commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
2268c381913dced911f3cb008d796f215233addf
|
pgmpy/__init__.py
|
pgmpy/__init__.py
|
from .global_vars import HAS_PANDAS, device
__all__ = ["HAS_PANDAS", "device"]
__version__ = "v0.1.9.dev11"
|
from .global_vars import HAS_PANDAS, device
__all__ = ["HAS_PANDAS", "device"]
__version__ = "v0.1.9.dev"
|
Make the version number just dev instead of numbers and releases are more frequent now
|
Make the version number just dev instead of numbers and releases are more frequent now
|
Python
|
mit
|
pgmpy/pgmpy,ankurankan/pgmpy,pgmpy/pgmpy,ankurankan/pgmpy
|
from .global_vars import HAS_PANDAS, device
__all__ = ["HAS_PANDAS", "device"]
__version__ = "v0.1.9.dev11"
Make the version number just dev instead of numbers and releases are more frequent now
|
from .global_vars import HAS_PANDAS, device
__all__ = ["HAS_PANDAS", "device"]
__version__ = "v0.1.9.dev"
|
<commit_before>from .global_vars import HAS_PANDAS, device
__all__ = ["HAS_PANDAS", "device"]
__version__ = "v0.1.9.dev11"
<commit_msg>Make the version number just dev instead of numbers and releases are more frequent now<commit_after>
|
from .global_vars import HAS_PANDAS, device
__all__ = ["HAS_PANDAS", "device"]
__version__ = "v0.1.9.dev"
|
from .global_vars import HAS_PANDAS, device
__all__ = ["HAS_PANDAS", "device"]
__version__ = "v0.1.9.dev11"
Make the version number just dev instead of numbers and releases are more frequent nowfrom .global_vars import HAS_PANDAS, device
__all__ = ["HAS_PANDAS", "device"]
__version__ = "v0.1.9.dev"
|
<commit_before>from .global_vars import HAS_PANDAS, device
__all__ = ["HAS_PANDAS", "device"]
__version__ = "v0.1.9.dev11"
<commit_msg>Make the version number just dev instead of numbers and releases are more frequent now<commit_after>from .global_vars import HAS_PANDAS, device
__all__ = ["HAS_PANDAS", "device"]
__version__ = "v0.1.9.dev"
|
81908e5f6304cc1c8e8627b0d4c859df194cc36d
|
ynr/apps/resultsbot/management/commands/store_modgov_urls.py
|
ynr/apps/resultsbot/management/commands/store_modgov_urls.py
|
import csv
import os
from django.core.management.base import BaseCommand
import resultsbot
from elections.models import Election
class Command(BaseCommand):
def handle(self, **options):
"""
Stores possible modgov urls stored in CSV file against the related election objects
"""
path = os.path.join(
os.path.dirname(resultsbot.__file__), "election_id_to_url.csv"
)
with open(path) as f:
csv_file = csv.reader(f)
for line in csv_file:
try:
election = Election.objects.get(slug=line[0])
election.modgov_url = line[1]
election.save()
except (IndexError, Election.DoesNotExist):
continue
|
import csv
import os
from django.core.management.base import BaseCommand
import resultsbot
from elections.models import Election
class Command(BaseCommand):
def handle(self, **options):
"""
Stores possible modgov urls stored in CSV file against the related election objects
"""
# remove existing values first as this allows us to remove bad urls from the csv file
Election.objects.update(modgov_url=None)
path = os.path.join(
os.path.dirname(resultsbot.__file__), "election_id_to_url.csv"
)
with open(path) as f:
csv_file = csv.reader(f)
for line in csv_file:
try:
election = Election.objects.get(slug=line[0])
election.modgov_url = line[1]
election.save()
except (IndexError, Election.DoesNotExist):
continue
|
Delete existing urls before each run
|
Delete existing urls before each run
|
Python
|
agpl-3.0
|
DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative
|
import csv
import os
from django.core.management.base import BaseCommand
import resultsbot
from elections.models import Election
class Command(BaseCommand):
def handle(self, **options):
"""
Stores possible modgov urls stored in CSV file against the related election objects
"""
path = os.path.join(
os.path.dirname(resultsbot.__file__), "election_id_to_url.csv"
)
with open(path) as f:
csv_file = csv.reader(f)
for line in csv_file:
try:
election = Election.objects.get(slug=line[0])
election.modgov_url = line[1]
election.save()
except (IndexError, Election.DoesNotExist):
continue
Delete existing urls before each run
|
import csv
import os
from django.core.management.base import BaseCommand
import resultsbot
from elections.models import Election
class Command(BaseCommand):
def handle(self, **options):
"""
Stores possible modgov urls stored in CSV file against the related election objects
"""
# remove existing values first as this allows us to remove bad urls from the csv file
Election.objects.update(modgov_url=None)
path = os.path.join(
os.path.dirname(resultsbot.__file__), "election_id_to_url.csv"
)
with open(path) as f:
csv_file = csv.reader(f)
for line in csv_file:
try:
election = Election.objects.get(slug=line[0])
election.modgov_url = line[1]
election.save()
except (IndexError, Election.DoesNotExist):
continue
|
<commit_before>import csv
import os
from django.core.management.base import BaseCommand
import resultsbot
from elections.models import Election
class Command(BaseCommand):
def handle(self, **options):
"""
Stores possible modgov urls stored in CSV file against the related election objects
"""
path = os.path.join(
os.path.dirname(resultsbot.__file__), "election_id_to_url.csv"
)
with open(path) as f:
csv_file = csv.reader(f)
for line in csv_file:
try:
election = Election.objects.get(slug=line[0])
election.modgov_url = line[1]
election.save()
except (IndexError, Election.DoesNotExist):
continue
<commit_msg>Delete existing urls before each run<commit_after>
|
import csv
import os
from django.core.management.base import BaseCommand
import resultsbot
from elections.models import Election
class Command(BaseCommand):
def handle(self, **options):
"""
Stores possible modgov urls stored in CSV file against the related election objects
"""
# remove existing values first as this allows us to remove bad urls from the csv file
Election.objects.update(modgov_url=None)
path = os.path.join(
os.path.dirname(resultsbot.__file__), "election_id_to_url.csv"
)
with open(path) as f:
csv_file = csv.reader(f)
for line in csv_file:
try:
election = Election.objects.get(slug=line[0])
election.modgov_url = line[1]
election.save()
except (IndexError, Election.DoesNotExist):
continue
|
import csv
import os
from django.core.management.base import BaseCommand
import resultsbot
from elections.models import Election
class Command(BaseCommand):
def handle(self, **options):
"""
Stores possible modgov urls stored in CSV file against the related election objects
"""
path = os.path.join(
os.path.dirname(resultsbot.__file__), "election_id_to_url.csv"
)
with open(path) as f:
csv_file = csv.reader(f)
for line in csv_file:
try:
election = Election.objects.get(slug=line[0])
election.modgov_url = line[1]
election.save()
except (IndexError, Election.DoesNotExist):
continue
Delete existing urls before each runimport csv
import os
from django.core.management.base import BaseCommand
import resultsbot
from elections.models import Election
class Command(BaseCommand):
def handle(self, **options):
"""
Stores possible modgov urls stored in CSV file against the related election objects
"""
# remove existing values first as this allows us to remove bad urls from the csv file
Election.objects.update(modgov_url=None)
path = os.path.join(
os.path.dirname(resultsbot.__file__), "election_id_to_url.csv"
)
with open(path) as f:
csv_file = csv.reader(f)
for line in csv_file:
try:
election = Election.objects.get(slug=line[0])
election.modgov_url = line[1]
election.save()
except (IndexError, Election.DoesNotExist):
continue
|
<commit_before>import csv
import os
from django.core.management.base import BaseCommand
import resultsbot
from elections.models import Election
class Command(BaseCommand):
def handle(self, **options):
"""
Stores possible modgov urls stored in CSV file against the related election objects
"""
path = os.path.join(
os.path.dirname(resultsbot.__file__), "election_id_to_url.csv"
)
with open(path) as f:
csv_file = csv.reader(f)
for line in csv_file:
try:
election = Election.objects.get(slug=line[0])
election.modgov_url = line[1]
election.save()
except (IndexError, Election.DoesNotExist):
continue
<commit_msg>Delete existing urls before each run<commit_after>import csv
import os
from django.core.management.base import BaseCommand
import resultsbot
from elections.models import Election
class Command(BaseCommand):
def handle(self, **options):
"""
Stores possible modgov urls stored in CSV file against the related election objects
"""
# remove existing values first as this allows us to remove bad urls from the csv file
Election.objects.update(modgov_url=None)
path = os.path.join(
os.path.dirname(resultsbot.__file__), "election_id_to_url.csv"
)
with open(path) as f:
csv_file = csv.reader(f)
for line in csv_file:
try:
election = Election.objects.get(slug=line[0])
election.modgov_url = line[1]
election.save()
except (IndexError, Election.DoesNotExist):
continue
|
afc959e23f21e086f710cbc7f3bb56d0b4d93329
|
bin/set_deploy_permissions.py
|
bin/set_deploy_permissions.py
|
#!/usr/bin/env python
"""
Set the file permissions appropriately for deployment. Call with the argument
of the webserver user (e.g. 'www-data') that should have permissions to uploads
and log files.
"""
import os
import sys
import subprocess
server_writable_directories = [
"vendor/solr/apache-solr-4.0.0/example/solr/collection1/data/",
"vendor/solr/apache-solr-4.0.0/example/solr-webapp/",
"lib/dotstorm/assets/dotstorm/uploads/",
"lib/www/assets/group_logos/",
"lib/www/assets/user_icons/",
]
BASE = os.path.join(os.path.dirname(__file__), "..")
def set_permissions(user):
for path in server_writable_directories:
print user, path
if not os.path.exists(path):
os.makedirs(path)
subprocess.check_call(["chown", "-R", user, os.path.join(BASE, path)])
if __name__ == "__main__":
try:
target_user = sys.argv[1]
except IndexError:
print "Missing required parameter `target user`."
print "Usage: set_deploy_permissions.py [username]"
sys.exit(1)
set_permissions(target_user)
|
#!/usr/bin/env python
"""
Set the file permissions appropriately for deployment. Call with the argument
of the webserver user (e.g. 'www-data') that should have permissions to uploads
and log files.
"""
import os
import sys
import subprocess
server_writable_directories = [
"vendor/solr/apache-solr-4.0.0/example/solr/collection1/data/",
"vendor/solr/apache-solr-4.0.0/example/solr-webapp/",
"lib/dotstorm/assets/dotstorm/uploads/",
"lib/www/assets/group_logos/",
"lib/www/assets/user_icons/",
"builtAssets/",
]
BASE = os.path.join(os.path.dirname(__file__), "..")
def set_permissions(user):
for path in server_writable_directories:
print user, path
if not os.path.exists(path):
os.makedirs(path)
subprocess.check_call(["chown", "-R", user, os.path.join(BASE, path)])
if __name__ == "__main__":
try:
target_user = sys.argv[1]
except IndexError:
print "Missing required parameter `target user`."
print "Usage: set_deploy_permissions.py [username]"
sys.exit(1)
set_permissions(target_user)
|
Add builtAssets to webserver-writable dirs
|
Add builtAssets to webserver-writable dirs
|
Python
|
bsd-2-clause
|
yourcelf/intertwinkles,yourcelf/intertwinkles
|
#!/usr/bin/env python
"""
Set the file permissions appropriately for deployment. Call with the argument
of the webserver user (e.g. 'www-data') that should have permissions to uploads
and log files.
"""
import os
import sys
import subprocess
server_writable_directories = [
"vendor/solr/apache-solr-4.0.0/example/solr/collection1/data/",
"vendor/solr/apache-solr-4.0.0/example/solr-webapp/",
"lib/dotstorm/assets/dotstorm/uploads/",
"lib/www/assets/group_logos/",
"lib/www/assets/user_icons/",
]
BASE = os.path.join(os.path.dirname(__file__), "..")
def set_permissions(user):
for path in server_writable_directories:
print user, path
if not os.path.exists(path):
os.makedirs(path)
subprocess.check_call(["chown", "-R", user, os.path.join(BASE, path)])
if __name__ == "__main__":
try:
target_user = sys.argv[1]
except IndexError:
print "Missing required parameter `target user`."
print "Usage: set_deploy_permissions.py [username]"
sys.exit(1)
set_permissions(target_user)
Add builtAssets to webserver-writable dirs
|
#!/usr/bin/env python
"""
Set the file permissions appropriately for deployment. Call with the argument
of the webserver user (e.g. 'www-data') that should have permissions to uploads
and log files.
"""
import os
import sys
import subprocess
server_writable_directories = [
"vendor/solr/apache-solr-4.0.0/example/solr/collection1/data/",
"vendor/solr/apache-solr-4.0.0/example/solr-webapp/",
"lib/dotstorm/assets/dotstorm/uploads/",
"lib/www/assets/group_logos/",
"lib/www/assets/user_icons/",
"builtAssets/",
]
BASE = os.path.join(os.path.dirname(__file__), "..")
def set_permissions(user):
for path in server_writable_directories:
print user, path
if not os.path.exists(path):
os.makedirs(path)
subprocess.check_call(["chown", "-R", user, os.path.join(BASE, path)])
if __name__ == "__main__":
try:
target_user = sys.argv[1]
except IndexError:
print "Missing required parameter `target user`."
print "Usage: set_deploy_permissions.py [username]"
sys.exit(1)
set_permissions(target_user)
|
<commit_before>#!/usr/bin/env python
"""
Set the file permissions appropriately for deployment. Call with the argument
of the webserver user (e.g. 'www-data') that should have permissions to uploads
and log files.
"""
import os
import sys
import subprocess
server_writable_directories = [
"vendor/solr/apache-solr-4.0.0/example/solr/collection1/data/",
"vendor/solr/apache-solr-4.0.0/example/solr-webapp/",
"lib/dotstorm/assets/dotstorm/uploads/",
"lib/www/assets/group_logos/",
"lib/www/assets/user_icons/",
]
BASE = os.path.join(os.path.dirname(__file__), "..")
def set_permissions(user):
for path in server_writable_directories:
print user, path
if not os.path.exists(path):
os.makedirs(path)
subprocess.check_call(["chown", "-R", user, os.path.join(BASE, path)])
if __name__ == "__main__":
try:
target_user = sys.argv[1]
except IndexError:
print "Missing required parameter `target user`."
print "Usage: set_deploy_permissions.py [username]"
sys.exit(1)
set_permissions(target_user)
<commit_msg>Add builtAssets to webserver-writable dirs<commit_after>
|
#!/usr/bin/env python
"""
Set the file permissions appropriately for deployment. Call with the argument
of the webserver user (e.g. 'www-data') that should have permissions to uploads
and log files.
"""
import os
import sys
import subprocess
server_writable_directories = [
"vendor/solr/apache-solr-4.0.0/example/solr/collection1/data/",
"vendor/solr/apache-solr-4.0.0/example/solr-webapp/",
"lib/dotstorm/assets/dotstorm/uploads/",
"lib/www/assets/group_logos/",
"lib/www/assets/user_icons/",
"builtAssets/",
]
BASE = os.path.join(os.path.dirname(__file__), "..")
def set_permissions(user):
for path in server_writable_directories:
print user, path
if not os.path.exists(path):
os.makedirs(path)
subprocess.check_call(["chown", "-R", user, os.path.join(BASE, path)])
if __name__ == "__main__":
try:
target_user = sys.argv[1]
except IndexError:
print "Missing required parameter `target user`."
print "Usage: set_deploy_permissions.py [username]"
sys.exit(1)
set_permissions(target_user)
|
#!/usr/bin/env python
"""
Set the file permissions appropriately for deployment. Call with the argument
of the webserver user (e.g. 'www-data') that should have permissions to uploads
and log files.
"""
import os
import sys
import subprocess
server_writable_directories = [
"vendor/solr/apache-solr-4.0.0/example/solr/collection1/data/",
"vendor/solr/apache-solr-4.0.0/example/solr-webapp/",
"lib/dotstorm/assets/dotstorm/uploads/",
"lib/www/assets/group_logos/",
"lib/www/assets/user_icons/",
]
BASE = os.path.join(os.path.dirname(__file__), "..")
def set_permissions(user):
for path in server_writable_directories:
print user, path
if not os.path.exists(path):
os.makedirs(path)
subprocess.check_call(["chown", "-R", user, os.path.join(BASE, path)])
if __name__ == "__main__":
try:
target_user = sys.argv[1]
except IndexError:
print "Missing required parameter `target user`."
print "Usage: set_deploy_permissions.py [username]"
sys.exit(1)
set_permissions(target_user)
Add builtAssets to webserver-writable dirs#!/usr/bin/env python
"""
Set the file permissions appropriately for deployment. Call with the argument
of the webserver user (e.g. 'www-data') that should have permissions to uploads
and log files.
"""
import os
import sys
import subprocess
server_writable_directories = [
"vendor/solr/apache-solr-4.0.0/example/solr/collection1/data/",
"vendor/solr/apache-solr-4.0.0/example/solr-webapp/",
"lib/dotstorm/assets/dotstorm/uploads/",
"lib/www/assets/group_logos/",
"lib/www/assets/user_icons/",
"builtAssets/",
]
BASE = os.path.join(os.path.dirname(__file__), "..")
def set_permissions(user):
for path in server_writable_directories:
print user, path
if not os.path.exists(path):
os.makedirs(path)
subprocess.check_call(["chown", "-R", user, os.path.join(BASE, path)])
if __name__ == "__main__":
try:
target_user = sys.argv[1]
except IndexError:
print "Missing required parameter `target user`."
print "Usage: set_deploy_permissions.py [username]"
sys.exit(1)
set_permissions(target_user)
|
<commit_before>#!/usr/bin/env python
"""
Set the file permissions appropriately for deployment. Call with the argument
of the webserver user (e.g. 'www-data') that should have permissions to uploads
and log files.
"""
import os
import sys
import subprocess
server_writable_directories = [
"vendor/solr/apache-solr-4.0.0/example/solr/collection1/data/",
"vendor/solr/apache-solr-4.0.0/example/solr-webapp/",
"lib/dotstorm/assets/dotstorm/uploads/",
"lib/www/assets/group_logos/",
"lib/www/assets/user_icons/",
]
BASE = os.path.join(os.path.dirname(__file__), "..")
def set_permissions(user):
for path in server_writable_directories:
print user, path
if not os.path.exists(path):
os.makedirs(path)
subprocess.check_call(["chown", "-R", user, os.path.join(BASE, path)])
if __name__ == "__main__":
try:
target_user = sys.argv[1]
except IndexError:
print "Missing required parameter `target user`."
print "Usage: set_deploy_permissions.py [username]"
sys.exit(1)
set_permissions(target_user)
<commit_msg>Add builtAssets to webserver-writable dirs<commit_after>#!/usr/bin/env python
"""
Set the file permissions appropriately for deployment. Call with the argument
of the webserver user (e.g. 'www-data') that should have permissions to uploads
and log files.
"""
import os
import sys
import subprocess
server_writable_directories = [
"vendor/solr/apache-solr-4.0.0/example/solr/collection1/data/",
"vendor/solr/apache-solr-4.0.0/example/solr-webapp/",
"lib/dotstorm/assets/dotstorm/uploads/",
"lib/www/assets/group_logos/",
"lib/www/assets/user_icons/",
"builtAssets/",
]
BASE = os.path.join(os.path.dirname(__file__), "..")
def set_permissions(user):
for path in server_writable_directories:
print user, path
if not os.path.exists(path):
os.makedirs(path)
subprocess.check_call(["chown", "-R", user, os.path.join(BASE, path)])
if __name__ == "__main__":
try:
target_user = sys.argv[1]
except IndexError:
print "Missing required parameter `target user`."
print "Usage: set_deploy_permissions.py [username]"
sys.exit(1)
set_permissions(target_user)
|
7a1b6d1999682ef114f81143a99d0f4d8e1f4af2
|
transactions_not_entry_line/models/account_invoice.py
|
transactions_not_entry_line/models/account_invoice.py
|
# -*- coding: utf-8 -*-
# © <YEAR(S)> <AUTHOR(S)>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import api, _, models
from openerp.exceptions import UserError
class AccountInvoice(models.Model):
_inherit = 'account.invoice'
@api.multi
def action_move_create(self):
for inv in self:
for line in inv.invoice_line_ids:
if line.price_unit <= 0:
raise UserError(_('At least one of the lines of the \
invoice has price unit zero!' '\n Please make sure \
that all lines have successfully captured the unit price.')
)
return super(AccountInvoice, self).action_move_create()
|
# -*- coding: utf-8 -*-
# © <YEAR(S)> <AUTHOR(S)>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import api, _, models
from openerp.exceptions import UserError
class AccountInvoice(models.Model):
_inherit = 'account.invoice'
@api.multi
def action_move_create(self):
for inv in self:
for line in inv.invoice_line_ids:
if line.product_id.id in (975887, 975888, 507890):
return []
elif line.price_unit <= 0:
raise UserError(_('At least one of the lines of the \
invoice has price unit zero!' '\n Please make sure \
that all lines have successfully captured the unit price.')
)
return super(AccountInvoice, self).action_move_create()
|
Add ids for balance product in transactions_not_entry_line
|
[FIX] Add ids for balance product in transactions_not_entry_line
|
Python
|
agpl-3.0
|
Gebesa-Dev/Addons-gebesa
|
# -*- coding: utf-8 -*-
# © <YEAR(S)> <AUTHOR(S)>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import api, _, models
from openerp.exceptions import UserError
class AccountInvoice(models.Model):
_inherit = 'account.invoice'
@api.multi
def action_move_create(self):
for inv in self:
for line in inv.invoice_line_ids:
if line.price_unit <= 0:
raise UserError(_('At least one of the lines of the \
invoice has price unit zero!' '\n Please make sure \
that all lines have successfully captured the unit price.')
)
return super(AccountInvoice, self).action_move_create()
[FIX] Add ids for balance product in transactions_not_entry_line
|
# -*- coding: utf-8 -*-
# © <YEAR(S)> <AUTHOR(S)>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import api, _, models
from openerp.exceptions import UserError
class AccountInvoice(models.Model):
_inherit = 'account.invoice'
@api.multi
def action_move_create(self):
for inv in self:
for line in inv.invoice_line_ids:
if line.product_id.id in (975887, 975888, 507890):
return []
elif line.price_unit <= 0:
raise UserError(_('At least one of the lines of the \
invoice has price unit zero!' '\n Please make sure \
that all lines have successfully captured the unit price.')
)
return super(AccountInvoice, self).action_move_create()
|
<commit_before># -*- coding: utf-8 -*-
# © <YEAR(S)> <AUTHOR(S)>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import api, _, models
from openerp.exceptions import UserError
class AccountInvoice(models.Model):
_inherit = 'account.invoice'
@api.multi
def action_move_create(self):
for inv in self:
for line in inv.invoice_line_ids:
if line.price_unit <= 0:
raise UserError(_('At least one of the lines of the \
invoice has price unit zero!' '\n Please make sure \
that all lines have successfully captured the unit price.')
)
return super(AccountInvoice, self).action_move_create()
<commit_msg>[FIX] Add ids for balance product in transactions_not_entry_line<commit_after>
|
# -*- coding: utf-8 -*-
# © <YEAR(S)> <AUTHOR(S)>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import api, _, models
from openerp.exceptions import UserError
class AccountInvoice(models.Model):
_inherit = 'account.invoice'
@api.multi
def action_move_create(self):
for inv in self:
for line in inv.invoice_line_ids:
if line.product_id.id in (975887, 975888, 507890):
return []
elif line.price_unit <= 0:
raise UserError(_('At least one of the lines of the \
invoice has price unit zero!' '\n Please make sure \
that all lines have successfully captured the unit price.')
)
return super(AccountInvoice, self).action_move_create()
|
# -*- coding: utf-8 -*-
# © <YEAR(S)> <AUTHOR(S)>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import api, _, models
from openerp.exceptions import UserError
class AccountInvoice(models.Model):
_inherit = 'account.invoice'
@api.multi
def action_move_create(self):
for inv in self:
for line in inv.invoice_line_ids:
if line.price_unit <= 0:
raise UserError(_('At least one of the lines of the \
invoice has price unit zero!' '\n Please make sure \
that all lines have successfully captured the unit price.')
)
return super(AccountInvoice, self).action_move_create()
[FIX] Add ids for balance product in transactions_not_entry_line# -*- coding: utf-8 -*-
# © <YEAR(S)> <AUTHOR(S)>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import api, _, models
from openerp.exceptions import UserError
class AccountInvoice(models.Model):
_inherit = 'account.invoice'
@api.multi
def action_move_create(self):
for inv in self:
for line in inv.invoice_line_ids:
if line.product_id.id in (975887, 975888, 507890):
return []
elif line.price_unit <= 0:
raise UserError(_('At least one of the lines of the \
invoice has price unit zero!' '\n Please make sure \
that all lines have successfully captured the unit price.')
)
return super(AccountInvoice, self).action_move_create()
|
<commit_before># -*- coding: utf-8 -*-
# © <YEAR(S)> <AUTHOR(S)>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import api, _, models
from openerp.exceptions import UserError
class AccountInvoice(models.Model):
_inherit = 'account.invoice'
@api.multi
def action_move_create(self):
for inv in self:
for line in inv.invoice_line_ids:
if line.price_unit <= 0:
raise UserError(_('At least one of the lines of the \
invoice has price unit zero!' '\n Please make sure \
that all lines have successfully captured the unit price.')
)
return super(AccountInvoice, self).action_move_create()
<commit_msg>[FIX] Add ids for balance product in transactions_not_entry_line<commit_after># -*- coding: utf-8 -*-
# © <YEAR(S)> <AUTHOR(S)>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import api, _, models
from openerp.exceptions import UserError
class AccountInvoice(models.Model):
_inherit = 'account.invoice'
@api.multi
def action_move_create(self):
for inv in self:
for line in inv.invoice_line_ids:
if line.product_id.id in (975887, 975888, 507890):
return []
elif line.price_unit <= 0:
raise UserError(_('At least one of the lines of the \
invoice has price unit zero!' '\n Please make sure \
that all lines have successfully captured the unit price.')
)
return super(AccountInvoice, self).action_move_create()
|
e90afe565a4d54e7fb81b4fbd29d44525b81aa89
|
data_structs/queue.py
|
data_structs/queue.py
|
#!/usr/bin/env python3
''' Linear queue '''
class Queue:
def __init__(self):
self.items = list()
def is_Empty(self):
return self.items == []
def size(self):
return len(self.items)
def set(self, item):
self.Queue.insert(0, item)
def get(self):
return self.items.pop()
def main():
pass
if __name__ == '__main__':
main()
|
#!/usr/bin/env python3
''' Linear queue '''
class Queue:
def __init__(self, items=[]):
self.items = items
def is_Empty(self):
return self.items == []
def size(self):
return len(self.items)
def enqueue(self, item):
self.Queue.insert(0, item)
def dequeue(self):
return self.items.pop()
def main():
pass
if __name__ == '__main__':
main()
|
Add default values and changed names for getter and setter
|
LinearQueue: Add default values and changed names for getter and setter
|
Python
|
apache-2.0
|
fedusia/python
|
#!/usr/bin/env python3
''' Linear queue '''
class Queue:
def __init__(self):
self.items = list()
def is_Empty(self):
return self.items == []
def size(self):
return len(self.items)
def set(self, item):
self.Queue.insert(0, item)
def get(self):
return self.items.pop()
def main():
pass
if __name__ == '__main__':
main()
LinearQueue: Add default values and changed names for getter and setter
|
#!/usr/bin/env python3
''' Linear queue '''
class Queue:
def __init__(self, items=[]):
self.items = items
def is_Empty(self):
return self.items == []
def size(self):
return len(self.items)
def enqueue(self, item):
self.Queue.insert(0, item)
def dequeue(self):
return self.items.pop()
def main():
pass
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python3
''' Linear queue '''
class Queue:
def __init__(self):
self.items = list()
def is_Empty(self):
return self.items == []
def size(self):
return len(self.items)
def set(self, item):
self.Queue.insert(0, item)
def get(self):
return self.items.pop()
def main():
pass
if __name__ == '__main__':
main()
<commit_msg>LinearQueue: Add default values and changed names for getter and setter<commit_after>
|
#!/usr/bin/env python3
''' Linear queue '''
class Queue:
def __init__(self, items=[]):
self.items = items
def is_Empty(self):
return self.items == []
def size(self):
return len(self.items)
def enqueue(self, item):
self.Queue.insert(0, item)
def dequeue(self):
return self.items.pop()
def main():
pass
if __name__ == '__main__':
main()
|
#!/usr/bin/env python3
''' Linear queue '''
class Queue:
def __init__(self):
self.items = list()
def is_Empty(self):
return self.items == []
def size(self):
return len(self.items)
def set(self, item):
self.Queue.insert(0, item)
def get(self):
return self.items.pop()
def main():
pass
if __name__ == '__main__':
main()
LinearQueue: Add default values and changed names for getter and setter#!/usr/bin/env python3
''' Linear queue '''
class Queue:
def __init__(self, items=[]):
self.items = items
def is_Empty(self):
return self.items == []
def size(self):
return len(self.items)
def enqueue(self, item):
self.Queue.insert(0, item)
def dequeue(self):
return self.items.pop()
def main():
pass
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python3
''' Linear queue '''
class Queue:
def __init__(self):
self.items = list()
def is_Empty(self):
return self.items == []
def size(self):
return len(self.items)
def set(self, item):
self.Queue.insert(0, item)
def get(self):
return self.items.pop()
def main():
pass
if __name__ == '__main__':
main()
<commit_msg>LinearQueue: Add default values and changed names for getter and setter<commit_after>#!/usr/bin/env python3
''' Linear queue '''
class Queue:
def __init__(self, items=[]):
self.items = items
def is_Empty(self):
return self.items == []
def size(self):
return len(self.items)
def enqueue(self, item):
self.Queue.insert(0, item)
def dequeue(self):
return self.items.pop()
def main():
pass
if __name__ == '__main__':
main()
|
ec3d63fb12ad73ee832f11ec5f93d7425e5ce0f0
|
kboard/board/urls.py
|
kboard/board/urls.py
|
# Created by JHJ on 2016. 10. 5.
from django.conf.urls import url
from . import views
app_name = 'board'
urlpatterns = [
url(r'^$', views.board_list, name='board_list'),
url(r'^(?P<board_slug>[-a-z]+)/$', views.post_list, name='post_list'),
url(r'^(?P<board_slug>[-a-z]+)/new/$', views.new_post, name='new_post'),
url(r'^(?P<post_id>\d+)/delete/$', views.delete_post, name='delete_post'),
url(r'^(?P<board_slug>[-a-z]+)/(?P<post_id>\d+)/$', views.view_post, name='view_post'),
url(r'^(?P<board_slug>[-a-z]+)/(?P<post_id>\d+)/comment/new/$', views.new_comment, name='new_comment'),
url(r'^(?P<post_id>\d+)/comment/delete/$', views.delete_comment, name='delete_comment'),
]
|
# Created by JHJ on 2016. 10. 5.
from django.conf.urls import url
from . import views
app_name = 'board'
urlpatterns = [
url(r'^$', views.board_list, name='board_list'),
url(r'^(?P<board_slug>[-a-z]+)/$', views.post_list, name='post_list'),
url(r'^(?P<board_slug>[-a-z]+)/new/$', views.new_post, name='new_post'),
url(r'^(?P<post_id>\d+)/delete/$', views.delete_post, name='delete_post'),
url(r'^(?P<post_id>\d+)/$', views.view_post, name='view_post'),
url(r'^(?P<board_slug>[-a-z]+)/(?P<post_id>\d+)/comment/new/$', views.new_comment, name='new_comment'),
url(r'^(?P<post_id>\d+)/comment/delete/$', views.delete_comment, name='delete_comment'),
]
|
Delete 'board_slug' in 'view_post' url
|
Delete 'board_slug' in 'view_post' url
|
Python
|
mit
|
kboard/kboard,cjh5414/kboard,guswnsxodlf/k-board,darjeeling/k-board,kboard/kboard,kboard/kboard,hyesun03/k-board,guswnsxodlf/k-board,cjh5414/kboard,guswnsxodlf/k-board,hyesun03/k-board,hyesun03/k-board,cjh5414/kboard
|
# Created by JHJ on 2016. 10. 5.
from django.conf.urls import url
from . import views
app_name = 'board'
urlpatterns = [
url(r'^$', views.board_list, name='board_list'),
url(r'^(?P<board_slug>[-a-z]+)/$', views.post_list, name='post_list'),
url(r'^(?P<board_slug>[-a-z]+)/new/$', views.new_post, name='new_post'),
url(r'^(?P<post_id>\d+)/delete/$', views.delete_post, name='delete_post'),
url(r'^(?P<board_slug>[-a-z]+)/(?P<post_id>\d+)/$', views.view_post, name='view_post'),
url(r'^(?P<board_slug>[-a-z]+)/(?P<post_id>\d+)/comment/new/$', views.new_comment, name='new_comment'),
url(r'^(?P<post_id>\d+)/comment/delete/$', views.delete_comment, name='delete_comment'),
]
Delete 'board_slug' in 'view_post' url
|
# Created by JHJ on 2016. 10. 5.
from django.conf.urls import url
from . import views
app_name = 'board'
urlpatterns = [
url(r'^$', views.board_list, name='board_list'),
url(r'^(?P<board_slug>[-a-z]+)/$', views.post_list, name='post_list'),
url(r'^(?P<board_slug>[-a-z]+)/new/$', views.new_post, name='new_post'),
url(r'^(?P<post_id>\d+)/delete/$', views.delete_post, name='delete_post'),
url(r'^(?P<post_id>\d+)/$', views.view_post, name='view_post'),
url(r'^(?P<board_slug>[-a-z]+)/(?P<post_id>\d+)/comment/new/$', views.new_comment, name='new_comment'),
url(r'^(?P<post_id>\d+)/comment/delete/$', views.delete_comment, name='delete_comment'),
]
|
<commit_before># Created by JHJ on 2016. 10. 5.
from django.conf.urls import url
from . import views
app_name = 'board'
urlpatterns = [
url(r'^$', views.board_list, name='board_list'),
url(r'^(?P<board_slug>[-a-z]+)/$', views.post_list, name='post_list'),
url(r'^(?P<board_slug>[-a-z]+)/new/$', views.new_post, name='new_post'),
url(r'^(?P<post_id>\d+)/delete/$', views.delete_post, name='delete_post'),
url(r'^(?P<board_slug>[-a-z]+)/(?P<post_id>\d+)/$', views.view_post, name='view_post'),
url(r'^(?P<board_slug>[-a-z]+)/(?P<post_id>\d+)/comment/new/$', views.new_comment, name='new_comment'),
url(r'^(?P<post_id>\d+)/comment/delete/$', views.delete_comment, name='delete_comment'),
]
<commit_msg>Delete 'board_slug' in 'view_post' url<commit_after>
|
# Created by JHJ on 2016. 10. 5.
from django.conf.urls import url
from . import views
app_name = 'board'
urlpatterns = [
url(r'^$', views.board_list, name='board_list'),
url(r'^(?P<board_slug>[-a-z]+)/$', views.post_list, name='post_list'),
url(r'^(?P<board_slug>[-a-z]+)/new/$', views.new_post, name='new_post'),
url(r'^(?P<post_id>\d+)/delete/$', views.delete_post, name='delete_post'),
url(r'^(?P<post_id>\d+)/$', views.view_post, name='view_post'),
url(r'^(?P<board_slug>[-a-z]+)/(?P<post_id>\d+)/comment/new/$', views.new_comment, name='new_comment'),
url(r'^(?P<post_id>\d+)/comment/delete/$', views.delete_comment, name='delete_comment'),
]
|
# Created by JHJ on 2016. 10. 5.
from django.conf.urls import url
from . import views
app_name = 'board'
urlpatterns = [
url(r'^$', views.board_list, name='board_list'),
url(r'^(?P<board_slug>[-a-z]+)/$', views.post_list, name='post_list'),
url(r'^(?P<board_slug>[-a-z]+)/new/$', views.new_post, name='new_post'),
url(r'^(?P<post_id>\d+)/delete/$', views.delete_post, name='delete_post'),
url(r'^(?P<board_slug>[-a-z]+)/(?P<post_id>\d+)/$', views.view_post, name='view_post'),
url(r'^(?P<board_slug>[-a-z]+)/(?P<post_id>\d+)/comment/new/$', views.new_comment, name='new_comment'),
url(r'^(?P<post_id>\d+)/comment/delete/$', views.delete_comment, name='delete_comment'),
]
Delete 'board_slug' in 'view_post' url# Created by JHJ on 2016. 10. 5.
from django.conf.urls import url
from . import views
app_name = 'board'
urlpatterns = [
url(r'^$', views.board_list, name='board_list'),
url(r'^(?P<board_slug>[-a-z]+)/$', views.post_list, name='post_list'),
url(r'^(?P<board_slug>[-a-z]+)/new/$', views.new_post, name='new_post'),
url(r'^(?P<post_id>\d+)/delete/$', views.delete_post, name='delete_post'),
url(r'^(?P<post_id>\d+)/$', views.view_post, name='view_post'),
url(r'^(?P<board_slug>[-a-z]+)/(?P<post_id>\d+)/comment/new/$', views.new_comment, name='new_comment'),
url(r'^(?P<post_id>\d+)/comment/delete/$', views.delete_comment, name='delete_comment'),
]
|
<commit_before># Created by JHJ on 2016. 10. 5.
from django.conf.urls import url
from . import views
app_name = 'board'
urlpatterns = [
url(r'^$', views.board_list, name='board_list'),
url(r'^(?P<board_slug>[-a-z]+)/$', views.post_list, name='post_list'),
url(r'^(?P<board_slug>[-a-z]+)/new/$', views.new_post, name='new_post'),
url(r'^(?P<post_id>\d+)/delete/$', views.delete_post, name='delete_post'),
url(r'^(?P<board_slug>[-a-z]+)/(?P<post_id>\d+)/$', views.view_post, name='view_post'),
url(r'^(?P<board_slug>[-a-z]+)/(?P<post_id>\d+)/comment/new/$', views.new_comment, name='new_comment'),
url(r'^(?P<post_id>\d+)/comment/delete/$', views.delete_comment, name='delete_comment'),
]
<commit_msg>Delete 'board_slug' in 'view_post' url<commit_after># Created by JHJ on 2016. 10. 5.
from django.conf.urls import url
from . import views
app_name = 'board'
urlpatterns = [
url(r'^$', views.board_list, name='board_list'),
url(r'^(?P<board_slug>[-a-z]+)/$', views.post_list, name='post_list'),
url(r'^(?P<board_slug>[-a-z]+)/new/$', views.new_post, name='new_post'),
url(r'^(?P<post_id>\d+)/delete/$', views.delete_post, name='delete_post'),
url(r'^(?P<post_id>\d+)/$', views.view_post, name='view_post'),
url(r'^(?P<board_slug>[-a-z]+)/(?P<post_id>\d+)/comment/new/$', views.new_comment, name='new_comment'),
url(r'^(?P<post_id>\d+)/comment/delete/$', views.delete_comment, name='delete_comment'),
]
|
97eabe6697e58f3b4dd8cced9a2c3bf05f3444c2
|
accounting/apps/books/context_processors.py
|
accounting/apps/books/context_processors.py
|
from .utils import organization_manager
from .models import Organization
def organizations(request):
"""
Add some generally useful metadata to the template context
"""
# selected organization
orga = organization_manager.get_selected_organization(request)
# all user authorized organizations
if not request.user or not request.user.is_authenticated():
user_organizations = None
else:
user_organizations = request.user.organizations.all()
return {
'user_organizations': user_organizations,
'selected_organization': orga,
}
|
from django.db.models import Q
from .utils import organization_manager
from .models import Organization
def organizations(request):
"""
Add some generally useful metadata to the template context
"""
# selected organization
orga = organization_manager.get_selected_organization(request)
# all user authorized organizations
if not request.user or not request.user.is_authenticated():
user_organizations = None
else:
user = request.user
user_organizations = (Organization.objects
.filter(Q(members=user) | Q(owner=user)))
return {
'user_organizations': user_organizations,
'selected_organization': orga,
}
|
Use owner or member filter for the dropdown
|
Use owner or member filter for the dropdown
|
Python
|
mit
|
kenjhim/django-accounting,dulaccc/django-accounting,dulaccc/django-accounting,dulaccc/django-accounting,kenjhim/django-accounting,kenjhim/django-accounting,dulaccc/django-accounting,kenjhim/django-accounting
|
from .utils import organization_manager
from .models import Organization
def organizations(request):
"""
Add some generally useful metadata to the template context
"""
# selected organization
orga = organization_manager.get_selected_organization(request)
# all user authorized organizations
if not request.user or not request.user.is_authenticated():
user_organizations = None
else:
user_organizations = request.user.organizations.all()
return {
'user_organizations': user_organizations,
'selected_organization': orga,
}
Use owner or member filter for the dropdown
|
from django.db.models import Q
from .utils import organization_manager
from .models import Organization
def organizations(request):
"""
Add some generally useful metadata to the template context
"""
# selected organization
orga = organization_manager.get_selected_organization(request)
# all user authorized organizations
if not request.user or not request.user.is_authenticated():
user_organizations = None
else:
user = request.user
user_organizations = (Organization.objects
.filter(Q(members=user) | Q(owner=user)))
return {
'user_organizations': user_organizations,
'selected_organization': orga,
}
|
<commit_before>from .utils import organization_manager
from .models import Organization
def organizations(request):
"""
Add some generally useful metadata to the template context
"""
# selected organization
orga = organization_manager.get_selected_organization(request)
# all user authorized organizations
if not request.user or not request.user.is_authenticated():
user_organizations = None
else:
user_organizations = request.user.organizations.all()
return {
'user_organizations': user_organizations,
'selected_organization': orga,
}
<commit_msg>Use owner or member filter for the dropdown<commit_after>
|
from django.db.models import Q
from .utils import organization_manager
from .models import Organization
def organizations(request):
"""
Add some generally useful metadata to the template context
"""
# selected organization
orga = organization_manager.get_selected_organization(request)
# all user authorized organizations
if not request.user or not request.user.is_authenticated():
user_organizations = None
else:
user = request.user
user_organizations = (Organization.objects
.filter(Q(members=user) | Q(owner=user)))
return {
'user_organizations': user_organizations,
'selected_organization': orga,
}
|
from .utils import organization_manager
from .models import Organization
def organizations(request):
"""
Add some generally useful metadata to the template context
"""
# selected organization
orga = organization_manager.get_selected_organization(request)
# all user authorized organizations
if not request.user or not request.user.is_authenticated():
user_organizations = None
else:
user_organizations = request.user.organizations.all()
return {
'user_organizations': user_organizations,
'selected_organization': orga,
}
Use owner or member filter for the dropdownfrom django.db.models import Q
from .utils import organization_manager
from .models import Organization
def organizations(request):
"""
Add some generally useful metadata to the template context
"""
# selected organization
orga = organization_manager.get_selected_organization(request)
# all user authorized organizations
if not request.user or not request.user.is_authenticated():
user_organizations = None
else:
user = request.user
user_organizations = (Organization.objects
.filter(Q(members=user) | Q(owner=user)))
return {
'user_organizations': user_organizations,
'selected_organization': orga,
}
|
<commit_before>from .utils import organization_manager
from .models import Organization
def organizations(request):
"""
Add some generally useful metadata to the template context
"""
# selected organization
orga = organization_manager.get_selected_organization(request)
# all user authorized organizations
if not request.user or not request.user.is_authenticated():
user_organizations = None
else:
user_organizations = request.user.organizations.all()
return {
'user_organizations': user_organizations,
'selected_organization': orga,
}
<commit_msg>Use owner or member filter for the dropdown<commit_after>from django.db.models import Q
from .utils import organization_manager
from .models import Organization
def organizations(request):
"""
Add some generally useful metadata to the template context
"""
# selected organization
orga = organization_manager.get_selected_organization(request)
# all user authorized organizations
if not request.user or not request.user.is_authenticated():
user_organizations = None
else:
user = request.user
user_organizations = (Organization.objects
.filter(Q(members=user) | Q(owner=user)))
return {
'user_organizations': user_organizations,
'selected_organization': orga,
}
|
5bf6d25148f627cd0e56b3530fe4c6c2fca6d913
|
index/views.py
|
index/views.py
|
from django.shortcuts import render, redirect, get_object_or_404
from django.urls import reverse
from mainmodels.models import Course, Review
# Create your views here.
def index(req):
mostPopularCourses = Course.objects.raw('SELECT * FROM mainmodels_course as main_course JOIN (SELECT main_review.course_id, AVG(main_review.rating) as "average_rating" FROM mainmodels_review as main_review) as avg_main_review ON main_course.courseID = avg_main_review.course_id ORDER BY avg_main_review.average_rating;')
return render(req, 'index/main.html', {'pageTitle': 'Coursing Field', 'mostPopularCourses': mostPopularCourses})
|
from django.shortcuts import render, redirect, get_object_or_404
from django.urls import reverse
from mainmodels.models import Course, Review
# Create your views here.
def index(req):
mostPopularCourses = Course.objects.raw('SELECT * FROM mainmodels_course as main_course JOIN (SELECT main_review.course_id, AVG(main_review.rating) as "average_rating" FROM mainmodels_review as main_review) as avg_main_review ON main_course.courseID = avg_main_review.course_id ORDER BY avg_main_review.average_rating LIMIT 10;')
return render(req, 'index/main.html', {'pageTitle': 'Coursing Field', 'mostPopularCourses': mostPopularCourses})
|
Add LIMIT to 10 rows
|
Add LIMIT to 10 rows
|
Python
|
apache-2.0
|
PNNutkung/Coursing-Field,PNNutkung/Coursing-Field,PNNutkung/Coursing-Field
|
from django.shortcuts import render, redirect, get_object_or_404
from django.urls import reverse
from mainmodels.models import Course, Review
# Create your views here.
def index(req):
mostPopularCourses = Course.objects.raw('SELECT * FROM mainmodels_course as main_course JOIN (SELECT main_review.course_id, AVG(main_review.rating) as "average_rating" FROM mainmodels_review as main_review) as avg_main_review ON main_course.courseID = avg_main_review.course_id ORDER BY avg_main_review.average_rating;')
return render(req, 'index/main.html', {'pageTitle': 'Coursing Field', 'mostPopularCourses': mostPopularCourses})
Add LIMIT to 10 rows
|
from django.shortcuts import render, redirect, get_object_or_404
from django.urls import reverse
from mainmodels.models import Course, Review
# Create your views here.
def index(req):
mostPopularCourses = Course.objects.raw('SELECT * FROM mainmodels_course as main_course JOIN (SELECT main_review.course_id, AVG(main_review.rating) as "average_rating" FROM mainmodels_review as main_review) as avg_main_review ON main_course.courseID = avg_main_review.course_id ORDER BY avg_main_review.average_rating LIMIT 10;')
return render(req, 'index/main.html', {'pageTitle': 'Coursing Field', 'mostPopularCourses': mostPopularCourses})
|
<commit_before>from django.shortcuts import render, redirect, get_object_or_404
from django.urls import reverse
from mainmodels.models import Course, Review
# Create your views here.
def index(req):
mostPopularCourses = Course.objects.raw('SELECT * FROM mainmodels_course as main_course JOIN (SELECT main_review.course_id, AVG(main_review.rating) as "average_rating" FROM mainmodels_review as main_review) as avg_main_review ON main_course.courseID = avg_main_review.course_id ORDER BY avg_main_review.average_rating;')
return render(req, 'index/main.html', {'pageTitle': 'Coursing Field', 'mostPopularCourses': mostPopularCourses})
<commit_msg>Add LIMIT to 10 rows<commit_after>
|
from django.shortcuts import render, redirect, get_object_or_404
from django.urls import reverse
from mainmodels.models import Course, Review
# Create your views here.
def index(req):
mostPopularCourses = Course.objects.raw('SELECT * FROM mainmodels_course as main_course JOIN (SELECT main_review.course_id, AVG(main_review.rating) as "average_rating" FROM mainmodels_review as main_review) as avg_main_review ON main_course.courseID = avg_main_review.course_id ORDER BY avg_main_review.average_rating LIMIT 10;')
return render(req, 'index/main.html', {'pageTitle': 'Coursing Field', 'mostPopularCourses': mostPopularCourses})
|
from django.shortcuts import render, redirect, get_object_or_404
from django.urls import reverse
from mainmodels.models import Course, Review
# Create your views here.
def index(req):
mostPopularCourses = Course.objects.raw('SELECT * FROM mainmodels_course as main_course JOIN (SELECT main_review.course_id, AVG(main_review.rating) as "average_rating" FROM mainmodels_review as main_review) as avg_main_review ON main_course.courseID = avg_main_review.course_id ORDER BY avg_main_review.average_rating;')
return render(req, 'index/main.html', {'pageTitle': 'Coursing Field', 'mostPopularCourses': mostPopularCourses})
Add LIMIT to 10 rowsfrom django.shortcuts import render, redirect, get_object_or_404
from django.urls import reverse
from mainmodels.models import Course, Review
# Create your views here.
def index(req):
mostPopularCourses = Course.objects.raw('SELECT * FROM mainmodels_course as main_course JOIN (SELECT main_review.course_id, AVG(main_review.rating) as "average_rating" FROM mainmodels_review as main_review) as avg_main_review ON main_course.courseID = avg_main_review.course_id ORDER BY avg_main_review.average_rating LIMIT 10;')
return render(req, 'index/main.html', {'pageTitle': 'Coursing Field', 'mostPopularCourses': mostPopularCourses})
|
<commit_before>from django.shortcuts import render, redirect, get_object_or_404
from django.urls import reverse
from mainmodels.models import Course, Review
# Create your views here.
def index(req):
mostPopularCourses = Course.objects.raw('SELECT * FROM mainmodels_course as main_course JOIN (SELECT main_review.course_id, AVG(main_review.rating) as "average_rating" FROM mainmodels_review as main_review) as avg_main_review ON main_course.courseID = avg_main_review.course_id ORDER BY avg_main_review.average_rating;')
return render(req, 'index/main.html', {'pageTitle': 'Coursing Field', 'mostPopularCourses': mostPopularCourses})
<commit_msg>Add LIMIT to 10 rows<commit_after>from django.shortcuts import render, redirect, get_object_or_404
from django.urls import reverse
from mainmodels.models import Course, Review
# Create your views here.
def index(req):
mostPopularCourses = Course.objects.raw('SELECT * FROM mainmodels_course as main_course JOIN (SELECT main_review.course_id, AVG(main_review.rating) as "average_rating" FROM mainmodels_review as main_review) as avg_main_review ON main_course.courseID = avg_main_review.course_id ORDER BY avg_main_review.average_rating LIMIT 10;')
return render(req, 'index/main.html', {'pageTitle': 'Coursing Field', 'mostPopularCourses': mostPopularCourses})
|
f11528381ba055ebc6042bde4cb35e0dd0512a3c
|
wandb/integration/sagemaker/resources.py
|
wandb/integration/sagemaker/resources.py
|
import json
import os
import socket
from . import files as sm_files
def parse_sm_secrets():
"""We read our api_key from secrets.env in SageMaker"""
env_dict = dict()
# Set secret variables
if os.path.exists(sm_files.SM_SECRETS):
for line in open(sm_files.SM_SECRETS, "r"):
key, val = line.strip().split("=", 1)
env_dict[key] = val
return env_dict
def parse_sm_resources():
run_dict = dict()
env_dict = dict()
run_id = os.getenv("TRAINING_JOB_NAME")
if run_id:
run_dict["run_id"] = "-".join(
[run_id, os.getenv("CURRENT_HOST", socket.gethostname())]
)
conf = json.load(open(sm_files.SM_RESOURCE_CONFIG))
if len(conf["hosts"]) > 1:
run_dict["run_group"] = os.getenv("TRAINING_JOB_NAME")
env_dict = parse_sm_secrets()
return run_dict, env_dict
|
import json
import os
import socket
from . import files as sm_files
def parse_sm_secrets():
"""We read our api_key from secrets.env in SageMaker"""
env_dict = dict()
# Set secret variables
if os.path.exists(sm_files.SM_SECRETS):
for line in open(sm_files.SM_SECRETS, "r"):
key, val = line.strip().split("=", 1)
env_dict[key] = val
return env_dict
def parse_sm_resources():
run_dict = dict()
env_dict = dict()
run_id = os.getenv("TRAINING_JOB_NAME")
if run_id and os.getenv("WANDB_RUN_ID") is None:
run_dict["run_id"] = "-".join(
[run_id, os.getenv("CURRENT_HOST", socket.gethostname())]
)
conf = json.load(open(sm_files.SM_RESOURCE_CONFIG))
if len(conf["hosts"]) > 1:
run_dict["run_group"] = os.getenv("TRAINING_JOB_NAME")
env_dict = parse_sm_secrets()
return run_dict, env_dict
|
Fix issue where sagemaker run ids break run queues
|
[WB-8591] Fix issue where sagemaker run ids break run queues
|
Python
|
mit
|
wandb/client,wandb/client,wandb/client
|
import json
import os
import socket
from . import files as sm_files
def parse_sm_secrets():
"""We read our api_key from secrets.env in SageMaker"""
env_dict = dict()
# Set secret variables
if os.path.exists(sm_files.SM_SECRETS):
for line in open(sm_files.SM_SECRETS, "r"):
key, val = line.strip().split("=", 1)
env_dict[key] = val
return env_dict
def parse_sm_resources():
run_dict = dict()
env_dict = dict()
run_id = os.getenv("TRAINING_JOB_NAME")
if run_id:
run_dict["run_id"] = "-".join(
[run_id, os.getenv("CURRENT_HOST", socket.gethostname())]
)
conf = json.load(open(sm_files.SM_RESOURCE_CONFIG))
if len(conf["hosts"]) > 1:
run_dict["run_group"] = os.getenv("TRAINING_JOB_NAME")
env_dict = parse_sm_secrets()
return run_dict, env_dict
[WB-8591] Fix issue where sagemaker run ids break run queues
|
import json
import os
import socket
from . import files as sm_files
def parse_sm_secrets():
"""We read our api_key from secrets.env in SageMaker"""
env_dict = dict()
# Set secret variables
if os.path.exists(sm_files.SM_SECRETS):
for line in open(sm_files.SM_SECRETS, "r"):
key, val = line.strip().split("=", 1)
env_dict[key] = val
return env_dict
def parse_sm_resources():
run_dict = dict()
env_dict = dict()
run_id = os.getenv("TRAINING_JOB_NAME")
if run_id and os.getenv("WANDB_RUN_ID") is None:
run_dict["run_id"] = "-".join(
[run_id, os.getenv("CURRENT_HOST", socket.gethostname())]
)
conf = json.load(open(sm_files.SM_RESOURCE_CONFIG))
if len(conf["hosts"]) > 1:
run_dict["run_group"] = os.getenv("TRAINING_JOB_NAME")
env_dict = parse_sm_secrets()
return run_dict, env_dict
|
<commit_before>import json
import os
import socket
from . import files as sm_files
def parse_sm_secrets():
"""We read our api_key from secrets.env in SageMaker"""
env_dict = dict()
# Set secret variables
if os.path.exists(sm_files.SM_SECRETS):
for line in open(sm_files.SM_SECRETS, "r"):
key, val = line.strip().split("=", 1)
env_dict[key] = val
return env_dict
def parse_sm_resources():
run_dict = dict()
env_dict = dict()
run_id = os.getenv("TRAINING_JOB_NAME")
if run_id:
run_dict["run_id"] = "-".join(
[run_id, os.getenv("CURRENT_HOST", socket.gethostname())]
)
conf = json.load(open(sm_files.SM_RESOURCE_CONFIG))
if len(conf["hosts"]) > 1:
run_dict["run_group"] = os.getenv("TRAINING_JOB_NAME")
env_dict = parse_sm_secrets()
return run_dict, env_dict
<commit_msg>[WB-8591] Fix issue where sagemaker run ids break run queues<commit_after>
|
import json
import os
import socket
from . import files as sm_files
def parse_sm_secrets():
"""We read our api_key from secrets.env in SageMaker"""
env_dict = dict()
# Set secret variables
if os.path.exists(sm_files.SM_SECRETS):
for line in open(sm_files.SM_SECRETS, "r"):
key, val = line.strip().split("=", 1)
env_dict[key] = val
return env_dict
def parse_sm_resources():
run_dict = dict()
env_dict = dict()
run_id = os.getenv("TRAINING_JOB_NAME")
if run_id and os.getenv("WANDB_RUN_ID") is None:
run_dict["run_id"] = "-".join(
[run_id, os.getenv("CURRENT_HOST", socket.gethostname())]
)
conf = json.load(open(sm_files.SM_RESOURCE_CONFIG))
if len(conf["hosts"]) > 1:
run_dict["run_group"] = os.getenv("TRAINING_JOB_NAME")
env_dict = parse_sm_secrets()
return run_dict, env_dict
|
import json
import os
import socket
from . import files as sm_files
def parse_sm_secrets():
"""We read our api_key from secrets.env in SageMaker"""
env_dict = dict()
# Set secret variables
if os.path.exists(sm_files.SM_SECRETS):
for line in open(sm_files.SM_SECRETS, "r"):
key, val = line.strip().split("=", 1)
env_dict[key] = val
return env_dict
def parse_sm_resources():
run_dict = dict()
env_dict = dict()
run_id = os.getenv("TRAINING_JOB_NAME")
if run_id:
run_dict["run_id"] = "-".join(
[run_id, os.getenv("CURRENT_HOST", socket.gethostname())]
)
conf = json.load(open(sm_files.SM_RESOURCE_CONFIG))
if len(conf["hosts"]) > 1:
run_dict["run_group"] = os.getenv("TRAINING_JOB_NAME")
env_dict = parse_sm_secrets()
return run_dict, env_dict
[WB-8591] Fix issue where sagemaker run ids break run queuesimport json
import os
import socket
from . import files as sm_files
def parse_sm_secrets():
"""We read our api_key from secrets.env in SageMaker"""
env_dict = dict()
# Set secret variables
if os.path.exists(sm_files.SM_SECRETS):
for line in open(sm_files.SM_SECRETS, "r"):
key, val = line.strip().split("=", 1)
env_dict[key] = val
return env_dict
def parse_sm_resources():
run_dict = dict()
env_dict = dict()
run_id = os.getenv("TRAINING_JOB_NAME")
if run_id and os.getenv("WANDB_RUN_ID") is None:
run_dict["run_id"] = "-".join(
[run_id, os.getenv("CURRENT_HOST", socket.gethostname())]
)
conf = json.load(open(sm_files.SM_RESOURCE_CONFIG))
if len(conf["hosts"]) > 1:
run_dict["run_group"] = os.getenv("TRAINING_JOB_NAME")
env_dict = parse_sm_secrets()
return run_dict, env_dict
|
<commit_before>import json
import os
import socket
from . import files as sm_files
def parse_sm_secrets():
"""We read our api_key from secrets.env in SageMaker"""
env_dict = dict()
# Set secret variables
if os.path.exists(sm_files.SM_SECRETS):
for line in open(sm_files.SM_SECRETS, "r"):
key, val = line.strip().split("=", 1)
env_dict[key] = val
return env_dict
def parse_sm_resources():
run_dict = dict()
env_dict = dict()
run_id = os.getenv("TRAINING_JOB_NAME")
if run_id:
run_dict["run_id"] = "-".join(
[run_id, os.getenv("CURRENT_HOST", socket.gethostname())]
)
conf = json.load(open(sm_files.SM_RESOURCE_CONFIG))
if len(conf["hosts"]) > 1:
run_dict["run_group"] = os.getenv("TRAINING_JOB_NAME")
env_dict = parse_sm_secrets()
return run_dict, env_dict
<commit_msg>[WB-8591] Fix issue where sagemaker run ids break run queues<commit_after>import json
import os
import socket
from . import files as sm_files
def parse_sm_secrets():
"""We read our api_key from secrets.env in SageMaker"""
env_dict = dict()
# Set secret variables
if os.path.exists(sm_files.SM_SECRETS):
for line in open(sm_files.SM_SECRETS, "r"):
key, val = line.strip().split("=", 1)
env_dict[key] = val
return env_dict
def parse_sm_resources():
run_dict = dict()
env_dict = dict()
run_id = os.getenv("TRAINING_JOB_NAME")
if run_id and os.getenv("WANDB_RUN_ID") is None:
run_dict["run_id"] = "-".join(
[run_id, os.getenv("CURRENT_HOST", socket.gethostname())]
)
conf = json.load(open(sm_files.SM_RESOURCE_CONFIG))
if len(conf["hosts"]) > 1:
run_dict["run_group"] = os.getenv("TRAINING_JOB_NAME")
env_dict = parse_sm_secrets()
return run_dict, env_dict
|
c48a7617f0555b264ac25ec76db2bdfe949a1695
|
dimod/package_info.py
|
dimod/package_info.py
|
# Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ================================================================================================
__version__ = '0.8.16'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
|
# Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ================================================================================================
__version__ = '0.8.17'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
|
Update version 0.8.16 -> 0.8.17
|
Update version 0.8.16 -> 0.8.17
Fixes
-----
* Sample sets with non-integer sample dtypes now correctly serialize
|
Python
|
apache-2.0
|
dwavesystems/dimod,dwavesystems/dimod
|
# Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ================================================================================================
__version__ = '0.8.16'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
Update version 0.8.16 -> 0.8.17
Fixes
-----
* Sample sets with non-integer sample dtypes now correctly serialize
|
# Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ================================================================================================
__version__ = '0.8.17'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
|
<commit_before># Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ================================================================================================
__version__ = '0.8.16'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
<commit_msg>Update version 0.8.16 -> 0.8.17
Fixes
-----
* Sample sets with non-integer sample dtypes now correctly serialize<commit_after>
|
# Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ================================================================================================
__version__ = '0.8.17'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
|
# Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ================================================================================================
__version__ = '0.8.16'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
Update version 0.8.16 -> 0.8.17
Fixes
-----
* Sample sets with non-integer sample dtypes now correctly serialize# Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ================================================================================================
__version__ = '0.8.17'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
|
<commit_before># Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ================================================================================================
__version__ = '0.8.16'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
<commit_msg>Update version 0.8.16 -> 0.8.17
Fixes
-----
* Sample sets with non-integer sample dtypes now correctly serialize<commit_after># Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ================================================================================================
__version__ = '0.8.17'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
|
3bd116a301ce8de9d3ea1b0dd4c0a969c278455a
|
wsgi.py
|
wsgi.py
|
from shale import app
if __name__ == '__main__':
app.run(
host='127.0.0.1',
)
|
from shale import app
if __name__ == '__main__':
app.run()
|
Revert "bind flask to 127.0.0.1"
|
Revert "bind flask to 127.0.0.1"
This reverts commit 097b126e511d3d7bf5f431cc6df552843fad4477.
I guess I was way wrong about that.
|
Python
|
mit
|
mhluongo/shale,mhluongo/shale,cardforcoin/shale,cardforcoin/shale
|
from shale import app
if __name__ == '__main__':
app.run(
host='127.0.0.1',
)
Revert "bind flask to 127.0.0.1"
This reverts commit 097b126e511d3d7bf5f431cc6df552843fad4477.
I guess I was way wrong about that.
|
from shale import app
if __name__ == '__main__':
app.run()
|
<commit_before>from shale import app
if __name__ == '__main__':
app.run(
host='127.0.0.1',
)
<commit_msg>Revert "bind flask to 127.0.0.1"
This reverts commit 097b126e511d3d7bf5f431cc6df552843fad4477.
I guess I was way wrong about that.<commit_after>
|
from shale import app
if __name__ == '__main__':
app.run()
|
from shale import app
if __name__ == '__main__':
app.run(
host='127.0.0.1',
)
Revert "bind flask to 127.0.0.1"
This reverts commit 097b126e511d3d7bf5f431cc6df552843fad4477.
I guess I was way wrong about that.from shale import app
if __name__ == '__main__':
app.run()
|
<commit_before>from shale import app
if __name__ == '__main__':
app.run(
host='127.0.0.1',
)
<commit_msg>Revert "bind flask to 127.0.0.1"
This reverts commit 097b126e511d3d7bf5f431cc6df552843fad4477.
I guess I was way wrong about that.<commit_after>from shale import app
if __name__ == '__main__':
app.run()
|
d77c0dd6b4b7718421bfde323b8ff4d9667fb696
|
jasylibrary.py
|
jasylibrary.py
|
#import os, json
#from jasy.core.Util import executeCommand
#import jasy.core.Console as Console
#import urllib.parse
# Little helper to allow python modules in current jasylibrarys path
import sys, os.path, inspect
filename = inspect.getframeinfo(inspect.currentframe()).filename
path = os.path.dirname(os.path.abspath(filename))
sys.path.append(path)
import konstrukteur.Konstrukteur
import jasy.asset.Manager
@share
def build(profile, regenerate = False):
""" Build static website """
def getPartUrl(part, type):
folder = ""
if type == "css":
folder = profile.getCssFolder()
outputPath = folder #os.path.join(profile.getDestinationPath(), folder)
filename = profile.expandFileName("%s/%s-{{id}}.%s" % (outputPath, part, type))
return filename
profile.addCommand("part.url", getPartUrl, "url")
for permutation in profile.permutate():
konstrukteur.Konstrukteur.build(regenerate, profile)
|
#import os, json
#from jasy.core.Util import executeCommand
#import jasy.core.Console as Console
#import urllib.parse
# Little helper to allow python modules in current jasylibrarys path
import sys, os.path, inspect
filename = inspect.getframeinfo(inspect.currentframe()).filename
path = os.path.dirname(os.path.abspath(filename))
sys.path.append(path)
import konstrukteur.Konstrukteur
import jasy.asset.Manager
@share
def build(profile, regenerate = False):
""" Build static website """
def getPartUrl(part, type):
folder = ""
if type == "css":
folder = profile.getCssFolder()
#outputPath = folder
outputPath = os.path.relpath("%s/%s" % (profile.getDestinationPath(), folder), profile.getWorkingPath())
filename = profile.expandFileName("%s/%s-{{id}}.%s" % (outputPath, part, type))
return filename
profile.addCommand("part.url", getPartUrl, "url")
for permutation in profile.permutate():
konstrukteur.Konstrukteur.build(regenerate, profile)
|
Fix path resolving in part.url
|
Fix path resolving in part.url
|
Python
|
mit
|
fastner/konstrukteur,fastner/konstrukteur,fastner/konstrukteur
|
#import os, json
#from jasy.core.Util import executeCommand
#import jasy.core.Console as Console
#import urllib.parse
# Little helper to allow python modules in current jasylibrarys path
import sys, os.path, inspect
filename = inspect.getframeinfo(inspect.currentframe()).filename
path = os.path.dirname(os.path.abspath(filename))
sys.path.append(path)
import konstrukteur.Konstrukteur
import jasy.asset.Manager
@share
def build(profile, regenerate = False):
""" Build static website """
def getPartUrl(part, type):
folder = ""
if type == "css":
folder = profile.getCssFolder()
outputPath = folder #os.path.join(profile.getDestinationPath(), folder)
filename = profile.expandFileName("%s/%s-{{id}}.%s" % (outputPath, part, type))
return filename
profile.addCommand("part.url", getPartUrl, "url")
for permutation in profile.permutate():
konstrukteur.Konstrukteur.build(regenerate, profile)
Fix path resolving in part.url
|
#import os, json
#from jasy.core.Util import executeCommand
#import jasy.core.Console as Console
#import urllib.parse
# Little helper to allow python modules in current jasylibrarys path
import sys, os.path, inspect
filename = inspect.getframeinfo(inspect.currentframe()).filename
path = os.path.dirname(os.path.abspath(filename))
sys.path.append(path)
import konstrukteur.Konstrukteur
import jasy.asset.Manager
@share
def build(profile, regenerate = False):
""" Build static website """
def getPartUrl(part, type):
folder = ""
if type == "css":
folder = profile.getCssFolder()
#outputPath = folder
outputPath = os.path.relpath("%s/%s" % (profile.getDestinationPath(), folder), profile.getWorkingPath())
filename = profile.expandFileName("%s/%s-{{id}}.%s" % (outputPath, part, type))
return filename
profile.addCommand("part.url", getPartUrl, "url")
for permutation in profile.permutate():
konstrukteur.Konstrukteur.build(regenerate, profile)
|
<commit_before>#import os, json
#from jasy.core.Util import executeCommand
#import jasy.core.Console as Console
#import urllib.parse
# Little helper to allow python modules in current jasylibrarys path
import sys, os.path, inspect
filename = inspect.getframeinfo(inspect.currentframe()).filename
path = os.path.dirname(os.path.abspath(filename))
sys.path.append(path)
import konstrukteur.Konstrukteur
import jasy.asset.Manager
@share
def build(profile, regenerate = False):
""" Build static website """
def getPartUrl(part, type):
folder = ""
if type == "css":
folder = profile.getCssFolder()
outputPath = folder #os.path.join(profile.getDestinationPath(), folder)
filename = profile.expandFileName("%s/%s-{{id}}.%s" % (outputPath, part, type))
return filename
profile.addCommand("part.url", getPartUrl, "url")
for permutation in profile.permutate():
konstrukteur.Konstrukteur.build(regenerate, profile)
<commit_msg>Fix path resolving in part.url<commit_after>
|
#import os, json
#from jasy.core.Util import executeCommand
#import jasy.core.Console as Console
#import urllib.parse
# Little helper to allow python modules in current jasylibrarys path
import sys, os.path, inspect
filename = inspect.getframeinfo(inspect.currentframe()).filename
path = os.path.dirname(os.path.abspath(filename))
sys.path.append(path)
import konstrukteur.Konstrukteur
import jasy.asset.Manager
@share
def build(profile, regenerate = False):
""" Build static website """
def getPartUrl(part, type):
folder = ""
if type == "css":
folder = profile.getCssFolder()
#outputPath = folder
outputPath = os.path.relpath("%s/%s" % (profile.getDestinationPath(), folder), profile.getWorkingPath())
filename = profile.expandFileName("%s/%s-{{id}}.%s" % (outputPath, part, type))
return filename
profile.addCommand("part.url", getPartUrl, "url")
for permutation in profile.permutate():
konstrukteur.Konstrukteur.build(regenerate, profile)
|
#import os, json
#from jasy.core.Util import executeCommand
#import jasy.core.Console as Console
#import urllib.parse
# Little helper to allow python modules in current jasylibrarys path
import sys, os.path, inspect
filename = inspect.getframeinfo(inspect.currentframe()).filename
path = os.path.dirname(os.path.abspath(filename))
sys.path.append(path)
import konstrukteur.Konstrukteur
import jasy.asset.Manager
@share
def build(profile, regenerate = False):
""" Build static website """
def getPartUrl(part, type):
folder = ""
if type == "css":
folder = profile.getCssFolder()
outputPath = folder #os.path.join(profile.getDestinationPath(), folder)
filename = profile.expandFileName("%s/%s-{{id}}.%s" % (outputPath, part, type))
return filename
profile.addCommand("part.url", getPartUrl, "url")
for permutation in profile.permutate():
konstrukteur.Konstrukteur.build(regenerate, profile)
Fix path resolving in part.url#import os, json
#from jasy.core.Util import executeCommand
#import jasy.core.Console as Console
#import urllib.parse
# Little helper to allow python modules in current jasylibrarys path
import sys, os.path, inspect
filename = inspect.getframeinfo(inspect.currentframe()).filename
path = os.path.dirname(os.path.abspath(filename))
sys.path.append(path)
import konstrukteur.Konstrukteur
import jasy.asset.Manager
@share
def build(profile, regenerate = False):
""" Build static website """
def getPartUrl(part, type):
folder = ""
if type == "css":
folder = profile.getCssFolder()
#outputPath = folder
outputPath = os.path.relpath("%s/%s" % (profile.getDestinationPath(), folder), profile.getWorkingPath())
filename = profile.expandFileName("%s/%s-{{id}}.%s" % (outputPath, part, type))
return filename
profile.addCommand("part.url", getPartUrl, "url")
for permutation in profile.permutate():
konstrukteur.Konstrukteur.build(regenerate, profile)
|
<commit_before>#import os, json
#from jasy.core.Util import executeCommand
#import jasy.core.Console as Console
#import urllib.parse
# Little helper to allow python modules in current jasylibrarys path
import sys, os.path, inspect
filename = inspect.getframeinfo(inspect.currentframe()).filename
path = os.path.dirname(os.path.abspath(filename))
sys.path.append(path)
import konstrukteur.Konstrukteur
import jasy.asset.Manager
@share
def build(profile, regenerate = False):
""" Build static website """
def getPartUrl(part, type):
folder = ""
if type == "css":
folder = profile.getCssFolder()
outputPath = folder #os.path.join(profile.getDestinationPath(), folder)
filename = profile.expandFileName("%s/%s-{{id}}.%s" % (outputPath, part, type))
return filename
profile.addCommand("part.url", getPartUrl, "url")
for permutation in profile.permutate():
konstrukteur.Konstrukteur.build(regenerate, profile)
<commit_msg>Fix path resolving in part.url<commit_after>#import os, json
#from jasy.core.Util import executeCommand
#import jasy.core.Console as Console
#import urllib.parse
# Little helper to allow python modules in current jasylibrarys path
import sys, os.path, inspect
filename = inspect.getframeinfo(inspect.currentframe()).filename
path = os.path.dirname(os.path.abspath(filename))
sys.path.append(path)
import konstrukteur.Konstrukteur
import jasy.asset.Manager
@share
def build(profile, regenerate = False):
""" Build static website """
def getPartUrl(part, type):
folder = ""
if type == "css":
folder = profile.getCssFolder()
#outputPath = folder
outputPath = os.path.relpath("%s/%s" % (profile.getDestinationPath(), folder), profile.getWorkingPath())
filename = profile.expandFileName("%s/%s-{{id}}.%s" % (outputPath, part, type))
return filename
profile.addCommand("part.url", getPartUrl, "url")
for permutation in profile.permutate():
konstrukteur.Konstrukteur.build(regenerate, profile)
|
4620502ff75cab02650a0e28628afae27084fdb4
|
magnum_ui/version.py
|
magnum_ui/version.py
|
import pbr.version
version_info = pbr.version.VersionInfo('magnum-ui')
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
version_info = pbr.version.VersionInfo('magnum-ui')
|
Add Apache 2.0 license to source file
|
Add Apache 2.0 license to source file
As per OpenStack licensing guide lines [1]:
[H102 H103] Newly contributed Source Code should be licensed under
the Apache 2.0 license.
[H104] Files with no code shouldn't contain any license header nor
comments, and must be left completely empty.
[1] http://docs.openstack.org/developer/hacking/#openstack-licensing
Change-Id: I3c3b43af534dbfd0a0b4854265a42999c14c53b8
|
Python
|
apache-2.0
|
openstack/magnum-ui,openstack/magnum-ui,openstack/magnum-ui,openstack/magnum-ui
|
import pbr.version
version_info = pbr.version.VersionInfo('magnum-ui')
Add Apache 2.0 license to source file
As per OpenStack licensing guide lines [1]:
[H102 H103] Newly contributed Source Code should be licensed under
the Apache 2.0 license.
[H104] Files with no code shouldn't contain any license header nor
comments, and must be left completely empty.
[1] http://docs.openstack.org/developer/hacking/#openstack-licensing
Change-Id: I3c3b43af534dbfd0a0b4854265a42999c14c53b8
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
version_info = pbr.version.VersionInfo('magnum-ui')
|
<commit_before>import pbr.version
version_info = pbr.version.VersionInfo('magnum-ui')
<commit_msg>Add Apache 2.0 license to source file
As per OpenStack licensing guide lines [1]:
[H102 H103] Newly contributed Source Code should be licensed under
the Apache 2.0 license.
[H104] Files with no code shouldn't contain any license header nor
comments, and must be left completely empty.
[1] http://docs.openstack.org/developer/hacking/#openstack-licensing
Change-Id: I3c3b43af534dbfd0a0b4854265a42999c14c53b8<commit_after>
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
version_info = pbr.version.VersionInfo('magnum-ui')
|
import pbr.version
version_info = pbr.version.VersionInfo('magnum-ui')
Add Apache 2.0 license to source file
As per OpenStack licensing guide lines [1]:
[H102 H103] Newly contributed Source Code should be licensed under
the Apache 2.0 license.
[H104] Files with no code shouldn't contain any license header nor
comments, and must be left completely empty.
[1] http://docs.openstack.org/developer/hacking/#openstack-licensing
Change-Id: I3c3b43af534dbfd0a0b4854265a42999c14c53b8# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
version_info = pbr.version.VersionInfo('magnum-ui')
|
<commit_before>import pbr.version
version_info = pbr.version.VersionInfo('magnum-ui')
<commit_msg>Add Apache 2.0 license to source file
As per OpenStack licensing guide lines [1]:
[H102 H103] Newly contributed Source Code should be licensed under
the Apache 2.0 license.
[H104] Files with no code shouldn't contain any license header nor
comments, and must be left completely empty.
[1] http://docs.openstack.org/developer/hacking/#openstack-licensing
Change-Id: I3c3b43af534dbfd0a0b4854265a42999c14c53b8<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
version_info = pbr.version.VersionInfo('magnum-ui')
|
b7e38f3fc299d906ab81ab7826af96ea4769d066
|
fireplace/cards/wog/neutral_common.py
|
fireplace/cards/wog/neutral_common.py
|
from ..utils import *
##
# Minions
class OG_151:
"Tentacle of N'Zoth"
deathrattle = Hit(ALL_MINIONS, 1)
class OG_156:
"Bilefin Tidehunter"
play = Summon(CONTROLLER, "OG_156a")
class OG_158:
"Zealous Initiate"
deathrattle = Buff(RANDOM_FRIENDLY_MINION, "OG_158e")
OG_158e = buff(+1, +1)
class OG_323:
"Polluted Hoarder"
deathrattle = Draw(CONTROLLER)
|
from ..utils import *
##
# Minions
class OG_150:
"Aberrant Berserker"
enrage = Refresh(SELF, buff="OG_150e")
OG_150e = buff(atk=2)
class OG_151:
"Tentacle of N'Zoth"
deathrattle = Hit(ALL_MINIONS, 1)
class OG_156:
"Bilefin Tidehunter"
play = Summon(CONTROLLER, "OG_156a")
class OG_158:
"Zealous Initiate"
deathrattle = Buff(RANDOM_FRIENDLY_MINION, "OG_158e")
OG_158e = buff(+1, +1)
class OG_249:
"Infested Tauren"
deathrattle = Summon(CONTROLLER, "OG_249a")
class OG_256:
"Spawn of N'Zoth"
deathrattle = Buff(FRIENDLY_MINIONS, "OG_256e")
OG_256e = buff(+1, +1)
class OG_323:
"Polluted Hoarder"
deathrattle = Draw(CONTROLLER)
|
Implement Aberrant Berserker, Infested Tauren, and Spawn of N'Zoth
|
Implement Aberrant Berserker, Infested Tauren, and Spawn of N'Zoth
|
Python
|
agpl-3.0
|
NightKev/fireplace,jleclanche/fireplace,beheh/fireplace
|
from ..utils import *
##
# Minions
class OG_151:
"Tentacle of N'Zoth"
deathrattle = Hit(ALL_MINIONS, 1)
class OG_156:
"Bilefin Tidehunter"
play = Summon(CONTROLLER, "OG_156a")
class OG_158:
"Zealous Initiate"
deathrattle = Buff(RANDOM_FRIENDLY_MINION, "OG_158e")
OG_158e = buff(+1, +1)
class OG_323:
"Polluted Hoarder"
deathrattle = Draw(CONTROLLER)
Implement Aberrant Berserker, Infested Tauren, and Spawn of N'Zoth
|
from ..utils import *
##
# Minions
class OG_150:
"Aberrant Berserker"
enrage = Refresh(SELF, buff="OG_150e")
OG_150e = buff(atk=2)
class OG_151:
"Tentacle of N'Zoth"
deathrattle = Hit(ALL_MINIONS, 1)
class OG_156:
"Bilefin Tidehunter"
play = Summon(CONTROLLER, "OG_156a")
class OG_158:
"Zealous Initiate"
deathrattle = Buff(RANDOM_FRIENDLY_MINION, "OG_158e")
OG_158e = buff(+1, +1)
class OG_249:
"Infested Tauren"
deathrattle = Summon(CONTROLLER, "OG_249a")
class OG_256:
"Spawn of N'Zoth"
deathrattle = Buff(FRIENDLY_MINIONS, "OG_256e")
OG_256e = buff(+1, +1)
class OG_323:
"Polluted Hoarder"
deathrattle = Draw(CONTROLLER)
|
<commit_before>from ..utils import *
##
# Minions
class OG_151:
"Tentacle of N'Zoth"
deathrattle = Hit(ALL_MINIONS, 1)
class OG_156:
"Bilefin Tidehunter"
play = Summon(CONTROLLER, "OG_156a")
class OG_158:
"Zealous Initiate"
deathrattle = Buff(RANDOM_FRIENDLY_MINION, "OG_158e")
OG_158e = buff(+1, +1)
class OG_323:
"Polluted Hoarder"
deathrattle = Draw(CONTROLLER)
<commit_msg>Implement Aberrant Berserker, Infested Tauren, and Spawn of N'Zoth<commit_after>
|
from ..utils import *
##
# Minions
class OG_150:
"Aberrant Berserker"
enrage = Refresh(SELF, buff="OG_150e")
OG_150e = buff(atk=2)
class OG_151:
"Tentacle of N'Zoth"
deathrattle = Hit(ALL_MINIONS, 1)
class OG_156:
"Bilefin Tidehunter"
play = Summon(CONTROLLER, "OG_156a")
class OG_158:
"Zealous Initiate"
deathrattle = Buff(RANDOM_FRIENDLY_MINION, "OG_158e")
OG_158e = buff(+1, +1)
class OG_249:
"Infested Tauren"
deathrattle = Summon(CONTROLLER, "OG_249a")
class OG_256:
"Spawn of N'Zoth"
deathrattle = Buff(FRIENDLY_MINIONS, "OG_256e")
OG_256e = buff(+1, +1)
class OG_323:
"Polluted Hoarder"
deathrattle = Draw(CONTROLLER)
|
from ..utils import *
##
# Minions
class OG_151:
"Tentacle of N'Zoth"
deathrattle = Hit(ALL_MINIONS, 1)
class OG_156:
"Bilefin Tidehunter"
play = Summon(CONTROLLER, "OG_156a")
class OG_158:
"Zealous Initiate"
deathrattle = Buff(RANDOM_FRIENDLY_MINION, "OG_158e")
OG_158e = buff(+1, +1)
class OG_323:
"Polluted Hoarder"
deathrattle = Draw(CONTROLLER)
Implement Aberrant Berserker, Infested Tauren, and Spawn of N'Zothfrom ..utils import *
##
# Minions
class OG_150:
"Aberrant Berserker"
enrage = Refresh(SELF, buff="OG_150e")
OG_150e = buff(atk=2)
class OG_151:
"Tentacle of N'Zoth"
deathrattle = Hit(ALL_MINIONS, 1)
class OG_156:
"Bilefin Tidehunter"
play = Summon(CONTROLLER, "OG_156a")
class OG_158:
"Zealous Initiate"
deathrattle = Buff(RANDOM_FRIENDLY_MINION, "OG_158e")
OG_158e = buff(+1, +1)
class OG_249:
"Infested Tauren"
deathrattle = Summon(CONTROLLER, "OG_249a")
class OG_256:
"Spawn of N'Zoth"
deathrattle = Buff(FRIENDLY_MINIONS, "OG_256e")
OG_256e = buff(+1, +1)
class OG_323:
"Polluted Hoarder"
deathrattle = Draw(CONTROLLER)
|
<commit_before>from ..utils import *
##
# Minions
class OG_151:
"Tentacle of N'Zoth"
deathrattle = Hit(ALL_MINIONS, 1)
class OG_156:
"Bilefin Tidehunter"
play = Summon(CONTROLLER, "OG_156a")
class OG_158:
"Zealous Initiate"
deathrattle = Buff(RANDOM_FRIENDLY_MINION, "OG_158e")
OG_158e = buff(+1, +1)
class OG_323:
"Polluted Hoarder"
deathrattle = Draw(CONTROLLER)
<commit_msg>Implement Aberrant Berserker, Infested Tauren, and Spawn of N'Zoth<commit_after>from ..utils import *
##
# Minions
class OG_150:
"Aberrant Berserker"
enrage = Refresh(SELF, buff="OG_150e")
OG_150e = buff(atk=2)
class OG_151:
"Tentacle of N'Zoth"
deathrattle = Hit(ALL_MINIONS, 1)
class OG_156:
"Bilefin Tidehunter"
play = Summon(CONTROLLER, "OG_156a")
class OG_158:
"Zealous Initiate"
deathrattle = Buff(RANDOM_FRIENDLY_MINION, "OG_158e")
OG_158e = buff(+1, +1)
class OG_249:
"Infested Tauren"
deathrattle = Summon(CONTROLLER, "OG_249a")
class OG_256:
"Spawn of N'Zoth"
deathrattle = Buff(FRIENDLY_MINIONS, "OG_256e")
OG_256e = buff(+1, +1)
class OG_323:
"Polluted Hoarder"
deathrattle = Draw(CONTROLLER)
|
fc97ef04bd5e6f6483b7a26348c5168626180941
|
rafem/__init__.py
|
rafem/__init__.py
|
"""River Avulsion Module."""
from .riverbmi import BmiRiverModule
from .rivermodule import RiverModule
__all__ = ['BmiRiverModule', 'RiverModule']
|
"""River Avulsion Module."""
from .riverbmi import BmiRiverModule
from .rivermodule import rivermodule
__all__ = ['BmiRiverModule', 'rivermodule']
|
Rename package from avulsion to rafem.
|
Rename package from avulsion to rafem.
|
Python
|
mit
|
mcflugen/avulsion-bmi,katmratliff/avulsion-bmi
|
"""River Avulsion Module."""
from .riverbmi import BmiRiverModule
from .rivermodule import RiverModule
__all__ = ['BmiRiverModule', 'RiverModule']
Rename package from avulsion to rafem.
|
"""River Avulsion Module."""
from .riverbmi import BmiRiverModule
from .rivermodule import rivermodule
__all__ = ['BmiRiverModule', 'rivermodule']
|
<commit_before>"""River Avulsion Module."""
from .riverbmi import BmiRiverModule
from .rivermodule import RiverModule
__all__ = ['BmiRiverModule', 'RiverModule']
<commit_msg>Rename package from avulsion to rafem.<commit_after>
|
"""River Avulsion Module."""
from .riverbmi import BmiRiverModule
from .rivermodule import rivermodule
__all__ = ['BmiRiverModule', 'rivermodule']
|
"""River Avulsion Module."""
from .riverbmi import BmiRiverModule
from .rivermodule import RiverModule
__all__ = ['BmiRiverModule', 'RiverModule']
Rename package from avulsion to rafem."""River Avulsion Module."""
from .riverbmi import BmiRiverModule
from .rivermodule import rivermodule
__all__ = ['BmiRiverModule', 'rivermodule']
|
<commit_before>"""River Avulsion Module."""
from .riverbmi import BmiRiverModule
from .rivermodule import RiverModule
__all__ = ['BmiRiverModule', 'RiverModule']
<commit_msg>Rename package from avulsion to rafem.<commit_after>"""River Avulsion Module."""
from .riverbmi import BmiRiverModule
from .rivermodule import rivermodule
__all__ = ['BmiRiverModule', 'rivermodule']
|
6fba51e47053d60eb8cb2f44178e548d8f2c3a8e
|
api/urls.py
|
api/urls.py
|
from django.conf.urls import patterns, url, include
from api import views
# The API URLs are now determined automatically by the router.
# Additionally, we include the login URLs for the browseable API.
urlpatterns = patterns('',
# list of all readings
url(r'^api/all', views.ApiRoot.as_view()),
# list of all readings from a single sensor
url(r'^api/readings/$', views.Read.as_view()),
# put method to update data
url(r'^api/record/$', views.record),
url(r'^api/create/$', views.CreateRecord.as_view()),
)
|
from django.conf.urls import url, include
from api.views import ReadingViewSet, UserViewSet
from rest_framework.routers import DefaultRouter
router = DefaultRouter()
router.register(r'readings', ReadingViewSet)
router.register(r'users', UserViewSet)
urlpatterns = [
url(r'^', include(router.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework'))
]
|
Use routers instead of manual routes
|
Use routers instead of manual routes
|
Python
|
bsd-3-clause
|
codefornigeria/dustduino-server,codefornigeria/dustduino-server,codefornigeria/dustduino-server,developmentseed/dustduino-server,developmentseed/dustduino-server,developmentseed/dustduino-server
|
from django.conf.urls import patterns, url, include
from api import views
# The API URLs are now determined automatically by the router.
# Additionally, we include the login URLs for the browseable API.
urlpatterns = patterns('',
# list of all readings
url(r'^api/all', views.ApiRoot.as_view()),
# list of all readings from a single sensor
url(r'^api/readings/$', views.Read.as_view()),
# put method to update data
url(r'^api/record/$', views.record),
url(r'^api/create/$', views.CreateRecord.as_view()),
)
Use routers instead of manual routes
|
from django.conf.urls import url, include
from api.views import ReadingViewSet, UserViewSet
from rest_framework.routers import DefaultRouter
router = DefaultRouter()
router.register(r'readings', ReadingViewSet)
router.register(r'users', UserViewSet)
urlpatterns = [
url(r'^', include(router.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework'))
]
|
<commit_before>from django.conf.urls import patterns, url, include
from api import views
# The API URLs are now determined automatically by the router.
# Additionally, we include the login URLs for the browseable API.
urlpatterns = patterns('',
# list of all readings
url(r'^api/all', views.ApiRoot.as_view()),
# list of all readings from a single sensor
url(r'^api/readings/$', views.Read.as_view()),
# put method to update data
url(r'^api/record/$', views.record),
url(r'^api/create/$', views.CreateRecord.as_view()),
)
<commit_msg>Use routers instead of manual routes<commit_after>
|
from django.conf.urls import url, include
from api.views import ReadingViewSet, UserViewSet
from rest_framework.routers import DefaultRouter
router = DefaultRouter()
router.register(r'readings', ReadingViewSet)
router.register(r'users', UserViewSet)
urlpatterns = [
url(r'^', include(router.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework'))
]
|
from django.conf.urls import patterns, url, include
from api import views
# The API URLs are now determined automatically by the router.
# Additionally, we include the login URLs for the browseable API.
urlpatterns = patterns('',
# list of all readings
url(r'^api/all', views.ApiRoot.as_view()),
# list of all readings from a single sensor
url(r'^api/readings/$', views.Read.as_view()),
# put method to update data
url(r'^api/record/$', views.record),
url(r'^api/create/$', views.CreateRecord.as_view()),
)
Use routers instead of manual routesfrom django.conf.urls import url, include
from api.views import ReadingViewSet, UserViewSet
from rest_framework.routers import DefaultRouter
router = DefaultRouter()
router.register(r'readings', ReadingViewSet)
router.register(r'users', UserViewSet)
urlpatterns = [
url(r'^', include(router.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework'))
]
|
<commit_before>from django.conf.urls import patterns, url, include
from api import views
# The API URLs are now determined automatically by the router.
# Additionally, we include the login URLs for the browseable API.
urlpatterns = patterns('',
# list of all readings
url(r'^api/all', views.ApiRoot.as_view()),
# list of all readings from a single sensor
url(r'^api/readings/$', views.Read.as_view()),
# put method to update data
url(r'^api/record/$', views.record),
url(r'^api/create/$', views.CreateRecord.as_view()),
)
<commit_msg>Use routers instead of manual routes<commit_after>from django.conf.urls import url, include
from api.views import ReadingViewSet, UserViewSet
from rest_framework.routers import DefaultRouter
router = DefaultRouter()
router.register(r'readings', ReadingViewSet)
router.register(r'users', UserViewSet)
urlpatterns = [
url(r'^', include(router.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework'))
]
|
7e87a91f48ef9d5a031033991ce68c2596193f01
|
tests/test_pipe.py
|
tests/test_pipe.py
|
from slug import Pipe
def test_goesthrough():
p = Pipe()
p.side_in.write(b"Hello")
p.side_in.close()
data = p.side_out.read()
assert data == b'Hello'
def test_eof():
p = Pipe()
p.side_in.write(b"spam")
data = p.side_out.read()
assert data == b'spam'
p.side_in.close()
data = p.side_out.read()
assert data == b''
|
import pytest
from slug import Pipe
def test_goesthrough():
p = Pipe()
p.side_in.write(b"Hello")
p.side_in.close()
data = p.side_out.read()
assert data == b'Hello'
def test_eof():
p = Pipe()
p.side_in.write(b"spam")
data = p.side_out.read()
assert data == b'spam'
p.side_in.close()
data = p.side_out.read()
assert data == b''
def test_iter():
p = Pipe()
p.side_in.write(b"Hello")
p.side_in.close()
riter = iter(p.side_out)
data = next(riter)
assert data == b'Hello'
with pytest.raises(StopIteration):
next(riter)
def test_iter_eof():
p = Pipe()
riter = iter(p.side_out)
p.side_in.write(b"Hello\n")
data = next(riter)
assert data == b'Hello\n'
p.side_in.close()
with pytest.raises(StopIteration):
next(riter)
|
Add iteration tests on pipes
|
Add iteration tests on pipes
|
Python
|
bsd-3-clause
|
xonsh/slug
|
from slug import Pipe
def test_goesthrough():
p = Pipe()
p.side_in.write(b"Hello")
p.side_in.close()
data = p.side_out.read()
assert data == b'Hello'
def test_eof():
p = Pipe()
p.side_in.write(b"spam")
data = p.side_out.read()
assert data == b'spam'
p.side_in.close()
data = p.side_out.read()
assert data == b''
Add iteration tests on pipes
|
import pytest
from slug import Pipe
def test_goesthrough():
p = Pipe()
p.side_in.write(b"Hello")
p.side_in.close()
data = p.side_out.read()
assert data == b'Hello'
def test_eof():
p = Pipe()
p.side_in.write(b"spam")
data = p.side_out.read()
assert data == b'spam'
p.side_in.close()
data = p.side_out.read()
assert data == b''
def test_iter():
p = Pipe()
p.side_in.write(b"Hello")
p.side_in.close()
riter = iter(p.side_out)
data = next(riter)
assert data == b'Hello'
with pytest.raises(StopIteration):
next(riter)
def test_iter_eof():
p = Pipe()
riter = iter(p.side_out)
p.side_in.write(b"Hello\n")
data = next(riter)
assert data == b'Hello\n'
p.side_in.close()
with pytest.raises(StopIteration):
next(riter)
|
<commit_before>from slug import Pipe
def test_goesthrough():
p = Pipe()
p.side_in.write(b"Hello")
p.side_in.close()
data = p.side_out.read()
assert data == b'Hello'
def test_eof():
p = Pipe()
p.side_in.write(b"spam")
data = p.side_out.read()
assert data == b'spam'
p.side_in.close()
data = p.side_out.read()
assert data == b''
<commit_msg>Add iteration tests on pipes<commit_after>
|
import pytest
from slug import Pipe
def test_goesthrough():
p = Pipe()
p.side_in.write(b"Hello")
p.side_in.close()
data = p.side_out.read()
assert data == b'Hello'
def test_eof():
p = Pipe()
p.side_in.write(b"spam")
data = p.side_out.read()
assert data == b'spam'
p.side_in.close()
data = p.side_out.read()
assert data == b''
def test_iter():
p = Pipe()
p.side_in.write(b"Hello")
p.side_in.close()
riter = iter(p.side_out)
data = next(riter)
assert data == b'Hello'
with pytest.raises(StopIteration):
next(riter)
def test_iter_eof():
p = Pipe()
riter = iter(p.side_out)
p.side_in.write(b"Hello\n")
data = next(riter)
assert data == b'Hello\n'
p.side_in.close()
with pytest.raises(StopIteration):
next(riter)
|
from slug import Pipe
def test_goesthrough():
p = Pipe()
p.side_in.write(b"Hello")
p.side_in.close()
data = p.side_out.read()
assert data == b'Hello'
def test_eof():
p = Pipe()
p.side_in.write(b"spam")
data = p.side_out.read()
assert data == b'spam'
p.side_in.close()
data = p.side_out.read()
assert data == b''
Add iteration tests on pipesimport pytest
from slug import Pipe
def test_goesthrough():
p = Pipe()
p.side_in.write(b"Hello")
p.side_in.close()
data = p.side_out.read()
assert data == b'Hello'
def test_eof():
p = Pipe()
p.side_in.write(b"spam")
data = p.side_out.read()
assert data == b'spam'
p.side_in.close()
data = p.side_out.read()
assert data == b''
def test_iter():
p = Pipe()
p.side_in.write(b"Hello")
p.side_in.close()
riter = iter(p.side_out)
data = next(riter)
assert data == b'Hello'
with pytest.raises(StopIteration):
next(riter)
def test_iter_eof():
p = Pipe()
riter = iter(p.side_out)
p.side_in.write(b"Hello\n")
data = next(riter)
assert data == b'Hello\n'
p.side_in.close()
with pytest.raises(StopIteration):
next(riter)
|
<commit_before>from slug import Pipe
def test_goesthrough():
p = Pipe()
p.side_in.write(b"Hello")
p.side_in.close()
data = p.side_out.read()
assert data == b'Hello'
def test_eof():
p = Pipe()
p.side_in.write(b"spam")
data = p.side_out.read()
assert data == b'spam'
p.side_in.close()
data = p.side_out.read()
assert data == b''
<commit_msg>Add iteration tests on pipes<commit_after>import pytest
from slug import Pipe
def test_goesthrough():
p = Pipe()
p.side_in.write(b"Hello")
p.side_in.close()
data = p.side_out.read()
assert data == b'Hello'
def test_eof():
p = Pipe()
p.side_in.write(b"spam")
data = p.side_out.read()
assert data == b'spam'
p.side_in.close()
data = p.side_out.read()
assert data == b''
def test_iter():
p = Pipe()
p.side_in.write(b"Hello")
p.side_in.close()
riter = iter(p.side_out)
data = next(riter)
assert data == b'Hello'
with pytest.raises(StopIteration):
next(riter)
def test_iter_eof():
p = Pipe()
riter = iter(p.side_out)
p.side_in.write(b"Hello\n")
data = next(riter)
assert data == b'Hello\n'
p.side_in.close()
with pytest.raises(StopIteration):
next(riter)
|
4975361a86fb2288e84beff0056e90a22225bdae
|
htmlmin/tests/mocks.py
|
htmlmin/tests/mocks.py
|
# Copyright 2013 django-htmlmin authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
class RequestMock(object):
def __init__(self, path="/"):
self.path = path
class ResponseMock(dict):
def __init__(self, *args, **kwargs):
super(ResponseMock, self).__init__(*args, **kwargs)
self['Content-Type'] = 'text/html'
status_code = 200
content = "<html> <body>some text here</body> </html>"
class ResponseWithCommentMock(ResponseMock):
content = "<html> <!-- some comment --><body>some " + \
"text here</body> </html>"
|
# Copyright 2013 django-htmlmin authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
class RequestMock(object):
def __init__(self, path="/"):
self.path = path
self._hit_htmlmin = True
class RequestBareMock(object):
def __init__(self, path="/"):
self.path = path
class ResponseMock(dict):
def __init__(self, *args, **kwargs):
super(ResponseMock, self).__init__(*args, **kwargs)
self['Content-Type'] = 'text/html'
status_code = 200
content = "<html> <body>some text here</body> </html>"
class ResponseWithCommentMock(ResponseMock):
content = "<html> <!-- some comment --><body>some " + \
"text here</body> </html>"
|
Extend RequestMock, add RequestBareMock w/o flag
|
Extend RequestMock, add RequestBareMock w/o flag
RequestMock always pretends that htmlmin has seen the request, so
all other tests work normally.
|
Python
|
bsd-2-clause
|
argollo/django-htmlmin,cobrateam/django-htmlmin,erikdejonge/django-htmlmin,erikdejonge/django-htmlmin,argollo/django-htmlmin,erikdejonge/django-htmlmin,Alcolo47/django-htmlmin,Zowie/django-htmlmin,Alcolo47/django-htmlmin,Zowie/django-htmlmin,cobrateam/django-htmlmin
|
# Copyright 2013 django-htmlmin authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
class RequestMock(object):
def __init__(self, path="/"):
self.path = path
class ResponseMock(dict):
def __init__(self, *args, **kwargs):
super(ResponseMock, self).__init__(*args, **kwargs)
self['Content-Type'] = 'text/html'
status_code = 200
content = "<html> <body>some text here</body> </html>"
class ResponseWithCommentMock(ResponseMock):
content = "<html> <!-- some comment --><body>some " + \
"text here</body> </html>"
Extend RequestMock, add RequestBareMock w/o flag
RequestMock always pretends that htmlmin has seen the request, so
all other tests work normally.
|
# Copyright 2013 django-htmlmin authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
class RequestMock(object):
def __init__(self, path="/"):
self.path = path
self._hit_htmlmin = True
class RequestBareMock(object):
def __init__(self, path="/"):
self.path = path
class ResponseMock(dict):
def __init__(self, *args, **kwargs):
super(ResponseMock, self).__init__(*args, **kwargs)
self['Content-Type'] = 'text/html'
status_code = 200
content = "<html> <body>some text here</body> </html>"
class ResponseWithCommentMock(ResponseMock):
content = "<html> <!-- some comment --><body>some " + \
"text here</body> </html>"
|
<commit_before># Copyright 2013 django-htmlmin authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
class RequestMock(object):
def __init__(self, path="/"):
self.path = path
class ResponseMock(dict):
def __init__(self, *args, **kwargs):
super(ResponseMock, self).__init__(*args, **kwargs)
self['Content-Type'] = 'text/html'
status_code = 200
content = "<html> <body>some text here</body> </html>"
class ResponseWithCommentMock(ResponseMock):
content = "<html> <!-- some comment --><body>some " + \
"text here</body> </html>"
<commit_msg>Extend RequestMock, add RequestBareMock w/o flag
RequestMock always pretends that htmlmin has seen the request, so
all other tests work normally.<commit_after>
|
# Copyright 2013 django-htmlmin authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
class RequestMock(object):
def __init__(self, path="/"):
self.path = path
self._hit_htmlmin = True
class RequestBareMock(object):
def __init__(self, path="/"):
self.path = path
class ResponseMock(dict):
def __init__(self, *args, **kwargs):
super(ResponseMock, self).__init__(*args, **kwargs)
self['Content-Type'] = 'text/html'
status_code = 200
content = "<html> <body>some text here</body> </html>"
class ResponseWithCommentMock(ResponseMock):
content = "<html> <!-- some comment --><body>some " + \
"text here</body> </html>"
|
# Copyright 2013 django-htmlmin authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
class RequestMock(object):
def __init__(self, path="/"):
self.path = path
class ResponseMock(dict):
def __init__(self, *args, **kwargs):
super(ResponseMock, self).__init__(*args, **kwargs)
self['Content-Type'] = 'text/html'
status_code = 200
content = "<html> <body>some text here</body> </html>"
class ResponseWithCommentMock(ResponseMock):
content = "<html> <!-- some comment --><body>some " + \
"text here</body> </html>"
Extend RequestMock, add RequestBareMock w/o flag
RequestMock always pretends that htmlmin has seen the request, so
all other tests work normally.# Copyright 2013 django-htmlmin authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
class RequestMock(object):
def __init__(self, path="/"):
self.path = path
self._hit_htmlmin = True
class RequestBareMock(object):
def __init__(self, path="/"):
self.path = path
class ResponseMock(dict):
def __init__(self, *args, **kwargs):
super(ResponseMock, self).__init__(*args, **kwargs)
self['Content-Type'] = 'text/html'
status_code = 200
content = "<html> <body>some text here</body> </html>"
class ResponseWithCommentMock(ResponseMock):
content = "<html> <!-- some comment --><body>some " + \
"text here</body> </html>"
|
<commit_before># Copyright 2013 django-htmlmin authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
class RequestMock(object):
def __init__(self, path="/"):
self.path = path
class ResponseMock(dict):
def __init__(self, *args, **kwargs):
super(ResponseMock, self).__init__(*args, **kwargs)
self['Content-Type'] = 'text/html'
status_code = 200
content = "<html> <body>some text here</body> </html>"
class ResponseWithCommentMock(ResponseMock):
content = "<html> <!-- some comment --><body>some " + \
"text here</body> </html>"
<commit_msg>Extend RequestMock, add RequestBareMock w/o flag
RequestMock always pretends that htmlmin has seen the request, so
all other tests work normally.<commit_after># Copyright 2013 django-htmlmin authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
class RequestMock(object):
def __init__(self, path="/"):
self.path = path
self._hit_htmlmin = True
class RequestBareMock(object):
def __init__(self, path="/"):
self.path = path
class ResponseMock(dict):
def __init__(self, *args, **kwargs):
super(ResponseMock, self).__init__(*args, **kwargs)
self['Content-Type'] = 'text/html'
status_code = 200
content = "<html> <body>some text here</body> </html>"
class ResponseWithCommentMock(ResponseMock):
content = "<html> <!-- some comment --><body>some " + \
"text here</body> </html>"
|
9b2a56c0a0fce70edb644f8a1f07eb2709867201
|
examples/quickstart/response_info.py
|
examples/quickstart/response_info.py
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vi:ts=4:et
import pycurl
try:
from io import BytesIO
except ImportError:
from StringIO import StringIO as BytesIO
buffer = BytesIO()
c = pycurl.Curl()
c.setopt(c.URL, 'http://pycurl.io/')
c.setopt(c.WRITEDATA, buffer)
c.perform()
# HTTP response code, e.g. 200.
print('Status: %d' % c.getinfo(c.RESPONSE_CODE))
# Elapsed time for the transfer.
print('Status: %f' % c.getinfo(c.TOTAL_TIME))
# getinfo must be called before close.
c.close()
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vi:ts=4:et
import pycurl
try:
from io import BytesIO
except ImportError:
from StringIO import StringIO as BytesIO
buffer = BytesIO()
c = pycurl.Curl()
c.setopt(c.URL, 'http://pycurl.io/')
c.setopt(c.WRITEDATA, buffer)
c.perform()
# HTTP response code, e.g. 200.
print('Status: %d' % c.getinfo(c.RESPONSE_CODE))
# Elapsed time for the transfer.
print('Time: %f' % c.getinfo(c.TOTAL_TIME))
# getinfo must be called before close.
c.close()
|
Fix response example time output
|
Fix response example time output
|
Python
|
lgpl-2.1
|
pycurl/pycurl,pycurl/pycurl,pycurl/pycurl
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vi:ts=4:et
import pycurl
try:
from io import BytesIO
except ImportError:
from StringIO import StringIO as BytesIO
buffer = BytesIO()
c = pycurl.Curl()
c.setopt(c.URL, 'http://pycurl.io/')
c.setopt(c.WRITEDATA, buffer)
c.perform()
# HTTP response code, e.g. 200.
print('Status: %d' % c.getinfo(c.RESPONSE_CODE))
# Elapsed time for the transfer.
print('Status: %f' % c.getinfo(c.TOTAL_TIME))
# getinfo must be called before close.
c.close()
Fix response example time output
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vi:ts=4:et
import pycurl
try:
from io import BytesIO
except ImportError:
from StringIO import StringIO as BytesIO
buffer = BytesIO()
c = pycurl.Curl()
c.setopt(c.URL, 'http://pycurl.io/')
c.setopt(c.WRITEDATA, buffer)
c.perform()
# HTTP response code, e.g. 200.
print('Status: %d' % c.getinfo(c.RESPONSE_CODE))
# Elapsed time for the transfer.
print('Time: %f' % c.getinfo(c.TOTAL_TIME))
# getinfo must be called before close.
c.close()
|
<commit_before>#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vi:ts=4:et
import pycurl
try:
from io import BytesIO
except ImportError:
from StringIO import StringIO as BytesIO
buffer = BytesIO()
c = pycurl.Curl()
c.setopt(c.URL, 'http://pycurl.io/')
c.setopt(c.WRITEDATA, buffer)
c.perform()
# HTTP response code, e.g. 200.
print('Status: %d' % c.getinfo(c.RESPONSE_CODE))
# Elapsed time for the transfer.
print('Status: %f' % c.getinfo(c.TOTAL_TIME))
# getinfo must be called before close.
c.close()
<commit_msg>Fix response example time output<commit_after>
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vi:ts=4:et
import pycurl
try:
from io import BytesIO
except ImportError:
from StringIO import StringIO as BytesIO
buffer = BytesIO()
c = pycurl.Curl()
c.setopt(c.URL, 'http://pycurl.io/')
c.setopt(c.WRITEDATA, buffer)
c.perform()
# HTTP response code, e.g. 200.
print('Status: %d' % c.getinfo(c.RESPONSE_CODE))
# Elapsed time for the transfer.
print('Time: %f' % c.getinfo(c.TOTAL_TIME))
# getinfo must be called before close.
c.close()
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vi:ts=4:et
import pycurl
try:
from io import BytesIO
except ImportError:
from StringIO import StringIO as BytesIO
buffer = BytesIO()
c = pycurl.Curl()
c.setopt(c.URL, 'http://pycurl.io/')
c.setopt(c.WRITEDATA, buffer)
c.perform()
# HTTP response code, e.g. 200.
print('Status: %d' % c.getinfo(c.RESPONSE_CODE))
# Elapsed time for the transfer.
print('Status: %f' % c.getinfo(c.TOTAL_TIME))
# getinfo must be called before close.
c.close()
Fix response example time output#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vi:ts=4:et
import pycurl
try:
from io import BytesIO
except ImportError:
from StringIO import StringIO as BytesIO
buffer = BytesIO()
c = pycurl.Curl()
c.setopt(c.URL, 'http://pycurl.io/')
c.setopt(c.WRITEDATA, buffer)
c.perform()
# HTTP response code, e.g. 200.
print('Status: %d' % c.getinfo(c.RESPONSE_CODE))
# Elapsed time for the transfer.
print('Time: %f' % c.getinfo(c.TOTAL_TIME))
# getinfo must be called before close.
c.close()
|
<commit_before>#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vi:ts=4:et
import pycurl
try:
from io import BytesIO
except ImportError:
from StringIO import StringIO as BytesIO
buffer = BytesIO()
c = pycurl.Curl()
c.setopt(c.URL, 'http://pycurl.io/')
c.setopt(c.WRITEDATA, buffer)
c.perform()
# HTTP response code, e.g. 200.
print('Status: %d' % c.getinfo(c.RESPONSE_CODE))
# Elapsed time for the transfer.
print('Status: %f' % c.getinfo(c.TOTAL_TIME))
# getinfo must be called before close.
c.close()
<commit_msg>Fix response example time output<commit_after>#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vi:ts=4:et
import pycurl
try:
from io import BytesIO
except ImportError:
from StringIO import StringIO as BytesIO
buffer = BytesIO()
c = pycurl.Curl()
c.setopt(c.URL, 'http://pycurl.io/')
c.setopt(c.WRITEDATA, buffer)
c.perform()
# HTTP response code, e.g. 200.
print('Status: %d' % c.getinfo(c.RESPONSE_CODE))
# Elapsed time for the transfer.
print('Time: %f' % c.getinfo(c.TOTAL_TIME))
# getinfo must be called before close.
c.close()
|
47b00f384dbee0fb3b82696406978669ae80a3c6
|
tests/test_config.py
|
tests/test_config.py
|
# Copyright 2015-2016 Masayuki Yamamoto
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unittest of configuration loading."""
import os
import unittest
from unittest import mock
from yanico import config
class TestUserPath(unittest.TestCase):
"""Test for yanico.config.user_path()."""
@mock.patch.dict(os.environ, {'HOME': 'spam'})
def test_path(self):
"""Expect filepath joinning '.yanico.conf' under $HOME."""
if os.sep == '\\':
expect = 'spam\\.yanico.conf'
elif os.sep == '/':
expect = 'spam/.yanico.conf'
result = config.user_path()
self.assertEqual(result, expect)
|
# Copyright 2015-2016 Masayuki Yamamoto
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unittest of configuration loading."""
import os
import unittest
from unittest import mock
from yanico import config
class TestUserPath(unittest.TestCase):
"""Test for yanico.config.user_path()."""
@mock.patch.dict(os.environ, {'HOME': 'spam'})
def test_path(self):
"""Expect filepath joinning '.yanico.conf' under $HOME."""
if os.sep == '\\':
expect = 'spam\\.yanico.conf'
elif os.sep == '/':
expect = 'spam/.yanico.conf'
result = config.user_path()
self.assertEqual(result, expect)
@mock.patch('yanico.config.CONFIG_FILENAME', new='ham.egg')
def test_dependence_constants(self):
"""Expect to depend filename by 'CONFIG_FILENAME' constants."""
result = config.user_path()
self.assertEqual(os.path.basename(result), 'ham.egg')
|
Add test case for dependence constants
|
Add test case for dependence constants
Expect to depend filename by 'CONFIG_FILENAME' constants.
|
Python
|
apache-2.0
|
ma8ma/yanico
|
# Copyright 2015-2016 Masayuki Yamamoto
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unittest of configuration loading."""
import os
import unittest
from unittest import mock
from yanico import config
class TestUserPath(unittest.TestCase):
"""Test for yanico.config.user_path()."""
@mock.patch.dict(os.environ, {'HOME': 'spam'})
def test_path(self):
"""Expect filepath joinning '.yanico.conf' under $HOME."""
if os.sep == '\\':
expect = 'spam\\.yanico.conf'
elif os.sep == '/':
expect = 'spam/.yanico.conf'
result = config.user_path()
self.assertEqual(result, expect)
Add test case for dependence constants
Expect to depend filename by 'CONFIG_FILENAME' constants.
|
# Copyright 2015-2016 Masayuki Yamamoto
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unittest of configuration loading."""
import os
import unittest
from unittest import mock
from yanico import config
class TestUserPath(unittest.TestCase):
"""Test for yanico.config.user_path()."""
@mock.patch.dict(os.environ, {'HOME': 'spam'})
def test_path(self):
"""Expect filepath joinning '.yanico.conf' under $HOME."""
if os.sep == '\\':
expect = 'spam\\.yanico.conf'
elif os.sep == '/':
expect = 'spam/.yanico.conf'
result = config.user_path()
self.assertEqual(result, expect)
@mock.patch('yanico.config.CONFIG_FILENAME', new='ham.egg')
def test_dependence_constants(self):
"""Expect to depend filename by 'CONFIG_FILENAME' constants."""
result = config.user_path()
self.assertEqual(os.path.basename(result), 'ham.egg')
|
<commit_before># Copyright 2015-2016 Masayuki Yamamoto
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unittest of configuration loading."""
import os
import unittest
from unittest import mock
from yanico import config
class TestUserPath(unittest.TestCase):
"""Test for yanico.config.user_path()."""
@mock.patch.dict(os.environ, {'HOME': 'spam'})
def test_path(self):
"""Expect filepath joinning '.yanico.conf' under $HOME."""
if os.sep == '\\':
expect = 'spam\\.yanico.conf'
elif os.sep == '/':
expect = 'spam/.yanico.conf'
result = config.user_path()
self.assertEqual(result, expect)
<commit_msg>Add test case for dependence constants
Expect to depend filename by 'CONFIG_FILENAME' constants.<commit_after>
|
# Copyright 2015-2016 Masayuki Yamamoto
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unittest of configuration loading."""
import os
import unittest
from unittest import mock
from yanico import config
class TestUserPath(unittest.TestCase):
"""Test for yanico.config.user_path()."""
@mock.patch.dict(os.environ, {'HOME': 'spam'})
def test_path(self):
"""Expect filepath joinning '.yanico.conf' under $HOME."""
if os.sep == '\\':
expect = 'spam\\.yanico.conf'
elif os.sep == '/':
expect = 'spam/.yanico.conf'
result = config.user_path()
self.assertEqual(result, expect)
@mock.patch('yanico.config.CONFIG_FILENAME', new='ham.egg')
def test_dependence_constants(self):
"""Expect to depend filename by 'CONFIG_FILENAME' constants."""
result = config.user_path()
self.assertEqual(os.path.basename(result), 'ham.egg')
|
# Copyright 2015-2016 Masayuki Yamamoto
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unittest of configuration loading."""
import os
import unittest
from unittest import mock
from yanico import config
class TestUserPath(unittest.TestCase):
"""Test for yanico.config.user_path()."""
@mock.patch.dict(os.environ, {'HOME': 'spam'})
def test_path(self):
"""Expect filepath joinning '.yanico.conf' under $HOME."""
if os.sep == '\\':
expect = 'spam\\.yanico.conf'
elif os.sep == '/':
expect = 'spam/.yanico.conf'
result = config.user_path()
self.assertEqual(result, expect)
Add test case for dependence constants
Expect to depend filename by 'CONFIG_FILENAME' constants.# Copyright 2015-2016 Masayuki Yamamoto
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unittest of configuration loading."""
import os
import unittest
from unittest import mock
from yanico import config
class TestUserPath(unittest.TestCase):
"""Test for yanico.config.user_path()."""
@mock.patch.dict(os.environ, {'HOME': 'spam'})
def test_path(self):
"""Expect filepath joinning '.yanico.conf' under $HOME."""
if os.sep == '\\':
expect = 'spam\\.yanico.conf'
elif os.sep == '/':
expect = 'spam/.yanico.conf'
result = config.user_path()
self.assertEqual(result, expect)
@mock.patch('yanico.config.CONFIG_FILENAME', new='ham.egg')
def test_dependence_constants(self):
"""Expect to depend filename by 'CONFIG_FILENAME' constants."""
result = config.user_path()
self.assertEqual(os.path.basename(result), 'ham.egg')
|
<commit_before># Copyright 2015-2016 Masayuki Yamamoto
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unittest of configuration loading."""
import os
import unittest
from unittest import mock
from yanico import config
class TestUserPath(unittest.TestCase):
"""Test for yanico.config.user_path()."""
@mock.patch.dict(os.environ, {'HOME': 'spam'})
def test_path(self):
"""Expect filepath joinning '.yanico.conf' under $HOME."""
if os.sep == '\\':
expect = 'spam\\.yanico.conf'
elif os.sep == '/':
expect = 'spam/.yanico.conf'
result = config.user_path()
self.assertEqual(result, expect)
<commit_msg>Add test case for dependence constants
Expect to depend filename by 'CONFIG_FILENAME' constants.<commit_after># Copyright 2015-2016 Masayuki Yamamoto
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unittest of configuration loading."""
import os
import unittest
from unittest import mock
from yanico import config
class TestUserPath(unittest.TestCase):
"""Test for yanico.config.user_path()."""
@mock.patch.dict(os.environ, {'HOME': 'spam'})
def test_path(self):
"""Expect filepath joinning '.yanico.conf' under $HOME."""
if os.sep == '\\':
expect = 'spam\\.yanico.conf'
elif os.sep == '/':
expect = 'spam/.yanico.conf'
result = config.user_path()
self.assertEqual(result, expect)
@mock.patch('yanico.config.CONFIG_FILENAME', new='ham.egg')
def test_dependence_constants(self):
"""Expect to depend filename by 'CONFIG_FILENAME' constants."""
result = config.user_path()
self.assertEqual(os.path.basename(result), 'ham.egg')
|
765e6d3c81925c5ed6f6a944eb15e25c6751819f
|
openstack_dashboard/views.py
|
openstack_dashboard/views.py
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django import shortcuts
from django.views.decorators import vary
import horizon
from openstack_auth import views
def get_user_home(user):
if user.is_superuser:
return horizon.get_dashboard('admin').get_absolute_url()
return horizon.get_dashboard('project').get_absolute_url()
@vary.vary_on_cookie
def splash(request):
if request.user.is_authenticated():
return shortcuts.redirect(horizon.get_user_home(request.user))
form = views.Login(request)
request.session.clear()
request.session.set_test_cookie()
return shortcuts.render(request, 'splash.html', {'form': form})
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django import shortcuts
from django.views.decorators import vary
import horizon
from openstack_auth import forms
def get_user_home(user):
if user.is_superuser:
return horizon.get_dashboard('admin').get_absolute_url()
return horizon.get_dashboard('project').get_absolute_url()
@vary.vary_on_cookie
def splash(request):
if request.user.is_authenticated():
return shortcuts.redirect(horizon.get_user_home(request.user))
form = forms.Login(request)
request.session.clear()
request.session.set_test_cookie()
return shortcuts.render(request, 'splash.html', {'form': form})
|
Fix issues with importing the Login form
|
Fix issues with importing the Login form
The Login form lives in openstack_auth.forms and should be directly
imported from that file.
Change-Id: I42808530024bebb01604adbf4828769812856bf3
Closes-Bug: #1332149
(cherry picked from commit 345ccc9d503e6e55fe46d7813958c0081cc1cffe)
|
Python
|
apache-2.0
|
yanheven/console,yanheven/console,yanheven/console
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django import shortcuts
from django.views.decorators import vary
import horizon
from openstack_auth import views
def get_user_home(user):
if user.is_superuser:
return horizon.get_dashboard('admin').get_absolute_url()
return horizon.get_dashboard('project').get_absolute_url()
@vary.vary_on_cookie
def splash(request):
if request.user.is_authenticated():
return shortcuts.redirect(horizon.get_user_home(request.user))
form = views.Login(request)
request.session.clear()
request.session.set_test_cookie()
return shortcuts.render(request, 'splash.html', {'form': form})
Fix issues with importing the Login form
The Login form lives in openstack_auth.forms and should be directly
imported from that file.
Change-Id: I42808530024bebb01604adbf4828769812856bf3
Closes-Bug: #1332149
(cherry picked from commit 345ccc9d503e6e55fe46d7813958c0081cc1cffe)
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django import shortcuts
from django.views.decorators import vary
import horizon
from openstack_auth import forms
def get_user_home(user):
if user.is_superuser:
return horizon.get_dashboard('admin').get_absolute_url()
return horizon.get_dashboard('project').get_absolute_url()
@vary.vary_on_cookie
def splash(request):
if request.user.is_authenticated():
return shortcuts.redirect(horizon.get_user_home(request.user))
form = forms.Login(request)
request.session.clear()
request.session.set_test_cookie()
return shortcuts.render(request, 'splash.html', {'form': form})
|
<commit_before># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django import shortcuts
from django.views.decorators import vary
import horizon
from openstack_auth import views
def get_user_home(user):
if user.is_superuser:
return horizon.get_dashboard('admin').get_absolute_url()
return horizon.get_dashboard('project').get_absolute_url()
@vary.vary_on_cookie
def splash(request):
if request.user.is_authenticated():
return shortcuts.redirect(horizon.get_user_home(request.user))
form = views.Login(request)
request.session.clear()
request.session.set_test_cookie()
return shortcuts.render(request, 'splash.html', {'form': form})
<commit_msg>Fix issues with importing the Login form
The Login form lives in openstack_auth.forms and should be directly
imported from that file.
Change-Id: I42808530024bebb01604adbf4828769812856bf3
Closes-Bug: #1332149
(cherry picked from commit 345ccc9d503e6e55fe46d7813958c0081cc1cffe)<commit_after>
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django import shortcuts
from django.views.decorators import vary
import horizon
from openstack_auth import forms
def get_user_home(user):
if user.is_superuser:
return horizon.get_dashboard('admin').get_absolute_url()
return horizon.get_dashboard('project').get_absolute_url()
@vary.vary_on_cookie
def splash(request):
if request.user.is_authenticated():
return shortcuts.redirect(horizon.get_user_home(request.user))
form = forms.Login(request)
request.session.clear()
request.session.set_test_cookie()
return shortcuts.render(request, 'splash.html', {'form': form})
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django import shortcuts
from django.views.decorators import vary
import horizon
from openstack_auth import views
def get_user_home(user):
if user.is_superuser:
return horizon.get_dashboard('admin').get_absolute_url()
return horizon.get_dashboard('project').get_absolute_url()
@vary.vary_on_cookie
def splash(request):
if request.user.is_authenticated():
return shortcuts.redirect(horizon.get_user_home(request.user))
form = views.Login(request)
request.session.clear()
request.session.set_test_cookie()
return shortcuts.render(request, 'splash.html', {'form': form})
Fix issues with importing the Login form
The Login form lives in openstack_auth.forms and should be directly
imported from that file.
Change-Id: I42808530024bebb01604adbf4828769812856bf3
Closes-Bug: #1332149
(cherry picked from commit 345ccc9d503e6e55fe46d7813958c0081cc1cffe)# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django import shortcuts
from django.views.decorators import vary
import horizon
from openstack_auth import forms
def get_user_home(user):
if user.is_superuser:
return horizon.get_dashboard('admin').get_absolute_url()
return horizon.get_dashboard('project').get_absolute_url()
@vary.vary_on_cookie
def splash(request):
if request.user.is_authenticated():
return shortcuts.redirect(horizon.get_user_home(request.user))
form = forms.Login(request)
request.session.clear()
request.session.set_test_cookie()
return shortcuts.render(request, 'splash.html', {'form': form})
|
<commit_before># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django import shortcuts
from django.views.decorators import vary
import horizon
from openstack_auth import views
def get_user_home(user):
if user.is_superuser:
return horizon.get_dashboard('admin').get_absolute_url()
return horizon.get_dashboard('project').get_absolute_url()
@vary.vary_on_cookie
def splash(request):
if request.user.is_authenticated():
return shortcuts.redirect(horizon.get_user_home(request.user))
form = views.Login(request)
request.session.clear()
request.session.set_test_cookie()
return shortcuts.render(request, 'splash.html', {'form': form})
<commit_msg>Fix issues with importing the Login form
The Login form lives in openstack_auth.forms and should be directly
imported from that file.
Change-Id: I42808530024bebb01604adbf4828769812856bf3
Closes-Bug: #1332149
(cherry picked from commit 345ccc9d503e6e55fe46d7813958c0081cc1cffe)<commit_after># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django import shortcuts
from django.views.decorators import vary
import horizon
from openstack_auth import forms
def get_user_home(user):
if user.is_superuser:
return horizon.get_dashboard('admin').get_absolute_url()
return horizon.get_dashboard('project').get_absolute_url()
@vary.vary_on_cookie
def splash(request):
if request.user.is_authenticated():
return shortcuts.redirect(horizon.get_user_home(request.user))
form = forms.Login(request)
request.session.clear()
request.session.set_test_cookie()
return shortcuts.render(request, 'splash.html', {'form': form})
|
798f5af95f94e29e7aa61d157fbe31a72c877b50
|
django_classified/admin.py
|
django_classified/admin.py
|
from django.contrib import admin
from sorl.thumbnail.admin import AdminImageMixin
from .models import Section, Group, Item, Image, Area, Profile
class ImageInline(AdminImageMixin, admin.StackedInline):
model = Image
extra = 5
class ItemAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('title',), }
list_display = ('title', 'group', 'area', 'user', 'is_active', 'posted', 'updated')
list_filter = ('area', 'group', 'is_active', 'posted',)
search_fields = ('title', 'description', 'user__email')
inlines = [ImageInline]
class GroupAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('title',), }
list_display = ('title', 'slug', 'section', 'count')
list_filter = ('section',)
search_fields = ('title', 'section__title')
class SectionAdmin(admin.ModelAdmin):
list_display = ('title',)
class AreaAdmin(admin.ModelAdmin):
list_display = (
'title',
)
prepopulated_fields = {'slug': ('title',)}
class ProfileAdmin(admin.ModelAdmin):
list_display = ('user', 'phone')
search_fields = ('user__username', 'phone')
admin.site.register(Area, AreaAdmin)
admin.site.register(Section, SectionAdmin)
admin.site.register(Group, GroupAdmin)
admin.site.register(Item, ItemAdmin)
admin.site.register(Profile, ProfileAdmin)
|
from django.contrib import admin
from sorl.thumbnail.admin import AdminImageMixin
from .models import Section, Group, Item, Image, Area, Profile
class ImageInline(AdminImageMixin, admin.StackedInline):
model = Image
extra = 5
class ItemAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('title',), }
list_display = ('title', 'group', 'area', 'user', 'is_active', 'posted', 'updated')
list_filter = ('area', 'group', 'is_active', 'posted',)
search_fields = ('title', 'description', 'user__email')
inlines = [ImageInline]
class GroupAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('title',), }
list_display = ('title', 'slug', 'section', 'count')
list_filter = ('section',)
search_fields = ('title', 'section__title')
class SectionAdmin(admin.ModelAdmin):
list_display = ('title',)
class AreaAdmin(admin.ModelAdmin):
list_display = (
'title',
)
prepopulated_fields = {'slug': ('title',)}
class ProfileAdmin(admin.ModelAdmin):
list_display = ('user', 'phone')
search_fields = ('user__username', 'phone')
admin.site.register(Area, AreaAdmin)
admin.site.register(Section, SectionAdmin)
admin.site.register(Group, GroupAdmin)
admin.site.register(Item, ItemAdmin)
admin.site.register(Profile, ProfileAdmin)
|
Build eror complaining of blank lines?
|
Build eror complaining of blank lines?
Build error complaining of blank lines?
|
Python
|
mit
|
inoks/dcf,inoks/dcf
|
from django.contrib import admin
from sorl.thumbnail.admin import AdminImageMixin
from .models import Section, Group, Item, Image, Area, Profile
class ImageInline(AdminImageMixin, admin.StackedInline):
model = Image
extra = 5
class ItemAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('title',), }
list_display = ('title', 'group', 'area', 'user', 'is_active', 'posted', 'updated')
list_filter = ('area', 'group', 'is_active', 'posted',)
search_fields = ('title', 'description', 'user__email')
inlines = [ImageInline]
class GroupAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('title',), }
list_display = ('title', 'slug', 'section', 'count')
list_filter = ('section',)
search_fields = ('title', 'section__title')
class SectionAdmin(admin.ModelAdmin):
list_display = ('title',)
class AreaAdmin(admin.ModelAdmin):
list_display = (
'title',
)
prepopulated_fields = {'slug': ('title',)}
class ProfileAdmin(admin.ModelAdmin):
list_display = ('user', 'phone')
search_fields = ('user__username', 'phone')
admin.site.register(Area, AreaAdmin)
admin.site.register(Section, SectionAdmin)
admin.site.register(Group, GroupAdmin)
admin.site.register(Item, ItemAdmin)
admin.site.register(Profile, ProfileAdmin)
Build eror complaining of blank lines?
Build error complaining of blank lines?
|
from django.contrib import admin
from sorl.thumbnail.admin import AdminImageMixin
from .models import Section, Group, Item, Image, Area, Profile
class ImageInline(AdminImageMixin, admin.StackedInline):
model = Image
extra = 5
class ItemAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('title',), }
list_display = ('title', 'group', 'area', 'user', 'is_active', 'posted', 'updated')
list_filter = ('area', 'group', 'is_active', 'posted',)
search_fields = ('title', 'description', 'user__email')
inlines = [ImageInline]
class GroupAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('title',), }
list_display = ('title', 'slug', 'section', 'count')
list_filter = ('section',)
search_fields = ('title', 'section__title')
class SectionAdmin(admin.ModelAdmin):
list_display = ('title',)
class AreaAdmin(admin.ModelAdmin):
list_display = (
'title',
)
prepopulated_fields = {'slug': ('title',)}
class ProfileAdmin(admin.ModelAdmin):
list_display = ('user', 'phone')
search_fields = ('user__username', 'phone')
admin.site.register(Area, AreaAdmin)
admin.site.register(Section, SectionAdmin)
admin.site.register(Group, GroupAdmin)
admin.site.register(Item, ItemAdmin)
admin.site.register(Profile, ProfileAdmin)
|
<commit_before>from django.contrib import admin
from sorl.thumbnail.admin import AdminImageMixin
from .models import Section, Group, Item, Image, Area, Profile
class ImageInline(AdminImageMixin, admin.StackedInline):
model = Image
extra = 5
class ItemAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('title',), }
list_display = ('title', 'group', 'area', 'user', 'is_active', 'posted', 'updated')
list_filter = ('area', 'group', 'is_active', 'posted',)
search_fields = ('title', 'description', 'user__email')
inlines = [ImageInline]
class GroupAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('title',), }
list_display = ('title', 'slug', 'section', 'count')
list_filter = ('section',)
search_fields = ('title', 'section__title')
class SectionAdmin(admin.ModelAdmin):
list_display = ('title',)
class AreaAdmin(admin.ModelAdmin):
list_display = (
'title',
)
prepopulated_fields = {'slug': ('title',)}
class ProfileAdmin(admin.ModelAdmin):
list_display = ('user', 'phone')
search_fields = ('user__username', 'phone')
admin.site.register(Area, AreaAdmin)
admin.site.register(Section, SectionAdmin)
admin.site.register(Group, GroupAdmin)
admin.site.register(Item, ItemAdmin)
admin.site.register(Profile, ProfileAdmin)
<commit_msg>Build eror complaining of blank lines?
Build error complaining of blank lines?<commit_after>
|
from django.contrib import admin
from sorl.thumbnail.admin import AdminImageMixin
from .models import Section, Group, Item, Image, Area, Profile
class ImageInline(AdminImageMixin, admin.StackedInline):
model = Image
extra = 5
class ItemAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('title',), }
list_display = ('title', 'group', 'area', 'user', 'is_active', 'posted', 'updated')
list_filter = ('area', 'group', 'is_active', 'posted',)
search_fields = ('title', 'description', 'user__email')
inlines = [ImageInline]
class GroupAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('title',), }
list_display = ('title', 'slug', 'section', 'count')
list_filter = ('section',)
search_fields = ('title', 'section__title')
class SectionAdmin(admin.ModelAdmin):
list_display = ('title',)
class AreaAdmin(admin.ModelAdmin):
list_display = (
'title',
)
prepopulated_fields = {'slug': ('title',)}
class ProfileAdmin(admin.ModelAdmin):
list_display = ('user', 'phone')
search_fields = ('user__username', 'phone')
admin.site.register(Area, AreaAdmin)
admin.site.register(Section, SectionAdmin)
admin.site.register(Group, GroupAdmin)
admin.site.register(Item, ItemAdmin)
admin.site.register(Profile, ProfileAdmin)
|
from django.contrib import admin
from sorl.thumbnail.admin import AdminImageMixin
from .models import Section, Group, Item, Image, Area, Profile
class ImageInline(AdminImageMixin, admin.StackedInline):
model = Image
extra = 5
class ItemAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('title',), }
list_display = ('title', 'group', 'area', 'user', 'is_active', 'posted', 'updated')
list_filter = ('area', 'group', 'is_active', 'posted',)
search_fields = ('title', 'description', 'user__email')
inlines = [ImageInline]
class GroupAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('title',), }
list_display = ('title', 'slug', 'section', 'count')
list_filter = ('section',)
search_fields = ('title', 'section__title')
class SectionAdmin(admin.ModelAdmin):
list_display = ('title',)
class AreaAdmin(admin.ModelAdmin):
list_display = (
'title',
)
prepopulated_fields = {'slug': ('title',)}
class ProfileAdmin(admin.ModelAdmin):
list_display = ('user', 'phone')
search_fields = ('user__username', 'phone')
admin.site.register(Area, AreaAdmin)
admin.site.register(Section, SectionAdmin)
admin.site.register(Group, GroupAdmin)
admin.site.register(Item, ItemAdmin)
admin.site.register(Profile, ProfileAdmin)
Build eror complaining of blank lines?
Build error complaining of blank lines?from django.contrib import admin
from sorl.thumbnail.admin import AdminImageMixin
from .models import Section, Group, Item, Image, Area, Profile
class ImageInline(AdminImageMixin, admin.StackedInline):
model = Image
extra = 5
class ItemAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('title',), }
list_display = ('title', 'group', 'area', 'user', 'is_active', 'posted', 'updated')
list_filter = ('area', 'group', 'is_active', 'posted',)
search_fields = ('title', 'description', 'user__email')
inlines = [ImageInline]
class GroupAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('title',), }
list_display = ('title', 'slug', 'section', 'count')
list_filter = ('section',)
search_fields = ('title', 'section__title')
class SectionAdmin(admin.ModelAdmin):
list_display = ('title',)
class AreaAdmin(admin.ModelAdmin):
list_display = (
'title',
)
prepopulated_fields = {'slug': ('title',)}
class ProfileAdmin(admin.ModelAdmin):
list_display = ('user', 'phone')
search_fields = ('user__username', 'phone')
admin.site.register(Area, AreaAdmin)
admin.site.register(Section, SectionAdmin)
admin.site.register(Group, GroupAdmin)
admin.site.register(Item, ItemAdmin)
admin.site.register(Profile, ProfileAdmin)
|
<commit_before>from django.contrib import admin
from sorl.thumbnail.admin import AdminImageMixin
from .models import Section, Group, Item, Image, Area, Profile
class ImageInline(AdminImageMixin, admin.StackedInline):
model = Image
extra = 5
class ItemAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('title',), }
list_display = ('title', 'group', 'area', 'user', 'is_active', 'posted', 'updated')
list_filter = ('area', 'group', 'is_active', 'posted',)
search_fields = ('title', 'description', 'user__email')
inlines = [ImageInline]
class GroupAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('title',), }
list_display = ('title', 'slug', 'section', 'count')
list_filter = ('section',)
search_fields = ('title', 'section__title')
class SectionAdmin(admin.ModelAdmin):
list_display = ('title',)
class AreaAdmin(admin.ModelAdmin):
list_display = (
'title',
)
prepopulated_fields = {'slug': ('title',)}
class ProfileAdmin(admin.ModelAdmin):
list_display = ('user', 'phone')
search_fields = ('user__username', 'phone')
admin.site.register(Area, AreaAdmin)
admin.site.register(Section, SectionAdmin)
admin.site.register(Group, GroupAdmin)
admin.site.register(Item, ItemAdmin)
admin.site.register(Profile, ProfileAdmin)
<commit_msg>Build eror complaining of blank lines?
Build error complaining of blank lines?<commit_after>from django.contrib import admin
from sorl.thumbnail.admin import AdminImageMixin
from .models import Section, Group, Item, Image, Area, Profile
class ImageInline(AdminImageMixin, admin.StackedInline):
model = Image
extra = 5
class ItemAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('title',), }
list_display = ('title', 'group', 'area', 'user', 'is_active', 'posted', 'updated')
list_filter = ('area', 'group', 'is_active', 'posted',)
search_fields = ('title', 'description', 'user__email')
inlines = [ImageInline]
class GroupAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('title',), }
list_display = ('title', 'slug', 'section', 'count')
list_filter = ('section',)
search_fields = ('title', 'section__title')
class SectionAdmin(admin.ModelAdmin):
list_display = ('title',)
class AreaAdmin(admin.ModelAdmin):
list_display = (
'title',
)
prepopulated_fields = {'slug': ('title',)}
class ProfileAdmin(admin.ModelAdmin):
list_display = ('user', 'phone')
search_fields = ('user__username', 'phone')
admin.site.register(Area, AreaAdmin)
admin.site.register(Section, SectionAdmin)
admin.site.register(Group, GroupAdmin)
admin.site.register(Item, ItemAdmin)
admin.site.register(Profile, ProfileAdmin)
|
f70a1ae6d86b5e789b5f6120db2772ec492bc088
|
mardek_sol_reader.py
|
mardek_sol_reader.py
|
from struct import unpack
from os import walk, sep
from os.path import expanduser
from re import search
shared_objects_dirs = [
'Library/Application Support/Google/Chrome/Default/Pepper Data/Shockwave Flash/WritableRoot/#SharedObjects',
'Library/Preferences/Macromedia/Flash Player/#SharedObjects',
'AppData\Local\Google\Chrome',
'AppData\Macromedia\Flash Player\#SharedObjects',
]
savefiles = []
for shared_objects_dir in shared_objects_dirs:
for root, dirs, files in walk(expanduser('~')+sep+shared_objects_dir):
for file in files:
if file[:13] == 'MARDEKv3__sg_':
savefiles.append((root.split(sep)[-1], int(file[13:-4]), root+sep+file))
savefiles.sort()
for dir, file_num, path in savefiles:
f = open(path).read()
# 8 represents array, 0003 means 3 elements
m = search('playtime\x08\x00\x00\x00\x03(.{36})', f)
print 'Save file %d from "%s":' % (file_num, dir),
print '\t%d:%02d:%02d' % unpack('>4xd4xd4xd', m.group(1))
|
from struct import unpack
from os import walk, sep
from os.path import expanduser
from re import search
shared_objects_dirs = [
'Library/Application Support/Google/Chrome/Default/Pepper Data/Shockwave Flash/WritableRoot/#SharedObjects',
'Library/Preferences/Macromedia/Flash Player/#SharedObjects',
'AppData\Local\Google\Chrome',
'AppData\Macromedia\Flash Player\#SharedObjects',
]
savefiles = []
for shared_objects_dir in shared_objects_dirs:
for root, dirs, files in walk(expanduser('~')+sep+shared_objects_dir):
for file in files:
if file[:13] == 'MARDEKv3__sg_':
savefiles.append((root.split(sep)[-1], int(file[13:-4]), root+sep+file))
savefiles.sort()
for dir, file_num, path in savefiles:
f = open(path, 'rb').read()
# 8 represents array, 0003 means 3 elements
m = search('playtime\x08\x00\x00\x00\x03(.{36})', f)
if m:
print 'Save file %d from "%s":' % (file_num, dir),
print '\t%d:%02d:%02d' % unpack('>4xd4xd4xd', m.group(1))
|
Fix to use binary read format
|
Fix to use binary read format
|
Python
|
apache-2.0
|
jbzdarkid/Random,jbzdarkid/Random,jbzdarkid/Random,jbzdarkid/Random,jbzdarkid/Random,jbzdarkid/Random,jbzdarkid/Random,jbzdarkid/Random,jbzdarkid/Random
|
from struct import unpack
from os import walk, sep
from os.path import expanduser
from re import search
shared_objects_dirs = [
'Library/Application Support/Google/Chrome/Default/Pepper Data/Shockwave Flash/WritableRoot/#SharedObjects',
'Library/Preferences/Macromedia/Flash Player/#SharedObjects',
'AppData\Local\Google\Chrome',
'AppData\Macromedia\Flash Player\#SharedObjects',
]
savefiles = []
for shared_objects_dir in shared_objects_dirs:
for root, dirs, files in walk(expanduser('~')+sep+shared_objects_dir):
for file in files:
if file[:13] == 'MARDEKv3__sg_':
savefiles.append((root.split(sep)[-1], int(file[13:-4]), root+sep+file))
savefiles.sort()
for dir, file_num, path in savefiles:
f = open(path).read()
# 8 represents array, 0003 means 3 elements
m = search('playtime\x08\x00\x00\x00\x03(.{36})', f)
print 'Save file %d from "%s":' % (file_num, dir),
print '\t%d:%02d:%02d' % unpack('>4xd4xd4xd', m.group(1))
Fix to use binary read format
|
from struct import unpack
from os import walk, sep
from os.path import expanduser
from re import search
shared_objects_dirs = [
'Library/Application Support/Google/Chrome/Default/Pepper Data/Shockwave Flash/WritableRoot/#SharedObjects',
'Library/Preferences/Macromedia/Flash Player/#SharedObjects',
'AppData\Local\Google\Chrome',
'AppData\Macromedia\Flash Player\#SharedObjects',
]
savefiles = []
for shared_objects_dir in shared_objects_dirs:
for root, dirs, files in walk(expanduser('~')+sep+shared_objects_dir):
for file in files:
if file[:13] == 'MARDEKv3__sg_':
savefiles.append((root.split(sep)[-1], int(file[13:-4]), root+sep+file))
savefiles.sort()
for dir, file_num, path in savefiles:
f = open(path, 'rb').read()
# 8 represents array, 0003 means 3 elements
m = search('playtime\x08\x00\x00\x00\x03(.{36})', f)
if m:
print 'Save file %d from "%s":' % (file_num, dir),
print '\t%d:%02d:%02d' % unpack('>4xd4xd4xd', m.group(1))
|
<commit_before>from struct import unpack
from os import walk, sep
from os.path import expanduser
from re import search
shared_objects_dirs = [
'Library/Application Support/Google/Chrome/Default/Pepper Data/Shockwave Flash/WritableRoot/#SharedObjects',
'Library/Preferences/Macromedia/Flash Player/#SharedObjects',
'AppData\Local\Google\Chrome',
'AppData\Macromedia\Flash Player\#SharedObjects',
]
savefiles = []
for shared_objects_dir in shared_objects_dirs:
for root, dirs, files in walk(expanduser('~')+sep+shared_objects_dir):
for file in files:
if file[:13] == 'MARDEKv3__sg_':
savefiles.append((root.split(sep)[-1], int(file[13:-4]), root+sep+file))
savefiles.sort()
for dir, file_num, path in savefiles:
f = open(path).read()
# 8 represents array, 0003 means 3 elements
m = search('playtime\x08\x00\x00\x00\x03(.{36})', f)
print 'Save file %d from "%s":' % (file_num, dir),
print '\t%d:%02d:%02d' % unpack('>4xd4xd4xd', m.group(1))
<commit_msg>Fix to use binary read format<commit_after>
|
from struct import unpack
from os import walk, sep
from os.path import expanduser
from re import search
shared_objects_dirs = [
'Library/Application Support/Google/Chrome/Default/Pepper Data/Shockwave Flash/WritableRoot/#SharedObjects',
'Library/Preferences/Macromedia/Flash Player/#SharedObjects',
'AppData\Local\Google\Chrome',
'AppData\Macromedia\Flash Player\#SharedObjects',
]
savefiles = []
for shared_objects_dir in shared_objects_dirs:
for root, dirs, files in walk(expanduser('~')+sep+shared_objects_dir):
for file in files:
if file[:13] == 'MARDEKv3__sg_':
savefiles.append((root.split(sep)[-1], int(file[13:-4]), root+sep+file))
savefiles.sort()
for dir, file_num, path in savefiles:
f = open(path, 'rb').read()
# 8 represents array, 0003 means 3 elements
m = search('playtime\x08\x00\x00\x00\x03(.{36})', f)
if m:
print 'Save file %d from "%s":' % (file_num, dir),
print '\t%d:%02d:%02d' % unpack('>4xd4xd4xd', m.group(1))
|
from struct import unpack
from os import walk, sep
from os.path import expanduser
from re import search
shared_objects_dirs = [
'Library/Application Support/Google/Chrome/Default/Pepper Data/Shockwave Flash/WritableRoot/#SharedObjects',
'Library/Preferences/Macromedia/Flash Player/#SharedObjects',
'AppData\Local\Google\Chrome',
'AppData\Macromedia\Flash Player\#SharedObjects',
]
savefiles = []
for shared_objects_dir in shared_objects_dirs:
for root, dirs, files in walk(expanduser('~')+sep+shared_objects_dir):
for file in files:
if file[:13] == 'MARDEKv3__sg_':
savefiles.append((root.split(sep)[-1], int(file[13:-4]), root+sep+file))
savefiles.sort()
for dir, file_num, path in savefiles:
f = open(path).read()
# 8 represents array, 0003 means 3 elements
m = search('playtime\x08\x00\x00\x00\x03(.{36})', f)
print 'Save file %d from "%s":' % (file_num, dir),
print '\t%d:%02d:%02d' % unpack('>4xd4xd4xd', m.group(1))
Fix to use binary read formatfrom struct import unpack
from os import walk, sep
from os.path import expanduser
from re import search
shared_objects_dirs = [
'Library/Application Support/Google/Chrome/Default/Pepper Data/Shockwave Flash/WritableRoot/#SharedObjects',
'Library/Preferences/Macromedia/Flash Player/#SharedObjects',
'AppData\Local\Google\Chrome',
'AppData\Macromedia\Flash Player\#SharedObjects',
]
savefiles = []
for shared_objects_dir in shared_objects_dirs:
for root, dirs, files in walk(expanduser('~')+sep+shared_objects_dir):
for file in files:
if file[:13] == 'MARDEKv3__sg_':
savefiles.append((root.split(sep)[-1], int(file[13:-4]), root+sep+file))
savefiles.sort()
for dir, file_num, path in savefiles:
f = open(path, 'rb').read()
# 8 represents array, 0003 means 3 elements
m = search('playtime\x08\x00\x00\x00\x03(.{36})', f)
if m:
print 'Save file %d from "%s":' % (file_num, dir),
print '\t%d:%02d:%02d' % unpack('>4xd4xd4xd', m.group(1))
|
<commit_before>from struct import unpack
from os import walk, sep
from os.path import expanduser
from re import search
shared_objects_dirs = [
'Library/Application Support/Google/Chrome/Default/Pepper Data/Shockwave Flash/WritableRoot/#SharedObjects',
'Library/Preferences/Macromedia/Flash Player/#SharedObjects',
'AppData\Local\Google\Chrome',
'AppData\Macromedia\Flash Player\#SharedObjects',
]
savefiles = []
for shared_objects_dir in shared_objects_dirs:
for root, dirs, files in walk(expanduser('~')+sep+shared_objects_dir):
for file in files:
if file[:13] == 'MARDEKv3__sg_':
savefiles.append((root.split(sep)[-1], int(file[13:-4]), root+sep+file))
savefiles.sort()
for dir, file_num, path in savefiles:
f = open(path).read()
# 8 represents array, 0003 means 3 elements
m = search('playtime\x08\x00\x00\x00\x03(.{36})', f)
print 'Save file %d from "%s":' % (file_num, dir),
print '\t%d:%02d:%02d' % unpack('>4xd4xd4xd', m.group(1))
<commit_msg>Fix to use binary read format<commit_after>from struct import unpack
from os import walk, sep
from os.path import expanduser
from re import search
shared_objects_dirs = [
'Library/Application Support/Google/Chrome/Default/Pepper Data/Shockwave Flash/WritableRoot/#SharedObjects',
'Library/Preferences/Macromedia/Flash Player/#SharedObjects',
'AppData\Local\Google\Chrome',
'AppData\Macromedia\Flash Player\#SharedObjects',
]
savefiles = []
for shared_objects_dir in shared_objects_dirs:
for root, dirs, files in walk(expanduser('~')+sep+shared_objects_dir):
for file in files:
if file[:13] == 'MARDEKv3__sg_':
savefiles.append((root.split(sep)[-1], int(file[13:-4]), root+sep+file))
savefiles.sort()
for dir, file_num, path in savefiles:
f = open(path, 'rb').read()
# 8 represents array, 0003 means 3 elements
m = search('playtime\x08\x00\x00\x00\x03(.{36})', f)
if m:
print 'Save file %d from "%s":' % (file_num, dir),
print '\t%d:%02d:%02d' % unpack('>4xd4xd4xd', m.group(1))
|
e21260b57873ed70bd6b1690b62a754af58020fc
|
otp_twilio/migrations/0002_last_t.py
|
otp_twilio/migrations/0002_last_t.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('otp_twilio_encrypted', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='twiliosmsdevice',
name='last_t',
field=models.BigIntegerField(default=-1, help_text='The t value of the latest verified token. The next token must be at a higher time step.'),
preserve_default=True,
),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('otp_twilio', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='twiliosmsdevice',
name='last_t',
field=models.BigIntegerField(default=-1, help_text='The t value of the latest verified token. The next token must be at a higher time step.'),
preserve_default=True,
),
]
|
Undo dependency name change in last migration
|
Undo dependency name change in last migration
|
Python
|
bsd-2-clause
|
prototypsthlm/otp_twilio_encrypted,gustavrannestig/otp_twilio_encrypted
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('otp_twilio_encrypted', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='twiliosmsdevice',
name='last_t',
field=models.BigIntegerField(default=-1, help_text='The t value of the latest verified token. The next token must be at a higher time step.'),
preserve_default=True,
),
]
Undo dependency name change in last migration
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('otp_twilio', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='twiliosmsdevice',
name='last_t',
field=models.BigIntegerField(default=-1, help_text='The t value of the latest verified token. The next token must be at a higher time step.'),
preserve_default=True,
),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('otp_twilio_encrypted', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='twiliosmsdevice',
name='last_t',
field=models.BigIntegerField(default=-1, help_text='The t value of the latest verified token. The next token must be at a higher time step.'),
preserve_default=True,
),
]
<commit_msg>Undo dependency name change in last migration<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('otp_twilio', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='twiliosmsdevice',
name='last_t',
field=models.BigIntegerField(default=-1, help_text='The t value of the latest verified token. The next token must be at a higher time step.'),
preserve_default=True,
),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('otp_twilio_encrypted', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='twiliosmsdevice',
name='last_t',
field=models.BigIntegerField(default=-1, help_text='The t value of the latest verified token. The next token must be at a higher time step.'),
preserve_default=True,
),
]
Undo dependency name change in last migration# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('otp_twilio', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='twiliosmsdevice',
name='last_t',
field=models.BigIntegerField(default=-1, help_text='The t value of the latest verified token. The next token must be at a higher time step.'),
preserve_default=True,
),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('otp_twilio_encrypted', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='twiliosmsdevice',
name='last_t',
field=models.BigIntegerField(default=-1, help_text='The t value of the latest verified token. The next token must be at a higher time step.'),
preserve_default=True,
),
]
<commit_msg>Undo dependency name change in last migration<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('otp_twilio', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='twiliosmsdevice',
name='last_t',
field=models.BigIntegerField(default=-1, help_text='The t value of the latest verified token. The next token must be at a higher time step.'),
preserve_default=True,
),
]
|
7a0243728ae5079b2409c9ccbf500d05f69886f3
|
examples/simple/schemas.py
|
examples/simple/schemas.py
|
from aiohttp_json_api.schema import BaseSchema, fields, relationships
from .models import Article, Comment, People
class SchemaWithStorage(BaseSchema):
@property
def storage(self):
return self.app['storage'][self.resource_class.__name__]
async def query_collection(self, context, **kwargs):
return self.storage
async def query_resource(self, resource_id, context, **kwargs):
return self.storage.get(resource_id)
class PeopleSchema(SchemaWithStorage):
type = 'people'
resource_class = People
first_name = fields.String()
last_name = fields.String(allow_blank=True)
twitter = fields.String(allow_none=True)
async def fetch_resource(self, resource_id, context, **kwargs):
pass
async def delete_resource(self, resource_id, context, **kwargs):
pass
class CommentSchema(SchemaWithStorage):
resource_class = Comment
body = fields.String()
author = relationships.ToOne(foreign_types=(PeopleSchema.type,))
async def fetch_resource(self, resource_id, context, **kwargs):
pass
async def delete_resource(self, resource_id, context, **kwargs):
pass
class ArticleSchema(SchemaWithStorage):
resource_class = Article
title = fields.String()
author = relationships.ToOne(foreign_types=(PeopleSchema.type,))
comments = relationships.ToMany(foreign_types=(CommentSchema.type,))
async def fetch_resource(self, resource_id, context, **kwargs):
pass
async def delete_resource(self, resource_id, context, **kwargs):
pass
|
from aiohttp_json_api.schema import BaseSchema, fields, relationships
from .models import Article, Comment, People
class SchemaWithStorage(BaseSchema):
@property
def storage(self):
return self.app['storage'][self.resource_class.__name__]
async def fetch_resource(self, resource_id, context, **kwargs):
return self.storage.get(resource_id)
async def query_collection(self, context, **kwargs):
return self.storage
async def query_resource(self, resource_id, context, **kwargs):
return await self.fetch_resource(resource_id, context, **kwargs)
class PeopleSchema(SchemaWithStorage):
type = 'people'
resource_class = People
first_name = fields.String()
last_name = fields.String(allow_blank=True)
twitter = fields.String(allow_none=True)
async def delete_resource(self, resource_id, context, **kwargs):
pass
class CommentSchema(SchemaWithStorage):
resource_class = Comment # type will be "comments"
body = fields.String()
author = relationships.ToOne(foreign_types=(PeopleSchema.type,))
async def delete_resource(self, resource_id, context, **kwargs):
pass
class ArticleSchema(SchemaWithStorage):
resource_class = Article # type will be "articles"
title = fields.String()
author = relationships.ToOne(foreign_types=(PeopleSchema.type,))
comments = relationships.ToMany(foreign_types=(CommentSchema.type,))
async def delete_resource(self, resource_id, context, **kwargs):
pass
|
Add support for query relatives in simple example
|
Add support for query relatives in simple example
|
Python
|
mit
|
vovanbo/aiohttp_json_api
|
from aiohttp_json_api.schema import BaseSchema, fields, relationships
from .models import Article, Comment, People
class SchemaWithStorage(BaseSchema):
@property
def storage(self):
return self.app['storage'][self.resource_class.__name__]
async def query_collection(self, context, **kwargs):
return self.storage
async def query_resource(self, resource_id, context, **kwargs):
return self.storage.get(resource_id)
class PeopleSchema(SchemaWithStorage):
type = 'people'
resource_class = People
first_name = fields.String()
last_name = fields.String(allow_blank=True)
twitter = fields.String(allow_none=True)
async def fetch_resource(self, resource_id, context, **kwargs):
pass
async def delete_resource(self, resource_id, context, **kwargs):
pass
class CommentSchema(SchemaWithStorage):
resource_class = Comment
body = fields.String()
author = relationships.ToOne(foreign_types=(PeopleSchema.type,))
async def fetch_resource(self, resource_id, context, **kwargs):
pass
async def delete_resource(self, resource_id, context, **kwargs):
pass
class ArticleSchema(SchemaWithStorage):
resource_class = Article
title = fields.String()
author = relationships.ToOne(foreign_types=(PeopleSchema.type,))
comments = relationships.ToMany(foreign_types=(CommentSchema.type,))
async def fetch_resource(self, resource_id, context, **kwargs):
pass
async def delete_resource(self, resource_id, context, **kwargs):
pass
Add support for query relatives in simple example
|
from aiohttp_json_api.schema import BaseSchema, fields, relationships
from .models import Article, Comment, People
class SchemaWithStorage(BaseSchema):
@property
def storage(self):
return self.app['storage'][self.resource_class.__name__]
async def fetch_resource(self, resource_id, context, **kwargs):
return self.storage.get(resource_id)
async def query_collection(self, context, **kwargs):
return self.storage
async def query_resource(self, resource_id, context, **kwargs):
return await self.fetch_resource(resource_id, context, **kwargs)
class PeopleSchema(SchemaWithStorage):
type = 'people'
resource_class = People
first_name = fields.String()
last_name = fields.String(allow_blank=True)
twitter = fields.String(allow_none=True)
async def delete_resource(self, resource_id, context, **kwargs):
pass
class CommentSchema(SchemaWithStorage):
resource_class = Comment # type will be "comments"
body = fields.String()
author = relationships.ToOne(foreign_types=(PeopleSchema.type,))
async def delete_resource(self, resource_id, context, **kwargs):
pass
class ArticleSchema(SchemaWithStorage):
resource_class = Article # type will be "articles"
title = fields.String()
author = relationships.ToOne(foreign_types=(PeopleSchema.type,))
comments = relationships.ToMany(foreign_types=(CommentSchema.type,))
async def delete_resource(self, resource_id, context, **kwargs):
pass
|
<commit_before>from aiohttp_json_api.schema import BaseSchema, fields, relationships
from .models import Article, Comment, People
class SchemaWithStorage(BaseSchema):
@property
def storage(self):
return self.app['storage'][self.resource_class.__name__]
async def query_collection(self, context, **kwargs):
return self.storage
async def query_resource(self, resource_id, context, **kwargs):
return self.storage.get(resource_id)
class PeopleSchema(SchemaWithStorage):
type = 'people'
resource_class = People
first_name = fields.String()
last_name = fields.String(allow_blank=True)
twitter = fields.String(allow_none=True)
async def fetch_resource(self, resource_id, context, **kwargs):
pass
async def delete_resource(self, resource_id, context, **kwargs):
pass
class CommentSchema(SchemaWithStorage):
resource_class = Comment
body = fields.String()
author = relationships.ToOne(foreign_types=(PeopleSchema.type,))
async def fetch_resource(self, resource_id, context, **kwargs):
pass
async def delete_resource(self, resource_id, context, **kwargs):
pass
class ArticleSchema(SchemaWithStorage):
resource_class = Article
title = fields.String()
author = relationships.ToOne(foreign_types=(PeopleSchema.type,))
comments = relationships.ToMany(foreign_types=(CommentSchema.type,))
async def fetch_resource(self, resource_id, context, **kwargs):
pass
async def delete_resource(self, resource_id, context, **kwargs):
pass
<commit_msg>Add support for query relatives in simple example<commit_after>
|
from aiohttp_json_api.schema import BaseSchema, fields, relationships
from .models import Article, Comment, People
class SchemaWithStorage(BaseSchema):
@property
def storage(self):
return self.app['storage'][self.resource_class.__name__]
async def fetch_resource(self, resource_id, context, **kwargs):
return self.storage.get(resource_id)
async def query_collection(self, context, **kwargs):
return self.storage
async def query_resource(self, resource_id, context, **kwargs):
return await self.fetch_resource(resource_id, context, **kwargs)
class PeopleSchema(SchemaWithStorage):
type = 'people'
resource_class = People
first_name = fields.String()
last_name = fields.String(allow_blank=True)
twitter = fields.String(allow_none=True)
async def delete_resource(self, resource_id, context, **kwargs):
pass
class CommentSchema(SchemaWithStorage):
resource_class = Comment # type will be "comments"
body = fields.String()
author = relationships.ToOne(foreign_types=(PeopleSchema.type,))
async def delete_resource(self, resource_id, context, **kwargs):
pass
class ArticleSchema(SchemaWithStorage):
resource_class = Article # type will be "articles"
title = fields.String()
author = relationships.ToOne(foreign_types=(PeopleSchema.type,))
comments = relationships.ToMany(foreign_types=(CommentSchema.type,))
async def delete_resource(self, resource_id, context, **kwargs):
pass
|
from aiohttp_json_api.schema import BaseSchema, fields, relationships
from .models import Article, Comment, People
class SchemaWithStorage(BaseSchema):
@property
def storage(self):
return self.app['storage'][self.resource_class.__name__]
async def query_collection(self, context, **kwargs):
return self.storage
async def query_resource(self, resource_id, context, **kwargs):
return self.storage.get(resource_id)
class PeopleSchema(SchemaWithStorage):
type = 'people'
resource_class = People
first_name = fields.String()
last_name = fields.String(allow_blank=True)
twitter = fields.String(allow_none=True)
async def fetch_resource(self, resource_id, context, **kwargs):
pass
async def delete_resource(self, resource_id, context, **kwargs):
pass
class CommentSchema(SchemaWithStorage):
resource_class = Comment
body = fields.String()
author = relationships.ToOne(foreign_types=(PeopleSchema.type,))
async def fetch_resource(self, resource_id, context, **kwargs):
pass
async def delete_resource(self, resource_id, context, **kwargs):
pass
class ArticleSchema(SchemaWithStorage):
resource_class = Article
title = fields.String()
author = relationships.ToOne(foreign_types=(PeopleSchema.type,))
comments = relationships.ToMany(foreign_types=(CommentSchema.type,))
async def fetch_resource(self, resource_id, context, **kwargs):
pass
async def delete_resource(self, resource_id, context, **kwargs):
pass
Add support for query relatives in simple examplefrom aiohttp_json_api.schema import BaseSchema, fields, relationships
from .models import Article, Comment, People
class SchemaWithStorage(BaseSchema):
@property
def storage(self):
return self.app['storage'][self.resource_class.__name__]
async def fetch_resource(self, resource_id, context, **kwargs):
return self.storage.get(resource_id)
async def query_collection(self, context, **kwargs):
return self.storage
async def query_resource(self, resource_id, context, **kwargs):
return await self.fetch_resource(resource_id, context, **kwargs)
class PeopleSchema(SchemaWithStorage):
type = 'people'
resource_class = People
first_name = fields.String()
last_name = fields.String(allow_blank=True)
twitter = fields.String(allow_none=True)
async def delete_resource(self, resource_id, context, **kwargs):
pass
class CommentSchema(SchemaWithStorage):
resource_class = Comment # type will be "comments"
body = fields.String()
author = relationships.ToOne(foreign_types=(PeopleSchema.type,))
async def delete_resource(self, resource_id, context, **kwargs):
pass
class ArticleSchema(SchemaWithStorage):
resource_class = Article # type will be "articles"
title = fields.String()
author = relationships.ToOne(foreign_types=(PeopleSchema.type,))
comments = relationships.ToMany(foreign_types=(CommentSchema.type,))
async def delete_resource(self, resource_id, context, **kwargs):
pass
|
<commit_before>from aiohttp_json_api.schema import BaseSchema, fields, relationships
from .models import Article, Comment, People
class SchemaWithStorage(BaseSchema):
@property
def storage(self):
return self.app['storage'][self.resource_class.__name__]
async def query_collection(self, context, **kwargs):
return self.storage
async def query_resource(self, resource_id, context, **kwargs):
return self.storage.get(resource_id)
class PeopleSchema(SchemaWithStorage):
type = 'people'
resource_class = People
first_name = fields.String()
last_name = fields.String(allow_blank=True)
twitter = fields.String(allow_none=True)
async def fetch_resource(self, resource_id, context, **kwargs):
pass
async def delete_resource(self, resource_id, context, **kwargs):
pass
class CommentSchema(SchemaWithStorage):
resource_class = Comment
body = fields.String()
author = relationships.ToOne(foreign_types=(PeopleSchema.type,))
async def fetch_resource(self, resource_id, context, **kwargs):
pass
async def delete_resource(self, resource_id, context, **kwargs):
pass
class ArticleSchema(SchemaWithStorage):
resource_class = Article
title = fields.String()
author = relationships.ToOne(foreign_types=(PeopleSchema.type,))
comments = relationships.ToMany(foreign_types=(CommentSchema.type,))
async def fetch_resource(self, resource_id, context, **kwargs):
pass
async def delete_resource(self, resource_id, context, **kwargs):
pass
<commit_msg>Add support for query relatives in simple example<commit_after>from aiohttp_json_api.schema import BaseSchema, fields, relationships
from .models import Article, Comment, People
class SchemaWithStorage(BaseSchema):
@property
def storage(self):
return self.app['storage'][self.resource_class.__name__]
async def fetch_resource(self, resource_id, context, **kwargs):
return self.storage.get(resource_id)
async def query_collection(self, context, **kwargs):
return self.storage
async def query_resource(self, resource_id, context, **kwargs):
return await self.fetch_resource(resource_id, context, **kwargs)
class PeopleSchema(SchemaWithStorage):
type = 'people'
resource_class = People
first_name = fields.String()
last_name = fields.String(allow_blank=True)
twitter = fields.String(allow_none=True)
async def delete_resource(self, resource_id, context, **kwargs):
pass
class CommentSchema(SchemaWithStorage):
resource_class = Comment # type will be "comments"
body = fields.String()
author = relationships.ToOne(foreign_types=(PeopleSchema.type,))
async def delete_resource(self, resource_id, context, **kwargs):
pass
class ArticleSchema(SchemaWithStorage):
resource_class = Article # type will be "articles"
title = fields.String()
author = relationships.ToOne(foreign_types=(PeopleSchema.type,))
comments = relationships.ToMany(foreign_types=(CommentSchema.type,))
async def delete_resource(self, resource_id, context, **kwargs):
pass
|
19d366141ffedbabc93de487d140333de30e4b7a
|
rcamp/lib/pam_backend.py
|
rcamp/lib/pam_backend.py
|
from django.conf import settings
from accounts.models import (
RcLdapUser,
User
)
import pam
import logging
logger = logging.getLogger('accounts')
class PamBackend():
def authenticate(self, request, username=None, password=None):
rc_user = RcLdapUser.objects.get_user_from_suffixed_username(username)
if not rc_user:
return None
p = pam.pam()
authed = p.authenticate(username, password, service=settings.PAM_SERVICES['default'])
logging.info('User {} auth attempt: {}'.format(username, authed))
if authed:
user_dict = {
'first_name': rc_user.first_name,
'last_name': rc_user.last_name,
'email': rc_user.email,
}
user, created = User.objects.update_or_create(
username=username,
defaults=user_dict
)
return user
return None
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
|
from django.conf import settings
from accounts.models import (
RcLdapUser,
User
)
import pam
import logging
logger = logging.getLogger('accounts')
class PamBackend():
def authenticate(self, request, username=None, password=None):
rc_user = RcLdapUser.objects.get_user_from_suffixed_username(username)
if not rc_user:
return None
logging.info('User {} auth attempt'.format(username))
p = pam.pam()
authed = p.authenticate(username, password, service=settings.PAM_SERVICES['default'])
logging.info('User {} auth attempt status: {}'.format(username, authed))
if authed:
user_dict = {
'first_name': rc_user.first_name,
'last_name': rc_user.last_name,
'email': rc_user.email,
}
user, created = User.objects.update_or_create(
username=username,
defaults=user_dict
)
return user
return None
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
|
Add logging to debug hanging auth
|
Add logging to debug hanging auth
|
Python
|
mit
|
ResearchComputing/RCAMP,ResearchComputing/RCAMP,ResearchComputing/RCAMP,ResearchComputing/RCAMP
|
from django.conf import settings
from accounts.models import (
RcLdapUser,
User
)
import pam
import logging
logger = logging.getLogger('accounts')
class PamBackend():
def authenticate(self, request, username=None, password=None):
rc_user = RcLdapUser.objects.get_user_from_suffixed_username(username)
if not rc_user:
return None
p = pam.pam()
authed = p.authenticate(username, password, service=settings.PAM_SERVICES['default'])
logging.info('User {} auth attempt: {}'.format(username, authed))
if authed:
user_dict = {
'first_name': rc_user.first_name,
'last_name': rc_user.last_name,
'email': rc_user.email,
}
user, created = User.objects.update_or_create(
username=username,
defaults=user_dict
)
return user
return None
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
Add logging to debug hanging auth
|
from django.conf import settings
from accounts.models import (
RcLdapUser,
User
)
import pam
import logging
logger = logging.getLogger('accounts')
class PamBackend():
def authenticate(self, request, username=None, password=None):
rc_user = RcLdapUser.objects.get_user_from_suffixed_username(username)
if not rc_user:
return None
logging.info('User {} auth attempt'.format(username))
p = pam.pam()
authed = p.authenticate(username, password, service=settings.PAM_SERVICES['default'])
logging.info('User {} auth attempt status: {}'.format(username, authed))
if authed:
user_dict = {
'first_name': rc_user.first_name,
'last_name': rc_user.last_name,
'email': rc_user.email,
}
user, created = User.objects.update_or_create(
username=username,
defaults=user_dict
)
return user
return None
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
|
<commit_before>from django.conf import settings
from accounts.models import (
RcLdapUser,
User
)
import pam
import logging
logger = logging.getLogger('accounts')
class PamBackend():
def authenticate(self, request, username=None, password=None):
rc_user = RcLdapUser.objects.get_user_from_suffixed_username(username)
if not rc_user:
return None
p = pam.pam()
authed = p.authenticate(username, password, service=settings.PAM_SERVICES['default'])
logging.info('User {} auth attempt: {}'.format(username, authed))
if authed:
user_dict = {
'first_name': rc_user.first_name,
'last_name': rc_user.last_name,
'email': rc_user.email,
}
user, created = User.objects.update_or_create(
username=username,
defaults=user_dict
)
return user
return None
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
<commit_msg>Add logging to debug hanging auth<commit_after>
|
from django.conf import settings
from accounts.models import (
RcLdapUser,
User
)
import pam
import logging
logger = logging.getLogger('accounts')
class PamBackend():
def authenticate(self, request, username=None, password=None):
rc_user = RcLdapUser.objects.get_user_from_suffixed_username(username)
if not rc_user:
return None
logging.info('User {} auth attempt'.format(username))
p = pam.pam()
authed = p.authenticate(username, password, service=settings.PAM_SERVICES['default'])
logging.info('User {} auth attempt status: {}'.format(username, authed))
if authed:
user_dict = {
'first_name': rc_user.first_name,
'last_name': rc_user.last_name,
'email': rc_user.email,
}
user, created = User.objects.update_or_create(
username=username,
defaults=user_dict
)
return user
return None
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
|
from django.conf import settings
from accounts.models import (
RcLdapUser,
User
)
import pam
import logging
logger = logging.getLogger('accounts')
class PamBackend():
def authenticate(self, request, username=None, password=None):
rc_user = RcLdapUser.objects.get_user_from_suffixed_username(username)
if not rc_user:
return None
p = pam.pam()
authed = p.authenticate(username, password, service=settings.PAM_SERVICES['default'])
logging.info('User {} auth attempt: {}'.format(username, authed))
if authed:
user_dict = {
'first_name': rc_user.first_name,
'last_name': rc_user.last_name,
'email': rc_user.email,
}
user, created = User.objects.update_or_create(
username=username,
defaults=user_dict
)
return user
return None
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
Add logging to debug hanging authfrom django.conf import settings
from accounts.models import (
RcLdapUser,
User
)
import pam
import logging
logger = logging.getLogger('accounts')
class PamBackend():
def authenticate(self, request, username=None, password=None):
rc_user = RcLdapUser.objects.get_user_from_suffixed_username(username)
if not rc_user:
return None
logging.info('User {} auth attempt'.format(username))
p = pam.pam()
authed = p.authenticate(username, password, service=settings.PAM_SERVICES['default'])
logging.info('User {} auth attempt status: {}'.format(username, authed))
if authed:
user_dict = {
'first_name': rc_user.first_name,
'last_name': rc_user.last_name,
'email': rc_user.email,
}
user, created = User.objects.update_or_create(
username=username,
defaults=user_dict
)
return user
return None
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
|
<commit_before>from django.conf import settings
from accounts.models import (
RcLdapUser,
User
)
import pam
import logging
logger = logging.getLogger('accounts')
class PamBackend():
def authenticate(self, request, username=None, password=None):
rc_user = RcLdapUser.objects.get_user_from_suffixed_username(username)
if not rc_user:
return None
p = pam.pam()
authed = p.authenticate(username, password, service=settings.PAM_SERVICES['default'])
logging.info('User {} auth attempt: {}'.format(username, authed))
if authed:
user_dict = {
'first_name': rc_user.first_name,
'last_name': rc_user.last_name,
'email': rc_user.email,
}
user, created = User.objects.update_or_create(
username=username,
defaults=user_dict
)
return user
return None
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
<commit_msg>Add logging to debug hanging auth<commit_after>from django.conf import settings
from accounts.models import (
RcLdapUser,
User
)
import pam
import logging
logger = logging.getLogger('accounts')
class PamBackend():
def authenticate(self, request, username=None, password=None):
rc_user = RcLdapUser.objects.get_user_from_suffixed_username(username)
if not rc_user:
return None
logging.info('User {} auth attempt'.format(username))
p = pam.pam()
authed = p.authenticate(username, password, service=settings.PAM_SERVICES['default'])
logging.info('User {} auth attempt status: {}'.format(username, authed))
if authed:
user_dict = {
'first_name': rc_user.first_name,
'last_name': rc_user.last_name,
'email': rc_user.email,
}
user, created = User.objects.update_or_create(
username=username,
defaults=user_dict
)
return user
return None
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
|
ea24974502e0e293905b493d0993ab2fc1812192
|
op_robot_tests/tests_files/brokers/openprocurement_client_helper.py
|
op_robot_tests/tests_files/brokers/openprocurement_client_helper.py
|
from openprocurement_client.client import Client
import sys
def prepare_api_wrapper(key='', host_url="https://api-sandbox.openprocurement.org", api_version='0.8'):
return Client(key, host_url, api_version)
def get_tenders(client, offset=None):
params = {'opt_fields': 'tenderID', 'descending': 1}
if offset:
params['offset'] = offset
return client.get_tenders(params)
|
from openprocurement_client.client import Client
def prepare_api_wrapper(key='',
host_url='https://api-sandbox.openprocurement.org',
api_version='0.8'):
return Client(key, host_url, api_version)
def get_tenders(client, offset=None):
params = {'opt_fields': 'tenderID', 'descending': 1}
if offset:
params['offset'] = offset
return client.get_tenders(params)
|
Remove unused import; split a long line of code
|
Remove unused import; split a long line of code
|
Python
|
apache-2.0
|
Rzaporozhets/robot_tests,mykhaly/robot_tests,bubanoid/robot_tests,VadimShurhal/robot_tests.broker.aps,SlaOne/robot_tests,selurvedu/robot_tests,Leits/robot_tests,cleardevice/robot_tests,openprocurement/robot_tests,kosaniak/robot_tests
|
from openprocurement_client.client import Client
import sys
def prepare_api_wrapper(key='', host_url="https://api-sandbox.openprocurement.org", api_version='0.8'):
return Client(key, host_url, api_version)
def get_tenders(client, offset=None):
params = {'opt_fields': 'tenderID', 'descending': 1}
if offset:
params['offset'] = offset
return client.get_tenders(params)
Remove unused import; split a long line of code
|
from openprocurement_client.client import Client
def prepare_api_wrapper(key='',
host_url='https://api-sandbox.openprocurement.org',
api_version='0.8'):
return Client(key, host_url, api_version)
def get_tenders(client, offset=None):
params = {'opt_fields': 'tenderID', 'descending': 1}
if offset:
params['offset'] = offset
return client.get_tenders(params)
|
<commit_before>from openprocurement_client.client import Client
import sys
def prepare_api_wrapper(key='', host_url="https://api-sandbox.openprocurement.org", api_version='0.8'):
return Client(key, host_url, api_version)
def get_tenders(client, offset=None):
params = {'opt_fields': 'tenderID', 'descending': 1}
if offset:
params['offset'] = offset
return client.get_tenders(params)
<commit_msg>Remove unused import; split a long line of code<commit_after>
|
from openprocurement_client.client import Client
def prepare_api_wrapper(key='',
host_url='https://api-sandbox.openprocurement.org',
api_version='0.8'):
return Client(key, host_url, api_version)
def get_tenders(client, offset=None):
params = {'opt_fields': 'tenderID', 'descending': 1}
if offset:
params['offset'] = offset
return client.get_tenders(params)
|
from openprocurement_client.client import Client
import sys
def prepare_api_wrapper(key='', host_url="https://api-sandbox.openprocurement.org", api_version='0.8'):
return Client(key, host_url, api_version)
def get_tenders(client, offset=None):
params = {'opt_fields': 'tenderID', 'descending': 1}
if offset:
params['offset'] = offset
return client.get_tenders(params)
Remove unused import; split a long line of codefrom openprocurement_client.client import Client
def prepare_api_wrapper(key='',
host_url='https://api-sandbox.openprocurement.org',
api_version='0.8'):
return Client(key, host_url, api_version)
def get_tenders(client, offset=None):
params = {'opt_fields': 'tenderID', 'descending': 1}
if offset:
params['offset'] = offset
return client.get_tenders(params)
|
<commit_before>from openprocurement_client.client import Client
import sys
def prepare_api_wrapper(key='', host_url="https://api-sandbox.openprocurement.org", api_version='0.8'):
return Client(key, host_url, api_version)
def get_tenders(client, offset=None):
params = {'opt_fields': 'tenderID', 'descending': 1}
if offset:
params['offset'] = offset
return client.get_tenders(params)
<commit_msg>Remove unused import; split a long line of code<commit_after>from openprocurement_client.client import Client
def prepare_api_wrapper(key='',
host_url='https://api-sandbox.openprocurement.org',
api_version='0.8'):
return Client(key, host_url, api_version)
def get_tenders(client, offset=None):
params = {'opt_fields': 'tenderID', 'descending': 1}
if offset:
params['offset'] = offset
return client.get_tenders(params)
|
d9a8d30ba12f4fb61fdffe353d225c2ffcd074fa
|
fabfile.py
|
fabfile.py
|
from fabric.api import cd, run, sudo, env, execute
from datetime import datetime
env.hosts = ['andrewlorente.com']
apps = {
'bloge': ['bloge@andrewlorente.com'],
'andrewlorente': ['andrewlorente@andrewlorente.com'],
}
def deploy(app):
if app not in apps.keys():
raise Exception("Unknown deploy target '{0}'".format(app))
release_id = datetime.now().strftime("%Y%m%d%H%M%S")
execute(build, app, release_id, hosts=apps[app])
execute(release, app, hosts=['alorente@andrewlorente.com'])
def build(app, release_id):
release_dir = "/u/apps/{0}/releases/{1}".format(app, release_id)
repo = "https://github.com/AndrewLorente/{0}.git".format(app)
run("git clone -q {0} {1}".format(repo, release_dir))
with cd(release_dir):
run("cabal update")
run("cabal install --constraint 'template-haskell installed' "
"--dependencies-only --force-reinstall")
run("cabal configure")
run("cabal build")
run("ln -nfs {0} /u/apps/{1}/current".format(release_dir, app))
def release(app):
sudo("initctl restart " + app)
|
from fabric.api import cd, run, sudo, env, execute, task
from datetime import datetime
env.hosts = ['andrewlorente.com']
apps = {
'bloge': ['bloge@andrewlorente.com'],
'andrewlorente': ['andrewlorente@andrewlorente.com'],
}
@task
def deploy(app):
if app not in apps.keys():
raise Exception("Unknown deploy target '{0}'".format(app))
release_id = datetime.now().strftime("%Y%m%d%H%M%S")
execute(build, app, release_id, hosts=apps[app])
execute(release, app, hosts=['alorente@andrewlorente.com'])
def build(app, release_id):
release_dir = "/u/apps/{0}/releases/{1}".format(app, release_id)
repo = "https://github.com/AndrewLorente/{0}.git".format(app)
run("git clone -q {0} {1}".format(repo, release_dir))
with cd(release_dir):
run("cabal update")
run("cabal install --constraint 'template-haskell installed' "
"--dependencies-only --force-reinstall")
run("cabal configure")
run("cabal build")
run("ln -nfs {0} /u/apps/{1}/current".format(release_dir, app))
def release(app):
sudo("initctl restart " + app)
|
Hide support functions from the public interface
|
Hide support functions from the public interface
|
Python
|
mit
|
ErinCall/andrewlorente
|
from fabric.api import cd, run, sudo, env, execute
from datetime import datetime
env.hosts = ['andrewlorente.com']
apps = {
'bloge': ['bloge@andrewlorente.com'],
'andrewlorente': ['andrewlorente@andrewlorente.com'],
}
def deploy(app):
if app not in apps.keys():
raise Exception("Unknown deploy target '{0}'".format(app))
release_id = datetime.now().strftime("%Y%m%d%H%M%S")
execute(build, app, release_id, hosts=apps[app])
execute(release, app, hosts=['alorente@andrewlorente.com'])
def build(app, release_id):
release_dir = "/u/apps/{0}/releases/{1}".format(app, release_id)
repo = "https://github.com/AndrewLorente/{0}.git".format(app)
run("git clone -q {0} {1}".format(repo, release_dir))
with cd(release_dir):
run("cabal update")
run("cabal install --constraint 'template-haskell installed' "
"--dependencies-only --force-reinstall")
run("cabal configure")
run("cabal build")
run("ln -nfs {0} /u/apps/{1}/current".format(release_dir, app))
def release(app):
sudo("initctl restart " + app)
Hide support functions from the public interface
|
from fabric.api import cd, run, sudo, env, execute, task
from datetime import datetime
env.hosts = ['andrewlorente.com']
apps = {
'bloge': ['bloge@andrewlorente.com'],
'andrewlorente': ['andrewlorente@andrewlorente.com'],
}
@task
def deploy(app):
if app not in apps.keys():
raise Exception("Unknown deploy target '{0}'".format(app))
release_id = datetime.now().strftime("%Y%m%d%H%M%S")
execute(build, app, release_id, hosts=apps[app])
execute(release, app, hosts=['alorente@andrewlorente.com'])
def build(app, release_id):
release_dir = "/u/apps/{0}/releases/{1}".format(app, release_id)
repo = "https://github.com/AndrewLorente/{0}.git".format(app)
run("git clone -q {0} {1}".format(repo, release_dir))
with cd(release_dir):
run("cabal update")
run("cabal install --constraint 'template-haskell installed' "
"--dependencies-only --force-reinstall")
run("cabal configure")
run("cabal build")
run("ln -nfs {0} /u/apps/{1}/current".format(release_dir, app))
def release(app):
sudo("initctl restart " + app)
|
<commit_before>from fabric.api import cd, run, sudo, env, execute
from datetime import datetime
env.hosts = ['andrewlorente.com']
apps = {
'bloge': ['bloge@andrewlorente.com'],
'andrewlorente': ['andrewlorente@andrewlorente.com'],
}
def deploy(app):
if app not in apps.keys():
raise Exception("Unknown deploy target '{0}'".format(app))
release_id = datetime.now().strftime("%Y%m%d%H%M%S")
execute(build, app, release_id, hosts=apps[app])
execute(release, app, hosts=['alorente@andrewlorente.com'])
def build(app, release_id):
release_dir = "/u/apps/{0}/releases/{1}".format(app, release_id)
repo = "https://github.com/AndrewLorente/{0}.git".format(app)
run("git clone -q {0} {1}".format(repo, release_dir))
with cd(release_dir):
run("cabal update")
run("cabal install --constraint 'template-haskell installed' "
"--dependencies-only --force-reinstall")
run("cabal configure")
run("cabal build")
run("ln -nfs {0} /u/apps/{1}/current".format(release_dir, app))
def release(app):
sudo("initctl restart " + app)
<commit_msg>Hide support functions from the public interface<commit_after>
|
from fabric.api import cd, run, sudo, env, execute, task
from datetime import datetime
env.hosts = ['andrewlorente.com']
apps = {
'bloge': ['bloge@andrewlorente.com'],
'andrewlorente': ['andrewlorente@andrewlorente.com'],
}
@task
def deploy(app):
if app not in apps.keys():
raise Exception("Unknown deploy target '{0}'".format(app))
release_id = datetime.now().strftime("%Y%m%d%H%M%S")
execute(build, app, release_id, hosts=apps[app])
execute(release, app, hosts=['alorente@andrewlorente.com'])
def build(app, release_id):
release_dir = "/u/apps/{0}/releases/{1}".format(app, release_id)
repo = "https://github.com/AndrewLorente/{0}.git".format(app)
run("git clone -q {0} {1}".format(repo, release_dir))
with cd(release_dir):
run("cabal update")
run("cabal install --constraint 'template-haskell installed' "
"--dependencies-only --force-reinstall")
run("cabal configure")
run("cabal build")
run("ln -nfs {0} /u/apps/{1}/current".format(release_dir, app))
def release(app):
sudo("initctl restart " + app)
|
from fabric.api import cd, run, sudo, env, execute
from datetime import datetime
env.hosts = ['andrewlorente.com']
apps = {
'bloge': ['bloge@andrewlorente.com'],
'andrewlorente': ['andrewlorente@andrewlorente.com'],
}
def deploy(app):
if app not in apps.keys():
raise Exception("Unknown deploy target '{0}'".format(app))
release_id = datetime.now().strftime("%Y%m%d%H%M%S")
execute(build, app, release_id, hosts=apps[app])
execute(release, app, hosts=['alorente@andrewlorente.com'])
def build(app, release_id):
release_dir = "/u/apps/{0}/releases/{1}".format(app, release_id)
repo = "https://github.com/AndrewLorente/{0}.git".format(app)
run("git clone -q {0} {1}".format(repo, release_dir))
with cd(release_dir):
run("cabal update")
run("cabal install --constraint 'template-haskell installed' "
"--dependencies-only --force-reinstall")
run("cabal configure")
run("cabal build")
run("ln -nfs {0} /u/apps/{1}/current".format(release_dir, app))
def release(app):
sudo("initctl restart " + app)
Hide support functions from the public interfacefrom fabric.api import cd, run, sudo, env, execute, task
from datetime import datetime
env.hosts = ['andrewlorente.com']
apps = {
'bloge': ['bloge@andrewlorente.com'],
'andrewlorente': ['andrewlorente@andrewlorente.com'],
}
@task
def deploy(app):
if app not in apps.keys():
raise Exception("Unknown deploy target '{0}'".format(app))
release_id = datetime.now().strftime("%Y%m%d%H%M%S")
execute(build, app, release_id, hosts=apps[app])
execute(release, app, hosts=['alorente@andrewlorente.com'])
def build(app, release_id):
release_dir = "/u/apps/{0}/releases/{1}".format(app, release_id)
repo = "https://github.com/AndrewLorente/{0}.git".format(app)
run("git clone -q {0} {1}".format(repo, release_dir))
with cd(release_dir):
run("cabal update")
run("cabal install --constraint 'template-haskell installed' "
"--dependencies-only --force-reinstall")
run("cabal configure")
run("cabal build")
run("ln -nfs {0} /u/apps/{1}/current".format(release_dir, app))
def release(app):
sudo("initctl restart " + app)
|
<commit_before>from fabric.api import cd, run, sudo, env, execute
from datetime import datetime
env.hosts = ['andrewlorente.com']
apps = {
'bloge': ['bloge@andrewlorente.com'],
'andrewlorente': ['andrewlorente@andrewlorente.com'],
}
def deploy(app):
if app not in apps.keys():
raise Exception("Unknown deploy target '{0}'".format(app))
release_id = datetime.now().strftime("%Y%m%d%H%M%S")
execute(build, app, release_id, hosts=apps[app])
execute(release, app, hosts=['alorente@andrewlorente.com'])
def build(app, release_id):
release_dir = "/u/apps/{0}/releases/{1}".format(app, release_id)
repo = "https://github.com/AndrewLorente/{0}.git".format(app)
run("git clone -q {0} {1}".format(repo, release_dir))
with cd(release_dir):
run("cabal update")
run("cabal install --constraint 'template-haskell installed' "
"--dependencies-only --force-reinstall")
run("cabal configure")
run("cabal build")
run("ln -nfs {0} /u/apps/{1}/current".format(release_dir, app))
def release(app):
sudo("initctl restart " + app)
<commit_msg>Hide support functions from the public interface<commit_after>from fabric.api import cd, run, sudo, env, execute, task
from datetime import datetime
env.hosts = ['andrewlorente.com']
apps = {
'bloge': ['bloge@andrewlorente.com'],
'andrewlorente': ['andrewlorente@andrewlorente.com'],
}
@task
def deploy(app):
if app not in apps.keys():
raise Exception("Unknown deploy target '{0}'".format(app))
release_id = datetime.now().strftime("%Y%m%d%H%M%S")
execute(build, app, release_id, hosts=apps[app])
execute(release, app, hosts=['alorente@andrewlorente.com'])
def build(app, release_id):
release_dir = "/u/apps/{0}/releases/{1}".format(app, release_id)
repo = "https://github.com/AndrewLorente/{0}.git".format(app)
run("git clone -q {0} {1}".format(repo, release_dir))
with cd(release_dir):
run("cabal update")
run("cabal install --constraint 'template-haskell installed' "
"--dependencies-only --force-reinstall")
run("cabal configure")
run("cabal build")
run("ln -nfs {0} /u/apps/{1}/current".format(release_dir, app))
def release(app):
sudo("initctl restart " + app)
|
38c831d1ca49c209b315761f5b58793ff3639759
|
tests/test_months.py
|
tests/test_months.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_months
----------------------------------
Tests for `months` module.
"""
import unittest
from months import months
class TestMonths(unittest.TestCase):
def setUp(self):
pass
def test_something(self):
pass
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_months
----------------------------------
Tests for `months` module.
"""
import os
import sys
import datetime
import unittest
sys.path.append(os.path.join('.', 'months'))
sys.path.append(os.path.join('..', 'months'))
from months import Month
class TestMonths(unittest.TestCase):
def setUp(self):
self.datetime = datetime.datetime(2015, 4, 15)
self.date = self.datetime.date()
self.month = Month(2015, 4)
def test_repr(self):
self.assertEqual(repr(self.month), 'Month(2015, 4)')
def test_str(self):
self.assertEqual(str(self.month), '2015-04')
def test_month_name(self):
self.assertEqual(self.month.month_name, 'April')
def test_month_abbr(self):
self.assertEqual(self.month.month_abbr, 'Apr')
def test_full_display(self):
self.assertEqual(self.month.full_display, 'April 2015')
def test_abbr_display(self):
self.assertEqual(self.month.abbr_display, 'Apr 2015')
def test_from_datetime(self):
self.assertEqual(self.month, Month.from_date(self.datetime))
def test_from_date(self):
self.assertEqual(self.month, Month.from_date(self.date))
def test_add(self):
self.assertEqual(self.month + 1, Month(2015, 5))
def test_add_rollover(self):
self.assertEqual(self.month + 9, Month(2016, 1))
def test_sub(self):
self.assertEqual(self.month - 1, Month(2015, 3))
def test_sub_rollover(self):
self.assertEqual(self.month - 4, Month(2014, 12))
def test_start_date(self):
self.assertEqual(self.month.start_date, self.date.replace(day=1))
def test_end_date(self):
self.assertEqual(self.month.end_date, self.date.replace(day=30))
def test_range(self):
self.assertEqual(
self.month.range,
(self.date.replace(day=1), self.date.replace(day=30)))
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
|
Add tests for Month functionality
|
Add tests for Month functionality
|
Python
|
mit
|
kstark/months
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_months
----------------------------------
Tests for `months` module.
"""
import unittest
from months import months
class TestMonths(unittest.TestCase):
def setUp(self):
pass
def test_something(self):
pass
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
Add tests for Month functionality
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_months
----------------------------------
Tests for `months` module.
"""
import os
import sys
import datetime
import unittest
sys.path.append(os.path.join('.', 'months'))
sys.path.append(os.path.join('..', 'months'))
from months import Month
class TestMonths(unittest.TestCase):
def setUp(self):
self.datetime = datetime.datetime(2015, 4, 15)
self.date = self.datetime.date()
self.month = Month(2015, 4)
def test_repr(self):
self.assertEqual(repr(self.month), 'Month(2015, 4)')
def test_str(self):
self.assertEqual(str(self.month), '2015-04')
def test_month_name(self):
self.assertEqual(self.month.month_name, 'April')
def test_month_abbr(self):
self.assertEqual(self.month.month_abbr, 'Apr')
def test_full_display(self):
self.assertEqual(self.month.full_display, 'April 2015')
def test_abbr_display(self):
self.assertEqual(self.month.abbr_display, 'Apr 2015')
def test_from_datetime(self):
self.assertEqual(self.month, Month.from_date(self.datetime))
def test_from_date(self):
self.assertEqual(self.month, Month.from_date(self.date))
def test_add(self):
self.assertEqual(self.month + 1, Month(2015, 5))
def test_add_rollover(self):
self.assertEqual(self.month + 9, Month(2016, 1))
def test_sub(self):
self.assertEqual(self.month - 1, Month(2015, 3))
def test_sub_rollover(self):
self.assertEqual(self.month - 4, Month(2014, 12))
def test_start_date(self):
self.assertEqual(self.month.start_date, self.date.replace(day=1))
def test_end_date(self):
self.assertEqual(self.month.end_date, self.date.replace(day=30))
def test_range(self):
self.assertEqual(
self.month.range,
(self.date.replace(day=1), self.date.replace(day=30)))
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_months
----------------------------------
Tests for `months` module.
"""
import unittest
from months import months
class TestMonths(unittest.TestCase):
def setUp(self):
pass
def test_something(self):
pass
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
<commit_msg>Add tests for Month functionality<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_months
----------------------------------
Tests for `months` module.
"""
import os
import sys
import datetime
import unittest
sys.path.append(os.path.join('.', 'months'))
sys.path.append(os.path.join('..', 'months'))
from months import Month
class TestMonths(unittest.TestCase):
def setUp(self):
self.datetime = datetime.datetime(2015, 4, 15)
self.date = self.datetime.date()
self.month = Month(2015, 4)
def test_repr(self):
self.assertEqual(repr(self.month), 'Month(2015, 4)')
def test_str(self):
self.assertEqual(str(self.month), '2015-04')
def test_month_name(self):
self.assertEqual(self.month.month_name, 'April')
def test_month_abbr(self):
self.assertEqual(self.month.month_abbr, 'Apr')
def test_full_display(self):
self.assertEqual(self.month.full_display, 'April 2015')
def test_abbr_display(self):
self.assertEqual(self.month.abbr_display, 'Apr 2015')
def test_from_datetime(self):
self.assertEqual(self.month, Month.from_date(self.datetime))
def test_from_date(self):
self.assertEqual(self.month, Month.from_date(self.date))
def test_add(self):
self.assertEqual(self.month + 1, Month(2015, 5))
def test_add_rollover(self):
self.assertEqual(self.month + 9, Month(2016, 1))
def test_sub(self):
self.assertEqual(self.month - 1, Month(2015, 3))
def test_sub_rollover(self):
self.assertEqual(self.month - 4, Month(2014, 12))
def test_start_date(self):
self.assertEqual(self.month.start_date, self.date.replace(day=1))
def test_end_date(self):
self.assertEqual(self.month.end_date, self.date.replace(day=30))
def test_range(self):
self.assertEqual(
self.month.range,
(self.date.replace(day=1), self.date.replace(day=30)))
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_months
----------------------------------
Tests for `months` module.
"""
import unittest
from months import months
class TestMonths(unittest.TestCase):
def setUp(self):
pass
def test_something(self):
pass
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
Add tests for Month functionality#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_months
----------------------------------
Tests for `months` module.
"""
import os
import sys
import datetime
import unittest
sys.path.append(os.path.join('.', 'months'))
sys.path.append(os.path.join('..', 'months'))
from months import Month
class TestMonths(unittest.TestCase):
def setUp(self):
self.datetime = datetime.datetime(2015, 4, 15)
self.date = self.datetime.date()
self.month = Month(2015, 4)
def test_repr(self):
self.assertEqual(repr(self.month), 'Month(2015, 4)')
def test_str(self):
self.assertEqual(str(self.month), '2015-04')
def test_month_name(self):
self.assertEqual(self.month.month_name, 'April')
def test_month_abbr(self):
self.assertEqual(self.month.month_abbr, 'Apr')
def test_full_display(self):
self.assertEqual(self.month.full_display, 'April 2015')
def test_abbr_display(self):
self.assertEqual(self.month.abbr_display, 'Apr 2015')
def test_from_datetime(self):
self.assertEqual(self.month, Month.from_date(self.datetime))
def test_from_date(self):
self.assertEqual(self.month, Month.from_date(self.date))
def test_add(self):
self.assertEqual(self.month + 1, Month(2015, 5))
def test_add_rollover(self):
self.assertEqual(self.month + 9, Month(2016, 1))
def test_sub(self):
self.assertEqual(self.month - 1, Month(2015, 3))
def test_sub_rollover(self):
self.assertEqual(self.month - 4, Month(2014, 12))
def test_start_date(self):
self.assertEqual(self.month.start_date, self.date.replace(day=1))
def test_end_date(self):
self.assertEqual(self.month.end_date, self.date.replace(day=30))
def test_range(self):
self.assertEqual(
self.month.range,
(self.date.replace(day=1), self.date.replace(day=30)))
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_months
----------------------------------
Tests for `months` module.
"""
import unittest
from months import months
class TestMonths(unittest.TestCase):
def setUp(self):
pass
def test_something(self):
pass
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
<commit_msg>Add tests for Month functionality<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_months
----------------------------------
Tests for `months` module.
"""
import os
import sys
import datetime
import unittest
sys.path.append(os.path.join('.', 'months'))
sys.path.append(os.path.join('..', 'months'))
from months import Month
class TestMonths(unittest.TestCase):
def setUp(self):
self.datetime = datetime.datetime(2015, 4, 15)
self.date = self.datetime.date()
self.month = Month(2015, 4)
def test_repr(self):
self.assertEqual(repr(self.month), 'Month(2015, 4)')
def test_str(self):
self.assertEqual(str(self.month), '2015-04')
def test_month_name(self):
self.assertEqual(self.month.month_name, 'April')
def test_month_abbr(self):
self.assertEqual(self.month.month_abbr, 'Apr')
def test_full_display(self):
self.assertEqual(self.month.full_display, 'April 2015')
def test_abbr_display(self):
self.assertEqual(self.month.abbr_display, 'Apr 2015')
def test_from_datetime(self):
self.assertEqual(self.month, Month.from_date(self.datetime))
def test_from_date(self):
self.assertEqual(self.month, Month.from_date(self.date))
def test_add(self):
self.assertEqual(self.month + 1, Month(2015, 5))
def test_add_rollover(self):
self.assertEqual(self.month + 9, Month(2016, 1))
def test_sub(self):
self.assertEqual(self.month - 1, Month(2015, 3))
def test_sub_rollover(self):
self.assertEqual(self.month - 4, Month(2014, 12))
def test_start_date(self):
self.assertEqual(self.month.start_date, self.date.replace(day=1))
def test_end_date(self):
self.assertEqual(self.month.end_date, self.date.replace(day=30))
def test_range(self):
self.assertEqual(
self.month.range,
(self.date.replace(day=1), self.date.replace(day=30)))
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
|
784b165d67550cd159b05aabfd2872ebc746a9e2
|
pants/views.py
|
pants/views.py
|
from cornice import Service
from tokenlib.errors import Error as TokenError
callurl = Service(name='callurl', path='/call-url')
call = Service(name='call', path='/call/{token}')
def is_authenticated(request):
"""Validates that an user is authenticated and extracts its userid"""
request.validated['userid'] = 'n1k0';
def is_token_valid(request):
token = request.matchdict['token']
try:
token = request.token_manager.parse_token(token.encode())
request.validated['token'] = token
except TokenError as e:
request.errors.add('querystring', 'token', e.message)
@callurl.post(permission='create')
def generate_callurl(request):
"""
Generate a callurl based on user ID.
"""
token = request.token_manager.make_token({
"userid": request.validated['userid'],
})
call_url = '{root}/call/{token}'.format(root=request.application_url,
token=token)
return {'call-url': call_url}
@call.get(validators=[is_token_valid], renderer='templates/call.jinja2')
def display_app(request):
return request.validated['token']
|
from pyramid.security import Allow, Authenticated, authenticated_userid
from cornice import Service
from tokenlib.errors import Error as TokenError
callurl = Service(name='callurl', path='/call-url')
call = Service(name='call', path='/call/{token}')
def acl(request):
return [(Allow, Authenticated, 'create-callurl')]
def is_token_valid(request):
token = request.matchdict['token']
try:
token = request.token_manager.parse_token(token.encode())
request.validated['token'] = token
except TokenError as e:
request.errors.add('querystring', 'token', e.message)
@callurl.post(permission='create-callurl', acl=acl)
def generate_callurl(request):
"""
Generate a callurl based on user ID.
"""
userid = authenticated_userid(request)
token = request.token_manager.make_token({"userid": userid})
call_url = '{root}/call/{token}'.format(root=request.application_url,
token=token)
return {'call-url': call_url}
@call.get(validators=[is_token_valid], renderer='templates/call.jinja2')
def display_app(request):
return request.validated['token']
|
Implement ACL for call url creation
|
Implement ACL for call url creation
|
Python
|
mpl-2.0
|
ametaireau/pants-server,almet/pants-server
|
from cornice import Service
from tokenlib.errors import Error as TokenError
callurl = Service(name='callurl', path='/call-url')
call = Service(name='call', path='/call/{token}')
def is_authenticated(request):
"""Validates that an user is authenticated and extracts its userid"""
request.validated['userid'] = 'n1k0';
def is_token_valid(request):
token = request.matchdict['token']
try:
token = request.token_manager.parse_token(token.encode())
request.validated['token'] = token
except TokenError as e:
request.errors.add('querystring', 'token', e.message)
@callurl.post(permission='create')
def generate_callurl(request):
"""
Generate a callurl based on user ID.
"""
token = request.token_manager.make_token({
"userid": request.validated['userid'],
})
call_url = '{root}/call/{token}'.format(root=request.application_url,
token=token)
return {'call-url': call_url}
@call.get(validators=[is_token_valid], renderer='templates/call.jinja2')
def display_app(request):
return request.validated['token']
Implement ACL for call url creation
|
from pyramid.security import Allow, Authenticated, authenticated_userid
from cornice import Service
from tokenlib.errors import Error as TokenError
callurl = Service(name='callurl', path='/call-url')
call = Service(name='call', path='/call/{token}')
def acl(request):
return [(Allow, Authenticated, 'create-callurl')]
def is_token_valid(request):
token = request.matchdict['token']
try:
token = request.token_manager.parse_token(token.encode())
request.validated['token'] = token
except TokenError as e:
request.errors.add('querystring', 'token', e.message)
@callurl.post(permission='create-callurl', acl=acl)
def generate_callurl(request):
"""
Generate a callurl based on user ID.
"""
userid = authenticated_userid(request)
token = request.token_manager.make_token({"userid": userid})
call_url = '{root}/call/{token}'.format(root=request.application_url,
token=token)
return {'call-url': call_url}
@call.get(validators=[is_token_valid], renderer='templates/call.jinja2')
def display_app(request):
return request.validated['token']
|
<commit_before>from cornice import Service
from tokenlib.errors import Error as TokenError
callurl = Service(name='callurl', path='/call-url')
call = Service(name='call', path='/call/{token}')
def is_authenticated(request):
"""Validates that an user is authenticated and extracts its userid"""
request.validated['userid'] = 'n1k0';
def is_token_valid(request):
token = request.matchdict['token']
try:
token = request.token_manager.parse_token(token.encode())
request.validated['token'] = token
except TokenError as e:
request.errors.add('querystring', 'token', e.message)
@callurl.post(permission='create')
def generate_callurl(request):
"""
Generate a callurl based on user ID.
"""
token = request.token_manager.make_token({
"userid": request.validated['userid'],
})
call_url = '{root}/call/{token}'.format(root=request.application_url,
token=token)
return {'call-url': call_url}
@call.get(validators=[is_token_valid], renderer='templates/call.jinja2')
def display_app(request):
return request.validated['token']
<commit_msg>Implement ACL for call url creation<commit_after>
|
from pyramid.security import Allow, Authenticated, authenticated_userid
from cornice import Service
from tokenlib.errors import Error as TokenError
callurl = Service(name='callurl', path='/call-url')
call = Service(name='call', path='/call/{token}')
def acl(request):
return [(Allow, Authenticated, 'create-callurl')]
def is_token_valid(request):
token = request.matchdict['token']
try:
token = request.token_manager.parse_token(token.encode())
request.validated['token'] = token
except TokenError as e:
request.errors.add('querystring', 'token', e.message)
@callurl.post(permission='create-callurl', acl=acl)
def generate_callurl(request):
"""
Generate a callurl based on user ID.
"""
userid = authenticated_userid(request)
token = request.token_manager.make_token({"userid": userid})
call_url = '{root}/call/{token}'.format(root=request.application_url,
token=token)
return {'call-url': call_url}
@call.get(validators=[is_token_valid], renderer='templates/call.jinja2')
def display_app(request):
return request.validated['token']
|
from cornice import Service
from tokenlib.errors import Error as TokenError
callurl = Service(name='callurl', path='/call-url')
call = Service(name='call', path='/call/{token}')
def is_authenticated(request):
"""Validates that an user is authenticated and extracts its userid"""
request.validated['userid'] = 'n1k0';
def is_token_valid(request):
token = request.matchdict['token']
try:
token = request.token_manager.parse_token(token.encode())
request.validated['token'] = token
except TokenError as e:
request.errors.add('querystring', 'token', e.message)
@callurl.post(permission='create')
def generate_callurl(request):
"""
Generate a callurl based on user ID.
"""
token = request.token_manager.make_token({
"userid": request.validated['userid'],
})
call_url = '{root}/call/{token}'.format(root=request.application_url,
token=token)
return {'call-url': call_url}
@call.get(validators=[is_token_valid], renderer='templates/call.jinja2')
def display_app(request):
return request.validated['token']
Implement ACL for call url creationfrom pyramid.security import Allow, Authenticated, authenticated_userid
from cornice import Service
from tokenlib.errors import Error as TokenError
callurl = Service(name='callurl', path='/call-url')
call = Service(name='call', path='/call/{token}')
def acl(request):
return [(Allow, Authenticated, 'create-callurl')]
def is_token_valid(request):
token = request.matchdict['token']
try:
token = request.token_manager.parse_token(token.encode())
request.validated['token'] = token
except TokenError as e:
request.errors.add('querystring', 'token', e.message)
@callurl.post(permission='create-callurl', acl=acl)
def generate_callurl(request):
"""
Generate a callurl based on user ID.
"""
userid = authenticated_userid(request)
token = request.token_manager.make_token({"userid": userid})
call_url = '{root}/call/{token}'.format(root=request.application_url,
token=token)
return {'call-url': call_url}
@call.get(validators=[is_token_valid], renderer='templates/call.jinja2')
def display_app(request):
return request.validated['token']
|
<commit_before>from cornice import Service
from tokenlib.errors import Error as TokenError
callurl = Service(name='callurl', path='/call-url')
call = Service(name='call', path='/call/{token}')
def is_authenticated(request):
"""Validates that an user is authenticated and extracts its userid"""
request.validated['userid'] = 'n1k0';
def is_token_valid(request):
token = request.matchdict['token']
try:
token = request.token_manager.parse_token(token.encode())
request.validated['token'] = token
except TokenError as e:
request.errors.add('querystring', 'token', e.message)
@callurl.post(permission='create')
def generate_callurl(request):
"""
Generate a callurl based on user ID.
"""
token = request.token_manager.make_token({
"userid": request.validated['userid'],
})
call_url = '{root}/call/{token}'.format(root=request.application_url,
token=token)
return {'call-url': call_url}
@call.get(validators=[is_token_valid], renderer='templates/call.jinja2')
def display_app(request):
return request.validated['token']
<commit_msg>Implement ACL for call url creation<commit_after>from pyramid.security import Allow, Authenticated, authenticated_userid
from cornice import Service
from tokenlib.errors import Error as TokenError
callurl = Service(name='callurl', path='/call-url')
call = Service(name='call', path='/call/{token}')
def acl(request):
return [(Allow, Authenticated, 'create-callurl')]
def is_token_valid(request):
token = request.matchdict['token']
try:
token = request.token_manager.parse_token(token.encode())
request.validated['token'] = token
except TokenError as e:
request.errors.add('querystring', 'token', e.message)
@callurl.post(permission='create-callurl', acl=acl)
def generate_callurl(request):
"""
Generate a callurl based on user ID.
"""
userid = authenticated_userid(request)
token = request.token_manager.make_token({"userid": userid})
call_url = '{root}/call/{token}'.format(root=request.application_url,
token=token)
return {'call-url': call_url}
@call.get(validators=[is_token_valid], renderer='templates/call.jinja2')
def display_app(request):
return request.validated['token']
|
0a907442eee18d0b30ca4ad2c6a5ef1fabb90684
|
pelicanconf.py
|
pelicanconf.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
AUTHOR = u'Lex Toumbourou'
SITENAME = u'LexToumbourou.com'
SITEURL = 'http://lextoumbourou.com'
TIMEZONE = 'Australia/Melbourne'
DEFAULT_LANG = u'en'
ARTICLE_URL = 'blog/posts/{slug}/'
ARTICLE_SAVE_AS = 'blog/posts/{slug}/index.html'
# Feed generation is usually not desired when developing
FEED_DOMAIN = SITEURL
FEED_ATOM = 'atom.xml'
# Social widget
SOCIAL = (('You can add links in your config file', '#'),
('Another social link', '#'),)
DEFAULT_PAGINATION = None
THEME = "themes/lextoumbourou-theme"
STATIC_PATHS = ['images']
FILES_TO_COPY = (
('extra/robots.txt', 'robots.txt'),
('extra/favicon.ico', 'favicon.ico'),
)
DISQUS_SITENAME = 'lextoumbouroucom'
|
#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
AUTHOR = u'Lex Toumbourou'
SITENAME = u'LexToumbourou.com'
SITEURL = 'http://lextoumbourou.com'
TIMEZONE = 'Australia/Melbourne'
DEFAULT_LANG = u'en'
ARTICLE_URL = 'blog/posts/{slug}/'
ARTICLE_SAVE_AS = 'blog/posts/{slug}/index.html'
# Feed generation is usually not desired when developing
FEED_DOMAIN = SITEURL
FEED_ATOM = 'atom.xml'
# Social widget
SOCIAL = (('You can add links in your config file', '#'),
('Another social link', '#'),)
DEFAULT_PAGINATION = None
THEME = "themes/lextoumbourou-theme"
STATIC_PATHS = ['images', 'extra']
EXTRA_PATH_METADATA = {
'extra/robots.txt': {'path': 'robots.txt'},
'extra/favicon.ico': {'path': 'favicon.ico'},
}
DISQUS_SITENAME = 'lextoumbouroucom'
|
Support static files via new Pelican API
|
Support static files via new Pelican API
|
Python
|
mit
|
lextoumbourou/lextoumbourou.github.io,lextoumbourou/lextoumbourou.github.io,lextoumbourou/lextoumbourou.github.io,lextoumbourou/lextoumbourou.github.io
|
#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
AUTHOR = u'Lex Toumbourou'
SITENAME = u'LexToumbourou.com'
SITEURL = 'http://lextoumbourou.com'
TIMEZONE = 'Australia/Melbourne'
DEFAULT_LANG = u'en'
ARTICLE_URL = 'blog/posts/{slug}/'
ARTICLE_SAVE_AS = 'blog/posts/{slug}/index.html'
# Feed generation is usually not desired when developing
FEED_DOMAIN = SITEURL
FEED_ATOM = 'atom.xml'
# Social widget
SOCIAL = (('You can add links in your config file', '#'),
('Another social link', '#'),)
DEFAULT_PAGINATION = None
THEME = "themes/lextoumbourou-theme"
STATIC_PATHS = ['images']
FILES_TO_COPY = (
('extra/robots.txt', 'robots.txt'),
('extra/favicon.ico', 'favicon.ico'),
)
DISQUS_SITENAME = 'lextoumbouroucom'
Support static files via new Pelican API
|
#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
AUTHOR = u'Lex Toumbourou'
SITENAME = u'LexToumbourou.com'
SITEURL = 'http://lextoumbourou.com'
TIMEZONE = 'Australia/Melbourne'
DEFAULT_LANG = u'en'
ARTICLE_URL = 'blog/posts/{slug}/'
ARTICLE_SAVE_AS = 'blog/posts/{slug}/index.html'
# Feed generation is usually not desired when developing
FEED_DOMAIN = SITEURL
FEED_ATOM = 'atom.xml'
# Social widget
SOCIAL = (('You can add links in your config file', '#'),
('Another social link', '#'),)
DEFAULT_PAGINATION = None
THEME = "themes/lextoumbourou-theme"
STATIC_PATHS = ['images', 'extra']
EXTRA_PATH_METADATA = {
'extra/robots.txt': {'path': 'robots.txt'},
'extra/favicon.ico': {'path': 'favicon.ico'},
}
DISQUS_SITENAME = 'lextoumbouroucom'
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
AUTHOR = u'Lex Toumbourou'
SITENAME = u'LexToumbourou.com'
SITEURL = 'http://lextoumbourou.com'
TIMEZONE = 'Australia/Melbourne'
DEFAULT_LANG = u'en'
ARTICLE_URL = 'blog/posts/{slug}/'
ARTICLE_SAVE_AS = 'blog/posts/{slug}/index.html'
# Feed generation is usually not desired when developing
FEED_DOMAIN = SITEURL
FEED_ATOM = 'atom.xml'
# Social widget
SOCIAL = (('You can add links in your config file', '#'),
('Another social link', '#'),)
DEFAULT_PAGINATION = None
THEME = "themes/lextoumbourou-theme"
STATIC_PATHS = ['images']
FILES_TO_COPY = (
('extra/robots.txt', 'robots.txt'),
('extra/favicon.ico', 'favicon.ico'),
)
DISQUS_SITENAME = 'lextoumbouroucom'
<commit_msg>Support static files via new Pelican API<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
AUTHOR = u'Lex Toumbourou'
SITENAME = u'LexToumbourou.com'
SITEURL = 'http://lextoumbourou.com'
TIMEZONE = 'Australia/Melbourne'
DEFAULT_LANG = u'en'
ARTICLE_URL = 'blog/posts/{slug}/'
ARTICLE_SAVE_AS = 'blog/posts/{slug}/index.html'
# Feed generation is usually not desired when developing
FEED_DOMAIN = SITEURL
FEED_ATOM = 'atom.xml'
# Social widget
SOCIAL = (('You can add links in your config file', '#'),
('Another social link', '#'),)
DEFAULT_PAGINATION = None
THEME = "themes/lextoumbourou-theme"
STATIC_PATHS = ['images', 'extra']
EXTRA_PATH_METADATA = {
'extra/robots.txt': {'path': 'robots.txt'},
'extra/favicon.ico': {'path': 'favicon.ico'},
}
DISQUS_SITENAME = 'lextoumbouroucom'
|
#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
AUTHOR = u'Lex Toumbourou'
SITENAME = u'LexToumbourou.com'
SITEURL = 'http://lextoumbourou.com'
TIMEZONE = 'Australia/Melbourne'
DEFAULT_LANG = u'en'
ARTICLE_URL = 'blog/posts/{slug}/'
ARTICLE_SAVE_AS = 'blog/posts/{slug}/index.html'
# Feed generation is usually not desired when developing
FEED_DOMAIN = SITEURL
FEED_ATOM = 'atom.xml'
# Social widget
SOCIAL = (('You can add links in your config file', '#'),
('Another social link', '#'),)
DEFAULT_PAGINATION = None
THEME = "themes/lextoumbourou-theme"
STATIC_PATHS = ['images']
FILES_TO_COPY = (
('extra/robots.txt', 'robots.txt'),
('extra/favicon.ico', 'favicon.ico'),
)
DISQUS_SITENAME = 'lextoumbouroucom'
Support static files via new Pelican API#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
AUTHOR = u'Lex Toumbourou'
SITENAME = u'LexToumbourou.com'
SITEURL = 'http://lextoumbourou.com'
TIMEZONE = 'Australia/Melbourne'
DEFAULT_LANG = u'en'
ARTICLE_URL = 'blog/posts/{slug}/'
ARTICLE_SAVE_AS = 'blog/posts/{slug}/index.html'
# Feed generation is usually not desired when developing
FEED_DOMAIN = SITEURL
FEED_ATOM = 'atom.xml'
# Social widget
SOCIAL = (('You can add links in your config file', '#'),
('Another social link', '#'),)
DEFAULT_PAGINATION = None
THEME = "themes/lextoumbourou-theme"
STATIC_PATHS = ['images', 'extra']
EXTRA_PATH_METADATA = {
'extra/robots.txt': {'path': 'robots.txt'},
'extra/favicon.ico': {'path': 'favicon.ico'},
}
DISQUS_SITENAME = 'lextoumbouroucom'
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
AUTHOR = u'Lex Toumbourou'
SITENAME = u'LexToumbourou.com'
SITEURL = 'http://lextoumbourou.com'
TIMEZONE = 'Australia/Melbourne'
DEFAULT_LANG = u'en'
ARTICLE_URL = 'blog/posts/{slug}/'
ARTICLE_SAVE_AS = 'blog/posts/{slug}/index.html'
# Feed generation is usually not desired when developing
FEED_DOMAIN = SITEURL
FEED_ATOM = 'atom.xml'
# Social widget
SOCIAL = (('You can add links in your config file', '#'),
('Another social link', '#'),)
DEFAULT_PAGINATION = None
THEME = "themes/lextoumbourou-theme"
STATIC_PATHS = ['images']
FILES_TO_COPY = (
('extra/robots.txt', 'robots.txt'),
('extra/favicon.ico', 'favicon.ico'),
)
DISQUS_SITENAME = 'lextoumbouroucom'
<commit_msg>Support static files via new Pelican API<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
AUTHOR = u'Lex Toumbourou'
SITENAME = u'LexToumbourou.com'
SITEURL = 'http://lextoumbourou.com'
TIMEZONE = 'Australia/Melbourne'
DEFAULT_LANG = u'en'
ARTICLE_URL = 'blog/posts/{slug}/'
ARTICLE_SAVE_AS = 'blog/posts/{slug}/index.html'
# Feed generation is usually not desired when developing
FEED_DOMAIN = SITEURL
FEED_ATOM = 'atom.xml'
# Social widget
SOCIAL = (('You can add links in your config file', '#'),
('Another social link', '#'),)
DEFAULT_PAGINATION = None
THEME = "themes/lextoumbourou-theme"
STATIC_PATHS = ['images', 'extra']
EXTRA_PATH_METADATA = {
'extra/robots.txt': {'path': 'robots.txt'},
'extra/favicon.ico': {'path': 'favicon.ico'},
}
DISQUS_SITENAME = 'lextoumbouroucom'
|
e1155bcc12b8178475b5ff2e1485d6c9ab6f2ea5
|
debug_toolbar_user_panel/views.py
|
debug_toolbar_user_panel/views.py
|
from django.http import HttpResponseRedirect
from django.conf import settings
from django.contrib import auth
from django.template import RequestContext
from django.shortcuts import render_to_response, get_object_or_404
from django.contrib.auth.models import User
from django.views.decorators.http import require_POST
def content(request):
current = []
if request.user.is_authenticated():
for field in User._meta.fields:
if field.name == 'password':
continue
current.append(
(field.attname, getattr(request.user, field.attname))
)
return render_to_response('debug_toolbar_user_panel/content.html', {
'next': request.GET.get('next'),
'users': User.objects.order_by('-last_login')[:20],
'current': current,
}, context_instance=RequestContext(request))
@require_POST
def login(request, pk):
user = get_object_or_404(User, pk=pk)
# Hacky
user.backend = settings.AUTHENTICATION_BACKENDS[0]
auth.login(request, user)
return HttpResponseRedirect(request.POST.get('next', '/'))
|
from django.http import HttpResponseRedirect
from django.conf import settings
from django.contrib import auth
from django.template import RequestContext
from django.shortcuts import render_to_response, get_object_or_404
from django.contrib.auth.models import User
from django.views.decorators.http import require_POST
def content(request):
current = []
if request.user.is_authenticated():
for field in User._meta.fields:
if field.name == 'password':
continue
current.append(
(field.attname, getattr(request.user, field.attname))
)
return render_to_response('debug_toolbar_user_panel/content.html', {
'next': request.GET.get('next'),
'users': User.objects.order_by('-last_login')[:10],
'current': current,
}, context_instance=RequestContext(request))
@require_POST
def login(request, pk):
user = get_object_or_404(User, pk=pk)
# Hacky
user.backend = settings.AUTHENTICATION_BACKENDS[0]
auth.login(request, user)
return HttpResponseRedirect(request.POST.get('next', '/'))
|
Reduce the number of recent users we display.
|
Reduce the number of recent users we display.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@playfire.com>
|
Python
|
bsd-3-clause
|
lamby/django-debug-toolbar-user-panel,playfire/django-debug-toolbar-user-panel,lamby/django-debug-toolbar-user-panel
|
from django.http import HttpResponseRedirect
from django.conf import settings
from django.contrib import auth
from django.template import RequestContext
from django.shortcuts import render_to_response, get_object_or_404
from django.contrib.auth.models import User
from django.views.decorators.http import require_POST
def content(request):
current = []
if request.user.is_authenticated():
for field in User._meta.fields:
if field.name == 'password':
continue
current.append(
(field.attname, getattr(request.user, field.attname))
)
return render_to_response('debug_toolbar_user_panel/content.html', {
'next': request.GET.get('next'),
'users': User.objects.order_by('-last_login')[:20],
'current': current,
}, context_instance=RequestContext(request))
@require_POST
def login(request, pk):
user = get_object_or_404(User, pk=pk)
# Hacky
user.backend = settings.AUTHENTICATION_BACKENDS[0]
auth.login(request, user)
return HttpResponseRedirect(request.POST.get('next', '/'))
Reduce the number of recent users we display.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@playfire.com>
|
from django.http import HttpResponseRedirect
from django.conf import settings
from django.contrib import auth
from django.template import RequestContext
from django.shortcuts import render_to_response, get_object_or_404
from django.contrib.auth.models import User
from django.views.decorators.http import require_POST
def content(request):
current = []
if request.user.is_authenticated():
for field in User._meta.fields:
if field.name == 'password':
continue
current.append(
(field.attname, getattr(request.user, field.attname))
)
return render_to_response('debug_toolbar_user_panel/content.html', {
'next': request.GET.get('next'),
'users': User.objects.order_by('-last_login')[:10],
'current': current,
}, context_instance=RequestContext(request))
@require_POST
def login(request, pk):
user = get_object_or_404(User, pk=pk)
# Hacky
user.backend = settings.AUTHENTICATION_BACKENDS[0]
auth.login(request, user)
return HttpResponseRedirect(request.POST.get('next', '/'))
|
<commit_before>from django.http import HttpResponseRedirect
from django.conf import settings
from django.contrib import auth
from django.template import RequestContext
from django.shortcuts import render_to_response, get_object_or_404
from django.contrib.auth.models import User
from django.views.decorators.http import require_POST
def content(request):
current = []
if request.user.is_authenticated():
for field in User._meta.fields:
if field.name == 'password':
continue
current.append(
(field.attname, getattr(request.user, field.attname))
)
return render_to_response('debug_toolbar_user_panel/content.html', {
'next': request.GET.get('next'),
'users': User.objects.order_by('-last_login')[:20],
'current': current,
}, context_instance=RequestContext(request))
@require_POST
def login(request, pk):
user = get_object_or_404(User, pk=pk)
# Hacky
user.backend = settings.AUTHENTICATION_BACKENDS[0]
auth.login(request, user)
return HttpResponseRedirect(request.POST.get('next', '/'))
<commit_msg>Reduce the number of recent users we display.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@playfire.com><commit_after>
|
from django.http import HttpResponseRedirect
from django.conf import settings
from django.contrib import auth
from django.template import RequestContext
from django.shortcuts import render_to_response, get_object_or_404
from django.contrib.auth.models import User
from django.views.decorators.http import require_POST
def content(request):
current = []
if request.user.is_authenticated():
for field in User._meta.fields:
if field.name == 'password':
continue
current.append(
(field.attname, getattr(request.user, field.attname))
)
return render_to_response('debug_toolbar_user_panel/content.html', {
'next': request.GET.get('next'),
'users': User.objects.order_by('-last_login')[:10],
'current': current,
}, context_instance=RequestContext(request))
@require_POST
def login(request, pk):
user = get_object_or_404(User, pk=pk)
# Hacky
user.backend = settings.AUTHENTICATION_BACKENDS[0]
auth.login(request, user)
return HttpResponseRedirect(request.POST.get('next', '/'))
|
from django.http import HttpResponseRedirect
from django.conf import settings
from django.contrib import auth
from django.template import RequestContext
from django.shortcuts import render_to_response, get_object_or_404
from django.contrib.auth.models import User
from django.views.decorators.http import require_POST
def content(request):
current = []
if request.user.is_authenticated():
for field in User._meta.fields:
if field.name == 'password':
continue
current.append(
(field.attname, getattr(request.user, field.attname))
)
return render_to_response('debug_toolbar_user_panel/content.html', {
'next': request.GET.get('next'),
'users': User.objects.order_by('-last_login')[:20],
'current': current,
}, context_instance=RequestContext(request))
@require_POST
def login(request, pk):
user = get_object_or_404(User, pk=pk)
# Hacky
user.backend = settings.AUTHENTICATION_BACKENDS[0]
auth.login(request, user)
return HttpResponseRedirect(request.POST.get('next', '/'))
Reduce the number of recent users we display.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@playfire.com>from django.http import HttpResponseRedirect
from django.conf import settings
from django.contrib import auth
from django.template import RequestContext
from django.shortcuts import render_to_response, get_object_or_404
from django.contrib.auth.models import User
from django.views.decorators.http import require_POST
def content(request):
current = []
if request.user.is_authenticated():
for field in User._meta.fields:
if field.name == 'password':
continue
current.append(
(field.attname, getattr(request.user, field.attname))
)
return render_to_response('debug_toolbar_user_panel/content.html', {
'next': request.GET.get('next'),
'users': User.objects.order_by('-last_login')[:10],
'current': current,
}, context_instance=RequestContext(request))
@require_POST
def login(request, pk):
user = get_object_or_404(User, pk=pk)
# Hacky
user.backend = settings.AUTHENTICATION_BACKENDS[0]
auth.login(request, user)
return HttpResponseRedirect(request.POST.get('next', '/'))
|
<commit_before>from django.http import HttpResponseRedirect
from django.conf import settings
from django.contrib import auth
from django.template import RequestContext
from django.shortcuts import render_to_response, get_object_or_404
from django.contrib.auth.models import User
from django.views.decorators.http import require_POST
def content(request):
current = []
if request.user.is_authenticated():
for field in User._meta.fields:
if field.name == 'password':
continue
current.append(
(field.attname, getattr(request.user, field.attname))
)
return render_to_response('debug_toolbar_user_panel/content.html', {
'next': request.GET.get('next'),
'users': User.objects.order_by('-last_login')[:20],
'current': current,
}, context_instance=RequestContext(request))
@require_POST
def login(request, pk):
user = get_object_or_404(User, pk=pk)
# Hacky
user.backend = settings.AUTHENTICATION_BACKENDS[0]
auth.login(request, user)
return HttpResponseRedirect(request.POST.get('next', '/'))
<commit_msg>Reduce the number of recent users we display.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@playfire.com><commit_after>from django.http import HttpResponseRedirect
from django.conf import settings
from django.contrib import auth
from django.template import RequestContext
from django.shortcuts import render_to_response, get_object_or_404
from django.contrib.auth.models import User
from django.views.decorators.http import require_POST
def content(request):
current = []
if request.user.is_authenticated():
for field in User._meta.fields:
if field.name == 'password':
continue
current.append(
(field.attname, getattr(request.user, field.attname))
)
return render_to_response('debug_toolbar_user_panel/content.html', {
'next': request.GET.get('next'),
'users': User.objects.order_by('-last_login')[:10],
'current': current,
}, context_instance=RequestContext(request))
@require_POST
def login(request, pk):
user = get_object_or_404(User, pk=pk)
# Hacky
user.backend = settings.AUTHENTICATION_BACKENDS[0]
auth.login(request, user)
return HttpResponseRedirect(request.POST.get('next', '/'))
|
d986f68c2490d276bec7f9511c567e591c70d2d3
|
corehq/ex-submodules/pillow_retry/management/commands/send_pillow_retry_queue_through_pillows.py
|
corehq/ex-submodules/pillow_retry/management/commands/send_pillow_retry_queue_through_pillows.py
|
from __future__ import absolute_import
from django.core.management.base import BaseCommand
from pillow_retry.models import PillowError
from corehq.apps.change_feed.producer import producer
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('pillow')
def handle(self, pillow, **options):
self.pillow = pillow
for errors in self.get_next_errors():
for error in errors:
if error.change_object.metadata:
producer.send_change(
error.change_object.metadata.data_source_type,
error.change_object.metadata
)
def get_next_errors(self):
num_retrieved = 1
while num_retrieved > 0:
pillow_errors = (
PillowError.objects
.filter(pillow=self.pillow)
.order_by('date_created')
)[:1000]
num_retrieved = len(pillow_errors)
yield pillow_errors
doc_ids = [error.doc_id for error in pillow_errors]
PillowError.objects(doc_id__in=doc_ids).delete()
|
from __future__ import absolute_import
from datetime import datetime
from django.core.management.base import BaseCommand
from pillow_retry.models import PillowError
from corehq.apps.change_feed.producer import producer
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('pillow')
def handle(self, pillow, **options):
self.pillow = pillow
for errors in self.get_next_errors():
for error in errors:
if error.change_object.metadata:
producer.send_change(
error.change_object.metadata.data_source_type,
error.change_object.metadata
)
def get_next_errors(self):
num_retrieved = 1
count = 0
while num_retrieved > 0:
pillow_errors = (
PillowError.objects
.filter(pillow=self.pillow)
.order_by('date_created')
)[:1000]
num_retrieved = len(pillow_errors)
yield pillow_errors
print("deleting")
doc_ids = [error.doc_id for error in pillow_errors]
PillowError.objects.filter(doc_id__in=doc_ids).delete()
count += num_retrieved
print(count)
print(datetime.utcnow())
|
Add some print statements for debugging.
|
Add some print statements for debugging.
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
from __future__ import absolute_import
from django.core.management.base import BaseCommand
from pillow_retry.models import PillowError
from corehq.apps.change_feed.producer import producer
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('pillow')
def handle(self, pillow, **options):
self.pillow = pillow
for errors in self.get_next_errors():
for error in errors:
if error.change_object.metadata:
producer.send_change(
error.change_object.metadata.data_source_type,
error.change_object.metadata
)
def get_next_errors(self):
num_retrieved = 1
while num_retrieved > 0:
pillow_errors = (
PillowError.objects
.filter(pillow=self.pillow)
.order_by('date_created')
)[:1000]
num_retrieved = len(pillow_errors)
yield pillow_errors
doc_ids = [error.doc_id for error in pillow_errors]
PillowError.objects(doc_id__in=doc_ids).delete()
Add some print statements for debugging.
|
from __future__ import absolute_import
from datetime import datetime
from django.core.management.base import BaseCommand
from pillow_retry.models import PillowError
from corehq.apps.change_feed.producer import producer
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('pillow')
def handle(self, pillow, **options):
self.pillow = pillow
for errors in self.get_next_errors():
for error in errors:
if error.change_object.metadata:
producer.send_change(
error.change_object.metadata.data_source_type,
error.change_object.metadata
)
def get_next_errors(self):
num_retrieved = 1
count = 0
while num_retrieved > 0:
pillow_errors = (
PillowError.objects
.filter(pillow=self.pillow)
.order_by('date_created')
)[:1000]
num_retrieved = len(pillow_errors)
yield pillow_errors
print("deleting")
doc_ids = [error.doc_id for error in pillow_errors]
PillowError.objects.filter(doc_id__in=doc_ids).delete()
count += num_retrieved
print(count)
print(datetime.utcnow())
|
<commit_before>from __future__ import absolute_import
from django.core.management.base import BaseCommand
from pillow_retry.models import PillowError
from corehq.apps.change_feed.producer import producer
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('pillow')
def handle(self, pillow, **options):
self.pillow = pillow
for errors in self.get_next_errors():
for error in errors:
if error.change_object.metadata:
producer.send_change(
error.change_object.metadata.data_source_type,
error.change_object.metadata
)
def get_next_errors(self):
num_retrieved = 1
while num_retrieved > 0:
pillow_errors = (
PillowError.objects
.filter(pillow=self.pillow)
.order_by('date_created')
)[:1000]
num_retrieved = len(pillow_errors)
yield pillow_errors
doc_ids = [error.doc_id for error in pillow_errors]
PillowError.objects(doc_id__in=doc_ids).delete()
<commit_msg>Add some print statements for debugging.<commit_after>
|
from __future__ import absolute_import
from datetime import datetime
from django.core.management.base import BaseCommand
from pillow_retry.models import PillowError
from corehq.apps.change_feed.producer import producer
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('pillow')
def handle(self, pillow, **options):
self.pillow = pillow
for errors in self.get_next_errors():
for error in errors:
if error.change_object.metadata:
producer.send_change(
error.change_object.metadata.data_source_type,
error.change_object.metadata
)
def get_next_errors(self):
num_retrieved = 1
count = 0
while num_retrieved > 0:
pillow_errors = (
PillowError.objects
.filter(pillow=self.pillow)
.order_by('date_created')
)[:1000]
num_retrieved = len(pillow_errors)
yield pillow_errors
print("deleting")
doc_ids = [error.doc_id for error in pillow_errors]
PillowError.objects.filter(doc_id__in=doc_ids).delete()
count += num_retrieved
print(count)
print(datetime.utcnow())
|
from __future__ import absolute_import
from django.core.management.base import BaseCommand
from pillow_retry.models import PillowError
from corehq.apps.change_feed.producer import producer
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('pillow')
def handle(self, pillow, **options):
self.pillow = pillow
for errors in self.get_next_errors():
for error in errors:
if error.change_object.metadata:
producer.send_change(
error.change_object.metadata.data_source_type,
error.change_object.metadata
)
def get_next_errors(self):
num_retrieved = 1
while num_retrieved > 0:
pillow_errors = (
PillowError.objects
.filter(pillow=self.pillow)
.order_by('date_created')
)[:1000]
num_retrieved = len(pillow_errors)
yield pillow_errors
doc_ids = [error.doc_id for error in pillow_errors]
PillowError.objects(doc_id__in=doc_ids).delete()
Add some print statements for debugging.from __future__ import absolute_import
from datetime import datetime
from django.core.management.base import BaseCommand
from pillow_retry.models import PillowError
from corehq.apps.change_feed.producer import producer
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('pillow')
def handle(self, pillow, **options):
self.pillow = pillow
for errors in self.get_next_errors():
for error in errors:
if error.change_object.metadata:
producer.send_change(
error.change_object.metadata.data_source_type,
error.change_object.metadata
)
def get_next_errors(self):
num_retrieved = 1
count = 0
while num_retrieved > 0:
pillow_errors = (
PillowError.objects
.filter(pillow=self.pillow)
.order_by('date_created')
)[:1000]
num_retrieved = len(pillow_errors)
yield pillow_errors
print("deleting")
doc_ids = [error.doc_id for error in pillow_errors]
PillowError.objects.filter(doc_id__in=doc_ids).delete()
count += num_retrieved
print(count)
print(datetime.utcnow())
|
<commit_before>from __future__ import absolute_import
from django.core.management.base import BaseCommand
from pillow_retry.models import PillowError
from corehq.apps.change_feed.producer import producer
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('pillow')
def handle(self, pillow, **options):
self.pillow = pillow
for errors in self.get_next_errors():
for error in errors:
if error.change_object.metadata:
producer.send_change(
error.change_object.metadata.data_source_type,
error.change_object.metadata
)
def get_next_errors(self):
num_retrieved = 1
while num_retrieved > 0:
pillow_errors = (
PillowError.objects
.filter(pillow=self.pillow)
.order_by('date_created')
)[:1000]
num_retrieved = len(pillow_errors)
yield pillow_errors
doc_ids = [error.doc_id for error in pillow_errors]
PillowError.objects(doc_id__in=doc_ids).delete()
<commit_msg>Add some print statements for debugging.<commit_after>from __future__ import absolute_import
from datetime import datetime
from django.core.management.base import BaseCommand
from pillow_retry.models import PillowError
from corehq.apps.change_feed.producer import producer
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('pillow')
def handle(self, pillow, **options):
self.pillow = pillow
for errors in self.get_next_errors():
for error in errors:
if error.change_object.metadata:
producer.send_change(
error.change_object.metadata.data_source_type,
error.change_object.metadata
)
def get_next_errors(self):
num_retrieved = 1
count = 0
while num_retrieved > 0:
pillow_errors = (
PillowError.objects
.filter(pillow=self.pillow)
.order_by('date_created')
)[:1000]
num_retrieved = len(pillow_errors)
yield pillow_errors
print("deleting")
doc_ids = [error.doc_id for error in pillow_errors]
PillowError.objects.filter(doc_id__in=doc_ids).delete()
count += num_retrieved
print(count)
print(datetime.utcnow())
|
3cd90339c2a66b92c024ad9365cbaceace29531d
|
devproject/devproject/urls.py
|
devproject/devproject/urls.py
|
from django.conf.urls import include, static, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^flickr/', include('ditto.flickr.urls', namespace='flickr')),
url(r'^lastfm/', include('ditto.lastfm.urls', namespace='lastfm')),
url(r'^pinboard/', include('ditto.pinboard.urls', namespace='pinboard')),
url(r'^twitter/', include('ditto.twitter.urls', namespace='twitter')),
url(r'', include('ditto.core.urls', namespace='ditto')),
]
from django.conf import settings
if settings.DEBUG:
import debug_toolbar
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]
urlpatterns += \
static.static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += \
static.static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
from django.conf.urls import include, static, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^flickr/', include('ditto.flickr.urls', namespace='flickr')),
url(r'^lastfm/', include('ditto.lastfm.urls', namespace='lastfm')),
url(r'^pinboard/', include('ditto.pinboard.urls', namespace='pinboard')),
url(r'^twitter/', include('ditto.twitter.urls', namespace='twitter')),
url(r'', include('ditto.core.urls', namespace='ditto')),
]
from django.conf import settings
if settings.DEBUG:
import debug_toolbar
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]
urlpatterns += \
static.static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += \
static.static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
Fix admin URL import in devproject for Django 2.0
|
Fix admin URL import in devproject for Django 2.0
For #191
|
Python
|
mit
|
philgyford/django-ditto,philgyford/django-ditto,philgyford/django-ditto
|
from django.conf.urls import include, static, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^flickr/', include('ditto.flickr.urls', namespace='flickr')),
url(r'^lastfm/', include('ditto.lastfm.urls', namespace='lastfm')),
url(r'^pinboard/', include('ditto.pinboard.urls', namespace='pinboard')),
url(r'^twitter/', include('ditto.twitter.urls', namespace='twitter')),
url(r'', include('ditto.core.urls', namespace='ditto')),
]
from django.conf import settings
if settings.DEBUG:
import debug_toolbar
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]
urlpatterns += \
static.static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += \
static.static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
Fix admin URL import in devproject for Django 2.0
For #191
|
from django.conf.urls import include, static, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^flickr/', include('ditto.flickr.urls', namespace='flickr')),
url(r'^lastfm/', include('ditto.lastfm.urls', namespace='lastfm')),
url(r'^pinboard/', include('ditto.pinboard.urls', namespace='pinboard')),
url(r'^twitter/', include('ditto.twitter.urls', namespace='twitter')),
url(r'', include('ditto.core.urls', namespace='ditto')),
]
from django.conf import settings
if settings.DEBUG:
import debug_toolbar
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]
urlpatterns += \
static.static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += \
static.static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
<commit_before>from django.conf.urls import include, static, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^flickr/', include('ditto.flickr.urls', namespace='flickr')),
url(r'^lastfm/', include('ditto.lastfm.urls', namespace='lastfm')),
url(r'^pinboard/', include('ditto.pinboard.urls', namespace='pinboard')),
url(r'^twitter/', include('ditto.twitter.urls', namespace='twitter')),
url(r'', include('ditto.core.urls', namespace='ditto')),
]
from django.conf import settings
if settings.DEBUG:
import debug_toolbar
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]
urlpatterns += \
static.static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += \
static.static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
<commit_msg>Fix admin URL import in devproject for Django 2.0
For #191<commit_after>
|
from django.conf.urls import include, static, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^flickr/', include('ditto.flickr.urls', namespace='flickr')),
url(r'^lastfm/', include('ditto.lastfm.urls', namespace='lastfm')),
url(r'^pinboard/', include('ditto.pinboard.urls', namespace='pinboard')),
url(r'^twitter/', include('ditto.twitter.urls', namespace='twitter')),
url(r'', include('ditto.core.urls', namespace='ditto')),
]
from django.conf import settings
if settings.DEBUG:
import debug_toolbar
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]
urlpatterns += \
static.static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += \
static.static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
from django.conf.urls import include, static, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^flickr/', include('ditto.flickr.urls', namespace='flickr')),
url(r'^lastfm/', include('ditto.lastfm.urls', namespace='lastfm')),
url(r'^pinboard/', include('ditto.pinboard.urls', namespace='pinboard')),
url(r'^twitter/', include('ditto.twitter.urls', namespace='twitter')),
url(r'', include('ditto.core.urls', namespace='ditto')),
]
from django.conf import settings
if settings.DEBUG:
import debug_toolbar
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]
urlpatterns += \
static.static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += \
static.static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
Fix admin URL import in devproject for Django 2.0
For #191from django.conf.urls import include, static, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^flickr/', include('ditto.flickr.urls', namespace='flickr')),
url(r'^lastfm/', include('ditto.lastfm.urls', namespace='lastfm')),
url(r'^pinboard/', include('ditto.pinboard.urls', namespace='pinboard')),
url(r'^twitter/', include('ditto.twitter.urls', namespace='twitter')),
url(r'', include('ditto.core.urls', namespace='ditto')),
]
from django.conf import settings
if settings.DEBUG:
import debug_toolbar
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]
urlpatterns += \
static.static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += \
static.static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
<commit_before>from django.conf.urls import include, static, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^flickr/', include('ditto.flickr.urls', namespace='flickr')),
url(r'^lastfm/', include('ditto.lastfm.urls', namespace='lastfm')),
url(r'^pinboard/', include('ditto.pinboard.urls', namespace='pinboard')),
url(r'^twitter/', include('ditto.twitter.urls', namespace='twitter')),
url(r'', include('ditto.core.urls', namespace='ditto')),
]
from django.conf import settings
if settings.DEBUG:
import debug_toolbar
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]
urlpatterns += \
static.static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += \
static.static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
<commit_msg>Fix admin URL import in devproject for Django 2.0
For #191<commit_after>from django.conf.urls import include, static, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^flickr/', include('ditto.flickr.urls', namespace='flickr')),
url(r'^lastfm/', include('ditto.lastfm.urls', namespace='lastfm')),
url(r'^pinboard/', include('ditto.pinboard.urls', namespace='pinboard')),
url(r'^twitter/', include('ditto.twitter.urls', namespace='twitter')),
url(r'', include('ditto.core.urls', namespace='ditto')),
]
from django.conf import settings
if settings.DEBUG:
import debug_toolbar
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]
urlpatterns += \
static.static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += \
static.static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
c1a5c5394ff9838e01b32ff448e309893c5bdf7f
|
cmsplugin_iframe/migrations/0001_initial.py
|
cmsplugin_iframe/migrations/0001_initial.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('cms', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='IframePlugin',
fields=[
('cmsplugin_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='cms.CMSPlugin')),
('src', models.URLField()),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('cms', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='IframePlugin',
fields=[
('cmsplugin_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='cms.CMSPlugin', on_delete=models.CASCADE)),
('src', models.URLField()),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
]
|
Add in on_delete clause to work with more modern versions of Django
|
Add in on_delete clause to work with more modern versions of Django
|
Python
|
mit
|
satyrius/cmsplugin-iframe,satyrius/cmsplugin-iframe
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('cms', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='IframePlugin',
fields=[
('cmsplugin_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='cms.CMSPlugin')),
('src', models.URLField()),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
]
Add in on_delete clause to work with more modern versions of Django
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('cms', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='IframePlugin',
fields=[
('cmsplugin_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='cms.CMSPlugin', on_delete=models.CASCADE)),
('src', models.URLField()),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('cms', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='IframePlugin',
fields=[
('cmsplugin_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='cms.CMSPlugin')),
('src', models.URLField()),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
]
<commit_msg>Add in on_delete clause to work with more modern versions of Django<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('cms', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='IframePlugin',
fields=[
('cmsplugin_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='cms.CMSPlugin', on_delete=models.CASCADE)),
('src', models.URLField()),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('cms', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='IframePlugin',
fields=[
('cmsplugin_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='cms.CMSPlugin')),
('src', models.URLField()),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
]
Add in on_delete clause to work with more modern versions of Django# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('cms', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='IframePlugin',
fields=[
('cmsplugin_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='cms.CMSPlugin', on_delete=models.CASCADE)),
('src', models.URLField()),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('cms', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='IframePlugin',
fields=[
('cmsplugin_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='cms.CMSPlugin')),
('src', models.URLField()),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
]
<commit_msg>Add in on_delete clause to work with more modern versions of Django<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('cms', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='IframePlugin',
fields=[
('cmsplugin_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='cms.CMSPlugin', on_delete=models.CASCADE)),
('src', models.URLField()),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
]
|
6325b0eebbe5c14284df4fa5398ffc678c3e0eca
|
posts/tests.py
|
posts/tests.py
|
from test_plus.test import TestCase
from posts.factories import PostFactory
class PostsTest(TestCase):
def test_get_list(self):
post = PostFactory()
post_list_url = self.reverse('post:list')
self.get_check_200(post_list_url)
self.assertResponseContains(post.title, html=False)
self.assertResponseContains(post.author.name, html=False)
write_url = self.reverse('post:create')
self.assertResponseContains(write_url, html=False)
def test_get_writing_page_with_login(self):
user = self.make_user('jelly jelly')
with self.login(username=user.username):
write_post_url = self.reverse('post:create')
self.get_check_200(write_post_url)
def test_get_writing_page_with_anonymous(self):
self.assertLoginRequired('post:create')
def test_post_writing(self):
user = self.make_user('jelly jelly')
data = {"title", "This is some "}
with self.login(username=user.username):
write_post_url = self.reverse('post:create')
self.get_check_200(write_post_url)
self.post('post:write', data=data)
|
from test_plus.test import TestCase
from posts.factories import PostFactory
class PostsTest(TestCase):
def test_get_list(self):
# given
post = PostFactory()
post_list_url = self.reverse('post:list')
# when
self.get_check_200(post_list_url)
# then
self.assertResponseContains(post.title, html=False)
self.assertResponseContains(post.author.name, html=False)
write_url = self.reverse('post:create')
self.assertResponseContains(write_url, html=False)
def test_get_writing_page_with_login(self):
# given
user = self.make_user('jelly jelly')
# when
with self.login(username=user.username):
# then
write_post_url = self.reverse('post:create')
self.get_check_200(write_post_url)
def test_get_writing_page_with_anonymous(self):
self.assertLoginRequired('post:create')
def test_post_writing(self):
# given
user = self.make_user('jelly jelly')
data = {"title", "This is some "}
# when
with self.login(username=user.username):
write_post_url = self.reverse('post:create')
# then
self.get_check_200(write_post_url)
self.post('post:write', data=data)
|
Add given, when, then comment
|
Add given, when, then comment
|
Python
|
mit
|
9XD/9XD,9XD/9XD,9XD/9XD,9XD/9XD
|
from test_plus.test import TestCase
from posts.factories import PostFactory
class PostsTest(TestCase):
def test_get_list(self):
post = PostFactory()
post_list_url = self.reverse('post:list')
self.get_check_200(post_list_url)
self.assertResponseContains(post.title, html=False)
self.assertResponseContains(post.author.name, html=False)
write_url = self.reverse('post:create')
self.assertResponseContains(write_url, html=False)
def test_get_writing_page_with_login(self):
user = self.make_user('jelly jelly')
with self.login(username=user.username):
write_post_url = self.reverse('post:create')
self.get_check_200(write_post_url)
def test_get_writing_page_with_anonymous(self):
self.assertLoginRequired('post:create')
def test_post_writing(self):
user = self.make_user('jelly jelly')
data = {"title", "This is some "}
with self.login(username=user.username):
write_post_url = self.reverse('post:create')
self.get_check_200(write_post_url)
self.post('post:write', data=data)
Add given, when, then comment
|
from test_plus.test import TestCase
from posts.factories import PostFactory
class PostsTest(TestCase):
def test_get_list(self):
# given
post = PostFactory()
post_list_url = self.reverse('post:list')
# when
self.get_check_200(post_list_url)
# then
self.assertResponseContains(post.title, html=False)
self.assertResponseContains(post.author.name, html=False)
write_url = self.reverse('post:create')
self.assertResponseContains(write_url, html=False)
def test_get_writing_page_with_login(self):
# given
user = self.make_user('jelly jelly')
# when
with self.login(username=user.username):
# then
write_post_url = self.reverse('post:create')
self.get_check_200(write_post_url)
def test_get_writing_page_with_anonymous(self):
self.assertLoginRequired('post:create')
def test_post_writing(self):
# given
user = self.make_user('jelly jelly')
data = {"title", "This is some "}
# when
with self.login(username=user.username):
write_post_url = self.reverse('post:create')
# then
self.get_check_200(write_post_url)
self.post('post:write', data=data)
|
<commit_before>from test_plus.test import TestCase
from posts.factories import PostFactory
class PostsTest(TestCase):
def test_get_list(self):
post = PostFactory()
post_list_url = self.reverse('post:list')
self.get_check_200(post_list_url)
self.assertResponseContains(post.title, html=False)
self.assertResponseContains(post.author.name, html=False)
write_url = self.reverse('post:create')
self.assertResponseContains(write_url, html=False)
def test_get_writing_page_with_login(self):
user = self.make_user('jelly jelly')
with self.login(username=user.username):
write_post_url = self.reverse('post:create')
self.get_check_200(write_post_url)
def test_get_writing_page_with_anonymous(self):
self.assertLoginRequired('post:create')
def test_post_writing(self):
user = self.make_user('jelly jelly')
data = {"title", "This is some "}
with self.login(username=user.username):
write_post_url = self.reverse('post:create')
self.get_check_200(write_post_url)
self.post('post:write', data=data)
<commit_msg>Add given, when, then comment<commit_after>
|
from test_plus.test import TestCase
from posts.factories import PostFactory
class PostsTest(TestCase):
def test_get_list(self):
# given
post = PostFactory()
post_list_url = self.reverse('post:list')
# when
self.get_check_200(post_list_url)
# then
self.assertResponseContains(post.title, html=False)
self.assertResponseContains(post.author.name, html=False)
write_url = self.reverse('post:create')
self.assertResponseContains(write_url, html=False)
def test_get_writing_page_with_login(self):
# given
user = self.make_user('jelly jelly')
# when
with self.login(username=user.username):
# then
write_post_url = self.reverse('post:create')
self.get_check_200(write_post_url)
def test_get_writing_page_with_anonymous(self):
self.assertLoginRequired('post:create')
def test_post_writing(self):
# given
user = self.make_user('jelly jelly')
data = {"title", "This is some "}
# when
with self.login(username=user.username):
write_post_url = self.reverse('post:create')
# then
self.get_check_200(write_post_url)
self.post('post:write', data=data)
|
from test_plus.test import TestCase
from posts.factories import PostFactory
class PostsTest(TestCase):
def test_get_list(self):
post = PostFactory()
post_list_url = self.reverse('post:list')
self.get_check_200(post_list_url)
self.assertResponseContains(post.title, html=False)
self.assertResponseContains(post.author.name, html=False)
write_url = self.reverse('post:create')
self.assertResponseContains(write_url, html=False)
def test_get_writing_page_with_login(self):
user = self.make_user('jelly jelly')
with self.login(username=user.username):
write_post_url = self.reverse('post:create')
self.get_check_200(write_post_url)
def test_get_writing_page_with_anonymous(self):
self.assertLoginRequired('post:create')
def test_post_writing(self):
user = self.make_user('jelly jelly')
data = {"title", "This is some "}
with self.login(username=user.username):
write_post_url = self.reverse('post:create')
self.get_check_200(write_post_url)
self.post('post:write', data=data)
Add given, when, then commentfrom test_plus.test import TestCase
from posts.factories import PostFactory
class PostsTest(TestCase):
def test_get_list(self):
# given
post = PostFactory()
post_list_url = self.reverse('post:list')
# when
self.get_check_200(post_list_url)
# then
self.assertResponseContains(post.title, html=False)
self.assertResponseContains(post.author.name, html=False)
write_url = self.reverse('post:create')
self.assertResponseContains(write_url, html=False)
def test_get_writing_page_with_login(self):
# given
user = self.make_user('jelly jelly')
# when
with self.login(username=user.username):
# then
write_post_url = self.reverse('post:create')
self.get_check_200(write_post_url)
def test_get_writing_page_with_anonymous(self):
self.assertLoginRequired('post:create')
def test_post_writing(self):
# given
user = self.make_user('jelly jelly')
data = {"title", "This is some "}
# when
with self.login(username=user.username):
write_post_url = self.reverse('post:create')
# then
self.get_check_200(write_post_url)
self.post('post:write', data=data)
|
<commit_before>from test_plus.test import TestCase
from posts.factories import PostFactory
class PostsTest(TestCase):
def test_get_list(self):
post = PostFactory()
post_list_url = self.reverse('post:list')
self.get_check_200(post_list_url)
self.assertResponseContains(post.title, html=False)
self.assertResponseContains(post.author.name, html=False)
write_url = self.reverse('post:create')
self.assertResponseContains(write_url, html=False)
def test_get_writing_page_with_login(self):
user = self.make_user('jelly jelly')
with self.login(username=user.username):
write_post_url = self.reverse('post:create')
self.get_check_200(write_post_url)
def test_get_writing_page_with_anonymous(self):
self.assertLoginRequired('post:create')
def test_post_writing(self):
user = self.make_user('jelly jelly')
data = {"title", "This is some "}
with self.login(username=user.username):
write_post_url = self.reverse('post:create')
self.get_check_200(write_post_url)
self.post('post:write', data=data)
<commit_msg>Add given, when, then comment<commit_after>from test_plus.test import TestCase
from posts.factories import PostFactory
class PostsTest(TestCase):
def test_get_list(self):
# given
post = PostFactory()
post_list_url = self.reverse('post:list')
# when
self.get_check_200(post_list_url)
# then
self.assertResponseContains(post.title, html=False)
self.assertResponseContains(post.author.name, html=False)
write_url = self.reverse('post:create')
self.assertResponseContains(write_url, html=False)
def test_get_writing_page_with_login(self):
# given
user = self.make_user('jelly jelly')
# when
with self.login(username=user.username):
# then
write_post_url = self.reverse('post:create')
self.get_check_200(write_post_url)
def test_get_writing_page_with_anonymous(self):
self.assertLoginRequired('post:create')
def test_post_writing(self):
# given
user = self.make_user('jelly jelly')
data = {"title", "This is some "}
# when
with self.login(username=user.username):
write_post_url = self.reverse('post:create')
# then
self.get_check_200(write_post_url)
self.post('post:write', data=data)
|
8958f8abb8798ff61af43199f0683c3e1c0ffcdd
|
checklisthq/main/models.py
|
checklisthq/main/models.py
|
from django.db import models
from django.contrib.auth.models import User
from taggit.managers import TaggableManager
class Checklist(models.Model):
title = models.CharField(max_length=512)
owner = models.ForeignKey(User)
content = models.TextField()
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True,auto_now_add=True)
deleted = models.BooleanField(default=False)
tags = TaggableManager()
# To add: type
def __unicode__(self):
return self.title
|
from django.db import models
from django.contrib.auth.models import User
from taggit.managers import TaggableManager
class Checklist(models.Model):
title = models.CharField(max_length=512)
owner = models.ForeignKey(User)
content = models.TextField()
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True,auto_now_add=True)
deleted = models.BooleanField(default=False)
tags = TaggableManager(blank=True)
# To add: type
def __unicode__(self):
return self.title
|
Make tags an optional field of Checklist
|
Make tags an optional field of Checklist
|
Python
|
agpl-3.0
|
checklisthq/checklisthq.com,checklisthq/checklisthq.com
|
from django.db import models
from django.contrib.auth.models import User
from taggit.managers import TaggableManager
class Checklist(models.Model):
title = models.CharField(max_length=512)
owner = models.ForeignKey(User)
content = models.TextField()
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True,auto_now_add=True)
deleted = models.BooleanField(default=False)
tags = TaggableManager()
# To add: type
def __unicode__(self):
return self.title
Make tags an optional field of Checklist
|
from django.db import models
from django.contrib.auth.models import User
from taggit.managers import TaggableManager
class Checklist(models.Model):
title = models.CharField(max_length=512)
owner = models.ForeignKey(User)
content = models.TextField()
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True,auto_now_add=True)
deleted = models.BooleanField(default=False)
tags = TaggableManager(blank=True)
# To add: type
def __unicode__(self):
return self.title
|
<commit_before>from django.db import models
from django.contrib.auth.models import User
from taggit.managers import TaggableManager
class Checklist(models.Model):
title = models.CharField(max_length=512)
owner = models.ForeignKey(User)
content = models.TextField()
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True,auto_now_add=True)
deleted = models.BooleanField(default=False)
tags = TaggableManager()
# To add: type
def __unicode__(self):
return self.title
<commit_msg>Make tags an optional field of Checklist<commit_after>
|
from django.db import models
from django.contrib.auth.models import User
from taggit.managers import TaggableManager
class Checklist(models.Model):
title = models.CharField(max_length=512)
owner = models.ForeignKey(User)
content = models.TextField()
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True,auto_now_add=True)
deleted = models.BooleanField(default=False)
tags = TaggableManager(blank=True)
# To add: type
def __unicode__(self):
return self.title
|
from django.db import models
from django.contrib.auth.models import User
from taggit.managers import TaggableManager
class Checklist(models.Model):
title = models.CharField(max_length=512)
owner = models.ForeignKey(User)
content = models.TextField()
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True,auto_now_add=True)
deleted = models.BooleanField(default=False)
tags = TaggableManager()
# To add: type
def __unicode__(self):
return self.title
Make tags an optional field of Checklistfrom django.db import models
from django.contrib.auth.models import User
from taggit.managers import TaggableManager
class Checklist(models.Model):
title = models.CharField(max_length=512)
owner = models.ForeignKey(User)
content = models.TextField()
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True,auto_now_add=True)
deleted = models.BooleanField(default=False)
tags = TaggableManager(blank=True)
# To add: type
def __unicode__(self):
return self.title
|
<commit_before>from django.db import models
from django.contrib.auth.models import User
from taggit.managers import TaggableManager
class Checklist(models.Model):
title = models.CharField(max_length=512)
owner = models.ForeignKey(User)
content = models.TextField()
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True,auto_now_add=True)
deleted = models.BooleanField(default=False)
tags = TaggableManager()
# To add: type
def __unicode__(self):
return self.title
<commit_msg>Make tags an optional field of Checklist<commit_after>from django.db import models
from django.contrib.auth.models import User
from taggit.managers import TaggableManager
class Checklist(models.Model):
title = models.CharField(max_length=512)
owner = models.ForeignKey(User)
content = models.TextField()
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True,auto_now_add=True)
deleted = models.BooleanField(default=False)
tags = TaggableManager(blank=True)
# To add: type
def __unicode__(self):
return self.title
|
964da6a3df622b5217596ac190ca46bc18942616
|
api/urls.py
|
api/urls.py
|
from django.conf.urls import url
from api import views
urlpatterns = [
url(r'^machines/(?P<serial>.+)/$', views.machine_detail),
url(r'^inventory/(?P<serial>.+)/$', views.machine_inventory),
url(r'^machines/$', views.machine_list),
url(r'^facts/(?P<serial>.+)/$', views.facts),
url(r'^conditions/(?P<serial>.+)/$', views.conditions),
url(r'^business_units/(?P<pk>.+)/$', views.business_unit),
url(r'^business_units/$', views.business_unit_list),
url(r'^machine_groups/(?P<pk>.+)/$', views.machine_group),
url(r'^machine_groups/$', views.machine_group_list),
]
|
from django.conf.urls import url
from api import views
urlpatterns = [
url(r'^machines/(?P<serial>.+)/inventory/$', views.machine_inventory),
url(r'^machines/(?P<serial>.+)/$', views.machine_detail),
url(r'^machines/$', views.machine_list),
url(r'^facts/(?P<serial>.+)/$', views.facts),
url(r'^conditions/(?P<serial>.+)/$', views.conditions),
url(r'^business_units/(?P<pk>.+)/$', views.business_unit),
url(r'^business_units/$', views.business_unit_list),
url(r'^machine_groups/(?P<pk>.+)/$', views.machine_group),
url(r'^machine_groups/$', views.machine_group_list),
]
|
Revert "Think this url is causing issues"
|
Revert "Think this url is causing issues"
This reverts commit ca4df2f1f23b5ff85b7c1685c6bbcd015f9789cf.
|
Python
|
apache-2.0
|
erikng/sal,chasetb/sal,chasetb/sal,sheagcraig/sal,sheagcraig/sal,salopensource/sal,erikng/sal,erikng/sal,salopensource/sal,chasetb/sal,sheagcraig/sal,salopensource/sal,salopensource/sal,sheagcraig/sal,chasetb/sal,erikng/sal
|
from django.conf.urls import url
from api import views
urlpatterns = [
url(r'^machines/(?P<serial>.+)/$', views.machine_detail),
url(r'^inventory/(?P<serial>.+)/$', views.machine_inventory),
url(r'^machines/$', views.machine_list),
url(r'^facts/(?P<serial>.+)/$', views.facts),
url(r'^conditions/(?P<serial>.+)/$', views.conditions),
url(r'^business_units/(?P<pk>.+)/$', views.business_unit),
url(r'^business_units/$', views.business_unit_list),
url(r'^machine_groups/(?P<pk>.+)/$', views.machine_group),
url(r'^machine_groups/$', views.machine_group_list),
]
Revert "Think this url is causing issues"
This reverts commit ca4df2f1f23b5ff85b7c1685c6bbcd015f9789cf.
|
from django.conf.urls import url
from api import views
urlpatterns = [
url(r'^machines/(?P<serial>.+)/inventory/$', views.machine_inventory),
url(r'^machines/(?P<serial>.+)/$', views.machine_detail),
url(r'^machines/$', views.machine_list),
url(r'^facts/(?P<serial>.+)/$', views.facts),
url(r'^conditions/(?P<serial>.+)/$', views.conditions),
url(r'^business_units/(?P<pk>.+)/$', views.business_unit),
url(r'^business_units/$', views.business_unit_list),
url(r'^machine_groups/(?P<pk>.+)/$', views.machine_group),
url(r'^machine_groups/$', views.machine_group_list),
]
|
<commit_before>from django.conf.urls import url
from api import views
urlpatterns = [
url(r'^machines/(?P<serial>.+)/$', views.machine_detail),
url(r'^inventory/(?P<serial>.+)/$', views.machine_inventory),
url(r'^machines/$', views.machine_list),
url(r'^facts/(?P<serial>.+)/$', views.facts),
url(r'^conditions/(?P<serial>.+)/$', views.conditions),
url(r'^business_units/(?P<pk>.+)/$', views.business_unit),
url(r'^business_units/$', views.business_unit_list),
url(r'^machine_groups/(?P<pk>.+)/$', views.machine_group),
url(r'^machine_groups/$', views.machine_group_list),
]
<commit_msg>Revert "Think this url is causing issues"
This reverts commit ca4df2f1f23b5ff85b7c1685c6bbcd015f9789cf.<commit_after>
|
from django.conf.urls import url
from api import views
urlpatterns = [
url(r'^machines/(?P<serial>.+)/inventory/$', views.machine_inventory),
url(r'^machines/(?P<serial>.+)/$', views.machine_detail),
url(r'^machines/$', views.machine_list),
url(r'^facts/(?P<serial>.+)/$', views.facts),
url(r'^conditions/(?P<serial>.+)/$', views.conditions),
url(r'^business_units/(?P<pk>.+)/$', views.business_unit),
url(r'^business_units/$', views.business_unit_list),
url(r'^machine_groups/(?P<pk>.+)/$', views.machine_group),
url(r'^machine_groups/$', views.machine_group_list),
]
|
from django.conf.urls import url
from api import views
urlpatterns = [
url(r'^machines/(?P<serial>.+)/$', views.machine_detail),
url(r'^inventory/(?P<serial>.+)/$', views.machine_inventory),
url(r'^machines/$', views.machine_list),
url(r'^facts/(?P<serial>.+)/$', views.facts),
url(r'^conditions/(?P<serial>.+)/$', views.conditions),
url(r'^business_units/(?P<pk>.+)/$', views.business_unit),
url(r'^business_units/$', views.business_unit_list),
url(r'^machine_groups/(?P<pk>.+)/$', views.machine_group),
url(r'^machine_groups/$', views.machine_group_list),
]
Revert "Think this url is causing issues"
This reverts commit ca4df2f1f23b5ff85b7c1685c6bbcd015f9789cf.from django.conf.urls import url
from api import views
urlpatterns = [
url(r'^machines/(?P<serial>.+)/inventory/$', views.machine_inventory),
url(r'^machines/(?P<serial>.+)/$', views.machine_detail),
url(r'^machines/$', views.machine_list),
url(r'^facts/(?P<serial>.+)/$', views.facts),
url(r'^conditions/(?P<serial>.+)/$', views.conditions),
url(r'^business_units/(?P<pk>.+)/$', views.business_unit),
url(r'^business_units/$', views.business_unit_list),
url(r'^machine_groups/(?P<pk>.+)/$', views.machine_group),
url(r'^machine_groups/$', views.machine_group_list),
]
|
<commit_before>from django.conf.urls import url
from api import views
urlpatterns = [
url(r'^machines/(?P<serial>.+)/$', views.machine_detail),
url(r'^inventory/(?P<serial>.+)/$', views.machine_inventory),
url(r'^machines/$', views.machine_list),
url(r'^facts/(?P<serial>.+)/$', views.facts),
url(r'^conditions/(?P<serial>.+)/$', views.conditions),
url(r'^business_units/(?P<pk>.+)/$', views.business_unit),
url(r'^business_units/$', views.business_unit_list),
url(r'^machine_groups/(?P<pk>.+)/$', views.machine_group),
url(r'^machine_groups/$', views.machine_group_list),
]
<commit_msg>Revert "Think this url is causing issues"
This reverts commit ca4df2f1f23b5ff85b7c1685c6bbcd015f9789cf.<commit_after>from django.conf.urls import url
from api import views
urlpatterns = [
url(r'^machines/(?P<serial>.+)/inventory/$', views.machine_inventory),
url(r'^machines/(?P<serial>.+)/$', views.machine_detail),
url(r'^machines/$', views.machine_list),
url(r'^facts/(?P<serial>.+)/$', views.facts),
url(r'^conditions/(?P<serial>.+)/$', views.conditions),
url(r'^business_units/(?P<pk>.+)/$', views.business_unit),
url(r'^business_units/$', views.business_unit_list),
url(r'^machine_groups/(?P<pk>.+)/$', views.machine_group),
url(r'^machine_groups/$', views.machine_group_list),
]
|
ff7239b915093c6915d05c15362c5e86341bd6cb
|
lib/dns_lookup.py
|
lib/dns_lookup.py
|
# -*- coding: utf-8 -*-
import network
BLOCK_SIZE = 256
PORT = 53
def listen(port):
if port is None:
port = PORT
print('Listening for data via DNS.')
l = network.get_listener('UDP', port)
print('Data Received:')
while 1:
data, addr = l.recvfrom(1500)
dns = network.parse_dns(data)
name = str(dns.get_q().get_qname())
dec = network.decode_data(name.rstrip('.'))
print(dec)
def send(server, port, data):
if port is None:
port = PORT
print('Sending data to {0} via DNS.'.format(server))
print('Data Sent:')
for n in range(0, len(data), BLOCK_SIZE):
block = data[n:n + BLOCK_SIZE]
print block
enc = network.encode_data(block)
network.send_dns_query(server, port, enc)
|
# -*- coding: utf-8 -*-
import network
BLOCK_SIZE = 256
PORT = 53
def listen(port):
if port is None:
port = PORT
print('Listening for data via DNS on port {0}.'.format(port))
l = network.get_listener('UDP', port)
print('Data Received:')
while 1:
data, addr = l.recvfrom(1500)
dns = network.parse_dns(data)
name = str(dns.get_q().get_qname())
dec = network.decode_data(name.rstrip('.'))
print(dec)
def send(server, port, data):
if port is None:
port = PORT
print('Sending data via DNS to {0} on port {1}.'.format(server, port))
print('Data Sent:')
for n in range(0, len(data), BLOCK_SIZE):
block = data[n:n + BLOCK_SIZE]
print(block)
enc = network.encode_data(block)
network.send_dns_query(server, port, enc)
|
Add port to logging. Fix print statement.
|
Add port to logging. Fix print statement.
|
Python
|
bsd-3-clause
|
averagesecurityguy/exfil
|
# -*- coding: utf-8 -*-
import network
BLOCK_SIZE = 256
PORT = 53
def listen(port):
if port is None:
port = PORT
print('Listening for data via DNS.')
l = network.get_listener('UDP', port)
print('Data Received:')
while 1:
data, addr = l.recvfrom(1500)
dns = network.parse_dns(data)
name = str(dns.get_q().get_qname())
dec = network.decode_data(name.rstrip('.'))
print(dec)
def send(server, port, data):
if port is None:
port = PORT
print('Sending data to {0} via DNS.'.format(server))
print('Data Sent:')
for n in range(0, len(data), BLOCK_SIZE):
block = data[n:n + BLOCK_SIZE]
print block
enc = network.encode_data(block)
network.send_dns_query(server, port, enc)
Add port to logging. Fix print statement.
|
# -*- coding: utf-8 -*-
import network
BLOCK_SIZE = 256
PORT = 53
def listen(port):
if port is None:
port = PORT
print('Listening for data via DNS on port {0}.'.format(port))
l = network.get_listener('UDP', port)
print('Data Received:')
while 1:
data, addr = l.recvfrom(1500)
dns = network.parse_dns(data)
name = str(dns.get_q().get_qname())
dec = network.decode_data(name.rstrip('.'))
print(dec)
def send(server, port, data):
if port is None:
port = PORT
print('Sending data via DNS to {0} on port {1}.'.format(server, port))
print('Data Sent:')
for n in range(0, len(data), BLOCK_SIZE):
block = data[n:n + BLOCK_SIZE]
print(block)
enc = network.encode_data(block)
network.send_dns_query(server, port, enc)
|
<commit_before># -*- coding: utf-8 -*-
import network
BLOCK_SIZE = 256
PORT = 53
def listen(port):
if port is None:
port = PORT
print('Listening for data via DNS.')
l = network.get_listener('UDP', port)
print('Data Received:')
while 1:
data, addr = l.recvfrom(1500)
dns = network.parse_dns(data)
name = str(dns.get_q().get_qname())
dec = network.decode_data(name.rstrip('.'))
print(dec)
def send(server, port, data):
if port is None:
port = PORT
print('Sending data to {0} via DNS.'.format(server))
print('Data Sent:')
for n in range(0, len(data), BLOCK_SIZE):
block = data[n:n + BLOCK_SIZE]
print block
enc = network.encode_data(block)
network.send_dns_query(server, port, enc)
<commit_msg>Add port to logging. Fix print statement.<commit_after>
|
# -*- coding: utf-8 -*-
import network
BLOCK_SIZE = 256
PORT = 53
def listen(port):
if port is None:
port = PORT
print('Listening for data via DNS on port {0}.'.format(port))
l = network.get_listener('UDP', port)
print('Data Received:')
while 1:
data, addr = l.recvfrom(1500)
dns = network.parse_dns(data)
name = str(dns.get_q().get_qname())
dec = network.decode_data(name.rstrip('.'))
print(dec)
def send(server, port, data):
if port is None:
port = PORT
print('Sending data via DNS to {0} on port {1}.'.format(server, port))
print('Data Sent:')
for n in range(0, len(data), BLOCK_SIZE):
block = data[n:n + BLOCK_SIZE]
print(block)
enc = network.encode_data(block)
network.send_dns_query(server, port, enc)
|
# -*- coding: utf-8 -*-
import network
BLOCK_SIZE = 256
PORT = 53
def listen(port):
if port is None:
port = PORT
print('Listening for data via DNS.')
l = network.get_listener('UDP', port)
print('Data Received:')
while 1:
data, addr = l.recvfrom(1500)
dns = network.parse_dns(data)
name = str(dns.get_q().get_qname())
dec = network.decode_data(name.rstrip('.'))
print(dec)
def send(server, port, data):
if port is None:
port = PORT
print('Sending data to {0} via DNS.'.format(server))
print('Data Sent:')
for n in range(0, len(data), BLOCK_SIZE):
block = data[n:n + BLOCK_SIZE]
print block
enc = network.encode_data(block)
network.send_dns_query(server, port, enc)
Add port to logging. Fix print statement.# -*- coding: utf-8 -*-
import network
BLOCK_SIZE = 256
PORT = 53
def listen(port):
if port is None:
port = PORT
print('Listening for data via DNS on port {0}.'.format(port))
l = network.get_listener('UDP', port)
print('Data Received:')
while 1:
data, addr = l.recvfrom(1500)
dns = network.parse_dns(data)
name = str(dns.get_q().get_qname())
dec = network.decode_data(name.rstrip('.'))
print(dec)
def send(server, port, data):
if port is None:
port = PORT
print('Sending data via DNS to {0} on port {1}.'.format(server, port))
print('Data Sent:')
for n in range(0, len(data), BLOCK_SIZE):
block = data[n:n + BLOCK_SIZE]
print(block)
enc = network.encode_data(block)
network.send_dns_query(server, port, enc)
|
<commit_before># -*- coding: utf-8 -*-
import network
BLOCK_SIZE = 256
PORT = 53
def listen(port):
if port is None:
port = PORT
print('Listening for data via DNS.')
l = network.get_listener('UDP', port)
print('Data Received:')
while 1:
data, addr = l.recvfrom(1500)
dns = network.parse_dns(data)
name = str(dns.get_q().get_qname())
dec = network.decode_data(name.rstrip('.'))
print(dec)
def send(server, port, data):
if port is None:
port = PORT
print('Sending data to {0} via DNS.'.format(server))
print('Data Sent:')
for n in range(0, len(data), BLOCK_SIZE):
block = data[n:n + BLOCK_SIZE]
print block
enc = network.encode_data(block)
network.send_dns_query(server, port, enc)
<commit_msg>Add port to logging. Fix print statement.<commit_after># -*- coding: utf-8 -*-
import network
BLOCK_SIZE = 256
PORT = 53
def listen(port):
if port is None:
port = PORT
print('Listening for data via DNS on port {0}.'.format(port))
l = network.get_listener('UDP', port)
print('Data Received:')
while 1:
data, addr = l.recvfrom(1500)
dns = network.parse_dns(data)
name = str(dns.get_q().get_qname())
dec = network.decode_data(name.rstrip('.'))
print(dec)
def send(server, port, data):
if port is None:
port = PORT
print('Sending data via DNS to {0} on port {1}.'.format(server, port))
print('Data Sent:')
for n in range(0, len(data), BLOCK_SIZE):
block = data[n:n + BLOCK_SIZE]
print(block)
enc = network.encode_data(block)
network.send_dns_query(server, port, enc)
|
c286722965ce7f5ea9acc201aa9cf289cfe16105
|
openstackclient/tests/functional/common/test_availability_zone.py
|
openstackclient/tests/functional/common/test_availability_zone.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstackclient.tests.functional import base
class AvailabilityZoneTests(base.TestCase):
"""Functional tests for availability zone. """
HEADERS = ["'Zone Name'"]
# So far, all components have the same default availability zone name.
DEFAULT_AZ_NAME = 'nova'
def test_availability_zone_list(self):
opts = self.get_opts(self.HEADERS)
raw_output = self.openstack('availability zone list' + opts)
self.assertIn(self.DEFAULT_AZ_NAME, raw_output)
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
from openstackclient.tests.functional import base
class AvailabilityZoneTests(base.TestCase):
"""Functional tests for availability zone. """
def test_availability_zone_list(self):
cmd_output = json.loads(self.openstack(
'availability zone list -f json'))
zones = [x['Zone Name'] for x in cmd_output]
self.assertIn(
'internal',
zones
)
self.assertIn(
'nova',
zones
)
|
Refactor availability zone functional test
|
Refactor availability zone functional test
Using json format output in availability zone list functional test
Change-Id: I7098b1c3bee680e47e414dcb4fa272628cdec1eb
|
Python
|
apache-2.0
|
dtroyer/python-openstackclient,openstack/python-openstackclient,dtroyer/python-openstackclient,openstack/python-openstackclient
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstackclient.tests.functional import base
class AvailabilityZoneTests(base.TestCase):
"""Functional tests for availability zone. """
HEADERS = ["'Zone Name'"]
# So far, all components have the same default availability zone name.
DEFAULT_AZ_NAME = 'nova'
def test_availability_zone_list(self):
opts = self.get_opts(self.HEADERS)
raw_output = self.openstack('availability zone list' + opts)
self.assertIn(self.DEFAULT_AZ_NAME, raw_output)
Refactor availability zone functional test
Using json format output in availability zone list functional test
Change-Id: I7098b1c3bee680e47e414dcb4fa272628cdec1eb
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
from openstackclient.tests.functional import base
class AvailabilityZoneTests(base.TestCase):
"""Functional tests for availability zone. """
def test_availability_zone_list(self):
cmd_output = json.loads(self.openstack(
'availability zone list -f json'))
zones = [x['Zone Name'] for x in cmd_output]
self.assertIn(
'internal',
zones
)
self.assertIn(
'nova',
zones
)
|
<commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstackclient.tests.functional import base
class AvailabilityZoneTests(base.TestCase):
"""Functional tests for availability zone. """
HEADERS = ["'Zone Name'"]
# So far, all components have the same default availability zone name.
DEFAULT_AZ_NAME = 'nova'
def test_availability_zone_list(self):
opts = self.get_opts(self.HEADERS)
raw_output = self.openstack('availability zone list' + opts)
self.assertIn(self.DEFAULT_AZ_NAME, raw_output)
<commit_msg>Refactor availability zone functional test
Using json format output in availability zone list functional test
Change-Id: I7098b1c3bee680e47e414dcb4fa272628cdec1eb<commit_after>
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
from openstackclient.tests.functional import base
class AvailabilityZoneTests(base.TestCase):
"""Functional tests for availability zone. """
def test_availability_zone_list(self):
cmd_output = json.loads(self.openstack(
'availability zone list -f json'))
zones = [x['Zone Name'] for x in cmd_output]
self.assertIn(
'internal',
zones
)
self.assertIn(
'nova',
zones
)
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstackclient.tests.functional import base
class AvailabilityZoneTests(base.TestCase):
"""Functional tests for availability zone. """
HEADERS = ["'Zone Name'"]
# So far, all components have the same default availability zone name.
DEFAULT_AZ_NAME = 'nova'
def test_availability_zone_list(self):
opts = self.get_opts(self.HEADERS)
raw_output = self.openstack('availability zone list' + opts)
self.assertIn(self.DEFAULT_AZ_NAME, raw_output)
Refactor availability zone functional test
Using json format output in availability zone list functional test
Change-Id: I7098b1c3bee680e47e414dcb4fa272628cdec1eb# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
from openstackclient.tests.functional import base
class AvailabilityZoneTests(base.TestCase):
"""Functional tests for availability zone. """
def test_availability_zone_list(self):
cmd_output = json.loads(self.openstack(
'availability zone list -f json'))
zones = [x['Zone Name'] for x in cmd_output]
self.assertIn(
'internal',
zones
)
self.assertIn(
'nova',
zones
)
|
<commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstackclient.tests.functional import base
class AvailabilityZoneTests(base.TestCase):
"""Functional tests for availability zone. """
HEADERS = ["'Zone Name'"]
# So far, all components have the same default availability zone name.
DEFAULT_AZ_NAME = 'nova'
def test_availability_zone_list(self):
opts = self.get_opts(self.HEADERS)
raw_output = self.openstack('availability zone list' + opts)
self.assertIn(self.DEFAULT_AZ_NAME, raw_output)
<commit_msg>Refactor availability zone functional test
Using json format output in availability zone list functional test
Change-Id: I7098b1c3bee680e47e414dcb4fa272628cdec1eb<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
from openstackclient.tests.functional import base
class AvailabilityZoneTests(base.TestCase):
"""Functional tests for availability zone. """
def test_availability_zone_list(self):
cmd_output = json.loads(self.openstack(
'availability zone list -f json'))
zones = [x['Zone Name'] for x in cmd_output]
self.assertIn(
'internal',
zones
)
self.assertIn(
'nova',
zones
)
|
f4b3c2ca7d9fdf6bc96202d6c2ad3b16cb6fc3be
|
sedfitter/timer.py
|
sedfitter/timer.py
|
from __future__ import print_function, division
import time
import numpy as np
class Timer(object):
def __init__(self):
self.time1 = time.time()
self.n = 0
self.step = 1
print(" # Sources CPU time (sec) Sources/sec ")
print(" ----------------------------------------------")
def display(self, force=False):
self.n += 1
if np.mod(self.n, self.step) == 0:
self.time2 = time.time()
if self.time2 - self.time1 < 1.:
self.step *= 10
else:
print(" %7i %10.1f %7.2f" % (self.n, self.time2 - self.time1, self.n / (self.time2 - self.time1)))
elif force:
self.time2 = time.time()
print(" %7i %10.1f %7.2f" % (self.n, self.time2 - self.time1, self.n / (self.time2 - self.time1)))
|
from __future__ import print_function, division
import time
import numpy as np
class Timer(object):
def __init__(self):
self.time1 = time.time()
self.n = 0
self.step = 1
print(" # Sources CPU time (sec) Sources/sec ")
print(" ----------------------------------------------")
def display(self, force=False):
self.n += 1
if np.mod(self.n, self.step) == 0:
self.time2 = time.time()
if self.time2 - self.time1 < 1.:
self.step *= 10
else:
print(" %7i %10.1f %7.2f" % (self.n, self.time2 - self.time1, self.n / (self.time2 - self.time1)))
elif force:
self.time2 = time.time()
if self.time2 == self.time1:
print(" %7i %10.1f -------" % (self.n, self.time2 - self.time1))
else:
print(" %7i %10.1f %7.2f" % (self.n, self.time2 - self.time1, self.n / (self.time2 - self.time1)))
|
Fix division by zero error
|
Fix division by zero error
|
Python
|
bsd-2-clause
|
astrofrog/sedfitter
|
from __future__ import print_function, division
import time
import numpy as np
class Timer(object):
def __init__(self):
self.time1 = time.time()
self.n = 0
self.step = 1
print(" # Sources CPU time (sec) Sources/sec ")
print(" ----------------------------------------------")
def display(self, force=False):
self.n += 1
if np.mod(self.n, self.step) == 0:
self.time2 = time.time()
if self.time2 - self.time1 < 1.:
self.step *= 10
else:
print(" %7i %10.1f %7.2f" % (self.n, self.time2 - self.time1, self.n / (self.time2 - self.time1)))
elif force:
self.time2 = time.time()
print(" %7i %10.1f %7.2f" % (self.n, self.time2 - self.time1, self.n / (self.time2 - self.time1)))
Fix division by zero error
|
from __future__ import print_function, division
import time
import numpy as np
class Timer(object):
def __init__(self):
self.time1 = time.time()
self.n = 0
self.step = 1
print(" # Sources CPU time (sec) Sources/sec ")
print(" ----------------------------------------------")
def display(self, force=False):
self.n += 1
if np.mod(self.n, self.step) == 0:
self.time2 = time.time()
if self.time2 - self.time1 < 1.:
self.step *= 10
else:
print(" %7i %10.1f %7.2f" % (self.n, self.time2 - self.time1, self.n / (self.time2 - self.time1)))
elif force:
self.time2 = time.time()
if self.time2 == self.time1:
print(" %7i %10.1f -------" % (self.n, self.time2 - self.time1))
else:
print(" %7i %10.1f %7.2f" % (self.n, self.time2 - self.time1, self.n / (self.time2 - self.time1)))
|
<commit_before>from __future__ import print_function, division
import time
import numpy as np
class Timer(object):
def __init__(self):
self.time1 = time.time()
self.n = 0
self.step = 1
print(" # Sources CPU time (sec) Sources/sec ")
print(" ----------------------------------------------")
def display(self, force=False):
self.n += 1
if np.mod(self.n, self.step) == 0:
self.time2 = time.time()
if self.time2 - self.time1 < 1.:
self.step *= 10
else:
print(" %7i %10.1f %7.2f" % (self.n, self.time2 - self.time1, self.n / (self.time2 - self.time1)))
elif force:
self.time2 = time.time()
print(" %7i %10.1f %7.2f" % (self.n, self.time2 - self.time1, self.n / (self.time2 - self.time1)))
<commit_msg>Fix division by zero error<commit_after>
|
from __future__ import print_function, division
import time
import numpy as np
class Timer(object):
def __init__(self):
self.time1 = time.time()
self.n = 0
self.step = 1
print(" # Sources CPU time (sec) Sources/sec ")
print(" ----------------------------------------------")
def display(self, force=False):
self.n += 1
if np.mod(self.n, self.step) == 0:
self.time2 = time.time()
if self.time2 - self.time1 < 1.:
self.step *= 10
else:
print(" %7i %10.1f %7.2f" % (self.n, self.time2 - self.time1, self.n / (self.time2 - self.time1)))
elif force:
self.time2 = time.time()
if self.time2 == self.time1:
print(" %7i %10.1f -------" % (self.n, self.time2 - self.time1))
else:
print(" %7i %10.1f %7.2f" % (self.n, self.time2 - self.time1, self.n / (self.time2 - self.time1)))
|
from __future__ import print_function, division
import time
import numpy as np
class Timer(object):
def __init__(self):
self.time1 = time.time()
self.n = 0
self.step = 1
print(" # Sources CPU time (sec) Sources/sec ")
print(" ----------------------------------------------")
def display(self, force=False):
self.n += 1
if np.mod(self.n, self.step) == 0:
self.time2 = time.time()
if self.time2 - self.time1 < 1.:
self.step *= 10
else:
print(" %7i %10.1f %7.2f" % (self.n, self.time2 - self.time1, self.n / (self.time2 - self.time1)))
elif force:
self.time2 = time.time()
print(" %7i %10.1f %7.2f" % (self.n, self.time2 - self.time1, self.n / (self.time2 - self.time1)))
Fix division by zero errorfrom __future__ import print_function, division
import time
import numpy as np
class Timer(object):
def __init__(self):
self.time1 = time.time()
self.n = 0
self.step = 1
print(" # Sources CPU time (sec) Sources/sec ")
print(" ----------------------------------------------")
def display(self, force=False):
self.n += 1
if np.mod(self.n, self.step) == 0:
self.time2 = time.time()
if self.time2 - self.time1 < 1.:
self.step *= 10
else:
print(" %7i %10.1f %7.2f" % (self.n, self.time2 - self.time1, self.n / (self.time2 - self.time1)))
elif force:
self.time2 = time.time()
if self.time2 == self.time1:
print(" %7i %10.1f -------" % (self.n, self.time2 - self.time1))
else:
print(" %7i %10.1f %7.2f" % (self.n, self.time2 - self.time1, self.n / (self.time2 - self.time1)))
|
<commit_before>from __future__ import print_function, division
import time
import numpy as np
class Timer(object):
def __init__(self):
self.time1 = time.time()
self.n = 0
self.step = 1
print(" # Sources CPU time (sec) Sources/sec ")
print(" ----------------------------------------------")
def display(self, force=False):
self.n += 1
if np.mod(self.n, self.step) == 0:
self.time2 = time.time()
if self.time2 - self.time1 < 1.:
self.step *= 10
else:
print(" %7i %10.1f %7.2f" % (self.n, self.time2 - self.time1, self.n / (self.time2 - self.time1)))
elif force:
self.time2 = time.time()
print(" %7i %10.1f %7.2f" % (self.n, self.time2 - self.time1, self.n / (self.time2 - self.time1)))
<commit_msg>Fix division by zero error<commit_after>from __future__ import print_function, division
import time
import numpy as np
class Timer(object):
def __init__(self):
self.time1 = time.time()
self.n = 0
self.step = 1
print(" # Sources CPU time (sec) Sources/sec ")
print(" ----------------------------------------------")
def display(self, force=False):
self.n += 1
if np.mod(self.n, self.step) == 0:
self.time2 = time.time()
if self.time2 - self.time1 < 1.:
self.step *= 10
else:
print(" %7i %10.1f %7.2f" % (self.n, self.time2 - self.time1, self.n / (self.time2 - self.time1)))
elif force:
self.time2 = time.time()
if self.time2 == self.time1:
print(" %7i %10.1f -------" % (self.n, self.time2 - self.time1))
else:
print(" %7i %10.1f %7.2f" % (self.n, self.time2 - self.time1, self.n / (self.time2 - self.time1)))
|
8399253b159b96419bcc36de791804bf86152f4d
|
scripts/commandsocket.py
|
scripts/commandsocket.py
|
import RPi.GPIO as GPIO
import time
from socketIO_client import SocketIO, LoggingNamespace
socketIO = SocketIO('localhost:3000')
def onCommand(*args):
print(args)
while (True):
socketIO.on("commands", onCommand)
socketIO.wait(seconds=1)
socketIO.off("sequencePi")
|
import RPi.GPIO as GPIO
import time
from socketIO_client import SocketIO, LoggingNamespace
socketIO = SocketIO('localhost:3000')
def onCommand(*args):
print(args[0])
while (True):
socketIO.on("commands", onCommand)
socketIO.wait(seconds=1)
socketIO.off("sequencePi")
|
Print first element of array
|
Print first element of array
Print first element of array
|
Python
|
mit
|
willdavidc/piel,willdavidc/piel,willdavidc/piel,willdavidc/piel,willdavidc/piel
|
import RPi.GPIO as GPIO
import time
from socketIO_client import SocketIO, LoggingNamespace
socketIO = SocketIO('localhost:3000')
def onCommand(*args):
print(args)
while (True):
socketIO.on("commands", onCommand)
socketIO.wait(seconds=1)
socketIO.off("sequencePi")Print first element of array
Print first element of array
|
import RPi.GPIO as GPIO
import time
from socketIO_client import SocketIO, LoggingNamespace
socketIO = SocketIO('localhost:3000')
def onCommand(*args):
print(args[0])
while (True):
socketIO.on("commands", onCommand)
socketIO.wait(seconds=1)
socketIO.off("sequencePi")
|
<commit_before>import RPi.GPIO as GPIO
import time
from socketIO_client import SocketIO, LoggingNamespace
socketIO = SocketIO('localhost:3000')
def onCommand(*args):
print(args)
while (True):
socketIO.on("commands", onCommand)
socketIO.wait(seconds=1)
socketIO.off("sequencePi")<commit_msg>Print first element of array
Print first element of array<commit_after>
|
import RPi.GPIO as GPIO
import time
from socketIO_client import SocketIO, LoggingNamespace
socketIO = SocketIO('localhost:3000')
def onCommand(*args):
print(args[0])
while (True):
socketIO.on("commands", onCommand)
socketIO.wait(seconds=1)
socketIO.off("sequencePi")
|
import RPi.GPIO as GPIO
import time
from socketIO_client import SocketIO, LoggingNamespace
socketIO = SocketIO('localhost:3000')
def onCommand(*args):
print(args)
while (True):
socketIO.on("commands", onCommand)
socketIO.wait(seconds=1)
socketIO.off("sequencePi")Print first element of array
Print first element of arrayimport RPi.GPIO as GPIO
import time
from socketIO_client import SocketIO, LoggingNamespace
socketIO = SocketIO('localhost:3000')
def onCommand(*args):
print(args[0])
while (True):
socketIO.on("commands", onCommand)
socketIO.wait(seconds=1)
socketIO.off("sequencePi")
|
<commit_before>import RPi.GPIO as GPIO
import time
from socketIO_client import SocketIO, LoggingNamespace
socketIO = SocketIO('localhost:3000')
def onCommand(*args):
print(args)
while (True):
socketIO.on("commands", onCommand)
socketIO.wait(seconds=1)
socketIO.off("sequencePi")<commit_msg>Print first element of array
Print first element of array<commit_after>import RPi.GPIO as GPIO
import time
from socketIO_client import SocketIO, LoggingNamespace
socketIO = SocketIO('localhost:3000')
def onCommand(*args):
print(args[0])
while (True):
socketIO.on("commands", onCommand)
socketIO.wait(seconds=1)
socketIO.off("sequencePi")
|
2c2a5b4af2fa3fe4daff088810ced044ce73af0c
|
services/disqus.py
|
services/disqus.py
|
from oauthlib.oauth2.draft25 import utils
import foauth.providers
def token_uri(service, token, r):
params = [((u'access_token', token)), ((u'api_key', service.client_id))]
r.url = utils.add_params_to_uri(r.url, params)
return r
class Disqus(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'http://disqus.com/'
docs_url = 'http://disqus.com/api/docs/'
category = 'Social'
# URLs to interact with the API
authorize_url = 'https://disqus.com/api/oauth/2.0/authorize/'
access_token_url = 'https://disqus.com/api/oauth/2.0/access_token/'
api_domain = 'disqus.com'
available_permissions = [
(None, 'read data on your behalf'),
('write', 'read and write data on your behalf'),
('admin', 'read and write data on your behalf and moderate your forums'),
]
permissions_widget = 'radio'
bearer_type = token_uri
def get_scope_string(self, scopes):
# Disqus doesn't follow the spec on this point
return ','.join(scopes)
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/api/3.0/users/details.json')
return r.json[u'response'][u'id']
|
from oauthlib.oauth2.draft25 import utils
import foauth.providers
def token_uri(service, token, r):
params = [((u'access_token', token)), ((u'api_key', service.client_id))]
r.url = utils.add_params_to_uri(r.url, params)
return r
class Disqus(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'http://disqus.com/'
docs_url = 'http://disqus.com/api/docs/'
category = 'Social'
# URLs to interact with the API
authorize_url = 'https://disqus.com/api/oauth/2.0/authorize/'
access_token_url = 'https://disqus.com/api/oauth/2.0/access_token/'
api_domain = 'disqus.com'
available_permissions = [
(None, 'access your contact info'),
('write', 'access your contact info and add comments'),
('admin', 'access your contact info, and comments and moderate your forums'),
]
permissions_widget = 'radio'
bearer_type = token_uri
def get_scope_string(self, scopes):
# Disqus doesn't follow the spec on this point
return ','.join(scopes)
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/api/3.0/users/details.json')
return r.json[u'response'][u'id']
|
Reword the permissions for Disqus
|
Reword the permissions for Disqus
|
Python
|
bsd-3-clause
|
foauth/foauth.org,foauth/foauth.org,foauth/foauth.org,foauth/oauth-proxy
|
from oauthlib.oauth2.draft25 import utils
import foauth.providers
def token_uri(service, token, r):
params = [((u'access_token', token)), ((u'api_key', service.client_id))]
r.url = utils.add_params_to_uri(r.url, params)
return r
class Disqus(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'http://disqus.com/'
docs_url = 'http://disqus.com/api/docs/'
category = 'Social'
# URLs to interact with the API
authorize_url = 'https://disqus.com/api/oauth/2.0/authorize/'
access_token_url = 'https://disqus.com/api/oauth/2.0/access_token/'
api_domain = 'disqus.com'
available_permissions = [
(None, 'read data on your behalf'),
('write', 'read and write data on your behalf'),
('admin', 'read and write data on your behalf and moderate your forums'),
]
permissions_widget = 'radio'
bearer_type = token_uri
def get_scope_string(self, scopes):
# Disqus doesn't follow the spec on this point
return ','.join(scopes)
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/api/3.0/users/details.json')
return r.json[u'response'][u'id']
Reword the permissions for Disqus
|
from oauthlib.oauth2.draft25 import utils
import foauth.providers
def token_uri(service, token, r):
params = [((u'access_token', token)), ((u'api_key', service.client_id))]
r.url = utils.add_params_to_uri(r.url, params)
return r
class Disqus(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'http://disqus.com/'
docs_url = 'http://disqus.com/api/docs/'
category = 'Social'
# URLs to interact with the API
authorize_url = 'https://disqus.com/api/oauth/2.0/authorize/'
access_token_url = 'https://disqus.com/api/oauth/2.0/access_token/'
api_domain = 'disqus.com'
available_permissions = [
(None, 'access your contact info'),
('write', 'access your contact info and add comments'),
('admin', 'access your contact info, and comments and moderate your forums'),
]
permissions_widget = 'radio'
bearer_type = token_uri
def get_scope_string(self, scopes):
# Disqus doesn't follow the spec on this point
return ','.join(scopes)
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/api/3.0/users/details.json')
return r.json[u'response'][u'id']
|
<commit_before>from oauthlib.oauth2.draft25 import utils
import foauth.providers
def token_uri(service, token, r):
params = [((u'access_token', token)), ((u'api_key', service.client_id))]
r.url = utils.add_params_to_uri(r.url, params)
return r
class Disqus(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'http://disqus.com/'
docs_url = 'http://disqus.com/api/docs/'
category = 'Social'
# URLs to interact with the API
authorize_url = 'https://disqus.com/api/oauth/2.0/authorize/'
access_token_url = 'https://disqus.com/api/oauth/2.0/access_token/'
api_domain = 'disqus.com'
available_permissions = [
(None, 'read data on your behalf'),
('write', 'read and write data on your behalf'),
('admin', 'read and write data on your behalf and moderate your forums'),
]
permissions_widget = 'radio'
bearer_type = token_uri
def get_scope_string(self, scopes):
# Disqus doesn't follow the spec on this point
return ','.join(scopes)
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/api/3.0/users/details.json')
return r.json[u'response'][u'id']
<commit_msg>Reword the permissions for Disqus<commit_after>
|
from oauthlib.oauth2.draft25 import utils
import foauth.providers
def token_uri(service, token, r):
params = [((u'access_token', token)), ((u'api_key', service.client_id))]
r.url = utils.add_params_to_uri(r.url, params)
return r
class Disqus(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'http://disqus.com/'
docs_url = 'http://disqus.com/api/docs/'
category = 'Social'
# URLs to interact with the API
authorize_url = 'https://disqus.com/api/oauth/2.0/authorize/'
access_token_url = 'https://disqus.com/api/oauth/2.0/access_token/'
api_domain = 'disqus.com'
available_permissions = [
(None, 'access your contact info'),
('write', 'access your contact info and add comments'),
('admin', 'access your contact info, and comments and moderate your forums'),
]
permissions_widget = 'radio'
bearer_type = token_uri
def get_scope_string(self, scopes):
# Disqus doesn't follow the spec on this point
return ','.join(scopes)
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/api/3.0/users/details.json')
return r.json[u'response'][u'id']
|
from oauthlib.oauth2.draft25 import utils
import foauth.providers
def token_uri(service, token, r):
params = [((u'access_token', token)), ((u'api_key', service.client_id))]
r.url = utils.add_params_to_uri(r.url, params)
return r
class Disqus(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'http://disqus.com/'
docs_url = 'http://disqus.com/api/docs/'
category = 'Social'
# URLs to interact with the API
authorize_url = 'https://disqus.com/api/oauth/2.0/authorize/'
access_token_url = 'https://disqus.com/api/oauth/2.0/access_token/'
api_domain = 'disqus.com'
available_permissions = [
(None, 'read data on your behalf'),
('write', 'read and write data on your behalf'),
('admin', 'read and write data on your behalf and moderate your forums'),
]
permissions_widget = 'radio'
bearer_type = token_uri
def get_scope_string(self, scopes):
# Disqus doesn't follow the spec on this point
return ','.join(scopes)
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/api/3.0/users/details.json')
return r.json[u'response'][u'id']
Reword the permissions for Disqusfrom oauthlib.oauth2.draft25 import utils
import foauth.providers
def token_uri(service, token, r):
params = [((u'access_token', token)), ((u'api_key', service.client_id))]
r.url = utils.add_params_to_uri(r.url, params)
return r
class Disqus(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'http://disqus.com/'
docs_url = 'http://disqus.com/api/docs/'
category = 'Social'
# URLs to interact with the API
authorize_url = 'https://disqus.com/api/oauth/2.0/authorize/'
access_token_url = 'https://disqus.com/api/oauth/2.0/access_token/'
api_domain = 'disqus.com'
available_permissions = [
(None, 'access your contact info'),
('write', 'access your contact info and add comments'),
('admin', 'access your contact info, and comments and moderate your forums'),
]
permissions_widget = 'radio'
bearer_type = token_uri
def get_scope_string(self, scopes):
# Disqus doesn't follow the spec on this point
return ','.join(scopes)
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/api/3.0/users/details.json')
return r.json[u'response'][u'id']
|
<commit_before>from oauthlib.oauth2.draft25 import utils
import foauth.providers
def token_uri(service, token, r):
params = [((u'access_token', token)), ((u'api_key', service.client_id))]
r.url = utils.add_params_to_uri(r.url, params)
return r
class Disqus(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'http://disqus.com/'
docs_url = 'http://disqus.com/api/docs/'
category = 'Social'
# URLs to interact with the API
authorize_url = 'https://disqus.com/api/oauth/2.0/authorize/'
access_token_url = 'https://disqus.com/api/oauth/2.0/access_token/'
api_domain = 'disqus.com'
available_permissions = [
(None, 'read data on your behalf'),
('write', 'read and write data on your behalf'),
('admin', 'read and write data on your behalf and moderate your forums'),
]
permissions_widget = 'radio'
bearer_type = token_uri
def get_scope_string(self, scopes):
# Disqus doesn't follow the spec on this point
return ','.join(scopes)
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/api/3.0/users/details.json')
return r.json[u'response'][u'id']
<commit_msg>Reword the permissions for Disqus<commit_after>from oauthlib.oauth2.draft25 import utils
import foauth.providers
def token_uri(service, token, r):
params = [((u'access_token', token)), ((u'api_key', service.client_id))]
r.url = utils.add_params_to_uri(r.url, params)
return r
class Disqus(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'http://disqus.com/'
docs_url = 'http://disqus.com/api/docs/'
category = 'Social'
# URLs to interact with the API
authorize_url = 'https://disqus.com/api/oauth/2.0/authorize/'
access_token_url = 'https://disqus.com/api/oauth/2.0/access_token/'
api_domain = 'disqus.com'
available_permissions = [
(None, 'access your contact info'),
('write', 'access your contact info and add comments'),
('admin', 'access your contact info, and comments and moderate your forums'),
]
permissions_widget = 'radio'
bearer_type = token_uri
def get_scope_string(self, scopes):
# Disqus doesn't follow the spec on this point
return ','.join(scopes)
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/api/3.0/users/details.json')
return r.json[u'response'][u'id']
|
0e1384ab777a2d7e30036ccc7d8ed0e17093f4e1
|
src/ggrc_basic_permissions/roles/ProgramAuditReader.py
|
src/ggrc_basic_permissions/roles/ProgramAuditReader.py
|
# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: anze@reciprocitylabs.com
# Maintained By: anze@reciprocitylabs.com
scope = "AuditImplied"
description = """
A user with the ProgramReader role for a private program will also have this
role in the audit context for any audit created for that program.
"""
permissions = {
"read": [
"Request",
"ControlAssessment",
"Issue",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse",
"Audit",
"AuditObject",
"Meeting",
"ObjectDocument",
"ObjectPerson",
"Relationship",
"Document",
"Meeting",
"UserRole",
"Context",
],
"create": [
"DocumentationResponse",
"InterviewResponse",
"Response",
"ObjectDocument",
"ObjectPerson",
"Relationship",
"Document"
],
"view_object_page": [
"__GGRC_ALL__"
],
"update": [],
"delete": [
"ObjectDocument",
"ObjectPerson",
"Relationship"
]
}
|
# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: anze@reciprocitylabs.com
# Maintained By: anze@reciprocitylabs.com
scope = "AuditImplied"
description = """
A user with the ProgramReader role for a private program will also have this
role in the audit context for any audit created for that program.
"""
permissions = {
"read": [
"Request",
"ControlAssessment",
"Issue",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse",
"Audit",
"AuditObject",
"Meeting",
"ObjectDocument",
"ObjectPerson",
"Relationship",
"Document",
"Meeting",
"UserRole",
"Context",
],
"create": [],
"view_object_page": [
"__GGRC_ALL__"
],
"update": [],
"delete": []
}
|
Remove CD permissions for program audit reader
|
Remove CD permissions for program audit reader
|
Python
|
apache-2.0
|
NejcZupec/ggrc-core,j0gurt/ggrc-core,prasannav7/ggrc-core,AleksNeStu/ggrc-core,kr41/ggrc-core,andrei-karalionak/ggrc-core,prasannav7/ggrc-core,jmakov/ggrc-core,j0gurt/ggrc-core,AleksNeStu/ggrc-core,NejcZupec/ggrc-core,VinnieJohns/ggrc-core,andrei-karalionak/ggrc-core,jmakov/ggrc-core,j0gurt/ggrc-core,VinnieJohns/ggrc-core,prasannav7/ggrc-core,kr41/ggrc-core,kr41/ggrc-core,NejcZupec/ggrc-core,jmakov/ggrc-core,selahssea/ggrc-core,selahssea/ggrc-core,edofic/ggrc-core,selahssea/ggrc-core,josthkko/ggrc-core,plamut/ggrc-core,jmakov/ggrc-core,edofic/ggrc-core,NejcZupec/ggrc-core,AleksNeStu/ggrc-core,selahssea/ggrc-core,VinnieJohns/ggrc-core,VinnieJohns/ggrc-core,josthkko/ggrc-core,josthkko/ggrc-core,plamut/ggrc-core,AleksNeStu/ggrc-core,j0gurt/ggrc-core,edofic/ggrc-core,jmakov/ggrc-core,andrei-karalionak/ggrc-core,josthkko/ggrc-core,kr41/ggrc-core,prasannav7/ggrc-core,plamut/ggrc-core,edofic/ggrc-core,plamut/ggrc-core,andrei-karalionak/ggrc-core
|
# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: anze@reciprocitylabs.com
# Maintained By: anze@reciprocitylabs.com
scope = "AuditImplied"
description = """
A user with the ProgramReader role for a private program will also have this
role in the audit context for any audit created for that program.
"""
permissions = {
"read": [
"Request",
"ControlAssessment",
"Issue",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse",
"Audit",
"AuditObject",
"Meeting",
"ObjectDocument",
"ObjectPerson",
"Relationship",
"Document",
"Meeting",
"UserRole",
"Context",
],
"create": [
"DocumentationResponse",
"InterviewResponse",
"Response",
"ObjectDocument",
"ObjectPerson",
"Relationship",
"Document"
],
"view_object_page": [
"__GGRC_ALL__"
],
"update": [],
"delete": [
"ObjectDocument",
"ObjectPerson",
"Relationship"
]
}
Remove CD permissions for program audit reader
|
# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: anze@reciprocitylabs.com
# Maintained By: anze@reciprocitylabs.com
scope = "AuditImplied"
description = """
A user with the ProgramReader role for a private program will also have this
role in the audit context for any audit created for that program.
"""
permissions = {
"read": [
"Request",
"ControlAssessment",
"Issue",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse",
"Audit",
"AuditObject",
"Meeting",
"ObjectDocument",
"ObjectPerson",
"Relationship",
"Document",
"Meeting",
"UserRole",
"Context",
],
"create": [],
"view_object_page": [
"__GGRC_ALL__"
],
"update": [],
"delete": []
}
|
<commit_before># Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: anze@reciprocitylabs.com
# Maintained By: anze@reciprocitylabs.com
scope = "AuditImplied"
description = """
A user with the ProgramReader role for a private program will also have this
role in the audit context for any audit created for that program.
"""
permissions = {
"read": [
"Request",
"ControlAssessment",
"Issue",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse",
"Audit",
"AuditObject",
"Meeting",
"ObjectDocument",
"ObjectPerson",
"Relationship",
"Document",
"Meeting",
"UserRole",
"Context",
],
"create": [
"DocumentationResponse",
"InterviewResponse",
"Response",
"ObjectDocument",
"ObjectPerson",
"Relationship",
"Document"
],
"view_object_page": [
"__GGRC_ALL__"
],
"update": [],
"delete": [
"ObjectDocument",
"ObjectPerson",
"Relationship"
]
}
<commit_msg>Remove CD permissions for program audit reader<commit_after>
|
# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: anze@reciprocitylabs.com
# Maintained By: anze@reciprocitylabs.com
scope = "AuditImplied"
description = """
A user with the ProgramReader role for a private program will also have this
role in the audit context for any audit created for that program.
"""
permissions = {
"read": [
"Request",
"ControlAssessment",
"Issue",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse",
"Audit",
"AuditObject",
"Meeting",
"ObjectDocument",
"ObjectPerson",
"Relationship",
"Document",
"Meeting",
"UserRole",
"Context",
],
"create": [],
"view_object_page": [
"__GGRC_ALL__"
],
"update": [],
"delete": []
}
|
# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: anze@reciprocitylabs.com
# Maintained By: anze@reciprocitylabs.com
scope = "AuditImplied"
description = """
A user with the ProgramReader role for a private program will also have this
role in the audit context for any audit created for that program.
"""
permissions = {
"read": [
"Request",
"ControlAssessment",
"Issue",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse",
"Audit",
"AuditObject",
"Meeting",
"ObjectDocument",
"ObjectPerson",
"Relationship",
"Document",
"Meeting",
"UserRole",
"Context",
],
"create": [
"DocumentationResponse",
"InterviewResponse",
"Response",
"ObjectDocument",
"ObjectPerson",
"Relationship",
"Document"
],
"view_object_page": [
"__GGRC_ALL__"
],
"update": [],
"delete": [
"ObjectDocument",
"ObjectPerson",
"Relationship"
]
}
Remove CD permissions for program audit reader# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: anze@reciprocitylabs.com
# Maintained By: anze@reciprocitylabs.com
scope = "AuditImplied"
description = """
A user with the ProgramReader role for a private program will also have this
role in the audit context for any audit created for that program.
"""
permissions = {
"read": [
"Request",
"ControlAssessment",
"Issue",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse",
"Audit",
"AuditObject",
"Meeting",
"ObjectDocument",
"ObjectPerson",
"Relationship",
"Document",
"Meeting",
"UserRole",
"Context",
],
"create": [],
"view_object_page": [
"__GGRC_ALL__"
],
"update": [],
"delete": []
}
|
<commit_before># Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: anze@reciprocitylabs.com
# Maintained By: anze@reciprocitylabs.com
scope = "AuditImplied"
description = """
A user with the ProgramReader role for a private program will also have this
role in the audit context for any audit created for that program.
"""
permissions = {
"read": [
"Request",
"ControlAssessment",
"Issue",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse",
"Audit",
"AuditObject",
"Meeting",
"ObjectDocument",
"ObjectPerson",
"Relationship",
"Document",
"Meeting",
"UserRole",
"Context",
],
"create": [
"DocumentationResponse",
"InterviewResponse",
"Response",
"ObjectDocument",
"ObjectPerson",
"Relationship",
"Document"
],
"view_object_page": [
"__GGRC_ALL__"
],
"update": [],
"delete": [
"ObjectDocument",
"ObjectPerson",
"Relationship"
]
}
<commit_msg>Remove CD permissions for program audit reader<commit_after># Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: anze@reciprocitylabs.com
# Maintained By: anze@reciprocitylabs.com
scope = "AuditImplied"
description = """
A user with the ProgramReader role for a private program will also have this
role in the audit context for any audit created for that program.
"""
permissions = {
"read": [
"Request",
"ControlAssessment",
"Issue",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse",
"Audit",
"AuditObject",
"Meeting",
"ObjectDocument",
"ObjectPerson",
"Relationship",
"Document",
"Meeting",
"UserRole",
"Context",
],
"create": [],
"view_object_page": [
"__GGRC_ALL__"
],
"update": [],
"delete": []
}
|
3a4e371d5d148d0171756776deffc7e5adf40197
|
ava/text_to_speech/__init__.py
|
ava/text_to_speech/__init__.py
|
from ..queues import QueueTtS
from ..components import _BaseComponent
from gtts import gTTS
from playsound import playsound
import os
class TextToSpeech(_BaseComponent):
def __init__(self):
super().__init__()
self.queue_tts = QueueTtS()
def run(self):
sentence = self.queue_tts.get()
print('To say out loud : {}'.format(sentence))
# TODO change the language to match user's settings
tts = gTTS(text=sentence, lang='en')
tts.save("tts.mp3")
playsound("tts.mp3")
os.remove("tts.mp3")
self.queue_tts.task_done()
|
from ..queues import QueueTtS
from ..components import _BaseComponent
from gtts import gTTS
from .playsound import playsound
import os
class TextToSpeech(_BaseComponent):
def __init__(self):
super().__init__()
self.queue_tts = QueueTtS()
def run(self):
sentence = self.queue_tts.get()
print('To say out loud : {}'.format(sentence))
# TODO change the language to match user's settings
tts = gTTS(text=sentence, lang='en')
tts.save("tts.mp3")
playsound("tts.mp3")
os.remove("tts.mp3")
self.queue_tts.task_done()
|
Add use of local playsoud.py
|
Add use of local playsoud.py
|
Python
|
mit
|
ava-project/AVA
|
from ..queues import QueueTtS
from ..components import _BaseComponent
from gtts import gTTS
from playsound import playsound
import os
class TextToSpeech(_BaseComponent):
def __init__(self):
super().__init__()
self.queue_tts = QueueTtS()
def run(self):
sentence = self.queue_tts.get()
print('To say out loud : {}'.format(sentence))
# TODO change the language to match user's settings
tts = gTTS(text=sentence, lang='en')
tts.save("tts.mp3")
playsound("tts.mp3")
os.remove("tts.mp3")
self.queue_tts.task_done()
Add use of local playsoud.py
|
from ..queues import QueueTtS
from ..components import _BaseComponent
from gtts import gTTS
from .playsound import playsound
import os
class TextToSpeech(_BaseComponent):
def __init__(self):
super().__init__()
self.queue_tts = QueueTtS()
def run(self):
sentence = self.queue_tts.get()
print('To say out loud : {}'.format(sentence))
# TODO change the language to match user's settings
tts = gTTS(text=sentence, lang='en')
tts.save("tts.mp3")
playsound("tts.mp3")
os.remove("tts.mp3")
self.queue_tts.task_done()
|
<commit_before>from ..queues import QueueTtS
from ..components import _BaseComponent
from gtts import gTTS
from playsound import playsound
import os
class TextToSpeech(_BaseComponent):
def __init__(self):
super().__init__()
self.queue_tts = QueueTtS()
def run(self):
sentence = self.queue_tts.get()
print('To say out loud : {}'.format(sentence))
# TODO change the language to match user's settings
tts = gTTS(text=sentence, lang='en')
tts.save("tts.mp3")
playsound("tts.mp3")
os.remove("tts.mp3")
self.queue_tts.task_done()
<commit_msg>Add use of local playsoud.py<commit_after>
|
from ..queues import QueueTtS
from ..components import _BaseComponent
from gtts import gTTS
from .playsound import playsound
import os
class TextToSpeech(_BaseComponent):
def __init__(self):
super().__init__()
self.queue_tts = QueueTtS()
def run(self):
sentence = self.queue_tts.get()
print('To say out loud : {}'.format(sentence))
# TODO change the language to match user's settings
tts = gTTS(text=sentence, lang='en')
tts.save("tts.mp3")
playsound("tts.mp3")
os.remove("tts.mp3")
self.queue_tts.task_done()
|
from ..queues import QueueTtS
from ..components import _BaseComponent
from gtts import gTTS
from playsound import playsound
import os
class TextToSpeech(_BaseComponent):
def __init__(self):
super().__init__()
self.queue_tts = QueueTtS()
def run(self):
sentence = self.queue_tts.get()
print('To say out loud : {}'.format(sentence))
# TODO change the language to match user's settings
tts = gTTS(text=sentence, lang='en')
tts.save("tts.mp3")
playsound("tts.mp3")
os.remove("tts.mp3")
self.queue_tts.task_done()
Add use of local playsoud.pyfrom ..queues import QueueTtS
from ..components import _BaseComponent
from gtts import gTTS
from .playsound import playsound
import os
class TextToSpeech(_BaseComponent):
def __init__(self):
super().__init__()
self.queue_tts = QueueTtS()
def run(self):
sentence = self.queue_tts.get()
print('To say out loud : {}'.format(sentence))
# TODO change the language to match user's settings
tts = gTTS(text=sentence, lang='en')
tts.save("tts.mp3")
playsound("tts.mp3")
os.remove("tts.mp3")
self.queue_tts.task_done()
|
<commit_before>from ..queues import QueueTtS
from ..components import _BaseComponent
from gtts import gTTS
from playsound import playsound
import os
class TextToSpeech(_BaseComponent):
def __init__(self):
super().__init__()
self.queue_tts = QueueTtS()
def run(self):
sentence = self.queue_tts.get()
print('To say out loud : {}'.format(sentence))
# TODO change the language to match user's settings
tts = gTTS(text=sentence, lang='en')
tts.save("tts.mp3")
playsound("tts.mp3")
os.remove("tts.mp3")
self.queue_tts.task_done()
<commit_msg>Add use of local playsoud.py<commit_after>from ..queues import QueueTtS
from ..components import _BaseComponent
from gtts import gTTS
from .playsound import playsound
import os
class TextToSpeech(_BaseComponent):
def __init__(self):
super().__init__()
self.queue_tts = QueueTtS()
def run(self):
sentence = self.queue_tts.get()
print('To say out loud : {}'.format(sentence))
# TODO change the language to match user's settings
tts = gTTS(text=sentence, lang='en')
tts.save("tts.mp3")
playsound("tts.mp3")
os.remove("tts.mp3")
self.queue_tts.task_done()
|
be8b36b141d32372e9c08e73cbbe4620d86effac
|
chainer/ya/utils/range_logger.py
|
chainer/ya/utils/range_logger.py
|
import logging
logger = logging.getLogger()
logger.setLevel(getattr(logging, 'INFO'))
logger.addHandler(logging.StreamHandler())
class rangelog:
def __init__(self, name):
self.name = name
def __enter__(self):
logger.info("--> Start: {}".format(self.name))
return logger
def __exit__(self, *args):
logger.info("<-- End: {}".format(self.name))
|
import logging
class rangelog:
logger = None
@classmethod
def set_logger(cls, logger=None):
if logger is None:
cls.logger = logging.getLogger()
cls.logger.setLevel(getattr(logging, 'INFO'))
cls.logger.addHandler(logging.StreamHandler())
elif isinstance(logger, logging.Logger):
cls.logger = logger
def __init__(self, name):
if rangelog.logger is None:
rangelog.set_logger()
self.name = name
def __enter__(self):
rangelog.logger.info("--> Start: {}".format(self.name))
return rangelog.logger
def __exit__(self, *args):
rangelog.logger.info("<-- End: {}".format(self.name))
|
Change to use class variable as logger
|
Change to use class variable as logger
|
Python
|
mit
|
yasuyuky/chainer-ya-utils
|
import logging
logger = logging.getLogger()
logger.setLevel(getattr(logging, 'INFO'))
logger.addHandler(logging.StreamHandler())
class rangelog:
def __init__(self, name):
self.name = name
def __enter__(self):
logger.info("--> Start: {}".format(self.name))
return logger
def __exit__(self, *args):
logger.info("<-- End: {}".format(self.name))
Change to use class variable as logger
|
import logging
class rangelog:
logger = None
@classmethod
def set_logger(cls, logger=None):
if logger is None:
cls.logger = logging.getLogger()
cls.logger.setLevel(getattr(logging, 'INFO'))
cls.logger.addHandler(logging.StreamHandler())
elif isinstance(logger, logging.Logger):
cls.logger = logger
def __init__(self, name):
if rangelog.logger is None:
rangelog.set_logger()
self.name = name
def __enter__(self):
rangelog.logger.info("--> Start: {}".format(self.name))
return rangelog.logger
def __exit__(self, *args):
rangelog.logger.info("<-- End: {}".format(self.name))
|
<commit_before>import logging
logger = logging.getLogger()
logger.setLevel(getattr(logging, 'INFO'))
logger.addHandler(logging.StreamHandler())
class rangelog:
def __init__(self, name):
self.name = name
def __enter__(self):
logger.info("--> Start: {}".format(self.name))
return logger
def __exit__(self, *args):
logger.info("<-- End: {}".format(self.name))
<commit_msg>Change to use class variable as logger<commit_after>
|
import logging
class rangelog:
logger = None
@classmethod
def set_logger(cls, logger=None):
if logger is None:
cls.logger = logging.getLogger()
cls.logger.setLevel(getattr(logging, 'INFO'))
cls.logger.addHandler(logging.StreamHandler())
elif isinstance(logger, logging.Logger):
cls.logger = logger
def __init__(self, name):
if rangelog.logger is None:
rangelog.set_logger()
self.name = name
def __enter__(self):
rangelog.logger.info("--> Start: {}".format(self.name))
return rangelog.logger
def __exit__(self, *args):
rangelog.logger.info("<-- End: {}".format(self.name))
|
import logging
logger = logging.getLogger()
logger.setLevel(getattr(logging, 'INFO'))
logger.addHandler(logging.StreamHandler())
class rangelog:
def __init__(self, name):
self.name = name
def __enter__(self):
logger.info("--> Start: {}".format(self.name))
return logger
def __exit__(self, *args):
logger.info("<-- End: {}".format(self.name))
Change to use class variable as loggerimport logging
class rangelog:
logger = None
@classmethod
def set_logger(cls, logger=None):
if logger is None:
cls.logger = logging.getLogger()
cls.logger.setLevel(getattr(logging, 'INFO'))
cls.logger.addHandler(logging.StreamHandler())
elif isinstance(logger, logging.Logger):
cls.logger = logger
def __init__(self, name):
if rangelog.logger is None:
rangelog.set_logger()
self.name = name
def __enter__(self):
rangelog.logger.info("--> Start: {}".format(self.name))
return rangelog.logger
def __exit__(self, *args):
rangelog.logger.info("<-- End: {}".format(self.name))
|
<commit_before>import logging
logger = logging.getLogger()
logger.setLevel(getattr(logging, 'INFO'))
logger.addHandler(logging.StreamHandler())
class rangelog:
def __init__(self, name):
self.name = name
def __enter__(self):
logger.info("--> Start: {}".format(self.name))
return logger
def __exit__(self, *args):
logger.info("<-- End: {}".format(self.name))
<commit_msg>Change to use class variable as logger<commit_after>import logging
class rangelog:
logger = None
@classmethod
def set_logger(cls, logger=None):
if logger is None:
cls.logger = logging.getLogger()
cls.logger.setLevel(getattr(logging, 'INFO'))
cls.logger.addHandler(logging.StreamHandler())
elif isinstance(logger, logging.Logger):
cls.logger = logger
def __init__(self, name):
if rangelog.logger is None:
rangelog.set_logger()
self.name = name
def __enter__(self):
rangelog.logger.info("--> Start: {}".format(self.name))
return rangelog.logger
def __exit__(self, *args):
rangelog.logger.info("<-- End: {}".format(self.name))
|
f26202f688f7612971e35b0ae33a2f961a117876
|
select_multiple_field/widgets.py
|
select_multiple_field/widgets.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.forms import widgets
from django.forms.util import flatatt
from django.utils.safestring import mark_safe
class SelectMultipleField(widgets.SelectMultiple):
"""Multiple select widget ready for jQuery multiselect.js"""
allow_multiple_selected = True
def render(self, name, value, attrs=None, choices=()):
rendered_attrs = {'class': 'select-multiple-field'}
rendered_attrs.update(attrs)
if value is None:
value = []
final_attrs = self.build_attrs(rendered_attrs, name=name)
# output = [u'<select multiple="multiple"%s>' % flatatt(final_attrs)]
output = ['<select multiple="multiple"{0}>'.format(flatatt(final_attrs))]
options = self.render_options(choices, value)
if options:
output.append(options)
output.append('</select>')
return mark_safe('\n'.join(output))
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.forms import widgets
from django.forms.util import flatatt
from django.utils.safestring import mark_safe
try:
from django.utils.html import format_html
except ImportError:
def format_html(format_string, *args, **kwargs):
return format_string.format(*args, **kwargs)
class SelectMultipleField(widgets.SelectMultiple):
"""Multiple select widget ready for jQuery multiselect.js"""
allow_multiple_selected = True
def render(self, name, value, attrs=None, choices=()):
rendered_attrs = {'class': 'select-multiple-field'}
rendered_attrs.update(attrs)
if value is None:
value = []
final_attrs = self.build_attrs(rendered_attrs, name=name)
# output = [u'<select multiple="multiple"%s>' % flatatt(final_attrs)]
output = [format_html('<select multiple="multiple"{0}>',
flatatt(final_attrs))]
options = self.render_options(choices, value)
if options:
output.append(options)
output.append('</select>')
return mark_safe('\n'.join(output))
|
Use format_html if it is available, fallback for dj 1.4
|
Use format_html if it is available, fallback for dj 1.4
|
Python
|
bsd-3-clause
|
kelvinwong-ca/django-select-multiple-field,kelvinwong-ca/django-select-multiple-field,kelvinwong-ca/django-select-multiple-field
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.forms import widgets
from django.forms.util import flatatt
from django.utils.safestring import mark_safe
class SelectMultipleField(widgets.SelectMultiple):
"""Multiple select widget ready for jQuery multiselect.js"""
allow_multiple_selected = True
def render(self, name, value, attrs=None, choices=()):
rendered_attrs = {'class': 'select-multiple-field'}
rendered_attrs.update(attrs)
if value is None:
value = []
final_attrs = self.build_attrs(rendered_attrs, name=name)
# output = [u'<select multiple="multiple"%s>' % flatatt(final_attrs)]
output = ['<select multiple="multiple"{0}>'.format(flatatt(final_attrs))]
options = self.render_options(choices, value)
if options:
output.append(options)
output.append('</select>')
return mark_safe('\n'.join(output))
Use format_html if it is available, fallback for dj 1.4
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.forms import widgets
from django.forms.util import flatatt
from django.utils.safestring import mark_safe
try:
from django.utils.html import format_html
except ImportError:
def format_html(format_string, *args, **kwargs):
return format_string.format(*args, **kwargs)
class SelectMultipleField(widgets.SelectMultiple):
"""Multiple select widget ready for jQuery multiselect.js"""
allow_multiple_selected = True
def render(self, name, value, attrs=None, choices=()):
rendered_attrs = {'class': 'select-multiple-field'}
rendered_attrs.update(attrs)
if value is None:
value = []
final_attrs = self.build_attrs(rendered_attrs, name=name)
# output = [u'<select multiple="multiple"%s>' % flatatt(final_attrs)]
output = [format_html('<select multiple="multiple"{0}>',
flatatt(final_attrs))]
options = self.render_options(choices, value)
if options:
output.append(options)
output.append('</select>')
return mark_safe('\n'.join(output))
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.forms import widgets
from django.forms.util import flatatt
from django.utils.safestring import mark_safe
class SelectMultipleField(widgets.SelectMultiple):
"""Multiple select widget ready for jQuery multiselect.js"""
allow_multiple_selected = True
def render(self, name, value, attrs=None, choices=()):
rendered_attrs = {'class': 'select-multiple-field'}
rendered_attrs.update(attrs)
if value is None:
value = []
final_attrs = self.build_attrs(rendered_attrs, name=name)
# output = [u'<select multiple="multiple"%s>' % flatatt(final_attrs)]
output = ['<select multiple="multiple"{0}>'.format(flatatt(final_attrs))]
options = self.render_options(choices, value)
if options:
output.append(options)
output.append('</select>')
return mark_safe('\n'.join(output))
<commit_msg>Use format_html if it is available, fallback for dj 1.4<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.forms import widgets
from django.forms.util import flatatt
from django.utils.safestring import mark_safe
try:
from django.utils.html import format_html
except ImportError:
def format_html(format_string, *args, **kwargs):
return format_string.format(*args, **kwargs)
class SelectMultipleField(widgets.SelectMultiple):
"""Multiple select widget ready for jQuery multiselect.js"""
allow_multiple_selected = True
def render(self, name, value, attrs=None, choices=()):
rendered_attrs = {'class': 'select-multiple-field'}
rendered_attrs.update(attrs)
if value is None:
value = []
final_attrs = self.build_attrs(rendered_attrs, name=name)
# output = [u'<select multiple="multiple"%s>' % flatatt(final_attrs)]
output = [format_html('<select multiple="multiple"{0}>',
flatatt(final_attrs))]
options = self.render_options(choices, value)
if options:
output.append(options)
output.append('</select>')
return mark_safe('\n'.join(output))
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.forms import widgets
from django.forms.util import flatatt
from django.utils.safestring import mark_safe
class SelectMultipleField(widgets.SelectMultiple):
"""Multiple select widget ready for jQuery multiselect.js"""
allow_multiple_selected = True
def render(self, name, value, attrs=None, choices=()):
rendered_attrs = {'class': 'select-multiple-field'}
rendered_attrs.update(attrs)
if value is None:
value = []
final_attrs = self.build_attrs(rendered_attrs, name=name)
# output = [u'<select multiple="multiple"%s>' % flatatt(final_attrs)]
output = ['<select multiple="multiple"{0}>'.format(flatatt(final_attrs))]
options = self.render_options(choices, value)
if options:
output.append(options)
output.append('</select>')
return mark_safe('\n'.join(output))
Use format_html if it is available, fallback for dj 1.4# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.forms import widgets
from django.forms.util import flatatt
from django.utils.safestring import mark_safe
try:
from django.utils.html import format_html
except ImportError:
def format_html(format_string, *args, **kwargs):
return format_string.format(*args, **kwargs)
class SelectMultipleField(widgets.SelectMultiple):
"""Multiple select widget ready for jQuery multiselect.js"""
allow_multiple_selected = True
def render(self, name, value, attrs=None, choices=()):
rendered_attrs = {'class': 'select-multiple-field'}
rendered_attrs.update(attrs)
if value is None:
value = []
final_attrs = self.build_attrs(rendered_attrs, name=name)
# output = [u'<select multiple="multiple"%s>' % flatatt(final_attrs)]
output = [format_html('<select multiple="multiple"{0}>',
flatatt(final_attrs))]
options = self.render_options(choices, value)
if options:
output.append(options)
output.append('</select>')
return mark_safe('\n'.join(output))
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.forms import widgets
from django.forms.util import flatatt
from django.utils.safestring import mark_safe
class SelectMultipleField(widgets.SelectMultiple):
"""Multiple select widget ready for jQuery multiselect.js"""
allow_multiple_selected = True
def render(self, name, value, attrs=None, choices=()):
rendered_attrs = {'class': 'select-multiple-field'}
rendered_attrs.update(attrs)
if value is None:
value = []
final_attrs = self.build_attrs(rendered_attrs, name=name)
# output = [u'<select multiple="multiple"%s>' % flatatt(final_attrs)]
output = ['<select multiple="multiple"{0}>'.format(flatatt(final_attrs))]
options = self.render_options(choices, value)
if options:
output.append(options)
output.append('</select>')
return mark_safe('\n'.join(output))
<commit_msg>Use format_html if it is available, fallback for dj 1.4<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.forms import widgets
from django.forms.util import flatatt
from django.utils.safestring import mark_safe
try:
from django.utils.html import format_html
except ImportError:
def format_html(format_string, *args, **kwargs):
return format_string.format(*args, **kwargs)
class SelectMultipleField(widgets.SelectMultiple):
"""Multiple select widget ready for jQuery multiselect.js"""
allow_multiple_selected = True
def render(self, name, value, attrs=None, choices=()):
rendered_attrs = {'class': 'select-multiple-field'}
rendered_attrs.update(attrs)
if value is None:
value = []
final_attrs = self.build_attrs(rendered_attrs, name=name)
# output = [u'<select multiple="multiple"%s>' % flatatt(final_attrs)]
output = [format_html('<select multiple="multiple"{0}>',
flatatt(final_attrs))]
options = self.render_options(choices, value)
if options:
output.append(options)
output.append('</select>')
return mark_safe('\n'.join(output))
|
1a00149b13a771ee18033a1abf1a3c30526b3d81
|
signac/__init__.py
|
signac/__init__.py
|
"""
signac aids in the management, access and analysis of large-scale
computational investigations.
The framework provides a simple data model, which helps to organize
data production and post-processing as well as distribution among collaboratos.
"""
# The VERSION string represents the actual (development) version of the
# package.
VERSION = '0.1.7'
# The VERSION_TUPLE is used to identify whether signac projects, are
# required to be updated and can therefore lag behind the actual version.
VERSION_TUPLE = 0, 1, 7
from .common.host import get_db
__all__ = ['get_db']
|
"""
signac aids in the management, access and analysis of large-scale
computational investigations.
The framework provides a simple data model, which helps to organize
data production and post-processing as well as distribution among collaboratos.
"""
# The VERSION string represents the actual (development) version of the
# package.
VERSION = '0.1.7'
# The VERSION_TUPLE is used to identify whether signac projects, are
# required to be updated and can therefore lag behind the actual version.
VERSION_TUPLE = 0, 1, 7
__all__ = ['VERSION', 'VERSION_TUPLE']
|
Remove everything but the VERSION constants from global namespace.
|
Remove everything but the VERSION constants from global namespace.
|
Python
|
bsd-3-clause
|
csadorf/signac,csadorf/signac
|
"""
signac aids in the management, access and analysis of large-scale
computational investigations.
The framework provides a simple data model, which helps to organize
data production and post-processing as well as distribution among collaboratos.
"""
# The VERSION string represents the actual (development) version of the
# package.
VERSION = '0.1.7'
# The VERSION_TUPLE is used to identify whether signac projects, are
# required to be updated and can therefore lag behind the actual version.
VERSION_TUPLE = 0, 1, 7
from .common.host import get_db
__all__ = ['get_db']
Remove everything but the VERSION constants from global namespace.
|
"""
signac aids in the management, access and analysis of large-scale
computational investigations.
The framework provides a simple data model, which helps to organize
data production and post-processing as well as distribution among collaboratos.
"""
# The VERSION string represents the actual (development) version of the
# package.
VERSION = '0.1.7'
# The VERSION_TUPLE is used to identify whether signac projects, are
# required to be updated and can therefore lag behind the actual version.
VERSION_TUPLE = 0, 1, 7
__all__ = ['VERSION', 'VERSION_TUPLE']
|
<commit_before>"""
signac aids in the management, access and analysis of large-scale
computational investigations.
The framework provides a simple data model, which helps to organize
data production and post-processing as well as distribution among collaboratos.
"""
# The VERSION string represents the actual (development) version of the
# package.
VERSION = '0.1.7'
# The VERSION_TUPLE is used to identify whether signac projects, are
# required to be updated and can therefore lag behind the actual version.
VERSION_TUPLE = 0, 1, 7
from .common.host import get_db
__all__ = ['get_db']
<commit_msg>Remove everything but the VERSION constants from global namespace.<commit_after>
|
"""
signac aids in the management, access and analysis of large-scale
computational investigations.
The framework provides a simple data model, which helps to organize
data production and post-processing as well as distribution among collaboratos.
"""
# The VERSION string represents the actual (development) version of the
# package.
VERSION = '0.1.7'
# The VERSION_TUPLE is used to identify whether signac projects, are
# required to be updated and can therefore lag behind the actual version.
VERSION_TUPLE = 0, 1, 7
__all__ = ['VERSION', 'VERSION_TUPLE']
|
"""
signac aids in the management, access and analysis of large-scale
computational investigations.
The framework provides a simple data model, which helps to organize
data production and post-processing as well as distribution among collaboratos.
"""
# The VERSION string represents the actual (development) version of the
# package.
VERSION = '0.1.7'
# The VERSION_TUPLE is used to identify whether signac projects, are
# required to be updated and can therefore lag behind the actual version.
VERSION_TUPLE = 0, 1, 7
from .common.host import get_db
__all__ = ['get_db']
Remove everything but the VERSION constants from global namespace."""
signac aids in the management, access and analysis of large-scale
computational investigations.
The framework provides a simple data model, which helps to organize
data production and post-processing as well as distribution among collaboratos.
"""
# The VERSION string represents the actual (development) version of the
# package.
VERSION = '0.1.7'
# The VERSION_TUPLE is used to identify whether signac projects, are
# required to be updated and can therefore lag behind the actual version.
VERSION_TUPLE = 0, 1, 7
__all__ = ['VERSION', 'VERSION_TUPLE']
|
<commit_before>"""
signac aids in the management, access and analysis of large-scale
computational investigations.
The framework provides a simple data model, which helps to organize
data production and post-processing as well as distribution among collaboratos.
"""
# The VERSION string represents the actual (development) version of the
# package.
VERSION = '0.1.7'
# The VERSION_TUPLE is used to identify whether signac projects, are
# required to be updated and can therefore lag behind the actual version.
VERSION_TUPLE = 0, 1, 7
from .common.host import get_db
__all__ = ['get_db']
<commit_msg>Remove everything but the VERSION constants from global namespace.<commit_after>"""
signac aids in the management, access and analysis of large-scale
computational investigations.
The framework provides a simple data model, which helps to organize
data production and post-processing as well as distribution among collaboratos.
"""
# The VERSION string represents the actual (development) version of the
# package.
VERSION = '0.1.7'
# The VERSION_TUPLE is used to identify whether signac projects, are
# required to be updated and can therefore lag behind the actual version.
VERSION_TUPLE = 0, 1, 7
__all__ = ['VERSION', 'VERSION_TUPLE']
|
f7a590e97715bf055928e8510c3822cc9b8272e8
|
changes/api/project_commit_builds.py
|
changes/api/project_commit_builds.py
|
from __future__ import absolute_import, division, unicode_literals
from sqlalchemy.orm import joinedload, contains_eager
from changes.api.base import APIView
from changes.models import Build, Project, Revision, Source
class ProjectCommitBuildsAPIView(APIView):
def get(self, project_id, commit_id):
project = Project.get(project_id)
if not project:
return '', 404
repo = project.repository
revision = Revision.query.filter(
Revision.repository_id == repo.id,
Revision.sha == commit_id,
).join(Revision.author).first()
if not revision:
return '', 404
build_query = Build.query.options(
joinedload('author'),
contains_eager('source').joinedload('revision'),
).join(
Source, Build.source_id == Source.id,
).filter(
Build.project_id == project.id,
Source.revision_sha == revision.sha,
Source.patch == None, # NOQA
).order_by(Build.date_created.desc())
return self.paginate(build_query)
|
from __future__ import absolute_import, division, unicode_literals
from sqlalchemy.orm import joinedload, contains_eager
from changes.api.base import APIView
from changes.models import Build, Project, Revision, Source
class ProjectCommitBuildsAPIView(APIView):
def get(self, project_id, commit_id):
project = Project.get(project_id)
if not project:
return '', 404
repo = project.repository
revision = Revision.query.filter(
Revision.repository_id == repo.id,
Revision.sha == commit_id,
).outerjoin(Revision.author).first()
if not revision:
return '', 404
build_query = Build.query.options(
joinedload('author'),
contains_eager('source').joinedload('revision'),
).join(
Source, Build.source_id == Source.id,
).filter(
Build.project_id == project.id,
Source.revision_sha == revision.sha,
Source.patch == None, # NOQA
).order_by(Build.date_created.desc())
return self.paginate(build_query)
|
Switch revision query to outer join
|
Switch revision query to outer join
|
Python
|
apache-2.0
|
dropbox/changes,bowlofstew/changes,wfxiang08/changes,wfxiang08/changes,dropbox/changes,bowlofstew/changes,wfxiang08/changes,dropbox/changes,bowlofstew/changes,dropbox/changes,bowlofstew/changes,wfxiang08/changes
|
from __future__ import absolute_import, division, unicode_literals
from sqlalchemy.orm import joinedload, contains_eager
from changes.api.base import APIView
from changes.models import Build, Project, Revision, Source
class ProjectCommitBuildsAPIView(APIView):
def get(self, project_id, commit_id):
project = Project.get(project_id)
if not project:
return '', 404
repo = project.repository
revision = Revision.query.filter(
Revision.repository_id == repo.id,
Revision.sha == commit_id,
).join(Revision.author).first()
if not revision:
return '', 404
build_query = Build.query.options(
joinedload('author'),
contains_eager('source').joinedload('revision'),
).join(
Source, Build.source_id == Source.id,
).filter(
Build.project_id == project.id,
Source.revision_sha == revision.sha,
Source.patch == None, # NOQA
).order_by(Build.date_created.desc())
return self.paginate(build_query)
Switch revision query to outer join
|
from __future__ import absolute_import, division, unicode_literals
from sqlalchemy.orm import joinedload, contains_eager
from changes.api.base import APIView
from changes.models import Build, Project, Revision, Source
class ProjectCommitBuildsAPIView(APIView):
def get(self, project_id, commit_id):
project = Project.get(project_id)
if not project:
return '', 404
repo = project.repository
revision = Revision.query.filter(
Revision.repository_id == repo.id,
Revision.sha == commit_id,
).outerjoin(Revision.author).first()
if not revision:
return '', 404
build_query = Build.query.options(
joinedload('author'),
contains_eager('source').joinedload('revision'),
).join(
Source, Build.source_id == Source.id,
).filter(
Build.project_id == project.id,
Source.revision_sha == revision.sha,
Source.patch == None, # NOQA
).order_by(Build.date_created.desc())
return self.paginate(build_query)
|
<commit_before>from __future__ import absolute_import, division, unicode_literals
from sqlalchemy.orm import joinedload, contains_eager
from changes.api.base import APIView
from changes.models import Build, Project, Revision, Source
class ProjectCommitBuildsAPIView(APIView):
def get(self, project_id, commit_id):
project = Project.get(project_id)
if not project:
return '', 404
repo = project.repository
revision = Revision.query.filter(
Revision.repository_id == repo.id,
Revision.sha == commit_id,
).join(Revision.author).first()
if not revision:
return '', 404
build_query = Build.query.options(
joinedload('author'),
contains_eager('source').joinedload('revision'),
).join(
Source, Build.source_id == Source.id,
).filter(
Build.project_id == project.id,
Source.revision_sha == revision.sha,
Source.patch == None, # NOQA
).order_by(Build.date_created.desc())
return self.paginate(build_query)
<commit_msg>Switch revision query to outer join<commit_after>
|
from __future__ import absolute_import, division, unicode_literals
from sqlalchemy.orm import joinedload, contains_eager
from changes.api.base import APIView
from changes.models import Build, Project, Revision, Source
class ProjectCommitBuildsAPIView(APIView):
def get(self, project_id, commit_id):
project = Project.get(project_id)
if not project:
return '', 404
repo = project.repository
revision = Revision.query.filter(
Revision.repository_id == repo.id,
Revision.sha == commit_id,
).outerjoin(Revision.author).first()
if not revision:
return '', 404
build_query = Build.query.options(
joinedload('author'),
contains_eager('source').joinedload('revision'),
).join(
Source, Build.source_id == Source.id,
).filter(
Build.project_id == project.id,
Source.revision_sha == revision.sha,
Source.patch == None, # NOQA
).order_by(Build.date_created.desc())
return self.paginate(build_query)
|
from __future__ import absolute_import, division, unicode_literals
from sqlalchemy.orm import joinedload, contains_eager
from changes.api.base import APIView
from changes.models import Build, Project, Revision, Source
class ProjectCommitBuildsAPIView(APIView):
def get(self, project_id, commit_id):
project = Project.get(project_id)
if not project:
return '', 404
repo = project.repository
revision = Revision.query.filter(
Revision.repository_id == repo.id,
Revision.sha == commit_id,
).join(Revision.author).first()
if not revision:
return '', 404
build_query = Build.query.options(
joinedload('author'),
contains_eager('source').joinedload('revision'),
).join(
Source, Build.source_id == Source.id,
).filter(
Build.project_id == project.id,
Source.revision_sha == revision.sha,
Source.patch == None, # NOQA
).order_by(Build.date_created.desc())
return self.paginate(build_query)
Switch revision query to outer joinfrom __future__ import absolute_import, division, unicode_literals
from sqlalchemy.orm import joinedload, contains_eager
from changes.api.base import APIView
from changes.models import Build, Project, Revision, Source
class ProjectCommitBuildsAPIView(APIView):
def get(self, project_id, commit_id):
project = Project.get(project_id)
if not project:
return '', 404
repo = project.repository
revision = Revision.query.filter(
Revision.repository_id == repo.id,
Revision.sha == commit_id,
).outerjoin(Revision.author).first()
if not revision:
return '', 404
build_query = Build.query.options(
joinedload('author'),
contains_eager('source').joinedload('revision'),
).join(
Source, Build.source_id == Source.id,
).filter(
Build.project_id == project.id,
Source.revision_sha == revision.sha,
Source.patch == None, # NOQA
).order_by(Build.date_created.desc())
return self.paginate(build_query)
|
<commit_before>from __future__ import absolute_import, division, unicode_literals
from sqlalchemy.orm import joinedload, contains_eager
from changes.api.base import APIView
from changes.models import Build, Project, Revision, Source
class ProjectCommitBuildsAPIView(APIView):
def get(self, project_id, commit_id):
project = Project.get(project_id)
if not project:
return '', 404
repo = project.repository
revision = Revision.query.filter(
Revision.repository_id == repo.id,
Revision.sha == commit_id,
).join(Revision.author).first()
if not revision:
return '', 404
build_query = Build.query.options(
joinedload('author'),
contains_eager('source').joinedload('revision'),
).join(
Source, Build.source_id == Source.id,
).filter(
Build.project_id == project.id,
Source.revision_sha == revision.sha,
Source.patch == None, # NOQA
).order_by(Build.date_created.desc())
return self.paginate(build_query)
<commit_msg>Switch revision query to outer join<commit_after>from __future__ import absolute_import, division, unicode_literals
from sqlalchemy.orm import joinedload, contains_eager
from changes.api.base import APIView
from changes.models import Build, Project, Revision, Source
class ProjectCommitBuildsAPIView(APIView):
def get(self, project_id, commit_id):
project = Project.get(project_id)
if not project:
return '', 404
repo = project.repository
revision = Revision.query.filter(
Revision.repository_id == repo.id,
Revision.sha == commit_id,
).outerjoin(Revision.author).first()
if not revision:
return '', 404
build_query = Build.query.options(
joinedload('author'),
contains_eager('source').joinedload('revision'),
).join(
Source, Build.source_id == Source.id,
).filter(
Build.project_id == project.id,
Source.revision_sha == revision.sha,
Source.patch == None, # NOQA
).order_by(Build.date_created.desc())
return self.paginate(build_query)
|
8baff5cb627ed55f748123d536273c4a4e648d77
|
obelisk-cardiograph.py
|
obelisk-cardiograph.py
|
#!/usr/bin/env python
"""
obelisk-cardiograph
Monitor obelisk servers' heartbeat.
Author: Noel Maersk <veox ta wemakethings tod net>
Based on "Pubsub envelope subscriber" example from zguide
Author: Guillaume Aubert (gaubert) <guillaume(dot)aubert(at)gmail(dot)com>
"""
import zmq
def main():
""" main method """
serverip = '79.98.29.93'
serverport = '9092'
ctx = zmq.Context()
s = ctx.socket(zmq.SUB)
s.connect('tcp://' + serverip + ':' + serverport)
s.setsockopt(zmq.SUBSCRIBE, b'') # subscribe to everything
print("Entering main loop.")
while True:
reply = s.recv()
reply = reply[::-1] # obelisk sent little-endian
data = ':'.join(hex(x)[2:] for x in reply)
print(serverip, data)
# We never get here but clean up anyhow
s.close()
ctx.term()
if __name__ == "__main__":
main()
|
#!/usr/bin/env python
"""
obelisk-cardiograph
Monitor obelisk servers' heartbeat.
Author: Noel Maersk <veox ta wemakethings tod net>
Based on "Pubsub envelope subscriber" example from zguide
Author: Guillaume Aubert (gaubert) <guillaume(dot)aubert(at)gmail(dot)com>
"""
import zmq
def main():
""" main method """
servers = ['preacher.veox.pw:9092']
context = zmq.Context()
s = context.socket(zmq.SUB)
s.connect('tcp://' + servers[0])
s.setsockopt(zmq.SUBSCRIBE, b'') # subscribe to everything
print("Entering main loop.")
while True:
reply = s.recv()
reply = reply[::-1] # obelisk sent little-endian
data = ':'.join(hex(x)[2:] for x in reply)
print(servers[0], data)
# We never get here but clean up anyhow
s.close()
context.term()
if __name__ == "__main__":
main()
|
Put servers in a list, use FQDNs if available, rename context var.
|
Put servers in a list, use FQDNs if available, rename context var.
|
Python
|
agpl-3.0
|
veox/obelisk-cardiograph
|
#!/usr/bin/env python
"""
obelisk-cardiograph
Monitor obelisk servers' heartbeat.
Author: Noel Maersk <veox ta wemakethings tod net>
Based on "Pubsub envelope subscriber" example from zguide
Author: Guillaume Aubert (gaubert) <guillaume(dot)aubert(at)gmail(dot)com>
"""
import zmq
def main():
""" main method """
serverip = '79.98.29.93'
serverport = '9092'
ctx = zmq.Context()
s = ctx.socket(zmq.SUB)
s.connect('tcp://' + serverip + ':' + serverport)
s.setsockopt(zmq.SUBSCRIBE, b'') # subscribe to everything
print("Entering main loop.")
while True:
reply = s.recv()
reply = reply[::-1] # obelisk sent little-endian
data = ':'.join(hex(x)[2:] for x in reply)
print(serverip, data)
# We never get here but clean up anyhow
s.close()
ctx.term()
if __name__ == "__main__":
main()
Put servers in a list, use FQDNs if available, rename context var.
|
#!/usr/bin/env python
"""
obelisk-cardiograph
Monitor obelisk servers' heartbeat.
Author: Noel Maersk <veox ta wemakethings tod net>
Based on "Pubsub envelope subscriber" example from zguide
Author: Guillaume Aubert (gaubert) <guillaume(dot)aubert(at)gmail(dot)com>
"""
import zmq
def main():
""" main method """
servers = ['preacher.veox.pw:9092']
context = zmq.Context()
s = context.socket(zmq.SUB)
s.connect('tcp://' + servers[0])
s.setsockopt(zmq.SUBSCRIBE, b'') # subscribe to everything
print("Entering main loop.")
while True:
reply = s.recv()
reply = reply[::-1] # obelisk sent little-endian
data = ':'.join(hex(x)[2:] for x in reply)
print(servers[0], data)
# We never get here but clean up anyhow
s.close()
context.term()
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python
"""
obelisk-cardiograph
Monitor obelisk servers' heartbeat.
Author: Noel Maersk <veox ta wemakethings tod net>
Based on "Pubsub envelope subscriber" example from zguide
Author: Guillaume Aubert (gaubert) <guillaume(dot)aubert(at)gmail(dot)com>
"""
import zmq
def main():
""" main method """
serverip = '79.98.29.93'
serverport = '9092'
ctx = zmq.Context()
s = ctx.socket(zmq.SUB)
s.connect('tcp://' + serverip + ':' + serverport)
s.setsockopt(zmq.SUBSCRIBE, b'') # subscribe to everything
print("Entering main loop.")
while True:
reply = s.recv()
reply = reply[::-1] # obelisk sent little-endian
data = ':'.join(hex(x)[2:] for x in reply)
print(serverip, data)
# We never get here but clean up anyhow
s.close()
ctx.term()
if __name__ == "__main__":
main()
<commit_msg>Put servers in a list, use FQDNs if available, rename context var.<commit_after>
|
#!/usr/bin/env python
"""
obelisk-cardiograph
Monitor obelisk servers' heartbeat.
Author: Noel Maersk <veox ta wemakethings tod net>
Based on "Pubsub envelope subscriber" example from zguide
Author: Guillaume Aubert (gaubert) <guillaume(dot)aubert(at)gmail(dot)com>
"""
import zmq
def main():
""" main method """
servers = ['preacher.veox.pw:9092']
context = zmq.Context()
s = context.socket(zmq.SUB)
s.connect('tcp://' + servers[0])
s.setsockopt(zmq.SUBSCRIBE, b'') # subscribe to everything
print("Entering main loop.")
while True:
reply = s.recv()
reply = reply[::-1] # obelisk sent little-endian
data = ':'.join(hex(x)[2:] for x in reply)
print(servers[0], data)
# We never get here but clean up anyhow
s.close()
context.term()
if __name__ == "__main__":
main()
|
#!/usr/bin/env python
"""
obelisk-cardiograph
Monitor obelisk servers' heartbeat.
Author: Noel Maersk <veox ta wemakethings tod net>
Based on "Pubsub envelope subscriber" example from zguide
Author: Guillaume Aubert (gaubert) <guillaume(dot)aubert(at)gmail(dot)com>
"""
import zmq
def main():
""" main method """
serverip = '79.98.29.93'
serverport = '9092'
ctx = zmq.Context()
s = ctx.socket(zmq.SUB)
s.connect('tcp://' + serverip + ':' + serverport)
s.setsockopt(zmq.SUBSCRIBE, b'') # subscribe to everything
print("Entering main loop.")
while True:
reply = s.recv()
reply = reply[::-1] # obelisk sent little-endian
data = ':'.join(hex(x)[2:] for x in reply)
print(serverip, data)
# We never get here but clean up anyhow
s.close()
ctx.term()
if __name__ == "__main__":
main()
Put servers in a list, use FQDNs if available, rename context var.#!/usr/bin/env python
"""
obelisk-cardiograph
Monitor obelisk servers' heartbeat.
Author: Noel Maersk <veox ta wemakethings tod net>
Based on "Pubsub envelope subscriber" example from zguide
Author: Guillaume Aubert (gaubert) <guillaume(dot)aubert(at)gmail(dot)com>
"""
import zmq
def main():
""" main method """
servers = ['preacher.veox.pw:9092']
context = zmq.Context()
s = context.socket(zmq.SUB)
s.connect('tcp://' + servers[0])
s.setsockopt(zmq.SUBSCRIBE, b'') # subscribe to everything
print("Entering main loop.")
while True:
reply = s.recv()
reply = reply[::-1] # obelisk sent little-endian
data = ':'.join(hex(x)[2:] for x in reply)
print(servers[0], data)
# We never get here but clean up anyhow
s.close()
context.term()
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python
"""
obelisk-cardiograph
Monitor obelisk servers' heartbeat.
Author: Noel Maersk <veox ta wemakethings tod net>
Based on "Pubsub envelope subscriber" example from zguide
Author: Guillaume Aubert (gaubert) <guillaume(dot)aubert(at)gmail(dot)com>
"""
import zmq
def main():
""" main method """
serverip = '79.98.29.93'
serverport = '9092'
ctx = zmq.Context()
s = ctx.socket(zmq.SUB)
s.connect('tcp://' + serverip + ':' + serverport)
s.setsockopt(zmq.SUBSCRIBE, b'') # subscribe to everything
print("Entering main loop.")
while True:
reply = s.recv()
reply = reply[::-1] # obelisk sent little-endian
data = ':'.join(hex(x)[2:] for x in reply)
print(serverip, data)
# We never get here but clean up anyhow
s.close()
ctx.term()
if __name__ == "__main__":
main()
<commit_msg>Put servers in a list, use FQDNs if available, rename context var.<commit_after>#!/usr/bin/env python
"""
obelisk-cardiograph
Monitor obelisk servers' heartbeat.
Author: Noel Maersk <veox ta wemakethings tod net>
Based on "Pubsub envelope subscriber" example from zguide
Author: Guillaume Aubert (gaubert) <guillaume(dot)aubert(at)gmail(dot)com>
"""
import zmq
def main():
""" main method """
servers = ['preacher.veox.pw:9092']
context = zmq.Context()
s = context.socket(zmq.SUB)
s.connect('tcp://' + servers[0])
s.setsockopt(zmq.SUBSCRIBE, b'') # subscribe to everything
print("Entering main loop.")
while True:
reply = s.recv()
reply = reply[::-1] # obelisk sent little-endian
data = ':'.join(hex(x)[2:] for x in reply)
print(servers[0], data)
# We never get here but clean up anyhow
s.close()
context.term()
if __name__ == "__main__":
main()
|
def22d2cfc4dc589344412750f1a6760c4643a93
|
common/djangoapps/dark_lang/models.py
|
common/djangoapps/dark_lang/models.py
|
"""
Models for the dark-launching languages
"""
from django.db import models
from config_models.models import ConfigurationModel
class DarkLangConfig(ConfigurationModel):
"""
Configuration for the dark_lang django app
"""
released_languages = models.TextField(
blank=True,
help_text="A comma-separated list of language codes to release to the public."
)
@property
def released_languages_list(self):
"""
``released_languages`` as a list of language codes.
Example: ['it', 'de-at', 'es', 'pt-br']
"""
if not self.released_languages.strip(): # pylint: disable=no-member
return []
languages = [lang.strip() for lang in self.released_languages.split(',')] # pylint: disable=no-member
# Put in alphabetical order
languages.sort()
return languages
|
"""
Models for the dark-launching languages
"""
from django.db import models
from config_models.models import ConfigurationModel
class DarkLangConfig(ConfigurationModel):
"""
Configuration for the dark_lang django app
"""
released_languages = models.TextField(
blank=True,
help_text="A comma-separated list of language codes to release to the public."
)
@property
def released_languages_list(self):
"""
``released_languages`` as a list of language codes.
Example: ['it', 'de-at', 'es', 'pt-br']
"""
if not self.released_languages.strip(): # pylint: disable=no-member
return []
languages = [lang.lower().strip() for lang in self.released_languages.split(',')] # pylint: disable=no-member
# Put in alphabetical order
languages.sort()
return languages
|
Store released dark_lang codes as all lower-case
|
Store released dark_lang codes as all lower-case
|
Python
|
agpl-3.0
|
SivilTaram/edx-platform,Shrhawk/edx-platform,mushtaqak/edx-platform,utecuy/edx-platform,xingyepei/edx-platform,nttks/edx-platform,fly19890211/edx-platform,ESOedX/edx-platform,fintech-circle/edx-platform,mitocw/edx-platform,halvertoluke/edx-platform,xinjiguaike/edx-platform,kursitet/edx-platform,a-parhom/edx-platform,proversity-org/edx-platform,jazkarta/edx-platform,ferabra/edx-platform,B-MOOC/edx-platform,shubhdev/edxOnBaadal,tanmaykm/edx-platform,itsjeyd/edx-platform,Edraak/circleci-edx-platform,edry/edx-platform,IndonesiaX/edx-platform,kmoocdev2/edx-platform,msegado/edx-platform,zadgroup/edx-platform,nikolas/edx-platform,xuxiao19910803/edx,franosincic/edx-platform,SravanthiSinha/edx-platform,shubhdev/edxOnBaadal,devs1991/test_edx_docmode,Softmotions/edx-platform,MakeHer/edx-platform,vasyarv/edx-platform,appliedx/edx-platform,openfun/edx-platform,raccoongang/edx-platform,stvstnfrd/edx-platform,shashank971/edx-platform,halvertoluke/edx-platform,zadgroup/edx-platform,Semi-global/edx-platform,xuxiao19910803/edx-platform,B-MOOC/edx-platform,chauhanhardik/populo,bigdatauniversity/edx-platform,bitifirefly/edx-platform,fintech-circle/edx-platform,kursitet/edx-platform,jbassen/edx-platform,proversity-org/edx-platform,xuxiao19910803/edx,jbzdak/edx-platform,don-github/edx-platform,atsolakid/edx-platform,jbzdak/edx-platform,ahmadiga/min_edx,angelapper/edx-platform,simbs/edx-platform,nanolearningllc/edx-platform-cypress-2,shabab12/edx-platform,Edraak/circleci-edx-platform,romain-li/edx-platform,teltek/edx-platform,bigdatauniversity/edx-platform,playm2mboy/edx-platform,etzhou/edx-platform,Livit/Livit.Learn.EdX,cognitiveclass/edx-platform,franosincic/edx-platform,cognitiveclass/edx-platform,hastexo/edx-platform,AkA84/edx-platform,stvstnfrd/edx-platform,zubair-arbi/edx-platform,synergeticsedx/deployment-wipro,gsehub/edx-platform,ahmadiga/min_edx,jolyonb/edx-platform,cognitiveclass/edx-platform,Lektorium-LLC/edx-platform,prarthitm/edxplatform,miptliot/edx-platform,cecep-edu/edx-platform,jjmiranda/edx-platform,AkA84/edx-platform,chudaol/edx-platform,Endika/edx-platform,deepsrijit1105/edx-platform,edx/edx-platform,pomegranited/edx-platform,shurihell/testasia,ubc/edx-platform,zhenzhai/edx-platform,cpennington/edx-platform,shashank971/edx-platform,edry/edx-platform,inares/edx-platform,bigdatauniversity/edx-platform,RPI-OPENEDX/edx-platform,louyihua/edx-platform,jonathan-beard/edx-platform,chrisndodge/edx-platform,msegado/edx-platform,jzoldak/edx-platform,philanthropy-u/edx-platform,JCBarahona/edX,Edraak/circleci-edx-platform,longmen21/edx-platform,bitifirefly/edx-platform,devs1991/test_edx_docmode,IndonesiaX/edx-platform,lduarte1991/edx-platform,simbs/edx-platform,appliedx/edx-platform,Endika/edx-platform,edry/edx-platform,devs1991/test_edx_docmode,edx/edx-platform,Semi-global/edx-platform,tiagochiavericosta/edx-platform,ahmadiga/min_edx,jazkarta/edx-platform,Ayub-Khan/edx-platform,adoosii/edx-platform,antoviaque/edx-platform,Edraak/circleci-edx-platform,nikolas/edx-platform,ak2703/edx-platform,knehez/edx-platform,adoosii/edx-platform,ubc/edx-platform,Shrhawk/edx-platform,appliedx/edx-platform,itsjeyd/edx-platform,chauhanhardik/populo,jazztpt/edx-platform,pepeportela/edx-platform,hamzehd/edx-platform,TeachAtTUM/edx-platform,shubhdev/edxOnBaadal,pomegranited/edx-platform,simbs/edx-platform,shashank971/edx-platform,nanolearningllc/edx-platform-cypress-2,Shrhawk/edx-platform,doismellburning/edx-platform,jbassen/edx-platform,arifsetiawan/edx-platform,waheedahmed/edx-platform,chudaol/edx-platform,cognitiveclass/edx-platform,jzoldak/edx-platform,IONISx/edx-platform,devs1991/test_edx_docmode,ahmedaljazzar/edx-platform,ahmadio/edx-platform,ZLLab-Mooc/edx-platform,zhenzhai/edx-platform,appsembler/edx-platform,motion2015/edx-platform,wwj718/edx-platform,SivilTaram/edx-platform,rismalrv/edx-platform,alexthered/kienhoc-platform,CourseTalk/edx-platform,CredoReference/edx-platform,mushtaqak/edx-platform,synergeticsedx/deployment-wipro,Edraak/edraak-platform,iivic/BoiseStateX,RPI-OPENEDX/edx-platform,nanolearningllc/edx-platform-cypress,J861449197/edx-platform,Kalyzee/edx-platform,SivilTaram/edx-platform,mcgachey/edx-platform,a-parhom/edx-platform,jzoldak/edx-platform,lduarte1991/edx-platform,iivic/BoiseStateX,philanthropy-u/edx-platform,wwj718/edx-platform,ahmadio/edx-platform,marcore/edx-platform,JioEducation/edx-platform,Edraak/edx-platform,hamzehd/edx-platform,analyseuc3m/ANALYSE-v1,shubhdev/edxOnBaadal,EDUlib/edx-platform,BehavioralInsightsTeam/edx-platform,gymnasium/edx-platform,don-github/edx-platform,Edraak/edraak-platform,cognitiveclass/edx-platform,deepsrijit1105/edx-platform,alexthered/kienhoc-platform,synergeticsedx/deployment-wipro,kursitet/edx-platform,prarthitm/edxplatform,jbzdak/edx-platform,defance/edx-platform,jamiefolsom/edx-platform,analyseuc3m/ANALYSE-v1,appsembler/edx-platform,doismellburning/edx-platform,miptliot/edx-platform,fly19890211/edx-platform,zhenzhai/edx-platform,jbassen/edx-platform,4eek/edx-platform,utecuy/edx-platform,alu042/edx-platform,B-MOOC/edx-platform,hamzehd/edx-platform,arifsetiawan/edx-platform,pepeportela/edx-platform,jamesblunt/edx-platform,romain-li/edx-platform,doismellburning/edx-platform,nanolearningllc/edx-platform-cypress,amir-qayyum-khan/edx-platform,ahmedaljazzar/edx-platform,rismalrv/edx-platform,chauhanhardik/populo,CourseTalk/edx-platform,SravanthiSinha/edx-platform,MakeHer/edx-platform,tanmaykm/edx-platform,chudaol/edx-platform,tiagochiavericosta/edx-platform,appliedx/edx-platform,marcore/edx-platform,edx-solutions/edx-platform,ZLLab-Mooc/edx-platform,cecep-edu/edx-platform,martynovp/edx-platform,atsolakid/edx-platform,gymnasium/edx-platform,arbrandes/edx-platform,xingyepei/edx-platform,knehez/edx-platform,UOMx/edx-platform,bigdatauniversity/edx-platform,Lektorium-LLC/edx-platform,halvertoluke/edx-platform,Livit/Livit.Learn.EdX,IndonesiaX/edx-platform,AkA84/edx-platform,mbareta/edx-platform-ft,bitifirefly/edx-platform,procangroup/edx-platform,jazkarta/edx-platform,jazztpt/edx-platform,mcgachey/edx-platform,angelapper/edx-platform,antoviaque/edx-platform,ahmadiga/min_edx,cpennington/edx-platform,jamesblunt/edx-platform,fintech-circle/edx-platform,fly19890211/edx-platform,marcore/edx-platform,philanthropy-u/edx-platform,shurihell/testasia,nikolas/edx-platform,playm2mboy/edx-platform,ZLLab-Mooc/edx-platform,jolyonb/edx-platform,tiagochiavericosta/edx-platform,tanmaykm/edx-platform,4eek/edx-platform,ferabra/edx-platform,rismalrv/edx-platform,jazztpt/edx-platform,antoviaque/edx-platform,romain-li/edx-platform,hastexo/edx-platform,vasyarv/edx-platform,jamiefolsom/edx-platform,longmen21/edx-platform,utecuy/edx-platform,jamiefolsom/edx-platform,appsembler/edx-platform,chauhanhardik/populo_2,ZLLab-Mooc/edx-platform,leansoft/edx-platform,zubair-arbi/edx-platform,louyihua/edx-platform,J861449197/edx-platform,Softmotions/edx-platform,chauhanhardik/populo_2,arifsetiawan/edx-platform,romain-li/edx-platform,angelapper/edx-platform,shubhdev/edxOnBaadal,polimediaupv/edx-platform,franosincic/edx-platform,CredoReference/edx-platform,proversity-org/edx-platform,waheedahmed/edx-platform,jjmiranda/edx-platform,xuxiao19910803/edx-platform,eduNEXT/edunext-platform,doganov/edx-platform,miptliot/edx-platform,longmen21/edx-platform,gsehub/edx-platform,fintech-circle/edx-platform,xuxiao19910803/edx,naresh21/synergetics-edx-platform,zubair-arbi/edx-platform,a-parhom/edx-platform,JioEducation/edx-platform,nanolearningllc/edx-platform-cypress-2,AkA84/edx-platform,jjmiranda/edx-platform,ESOedX/edx-platform,nttks/edx-platform,MakeHer/edx-platform,bitifirefly/edx-platform,zubair-arbi/edx-platform,pabloborrego93/edx-platform,iivic/BoiseStateX,zerobatu/edx-platform,ovnicraft/edx-platform,martynovp/edx-platform,halvertoluke/edx-platform,knehez/edx-platform,chrisndodge/edx-platform,leansoft/edx-platform,pomegranited/edx-platform,naresh21/synergetics-edx-platform,deepsrijit1105/edx-platform,naresh21/synergetics-edx-platform,inares/edx-platform,xinjiguaike/edx-platform,nikolas/edx-platform,jamiefolsom/edx-platform,edry/edx-platform,Ayub-Khan/edx-platform,benpatterson/edx-platform,Edraak/edx-platform,edry/edx-platform,mushtaqak/edx-platform,nagyistoce/edx-platform,nagyistoce/edx-platform,jonathan-beard/edx-platform,pabloborrego93/edx-platform,nikolas/edx-platform,romain-li/edx-platform,mushtaqak/edx-platform,eduNEXT/edx-platform,ubc/edx-platform,zofuthan/edx-platform,eduNEXT/edunext-platform,procangroup/edx-platform,zofuthan/edx-platform,etzhou/edx-platform,ahmadio/edx-platform,mcgachey/edx-platform,nanolearningllc/edx-platform-cypress,xingyepei/edx-platform,pomegranited/edx-platform,SravanthiSinha/edx-platform,tiagochiavericosta/edx-platform,chauhanhardik/populo_2,nttks/edx-platform,chudaol/edx-platform,procangroup/edx-platform,martynovp/edx-platform,ahmadiga/min_edx,zadgroup/edx-platform,cecep-edu/edx-platform,SravanthiSinha/edx-platform,BehavioralInsightsTeam/edx-platform,adoosii/edx-platform,Livit/Livit.Learn.EdX,louyihua/edx-platform,philanthropy-u/edx-platform,msegado/edx-platform,zofuthan/edx-platform,inares/edx-platform,SivilTaram/edx-platform,alu042/edx-platform,Softmotions/edx-platform,ampax/edx-platform,openfun/edx-platform,UOMx/edx-platform,waheedahmed/edx-platform,TeachAtTUM/edx-platform,jazkarta/edx-platform,devs1991/test_edx_docmode,edx-solutions/edx-platform,atsolakid/edx-platform,caesar2164/edx-platform,UOMx/edx-platform,naresh21/synergetics-edx-platform,kxliugang/edx-platform,motion2015/edx-platform,teltek/edx-platform,vasyarv/edx-platform,analyseuc3m/ANALYSE-v1,xingyepei/edx-platform,nttks/edx-platform,inares/edx-platform,pepeportela/edx-platform,Softmotions/edx-platform,eduNEXT/edunext-platform,vikas1885/test1,cecep-edu/edx-platform,mitocw/edx-platform,caesar2164/edx-platform,synergeticsedx/deployment-wipro,10clouds/edx-platform,CredoReference/edx-platform,martynovp/edx-platform,openfun/edx-platform,kursitet/edx-platform,EDUlib/edx-platform,Ayub-Khan/edx-platform,jzoldak/edx-platform,hastexo/edx-platform,stvstnfrd/edx-platform,benpatterson/edx-platform,mahendra-r/edx-platform,Semi-global/edx-platform,Semi-global/edx-platform,J861449197/edx-platform,ahmadio/edx-platform,alu042/edx-platform,martynovp/edx-platform,JCBarahona/edX,kxliugang/edx-platform,chand3040/cloud_that,edx-solutions/edx-platform,ak2703/edx-platform,leansoft/edx-platform,cpennington/edx-platform,doismellburning/edx-platform,RPI-OPENEDX/edx-platform,pabloborrego93/edx-platform,Semi-global/edx-platform,etzhou/edx-platform,chand3040/cloud_that,mjirayu/sit_academy,jamesblunt/edx-platform,UOMx/edx-platform,benpatterson/edx-platform,xinjiguaike/edx-platform,polimediaupv/edx-platform,ZLLab-Mooc/edx-platform,leansoft/edx-platform,procangroup/edx-platform,devs1991/test_edx_docmode,ferabra/edx-platform,IONISx/edx-platform,lduarte1991/edx-platform,don-github/edx-platform,Edraak/edx-platform,a-parhom/edx-platform,appliedx/edx-platform,Stanford-Online/edx-platform,kxliugang/edx-platform,mjirayu/sit_academy,chauhanhardik/populo,4eek/edx-platform,longmen21/edx-platform,ak2703/edx-platform,doganov/edx-platform,J861449197/edx-platform,ak2703/edx-platform,mushtaqak/edx-platform,Ayub-Khan/edx-platform,Shrhawk/edx-platform,jbzdak/edx-platform,nttks/edx-platform,tiagochiavericosta/edx-platform,motion2015/edx-platform,BehavioralInsightsTeam/edx-platform,nanolearningllc/edx-platform-cypress,EDUlib/edx-platform,defance/edx-platform,Stanford-Online/edx-platform,eduNEXT/edunext-platform,zerobatu/edx-platform,arifsetiawan/edx-platform,pabloborrego93/edx-platform,solashirai/edx-platform,JCBarahona/edX,wwj718/edx-platform,franosincic/edx-platform,itsjeyd/edx-platform,adoosii/edx-platform,shabab12/edx-platform,vasyarv/edx-platform,xuxiao19910803/edx,fly19890211/edx-platform,mcgachey/edx-platform,vikas1885/test1,Edraak/edx-platform,ubc/edx-platform,analyseuc3m/ANALYSE-v1,mbareta/edx-platform-ft,TeachAtTUM/edx-platform,defance/edx-platform,mjirayu/sit_academy,mcgachey/edx-platform,solashirai/edx-platform,kmoocdev2/edx-platform,xuxiao19910803/edx-platform,proversity-org/edx-platform,Kalyzee/edx-platform,ak2703/edx-platform,longmen21/edx-platform,vikas1885/test1,simbs/edx-platform,pepeportela/edx-platform,gsehub/edx-platform,IndonesiaX/edx-platform,shabab12/edx-platform,kxliugang/edx-platform,shurihell/testasia,raccoongang/edx-platform,EDUlib/edx-platform,xinjiguaike/edx-platform,JCBarahona/edX,CredoReference/edx-platform,xuxiao19910803/edx-platform,amir-qayyum-khan/edx-platform,vasyarv/edx-platform,mjirayu/sit_academy,knehez/edx-platform,hamzehd/edx-platform,4eek/edx-platform,arbrandes/edx-platform,motion2015/edx-platform,JioEducation/edx-platform,shashank971/edx-platform,chrisndodge/edx-platform,zadgroup/edx-platform,solashirai/edx-platform,nanolearningllc/edx-platform-cypress-2,bitifirefly/edx-platform,jamesblunt/edx-platform,benpatterson/edx-platform,chrisndodge/edx-platform,teltek/edx-platform,devs1991/test_edx_docmode,eduNEXT/edx-platform,mjirayu/sit_academy,marcore/edx-platform,jolyonb/edx-platform,10clouds/edx-platform,Edraak/edraak-platform,hamzehd/edx-platform,Lektorium-LLC/edx-platform,louyihua/edx-platform,xinjiguaike/edx-platform,zofuthan/edx-platform,rismalrv/edx-platform,fly19890211/edx-platform,jonathan-beard/edx-platform,tanmaykm/edx-platform,mbareta/edx-platform-ft,kmoocdev2/edx-platform,mitocw/edx-platform,Stanford-Online/edx-platform,gymnasium/edx-platform,jolyonb/edx-platform,arifsetiawan/edx-platform,gymnasium/edx-platform,jbzdak/edx-platform,doganov/edx-platform,ovnicraft/edx-platform,TeachAtTUM/edx-platform,xingyepei/edx-platform,IndonesiaX/edx-platform,J861449197/edx-platform,10clouds/edx-platform,ovnicraft/edx-platform,jonathan-beard/edx-platform,zerobatu/edx-platform,atsolakid/edx-platform,vikas1885/test1,adoosii/edx-platform,CourseTalk/edx-platform,simbs/edx-platform,RPI-OPENEDX/edx-platform,ESOedX/edx-platform,chauhanhardik/populo_2,atsolakid/edx-platform,arbrandes/edx-platform,JCBarahona/edX,SravanthiSinha/edx-platform,chauhanhardik/populo_2,arbrandes/edx-platform,alexthered/kienhoc-platform,stvstnfrd/edx-platform,shashank971/edx-platform,nanolearningllc/edx-platform-cypress-2,utecuy/edx-platform,MakeHer/edx-platform,ferabra/edx-platform,utecuy/edx-platform,waheedahmed/edx-platform,IONISx/edx-platform,msegado/edx-platform,chand3040/cloud_that,AkA84/edx-platform,vikas1885/test1,kmoocdev2/edx-platform,nanolearningllc/edx-platform-cypress,JioEducation/edx-platform,zadgroup/edx-platform,wwj718/edx-platform,Shrhawk/edx-platform,defance/edx-platform,ampax/edx-platform,raccoongang/edx-platform,edx/edx-platform,edx/edx-platform,itsjeyd/edx-platform,solashirai/edx-platform,solashirai/edx-platform,iivic/BoiseStateX,IONISx/edx-platform,10clouds/edx-platform,doganov/edx-platform,CourseTalk/edx-platform,ferabra/edx-platform,benpatterson/edx-platform,ahmadio/edx-platform,zerobatu/edx-platform,polimediaupv/edx-platform,zofuthan/edx-platform,appsembler/edx-platform,Endika/edx-platform,shabab12/edx-platform,chand3040/cloud_that,deepsrijit1105/edx-platform,prarthitm/edxplatform,Kalyzee/edx-platform,amir-qayyum-khan/edx-platform,ahmedaljazzar/edx-platform,xuxiao19910803/edx,mahendra-r/edx-platform,Kalyzee/edx-platform,Endika/edx-platform,don-github/edx-platform,Lektorium-LLC/edx-platform,zhenzhai/edx-platform,zhenzhai/edx-platform,chudaol/edx-platform,playm2mboy/edx-platform,bigdatauniversity/edx-platform,openfun/edx-platform,hastexo/edx-platform,eduNEXT/edx-platform,nagyistoce/edx-platform,Kalyzee/edx-platform,Softmotions/edx-platform,knehez/edx-platform,chand3040/cloud_that,miptliot/edx-platform,gsehub/edx-platform,SivilTaram/edx-platform,Livit/Livit.Learn.EdX,doismellburning/edx-platform,playm2mboy/edx-platform,prarthitm/edxplatform,zerobatu/edx-platform,doganov/edx-platform,kxliugang/edx-platform,jbassen/edx-platform,RPI-OPENEDX/edx-platform,eduNEXT/edx-platform,wwj718/edx-platform,BehavioralInsightsTeam/edx-platform,lduarte1991/edx-platform,etzhou/edx-platform,Ayub-Khan/edx-platform,jazkarta/edx-platform,Edraak/edx-platform,ovnicraft/edx-platform,angelapper/edx-platform,pomegranited/edx-platform,waheedahmed/edx-platform,mahendra-r/edx-platform,cecep-edu/edx-platform,inares/edx-platform,chauhanhardik/populo,raccoongang/edx-platform,nagyistoce/edx-platform,shurihell/testasia,B-MOOC/edx-platform,jazztpt/edx-platform,mahendra-r/edx-platform,ubc/edx-platform,playm2mboy/edx-platform,B-MOOC/edx-platform,jamiefolsom/edx-platform,edx-solutions/edx-platform,halvertoluke/edx-platform,jbassen/edx-platform,jazztpt/edx-platform,amir-qayyum-khan/edx-platform,cpennington/edx-platform,mitocw/edx-platform,msegado/edx-platform,Stanford-Online/edx-platform,ahmedaljazzar/edx-platform,don-github/edx-platform,shurihell/testasia,leansoft/edx-platform,polimediaupv/edx-platform,MakeHer/edx-platform,IONISx/edx-platform,antoviaque/edx-platform,jamesblunt/edx-platform,polimediaupv/edx-platform,4eek/edx-platform,iivic/BoiseStateX,franosincic/edx-platform,ampax/edx-platform,ESOedX/edx-platform,jjmiranda/edx-platform,alexthered/kienhoc-platform,zubair-arbi/edx-platform,alu042/edx-platform,openfun/edx-platform,etzhou/edx-platform,jonathan-beard/edx-platform,ampax/edx-platform,nagyistoce/edx-platform,devs1991/test_edx_docmode,kmoocdev2/edx-platform,caesar2164/edx-platform,Edraak/edraak-platform,kursitet/edx-platform,caesar2164/edx-platform,Edraak/circleci-edx-platform,teltek/edx-platform,xuxiao19910803/edx-platform,motion2015/edx-platform,ovnicraft/edx-platform,rismalrv/edx-platform,mahendra-r/edx-platform,mbareta/edx-platform-ft,alexthered/kienhoc-platform
|
"""
Models for the dark-launching languages
"""
from django.db import models
from config_models.models import ConfigurationModel
class DarkLangConfig(ConfigurationModel):
"""
Configuration for the dark_lang django app
"""
released_languages = models.TextField(
blank=True,
help_text="A comma-separated list of language codes to release to the public."
)
@property
def released_languages_list(self):
"""
``released_languages`` as a list of language codes.
Example: ['it', 'de-at', 'es', 'pt-br']
"""
if not self.released_languages.strip(): # pylint: disable=no-member
return []
languages = [lang.strip() for lang in self.released_languages.split(',')] # pylint: disable=no-member
# Put in alphabetical order
languages.sort()
return languages
Store released dark_lang codes as all lower-case
|
"""
Models for the dark-launching languages
"""
from django.db import models
from config_models.models import ConfigurationModel
class DarkLangConfig(ConfigurationModel):
"""
Configuration for the dark_lang django app
"""
released_languages = models.TextField(
blank=True,
help_text="A comma-separated list of language codes to release to the public."
)
@property
def released_languages_list(self):
"""
``released_languages`` as a list of language codes.
Example: ['it', 'de-at', 'es', 'pt-br']
"""
if not self.released_languages.strip(): # pylint: disable=no-member
return []
languages = [lang.lower().strip() for lang in self.released_languages.split(',')] # pylint: disable=no-member
# Put in alphabetical order
languages.sort()
return languages
|
<commit_before>"""
Models for the dark-launching languages
"""
from django.db import models
from config_models.models import ConfigurationModel
class DarkLangConfig(ConfigurationModel):
"""
Configuration for the dark_lang django app
"""
released_languages = models.TextField(
blank=True,
help_text="A comma-separated list of language codes to release to the public."
)
@property
def released_languages_list(self):
"""
``released_languages`` as a list of language codes.
Example: ['it', 'de-at', 'es', 'pt-br']
"""
if not self.released_languages.strip(): # pylint: disable=no-member
return []
languages = [lang.strip() for lang in self.released_languages.split(',')] # pylint: disable=no-member
# Put in alphabetical order
languages.sort()
return languages
<commit_msg>Store released dark_lang codes as all lower-case<commit_after>
|
"""
Models for the dark-launching languages
"""
from django.db import models
from config_models.models import ConfigurationModel
class DarkLangConfig(ConfigurationModel):
"""
Configuration for the dark_lang django app
"""
released_languages = models.TextField(
blank=True,
help_text="A comma-separated list of language codes to release to the public."
)
@property
def released_languages_list(self):
"""
``released_languages`` as a list of language codes.
Example: ['it', 'de-at', 'es', 'pt-br']
"""
if not self.released_languages.strip(): # pylint: disable=no-member
return []
languages = [lang.lower().strip() for lang in self.released_languages.split(',')] # pylint: disable=no-member
# Put in alphabetical order
languages.sort()
return languages
|
"""
Models for the dark-launching languages
"""
from django.db import models
from config_models.models import ConfigurationModel
class DarkLangConfig(ConfigurationModel):
"""
Configuration for the dark_lang django app
"""
released_languages = models.TextField(
blank=True,
help_text="A comma-separated list of language codes to release to the public."
)
@property
def released_languages_list(self):
"""
``released_languages`` as a list of language codes.
Example: ['it', 'de-at', 'es', 'pt-br']
"""
if not self.released_languages.strip(): # pylint: disable=no-member
return []
languages = [lang.strip() for lang in self.released_languages.split(',')] # pylint: disable=no-member
# Put in alphabetical order
languages.sort()
return languages
Store released dark_lang codes as all lower-case"""
Models for the dark-launching languages
"""
from django.db import models
from config_models.models import ConfigurationModel
class DarkLangConfig(ConfigurationModel):
"""
Configuration for the dark_lang django app
"""
released_languages = models.TextField(
blank=True,
help_text="A comma-separated list of language codes to release to the public."
)
@property
def released_languages_list(self):
"""
``released_languages`` as a list of language codes.
Example: ['it', 'de-at', 'es', 'pt-br']
"""
if not self.released_languages.strip(): # pylint: disable=no-member
return []
languages = [lang.lower().strip() for lang in self.released_languages.split(',')] # pylint: disable=no-member
# Put in alphabetical order
languages.sort()
return languages
|
<commit_before>"""
Models for the dark-launching languages
"""
from django.db import models
from config_models.models import ConfigurationModel
class DarkLangConfig(ConfigurationModel):
"""
Configuration for the dark_lang django app
"""
released_languages = models.TextField(
blank=True,
help_text="A comma-separated list of language codes to release to the public."
)
@property
def released_languages_list(self):
"""
``released_languages`` as a list of language codes.
Example: ['it', 'de-at', 'es', 'pt-br']
"""
if not self.released_languages.strip(): # pylint: disable=no-member
return []
languages = [lang.strip() for lang in self.released_languages.split(',')] # pylint: disable=no-member
# Put in alphabetical order
languages.sort()
return languages
<commit_msg>Store released dark_lang codes as all lower-case<commit_after>"""
Models for the dark-launching languages
"""
from django.db import models
from config_models.models import ConfigurationModel
class DarkLangConfig(ConfigurationModel):
"""
Configuration for the dark_lang django app
"""
released_languages = models.TextField(
blank=True,
help_text="A comma-separated list of language codes to release to the public."
)
@property
def released_languages_list(self):
"""
``released_languages`` as a list of language codes.
Example: ['it', 'de-at', 'es', 'pt-br']
"""
if not self.released_languages.strip(): # pylint: disable=no-member
return []
languages = [lang.lower().strip() for lang in self.released_languages.split(',')] # pylint: disable=no-member
# Put in alphabetical order
languages.sort()
return languages
|
584b707fe83a49264c95b7cfa6fd84cfcce96a52
|
csunplugged/utils/group_lessons_by_age.py
|
csunplugged/utils/group_lessons_by_age.py
|
"""Return ordered groups of lessons."""
from collections import OrderedDict
def group_lessons_by_age(lessons):
"""Return ordered groups of lessons.
Lessons are grouped by the lesson minimum age and maximum ages,
and then order by number.
Returns:
A ordered dictionary of grouped lessons.
The key is a tuple of the minimum age and maximum ages for
the lessons.
The value for a key is a sorted list of lessons.
The dictionary is ordered by minimum age, then maximum age.
"""
grouped_lessons = OrderedDict()
lessons = lessons.order_by("min_age", "max_age", "number")
for lesson in lessons:
if (lesson.min_age, lesson.max_age) in grouped_lessons:
grouped_lessons[(lesson.min_age, lesson.max_age)].append(lesson)
else:
grouped_lessons[(lesson.min_age, lesson.max_age)] = [lesson]
return grouped_lessons
|
"""Return ordered groups of lessons."""
from collections import OrderedDict
def group_lessons_by_age(lessons):
"""Return ordered groups of lessons.
Lessons are grouped by the lesson minimum age and maximum ages,
and then order by number.
Args:
lessons: QuerySet of Lesson objects (QuerySet).
Returns:
A ordered dictionary of grouped lessons.
The key is a tuple of the minimum age and maximum ages for
the lessons.
The value for a key is a sorted list of lessons.
The dictionary is ordered by minimum age, then maximum age.
"""
grouped_lessons = OrderedDict()
lessons = lessons.order_by("min_age", "max_age", "number")
for lesson in lessons:
if (lesson.min_age, lesson.max_age) in grouped_lessons:
grouped_lessons[(lesson.min_age, lesson.max_age)].append(lesson)
else:
grouped_lessons[(lesson.min_age, lesson.max_age)] = [lesson]
return grouped_lessons
|
Add missing args docstring details
|
Add missing args docstring details
|
Python
|
mit
|
uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged
|
"""Return ordered groups of lessons."""
from collections import OrderedDict
def group_lessons_by_age(lessons):
"""Return ordered groups of lessons.
Lessons are grouped by the lesson minimum age and maximum ages,
and then order by number.
Returns:
A ordered dictionary of grouped lessons.
The key is a tuple of the minimum age and maximum ages for
the lessons.
The value for a key is a sorted list of lessons.
The dictionary is ordered by minimum age, then maximum age.
"""
grouped_lessons = OrderedDict()
lessons = lessons.order_by("min_age", "max_age", "number")
for lesson in lessons:
if (lesson.min_age, lesson.max_age) in grouped_lessons:
grouped_lessons[(lesson.min_age, lesson.max_age)].append(lesson)
else:
grouped_lessons[(lesson.min_age, lesson.max_age)] = [lesson]
return grouped_lessons
Add missing args docstring details
|
"""Return ordered groups of lessons."""
from collections import OrderedDict
def group_lessons_by_age(lessons):
"""Return ordered groups of lessons.
Lessons are grouped by the lesson minimum age and maximum ages,
and then order by number.
Args:
lessons: QuerySet of Lesson objects (QuerySet).
Returns:
A ordered dictionary of grouped lessons.
The key is a tuple of the minimum age and maximum ages for
the lessons.
The value for a key is a sorted list of lessons.
The dictionary is ordered by minimum age, then maximum age.
"""
grouped_lessons = OrderedDict()
lessons = lessons.order_by("min_age", "max_age", "number")
for lesson in lessons:
if (lesson.min_age, lesson.max_age) in grouped_lessons:
grouped_lessons[(lesson.min_age, lesson.max_age)].append(lesson)
else:
grouped_lessons[(lesson.min_age, lesson.max_age)] = [lesson]
return grouped_lessons
|
<commit_before>"""Return ordered groups of lessons."""
from collections import OrderedDict
def group_lessons_by_age(lessons):
"""Return ordered groups of lessons.
Lessons are grouped by the lesson minimum age and maximum ages,
and then order by number.
Returns:
A ordered dictionary of grouped lessons.
The key is a tuple of the minimum age and maximum ages for
the lessons.
The value for a key is a sorted list of lessons.
The dictionary is ordered by minimum age, then maximum age.
"""
grouped_lessons = OrderedDict()
lessons = lessons.order_by("min_age", "max_age", "number")
for lesson in lessons:
if (lesson.min_age, lesson.max_age) in grouped_lessons:
grouped_lessons[(lesson.min_age, lesson.max_age)].append(lesson)
else:
grouped_lessons[(lesson.min_age, lesson.max_age)] = [lesson]
return grouped_lessons
<commit_msg>Add missing args docstring details<commit_after>
|
"""Return ordered groups of lessons."""
from collections import OrderedDict
def group_lessons_by_age(lessons):
"""Return ordered groups of lessons.
Lessons are grouped by the lesson minimum age and maximum ages,
and then order by number.
Args:
lessons: QuerySet of Lesson objects (QuerySet).
Returns:
A ordered dictionary of grouped lessons.
The key is a tuple of the minimum age and maximum ages for
the lessons.
The value for a key is a sorted list of lessons.
The dictionary is ordered by minimum age, then maximum age.
"""
grouped_lessons = OrderedDict()
lessons = lessons.order_by("min_age", "max_age", "number")
for lesson in lessons:
if (lesson.min_age, lesson.max_age) in grouped_lessons:
grouped_lessons[(lesson.min_age, lesson.max_age)].append(lesson)
else:
grouped_lessons[(lesson.min_age, lesson.max_age)] = [lesson]
return grouped_lessons
|
"""Return ordered groups of lessons."""
from collections import OrderedDict
def group_lessons_by_age(lessons):
"""Return ordered groups of lessons.
Lessons are grouped by the lesson minimum age and maximum ages,
and then order by number.
Returns:
A ordered dictionary of grouped lessons.
The key is a tuple of the minimum age and maximum ages for
the lessons.
The value for a key is a sorted list of lessons.
The dictionary is ordered by minimum age, then maximum age.
"""
grouped_lessons = OrderedDict()
lessons = lessons.order_by("min_age", "max_age", "number")
for lesson in lessons:
if (lesson.min_age, lesson.max_age) in grouped_lessons:
grouped_lessons[(lesson.min_age, lesson.max_age)].append(lesson)
else:
grouped_lessons[(lesson.min_age, lesson.max_age)] = [lesson]
return grouped_lessons
Add missing args docstring details"""Return ordered groups of lessons."""
from collections import OrderedDict
def group_lessons_by_age(lessons):
"""Return ordered groups of lessons.
Lessons are grouped by the lesson minimum age and maximum ages,
and then order by number.
Args:
lessons: QuerySet of Lesson objects (QuerySet).
Returns:
A ordered dictionary of grouped lessons.
The key is a tuple of the minimum age and maximum ages for
the lessons.
The value for a key is a sorted list of lessons.
The dictionary is ordered by minimum age, then maximum age.
"""
grouped_lessons = OrderedDict()
lessons = lessons.order_by("min_age", "max_age", "number")
for lesson in lessons:
if (lesson.min_age, lesson.max_age) in grouped_lessons:
grouped_lessons[(lesson.min_age, lesson.max_age)].append(lesson)
else:
grouped_lessons[(lesson.min_age, lesson.max_age)] = [lesson]
return grouped_lessons
|
<commit_before>"""Return ordered groups of lessons."""
from collections import OrderedDict
def group_lessons_by_age(lessons):
"""Return ordered groups of lessons.
Lessons are grouped by the lesson minimum age and maximum ages,
and then order by number.
Returns:
A ordered dictionary of grouped lessons.
The key is a tuple of the minimum age and maximum ages for
the lessons.
The value for a key is a sorted list of lessons.
The dictionary is ordered by minimum age, then maximum age.
"""
grouped_lessons = OrderedDict()
lessons = lessons.order_by("min_age", "max_age", "number")
for lesson in lessons:
if (lesson.min_age, lesson.max_age) in grouped_lessons:
grouped_lessons[(lesson.min_age, lesson.max_age)].append(lesson)
else:
grouped_lessons[(lesson.min_age, lesson.max_age)] = [lesson]
return grouped_lessons
<commit_msg>Add missing args docstring details<commit_after>"""Return ordered groups of lessons."""
from collections import OrderedDict
def group_lessons_by_age(lessons):
"""Return ordered groups of lessons.
Lessons are grouped by the lesson minimum age and maximum ages,
and then order by number.
Args:
lessons: QuerySet of Lesson objects (QuerySet).
Returns:
A ordered dictionary of grouped lessons.
The key is a tuple of the minimum age and maximum ages for
the lessons.
The value for a key is a sorted list of lessons.
The dictionary is ordered by minimum age, then maximum age.
"""
grouped_lessons = OrderedDict()
lessons = lessons.order_by("min_age", "max_age", "number")
for lesson in lessons:
if (lesson.min_age, lesson.max_age) in grouped_lessons:
grouped_lessons[(lesson.min_age, lesson.max_age)].append(lesson)
else:
grouped_lessons[(lesson.min_age, lesson.max_age)] = [lesson]
return grouped_lessons
|
a73cc6d6ad8460d492b29db60df2c0e8eaff932e
|
openerp_conventions.py
|
openerp_conventions.py
|
# -*- coding: utf-8 -*-
"""OpenERP community addons standard plugin for flake8"""
from __future__ import absolute_import
import common_checker
from common_checker.base_checker import BaseChecker
# When OpenERP version 8 API will be frozen
# We wille be able to do version toggle here
import v7
__version__ = '0.0.1'
class OpenERPConventionsChecker(object):
"""Check OpenERP conventions
It will call the function 'visit(root_node)' for all checker instances
registered in BaseCheckerMeta
"""
name = 'OpenERP convention'
version = __version__
def __init__(self, tree, filename):
"""Constructor
:param tree: root ast.node of current module
:param filename: current module filename
"""
self.tree = tree if tree else ()
self.filename = filename
self.checks = BaseChecker._checks
def run(self):
"""Run the checks"""
return self.check_tree(self.tree)
def check_tree(self, tree_root):
"""Apply all checks registered in BaseCheckerMeta on root ast.node
:param tree_root: Root ast node of the namespace
:returns: yeld list of errors codes
"""
for check in self.checks:
check.visit(tree_root)
for error in check.errors:
yield error
|
# -*- coding: utf-8 -*-
"""OpenERP community addons standard plugin for flake8"""
from __future__ import absolute_import
import common_checker
from common_checker.base_checker import BaseChecker
# When OpenERP version 8 API will be frozen
# We wille be able to do version toggle here
import v7
__version__ = '0.0.1'
class OpenERPConventionsChecker(object):
"""Check OpenERP conventions
It will call the function 'visit(root_node)' for all checker instances
registered in BaseCheckerMeta
"""
name = 'OpenERP convention'
version = __version__
def __init__(self, tree, filename):
"""Constructor
:param tree: root ast.node of current module
:param filename: current module filename
"""
self.tree = tree if tree else ()
self.filename = filename
self.checks = BaseChecker._checks
def run(self):
"""Run the checks"""
return self.check_tree(self.tree)
def check_tree(self, tree_root):
"""Apply all checks registered in BaseCheckerMeta on root ast.node
:param tree_root: Root ast node of the namespace
:returns: yeld list of errors codes
"""
for check in self.checks:
check.set_filename(self.filename)
check.visit(tree_root)
for error in check.errors:
yield error
|
Improve BaseChecker class by using __metaclass__ keyword + add a filename setter
|
Improve BaseChecker class by using __metaclass__ keyword + add a filename setter
|
Python
|
mit
|
nbessi/openerp-conventions
|
# -*- coding: utf-8 -*-
"""OpenERP community addons standard plugin for flake8"""
from __future__ import absolute_import
import common_checker
from common_checker.base_checker import BaseChecker
# When OpenERP version 8 API will be frozen
# We wille be able to do version toggle here
import v7
__version__ = '0.0.1'
class OpenERPConventionsChecker(object):
"""Check OpenERP conventions
It will call the function 'visit(root_node)' for all checker instances
registered in BaseCheckerMeta
"""
name = 'OpenERP convention'
version = __version__
def __init__(self, tree, filename):
"""Constructor
:param tree: root ast.node of current module
:param filename: current module filename
"""
self.tree = tree if tree else ()
self.filename = filename
self.checks = BaseChecker._checks
def run(self):
"""Run the checks"""
return self.check_tree(self.tree)
def check_tree(self, tree_root):
"""Apply all checks registered in BaseCheckerMeta on root ast.node
:param tree_root: Root ast node of the namespace
:returns: yeld list of errors codes
"""
for check in self.checks:
check.visit(tree_root)
for error in check.errors:
yield error
Improve BaseChecker class by using __metaclass__ keyword + add a filename setter
|
# -*- coding: utf-8 -*-
"""OpenERP community addons standard plugin for flake8"""
from __future__ import absolute_import
import common_checker
from common_checker.base_checker import BaseChecker
# When OpenERP version 8 API will be frozen
# We wille be able to do version toggle here
import v7
__version__ = '0.0.1'
class OpenERPConventionsChecker(object):
"""Check OpenERP conventions
It will call the function 'visit(root_node)' for all checker instances
registered in BaseCheckerMeta
"""
name = 'OpenERP convention'
version = __version__
def __init__(self, tree, filename):
"""Constructor
:param tree: root ast.node of current module
:param filename: current module filename
"""
self.tree = tree if tree else ()
self.filename = filename
self.checks = BaseChecker._checks
def run(self):
"""Run the checks"""
return self.check_tree(self.tree)
def check_tree(self, tree_root):
"""Apply all checks registered in BaseCheckerMeta on root ast.node
:param tree_root: Root ast node of the namespace
:returns: yeld list of errors codes
"""
for check in self.checks:
check.set_filename(self.filename)
check.visit(tree_root)
for error in check.errors:
yield error
|
<commit_before># -*- coding: utf-8 -*-
"""OpenERP community addons standard plugin for flake8"""
from __future__ import absolute_import
import common_checker
from common_checker.base_checker import BaseChecker
# When OpenERP version 8 API will be frozen
# We wille be able to do version toggle here
import v7
__version__ = '0.0.1'
class OpenERPConventionsChecker(object):
"""Check OpenERP conventions
It will call the function 'visit(root_node)' for all checker instances
registered in BaseCheckerMeta
"""
name = 'OpenERP convention'
version = __version__
def __init__(self, tree, filename):
"""Constructor
:param tree: root ast.node of current module
:param filename: current module filename
"""
self.tree = tree if tree else ()
self.filename = filename
self.checks = BaseChecker._checks
def run(self):
"""Run the checks"""
return self.check_tree(self.tree)
def check_tree(self, tree_root):
"""Apply all checks registered in BaseCheckerMeta on root ast.node
:param tree_root: Root ast node of the namespace
:returns: yeld list of errors codes
"""
for check in self.checks:
check.visit(tree_root)
for error in check.errors:
yield error
<commit_msg>Improve BaseChecker class by using __metaclass__ keyword + add a filename setter<commit_after>
|
# -*- coding: utf-8 -*-
"""OpenERP community addons standard plugin for flake8"""
from __future__ import absolute_import
import common_checker
from common_checker.base_checker import BaseChecker
# When OpenERP version 8 API will be frozen
# We wille be able to do version toggle here
import v7
__version__ = '0.0.1'
class OpenERPConventionsChecker(object):
"""Check OpenERP conventions
It will call the function 'visit(root_node)' for all checker instances
registered in BaseCheckerMeta
"""
name = 'OpenERP convention'
version = __version__
def __init__(self, tree, filename):
"""Constructor
:param tree: root ast.node of current module
:param filename: current module filename
"""
self.tree = tree if tree else ()
self.filename = filename
self.checks = BaseChecker._checks
def run(self):
"""Run the checks"""
return self.check_tree(self.tree)
def check_tree(self, tree_root):
"""Apply all checks registered in BaseCheckerMeta on root ast.node
:param tree_root: Root ast node of the namespace
:returns: yeld list of errors codes
"""
for check in self.checks:
check.set_filename(self.filename)
check.visit(tree_root)
for error in check.errors:
yield error
|
# -*- coding: utf-8 -*-
"""OpenERP community addons standard plugin for flake8"""
from __future__ import absolute_import
import common_checker
from common_checker.base_checker import BaseChecker
# When OpenERP version 8 API will be frozen
# We wille be able to do version toggle here
import v7
__version__ = '0.0.1'
class OpenERPConventionsChecker(object):
"""Check OpenERP conventions
It will call the function 'visit(root_node)' for all checker instances
registered in BaseCheckerMeta
"""
name = 'OpenERP convention'
version = __version__
def __init__(self, tree, filename):
"""Constructor
:param tree: root ast.node of current module
:param filename: current module filename
"""
self.tree = tree if tree else ()
self.filename = filename
self.checks = BaseChecker._checks
def run(self):
"""Run the checks"""
return self.check_tree(self.tree)
def check_tree(self, tree_root):
"""Apply all checks registered in BaseCheckerMeta on root ast.node
:param tree_root: Root ast node of the namespace
:returns: yeld list of errors codes
"""
for check in self.checks:
check.visit(tree_root)
for error in check.errors:
yield error
Improve BaseChecker class by using __metaclass__ keyword + add a filename setter# -*- coding: utf-8 -*-
"""OpenERP community addons standard plugin for flake8"""
from __future__ import absolute_import
import common_checker
from common_checker.base_checker import BaseChecker
# When OpenERP version 8 API will be frozen
# We wille be able to do version toggle here
import v7
__version__ = '0.0.1'
class OpenERPConventionsChecker(object):
"""Check OpenERP conventions
It will call the function 'visit(root_node)' for all checker instances
registered in BaseCheckerMeta
"""
name = 'OpenERP convention'
version = __version__
def __init__(self, tree, filename):
"""Constructor
:param tree: root ast.node of current module
:param filename: current module filename
"""
self.tree = tree if tree else ()
self.filename = filename
self.checks = BaseChecker._checks
def run(self):
"""Run the checks"""
return self.check_tree(self.tree)
def check_tree(self, tree_root):
"""Apply all checks registered in BaseCheckerMeta on root ast.node
:param tree_root: Root ast node of the namespace
:returns: yeld list of errors codes
"""
for check in self.checks:
check.set_filename(self.filename)
check.visit(tree_root)
for error in check.errors:
yield error
|
<commit_before># -*- coding: utf-8 -*-
"""OpenERP community addons standard plugin for flake8"""
from __future__ import absolute_import
import common_checker
from common_checker.base_checker import BaseChecker
# When OpenERP version 8 API will be frozen
# We wille be able to do version toggle here
import v7
__version__ = '0.0.1'
class OpenERPConventionsChecker(object):
"""Check OpenERP conventions
It will call the function 'visit(root_node)' for all checker instances
registered in BaseCheckerMeta
"""
name = 'OpenERP convention'
version = __version__
def __init__(self, tree, filename):
"""Constructor
:param tree: root ast.node of current module
:param filename: current module filename
"""
self.tree = tree if tree else ()
self.filename = filename
self.checks = BaseChecker._checks
def run(self):
"""Run the checks"""
return self.check_tree(self.tree)
def check_tree(self, tree_root):
"""Apply all checks registered in BaseCheckerMeta on root ast.node
:param tree_root: Root ast node of the namespace
:returns: yeld list of errors codes
"""
for check in self.checks:
check.visit(tree_root)
for error in check.errors:
yield error
<commit_msg>Improve BaseChecker class by using __metaclass__ keyword + add a filename setter<commit_after># -*- coding: utf-8 -*-
"""OpenERP community addons standard plugin for flake8"""
from __future__ import absolute_import
import common_checker
from common_checker.base_checker import BaseChecker
# When OpenERP version 8 API will be frozen
# We wille be able to do version toggle here
import v7
__version__ = '0.0.1'
class OpenERPConventionsChecker(object):
"""Check OpenERP conventions
It will call the function 'visit(root_node)' for all checker instances
registered in BaseCheckerMeta
"""
name = 'OpenERP convention'
version = __version__
def __init__(self, tree, filename):
"""Constructor
:param tree: root ast.node of current module
:param filename: current module filename
"""
self.tree = tree if tree else ()
self.filename = filename
self.checks = BaseChecker._checks
def run(self):
"""Run the checks"""
return self.check_tree(self.tree)
def check_tree(self, tree_root):
"""Apply all checks registered in BaseCheckerMeta on root ast.node
:param tree_root: Root ast node of the namespace
:returns: yeld list of errors codes
"""
for check in self.checks:
check.set_filename(self.filename)
check.visit(tree_root)
for error in check.errors:
yield error
|
3b7b15db24ac738c143e3d2d38c740500ac73fd0
|
jinja2_time/jinja2_time.py
|
jinja2_time/jinja2_time.py
|
# -*- coding: utf-8 -*-
import arrow
from jinja2 import nodes
from jinja2.ext import Extension
class TimeExtension(Extension):
tags = set(['now'])
def __init__(self, environment):
super(TimeExtension, self).__init__(environment)
# add the defaults to the environment
environment.extend(
date_format='%Y-%m-%d',
)
def _now(self, timezone, date_format):
date_format = date_format or self.environment.date_format
return arrow.now(timezone).strftime(date_format)
def parse(self, parser):
lineno = next(parser.stream).lineno
args = [parser.parse_expression()]
if parser.stream.skip_if('comma'):
args.append(parser.parse_expression())
else:
args.append(nodes.Const(None))
call = self.call_method('_now', args, lineno=lineno)
return nodes.Output([call], lineno=lineno)
|
# -*- coding: utf-8 -*-
import arrow
from jinja2 import nodes
from jinja2.ext import Extension
class TimeExtension(Extension):
tags = set(['now'])
def __init__(self, environment):
super(TimeExtension, self).__init__(environment)
# add the defaults to the environment
environment.extend(
datetime_format='%Y-%m-%d',
)
def _now(self, timezone, datetime_format):
datetime_format = datetime_format or self.environment.datetime_format
return arrow.now(timezone).strftime(datetime_format)
def parse(self, parser):
lineno = next(parser.stream).lineno
args = [parser.parse_expression()]
if parser.stream.skip_if('comma'):
args.append(parser.parse_expression())
else:
args.append(nodes.Const(None))
call = self.call_method('_now', args, lineno=lineno)
return nodes.Output([call], lineno=lineno)
|
Change environment attribute name to datetime_format
|
Change environment attribute name to datetime_format
|
Python
|
mit
|
hackebrot/jinja2-time
|
# -*- coding: utf-8 -*-
import arrow
from jinja2 import nodes
from jinja2.ext import Extension
class TimeExtension(Extension):
tags = set(['now'])
def __init__(self, environment):
super(TimeExtension, self).__init__(environment)
# add the defaults to the environment
environment.extend(
date_format='%Y-%m-%d',
)
def _now(self, timezone, date_format):
date_format = date_format or self.environment.date_format
return arrow.now(timezone).strftime(date_format)
def parse(self, parser):
lineno = next(parser.stream).lineno
args = [parser.parse_expression()]
if parser.stream.skip_if('comma'):
args.append(parser.parse_expression())
else:
args.append(nodes.Const(None))
call = self.call_method('_now', args, lineno=lineno)
return nodes.Output([call], lineno=lineno)
Change environment attribute name to datetime_format
|
# -*- coding: utf-8 -*-
import arrow
from jinja2 import nodes
from jinja2.ext import Extension
class TimeExtension(Extension):
tags = set(['now'])
def __init__(self, environment):
super(TimeExtension, self).__init__(environment)
# add the defaults to the environment
environment.extend(
datetime_format='%Y-%m-%d',
)
def _now(self, timezone, datetime_format):
datetime_format = datetime_format or self.environment.datetime_format
return arrow.now(timezone).strftime(datetime_format)
def parse(self, parser):
lineno = next(parser.stream).lineno
args = [parser.parse_expression()]
if parser.stream.skip_if('comma'):
args.append(parser.parse_expression())
else:
args.append(nodes.Const(None))
call = self.call_method('_now', args, lineno=lineno)
return nodes.Output([call], lineno=lineno)
|
<commit_before># -*- coding: utf-8 -*-
import arrow
from jinja2 import nodes
from jinja2.ext import Extension
class TimeExtension(Extension):
tags = set(['now'])
def __init__(self, environment):
super(TimeExtension, self).__init__(environment)
# add the defaults to the environment
environment.extend(
date_format='%Y-%m-%d',
)
def _now(self, timezone, date_format):
date_format = date_format or self.environment.date_format
return arrow.now(timezone).strftime(date_format)
def parse(self, parser):
lineno = next(parser.stream).lineno
args = [parser.parse_expression()]
if parser.stream.skip_if('comma'):
args.append(parser.parse_expression())
else:
args.append(nodes.Const(None))
call = self.call_method('_now', args, lineno=lineno)
return nodes.Output([call], lineno=lineno)
<commit_msg>Change environment attribute name to datetime_format<commit_after>
|
# -*- coding: utf-8 -*-
import arrow
from jinja2 import nodes
from jinja2.ext import Extension
class TimeExtension(Extension):
tags = set(['now'])
def __init__(self, environment):
super(TimeExtension, self).__init__(environment)
# add the defaults to the environment
environment.extend(
datetime_format='%Y-%m-%d',
)
def _now(self, timezone, datetime_format):
datetime_format = datetime_format or self.environment.datetime_format
return arrow.now(timezone).strftime(datetime_format)
def parse(self, parser):
lineno = next(parser.stream).lineno
args = [parser.parse_expression()]
if parser.stream.skip_if('comma'):
args.append(parser.parse_expression())
else:
args.append(nodes.Const(None))
call = self.call_method('_now', args, lineno=lineno)
return nodes.Output([call], lineno=lineno)
|
# -*- coding: utf-8 -*-
import arrow
from jinja2 import nodes
from jinja2.ext import Extension
class TimeExtension(Extension):
tags = set(['now'])
def __init__(self, environment):
super(TimeExtension, self).__init__(environment)
# add the defaults to the environment
environment.extend(
date_format='%Y-%m-%d',
)
def _now(self, timezone, date_format):
date_format = date_format or self.environment.date_format
return arrow.now(timezone).strftime(date_format)
def parse(self, parser):
lineno = next(parser.stream).lineno
args = [parser.parse_expression()]
if parser.stream.skip_if('comma'):
args.append(parser.parse_expression())
else:
args.append(nodes.Const(None))
call = self.call_method('_now', args, lineno=lineno)
return nodes.Output([call], lineno=lineno)
Change environment attribute name to datetime_format# -*- coding: utf-8 -*-
import arrow
from jinja2 import nodes
from jinja2.ext import Extension
class TimeExtension(Extension):
tags = set(['now'])
def __init__(self, environment):
super(TimeExtension, self).__init__(environment)
# add the defaults to the environment
environment.extend(
datetime_format='%Y-%m-%d',
)
def _now(self, timezone, datetime_format):
datetime_format = datetime_format or self.environment.datetime_format
return arrow.now(timezone).strftime(datetime_format)
def parse(self, parser):
lineno = next(parser.stream).lineno
args = [parser.parse_expression()]
if parser.stream.skip_if('comma'):
args.append(parser.parse_expression())
else:
args.append(nodes.Const(None))
call = self.call_method('_now', args, lineno=lineno)
return nodes.Output([call], lineno=lineno)
|
<commit_before># -*- coding: utf-8 -*-
import arrow
from jinja2 import nodes
from jinja2.ext import Extension
class TimeExtension(Extension):
tags = set(['now'])
def __init__(self, environment):
super(TimeExtension, self).__init__(environment)
# add the defaults to the environment
environment.extend(
date_format='%Y-%m-%d',
)
def _now(self, timezone, date_format):
date_format = date_format or self.environment.date_format
return arrow.now(timezone).strftime(date_format)
def parse(self, parser):
lineno = next(parser.stream).lineno
args = [parser.parse_expression()]
if parser.stream.skip_if('comma'):
args.append(parser.parse_expression())
else:
args.append(nodes.Const(None))
call = self.call_method('_now', args, lineno=lineno)
return nodes.Output([call], lineno=lineno)
<commit_msg>Change environment attribute name to datetime_format<commit_after># -*- coding: utf-8 -*-
import arrow
from jinja2 import nodes
from jinja2.ext import Extension
class TimeExtension(Extension):
tags = set(['now'])
def __init__(self, environment):
super(TimeExtension, self).__init__(environment)
# add the defaults to the environment
environment.extend(
datetime_format='%Y-%m-%d',
)
def _now(self, timezone, datetime_format):
datetime_format = datetime_format or self.environment.datetime_format
return arrow.now(timezone).strftime(datetime_format)
def parse(self, parser):
lineno = next(parser.stream).lineno
args = [parser.parse_expression()]
if parser.stream.skip_if('comma'):
args.append(parser.parse_expression())
else:
args.append(nodes.Const(None))
call = self.call_method('_now', args, lineno=lineno)
return nodes.Output([call], lineno=lineno)
|
c5fc667a6d50677936d8ae457734562d207a034b
|
bluesky/tests/test_vertical_integration.py
|
bluesky/tests/test_vertical_integration.py
|
from metadatastore.utils.testing import mds_setup, mds_teardown
from dataportal import DataBroker as db
from bluesky.examples import *
from bluesky.standard_config import RE
def setup():
mds_setup()
def teardown():
mds_teardown()
def test_scan_and_get_data():
uid = RE(stepscan(det, motor), group='foo', beamline_id='testing',
config={})
hdr = db[uid]
ev = db.fetch_events(hdr)
if __name__ == '__main__':
import nose
nose.runmodule(argv=['-s', '--with-doctest'], exit=False)
|
from metadatastore.utils.testing import mds_setup, mds_teardown
from dataportal import DataBroker as db
from bluesky.examples import stepscan, det, motor
from bluesky.standard_config import gs
def setup():
mds_setup()
def teardown():
mds_teardown()
def test_scan_and_get_data():
uid = gs.RE(stepscan(det, motor), group='foo', beamline_id='testing',
config={})
hdr = db[uid]
db.fetch_events(hdr)
if __name__ == '__main__':
import nose
nose.runmodule(argv=['-s', '--with-doctest'], exit=False)
|
Update test after RE -> gs.RE change.
|
TST: Update test after RE -> gs.RE change.
|
Python
|
bsd-3-clause
|
sameera2004/bluesky,ericdill/bluesky,klauer/bluesky,klauer/bluesky,ericdill/bluesky,dchabot/bluesky,sameera2004/bluesky,dchabot/bluesky
|
from metadatastore.utils.testing import mds_setup, mds_teardown
from dataportal import DataBroker as db
from bluesky.examples import *
from bluesky.standard_config import RE
def setup():
mds_setup()
def teardown():
mds_teardown()
def test_scan_and_get_data():
uid = RE(stepscan(det, motor), group='foo', beamline_id='testing',
config={})
hdr = db[uid]
ev = db.fetch_events(hdr)
if __name__ == '__main__':
import nose
nose.runmodule(argv=['-s', '--with-doctest'], exit=False)
TST: Update test after RE -> gs.RE change.
|
from metadatastore.utils.testing import mds_setup, mds_teardown
from dataportal import DataBroker as db
from bluesky.examples import stepscan, det, motor
from bluesky.standard_config import gs
def setup():
mds_setup()
def teardown():
mds_teardown()
def test_scan_and_get_data():
uid = gs.RE(stepscan(det, motor), group='foo', beamline_id='testing',
config={})
hdr = db[uid]
db.fetch_events(hdr)
if __name__ == '__main__':
import nose
nose.runmodule(argv=['-s', '--with-doctest'], exit=False)
|
<commit_before>
from metadatastore.utils.testing import mds_setup, mds_teardown
from dataportal import DataBroker as db
from bluesky.examples import *
from bluesky.standard_config import RE
def setup():
mds_setup()
def teardown():
mds_teardown()
def test_scan_and_get_data():
uid = RE(stepscan(det, motor), group='foo', beamline_id='testing',
config={})
hdr = db[uid]
ev = db.fetch_events(hdr)
if __name__ == '__main__':
import nose
nose.runmodule(argv=['-s', '--with-doctest'], exit=False)
<commit_msg>TST: Update test after RE -> gs.RE change.<commit_after>
|
from metadatastore.utils.testing import mds_setup, mds_teardown
from dataportal import DataBroker as db
from bluesky.examples import stepscan, det, motor
from bluesky.standard_config import gs
def setup():
mds_setup()
def teardown():
mds_teardown()
def test_scan_and_get_data():
uid = gs.RE(stepscan(det, motor), group='foo', beamline_id='testing',
config={})
hdr = db[uid]
db.fetch_events(hdr)
if __name__ == '__main__':
import nose
nose.runmodule(argv=['-s', '--with-doctest'], exit=False)
|
from metadatastore.utils.testing import mds_setup, mds_teardown
from dataportal import DataBroker as db
from bluesky.examples import *
from bluesky.standard_config import RE
def setup():
mds_setup()
def teardown():
mds_teardown()
def test_scan_and_get_data():
uid = RE(stepscan(det, motor), group='foo', beamline_id='testing',
config={})
hdr = db[uid]
ev = db.fetch_events(hdr)
if __name__ == '__main__':
import nose
nose.runmodule(argv=['-s', '--with-doctest'], exit=False)
TST: Update test after RE -> gs.RE change.
from metadatastore.utils.testing import mds_setup, mds_teardown
from dataportal import DataBroker as db
from bluesky.examples import stepscan, det, motor
from bluesky.standard_config import gs
def setup():
mds_setup()
def teardown():
mds_teardown()
def test_scan_and_get_data():
uid = gs.RE(stepscan(det, motor), group='foo', beamline_id='testing',
config={})
hdr = db[uid]
db.fetch_events(hdr)
if __name__ == '__main__':
import nose
nose.runmodule(argv=['-s', '--with-doctest'], exit=False)
|
<commit_before>
from metadatastore.utils.testing import mds_setup, mds_teardown
from dataportal import DataBroker as db
from bluesky.examples import *
from bluesky.standard_config import RE
def setup():
mds_setup()
def teardown():
mds_teardown()
def test_scan_and_get_data():
uid = RE(stepscan(det, motor), group='foo', beamline_id='testing',
config={})
hdr = db[uid]
ev = db.fetch_events(hdr)
if __name__ == '__main__':
import nose
nose.runmodule(argv=['-s', '--with-doctest'], exit=False)
<commit_msg>TST: Update test after RE -> gs.RE change.<commit_after>
from metadatastore.utils.testing import mds_setup, mds_teardown
from dataportal import DataBroker as db
from bluesky.examples import stepscan, det, motor
from bluesky.standard_config import gs
def setup():
mds_setup()
def teardown():
mds_teardown()
def test_scan_and_get_data():
uid = gs.RE(stepscan(det, motor), group='foo', beamline_id='testing',
config={})
hdr = db[uid]
db.fetch_events(hdr)
if __name__ == '__main__':
import nose
nose.runmodule(argv=['-s', '--with-doctest'], exit=False)
|
310016762927dd9796109712be1c59ce0c1a658c
|
runcurldrop.py
|
runcurldrop.py
|
#!/usr/bin/env python
import os
import tornado
from curldrop import StreamHandler, config
from contextlib import closing
import sqlite3
schema = '''drop table if exists files;
create table files (
id integer primary key autoincrement,
file_id text not null,
timestamp integer not null,
ip text not null,
originalname text not null
);'''
if not os.path.isfile(config['DATABASE']):
with closing(sqlite3.connect(config['DATABASE'])) as db:
db.cursor().executescript(schema)
db.commit()
if not os.path.isdir(config['UPLOADDIR']):
os.makedirs(config['UPLOADDIR'])
application = tornado.web.Application([
(r"/(.*)", StreamHandler),
])
server = tornado.httpserver.HTTPServer(application,
max_buffer_size=config["SERVERBUFF"])
server.listen(config["PORT"])
tornado.ioloop.IOLoop.instance().start()
|
#!/usr/bin/env python
import os
import tornado
from curldrop import StreamHandler, config
from contextlib import closing
import sqlite3
schema = '''drop table if exists files;
create table files (
id integer primary key autoincrement,
file_id text not null,
timestamp integer not null,
ip text not null,
originalname text not null
);'''
if not os.path.isfile(config['DATABASE']):
with closing(sqlite3.connect(config['DATABASE'])) as db:
db.cursor().executescript(schema)
db.commit()
if not os.path.isdir(config['UPLOADDIR']):
os.makedirs(config['UPLOADDIR'])
application = tornado.web.Application([
(r"/list_files", FileListHandler),
(r"/(.*)", StreamHandler),
])
server = tornado.httpserver.HTTPServer(application,
max_buffer_size=config["SERVERBUFF"])
server.listen(config["PORT"])
tornado.ioloop.IOLoop.instance().start()
|
Add handler to main app
|
Add handler to main app
|
Python
|
mit
|
Xarthisius/curldrop
|
#!/usr/bin/env python
import os
import tornado
from curldrop import StreamHandler, config
from contextlib import closing
import sqlite3
schema = '''drop table if exists files;
create table files (
id integer primary key autoincrement,
file_id text not null,
timestamp integer not null,
ip text not null,
originalname text not null
);'''
if not os.path.isfile(config['DATABASE']):
with closing(sqlite3.connect(config['DATABASE'])) as db:
db.cursor().executescript(schema)
db.commit()
if not os.path.isdir(config['UPLOADDIR']):
os.makedirs(config['UPLOADDIR'])
application = tornado.web.Application([
(r"/(.*)", StreamHandler),
])
server = tornado.httpserver.HTTPServer(application,
max_buffer_size=config["SERVERBUFF"])
server.listen(config["PORT"])
tornado.ioloop.IOLoop.instance().start()
Add handler to main app
|
#!/usr/bin/env python
import os
import tornado
from curldrop import StreamHandler, config
from contextlib import closing
import sqlite3
schema = '''drop table if exists files;
create table files (
id integer primary key autoincrement,
file_id text not null,
timestamp integer not null,
ip text not null,
originalname text not null
);'''
if not os.path.isfile(config['DATABASE']):
with closing(sqlite3.connect(config['DATABASE'])) as db:
db.cursor().executescript(schema)
db.commit()
if not os.path.isdir(config['UPLOADDIR']):
os.makedirs(config['UPLOADDIR'])
application = tornado.web.Application([
(r"/list_files", FileListHandler),
(r"/(.*)", StreamHandler),
])
server = tornado.httpserver.HTTPServer(application,
max_buffer_size=config["SERVERBUFF"])
server.listen(config["PORT"])
tornado.ioloop.IOLoop.instance().start()
|
<commit_before>#!/usr/bin/env python
import os
import tornado
from curldrop import StreamHandler, config
from contextlib import closing
import sqlite3
schema = '''drop table if exists files;
create table files (
id integer primary key autoincrement,
file_id text not null,
timestamp integer not null,
ip text not null,
originalname text not null
);'''
if not os.path.isfile(config['DATABASE']):
with closing(sqlite3.connect(config['DATABASE'])) as db:
db.cursor().executescript(schema)
db.commit()
if not os.path.isdir(config['UPLOADDIR']):
os.makedirs(config['UPLOADDIR'])
application = tornado.web.Application([
(r"/(.*)", StreamHandler),
])
server = tornado.httpserver.HTTPServer(application,
max_buffer_size=config["SERVERBUFF"])
server.listen(config["PORT"])
tornado.ioloop.IOLoop.instance().start()
<commit_msg>Add handler to main app<commit_after>
|
#!/usr/bin/env python
import os
import tornado
from curldrop import StreamHandler, config
from contextlib import closing
import sqlite3
schema = '''drop table if exists files;
create table files (
id integer primary key autoincrement,
file_id text not null,
timestamp integer not null,
ip text not null,
originalname text not null
);'''
if not os.path.isfile(config['DATABASE']):
with closing(sqlite3.connect(config['DATABASE'])) as db:
db.cursor().executescript(schema)
db.commit()
if not os.path.isdir(config['UPLOADDIR']):
os.makedirs(config['UPLOADDIR'])
application = tornado.web.Application([
(r"/list_files", FileListHandler),
(r"/(.*)", StreamHandler),
])
server = tornado.httpserver.HTTPServer(application,
max_buffer_size=config["SERVERBUFF"])
server.listen(config["PORT"])
tornado.ioloop.IOLoop.instance().start()
|
#!/usr/bin/env python
import os
import tornado
from curldrop import StreamHandler, config
from contextlib import closing
import sqlite3
schema = '''drop table if exists files;
create table files (
id integer primary key autoincrement,
file_id text not null,
timestamp integer not null,
ip text not null,
originalname text not null
);'''
if not os.path.isfile(config['DATABASE']):
with closing(sqlite3.connect(config['DATABASE'])) as db:
db.cursor().executescript(schema)
db.commit()
if not os.path.isdir(config['UPLOADDIR']):
os.makedirs(config['UPLOADDIR'])
application = tornado.web.Application([
(r"/(.*)", StreamHandler),
])
server = tornado.httpserver.HTTPServer(application,
max_buffer_size=config["SERVERBUFF"])
server.listen(config["PORT"])
tornado.ioloop.IOLoop.instance().start()
Add handler to main app#!/usr/bin/env python
import os
import tornado
from curldrop import StreamHandler, config
from contextlib import closing
import sqlite3
schema = '''drop table if exists files;
create table files (
id integer primary key autoincrement,
file_id text not null,
timestamp integer not null,
ip text not null,
originalname text not null
);'''
if not os.path.isfile(config['DATABASE']):
with closing(sqlite3.connect(config['DATABASE'])) as db:
db.cursor().executescript(schema)
db.commit()
if not os.path.isdir(config['UPLOADDIR']):
os.makedirs(config['UPLOADDIR'])
application = tornado.web.Application([
(r"/list_files", FileListHandler),
(r"/(.*)", StreamHandler),
])
server = tornado.httpserver.HTTPServer(application,
max_buffer_size=config["SERVERBUFF"])
server.listen(config["PORT"])
tornado.ioloop.IOLoop.instance().start()
|
<commit_before>#!/usr/bin/env python
import os
import tornado
from curldrop import StreamHandler, config
from contextlib import closing
import sqlite3
schema = '''drop table if exists files;
create table files (
id integer primary key autoincrement,
file_id text not null,
timestamp integer not null,
ip text not null,
originalname text not null
);'''
if not os.path.isfile(config['DATABASE']):
with closing(sqlite3.connect(config['DATABASE'])) as db:
db.cursor().executescript(schema)
db.commit()
if not os.path.isdir(config['UPLOADDIR']):
os.makedirs(config['UPLOADDIR'])
application = tornado.web.Application([
(r"/(.*)", StreamHandler),
])
server = tornado.httpserver.HTTPServer(application,
max_buffer_size=config["SERVERBUFF"])
server.listen(config["PORT"])
tornado.ioloop.IOLoop.instance().start()
<commit_msg>Add handler to main app<commit_after>#!/usr/bin/env python
import os
import tornado
from curldrop import StreamHandler, config
from contextlib import closing
import sqlite3
schema = '''drop table if exists files;
create table files (
id integer primary key autoincrement,
file_id text not null,
timestamp integer not null,
ip text not null,
originalname text not null
);'''
if not os.path.isfile(config['DATABASE']):
with closing(sqlite3.connect(config['DATABASE'])) as db:
db.cursor().executescript(schema)
db.commit()
if not os.path.isdir(config['UPLOADDIR']):
os.makedirs(config['UPLOADDIR'])
application = tornado.web.Application([
(r"/list_files", FileListHandler),
(r"/(.*)", StreamHandler),
])
server = tornado.httpserver.HTTPServer(application,
max_buffer_size=config["SERVERBUFF"])
server.listen(config["PORT"])
tornado.ioloop.IOLoop.instance().start()
|
a25d12d7d3eab68dff1d65382543ad93fb8a22bd
|
mint/rest/api/models/__init__.py
|
mint/rest/api/models/__init__.py
|
from mint.rest.modellib import Model
from mint.rest.modellib import fields
class RbuilderStatus(Model):
version = fields.CharField()
conaryVersion = fields.CharField()
products = fields.UrlField('products', None)
users = fields.UrlField('users', None)
from mint.rest.api.models.members import *
from mint.rest.api.models.users import *
from mint.rest.api.models.products import *
from mint.rest.api.models.productversions import *
from mint.rest.api.models.images import *
from mint.rest.api.models.repos import *
|
from mint.rest.modellib import Model
from mint.rest.modellib import fields
class RMCUrlField(fields.CalculatedField):
def getValue(self, controller, request, class_, parent, value):
return request.getHostWithProtocol() + '/catalog'
class RbuilderStatus(Model):
id = fields.AbsoluteUrlField(isAttribute=True)
version = fields.CharField()
conaryVersion = fields.CharField()
rmcService = RMCUrlField()
products = fields.UrlField('products', None)
users = fields.UrlField('users', None)
def get_absolute_url(self):
return '',
from mint.rest.api.models.members import *
from mint.rest.api.models.users import *
from mint.rest.api.models.products import *
from mint.rest.api.models.productversions import *
from mint.rest.api.models.images import *
from mint.rest.api.models.repos import *
|
Add extra fields to status info page
|
Add extra fields to status info page
|
Python
|
apache-2.0
|
sassoftware/mint,sassoftware/mint,sassoftware/mint,sassoftware/mint,sassoftware/mint
|
from mint.rest.modellib import Model
from mint.rest.modellib import fields
class RbuilderStatus(Model):
version = fields.CharField()
conaryVersion = fields.CharField()
products = fields.UrlField('products', None)
users = fields.UrlField('users', None)
from mint.rest.api.models.members import *
from mint.rest.api.models.users import *
from mint.rest.api.models.products import *
from mint.rest.api.models.productversions import *
from mint.rest.api.models.images import *
from mint.rest.api.models.repos import *
Add extra fields to status info page
|
from mint.rest.modellib import Model
from mint.rest.modellib import fields
class RMCUrlField(fields.CalculatedField):
def getValue(self, controller, request, class_, parent, value):
return request.getHostWithProtocol() + '/catalog'
class RbuilderStatus(Model):
id = fields.AbsoluteUrlField(isAttribute=True)
version = fields.CharField()
conaryVersion = fields.CharField()
rmcService = RMCUrlField()
products = fields.UrlField('products', None)
users = fields.UrlField('users', None)
def get_absolute_url(self):
return '',
from mint.rest.api.models.members import *
from mint.rest.api.models.users import *
from mint.rest.api.models.products import *
from mint.rest.api.models.productversions import *
from mint.rest.api.models.images import *
from mint.rest.api.models.repos import *
|
<commit_before>from mint.rest.modellib import Model
from mint.rest.modellib import fields
class RbuilderStatus(Model):
version = fields.CharField()
conaryVersion = fields.CharField()
products = fields.UrlField('products', None)
users = fields.UrlField('users', None)
from mint.rest.api.models.members import *
from mint.rest.api.models.users import *
from mint.rest.api.models.products import *
from mint.rest.api.models.productversions import *
from mint.rest.api.models.images import *
from mint.rest.api.models.repos import *
<commit_msg>Add extra fields to status info page<commit_after>
|
from mint.rest.modellib import Model
from mint.rest.modellib import fields
class RMCUrlField(fields.CalculatedField):
def getValue(self, controller, request, class_, parent, value):
return request.getHostWithProtocol() + '/catalog'
class RbuilderStatus(Model):
id = fields.AbsoluteUrlField(isAttribute=True)
version = fields.CharField()
conaryVersion = fields.CharField()
rmcService = RMCUrlField()
products = fields.UrlField('products', None)
users = fields.UrlField('users', None)
def get_absolute_url(self):
return '',
from mint.rest.api.models.members import *
from mint.rest.api.models.users import *
from mint.rest.api.models.products import *
from mint.rest.api.models.productversions import *
from mint.rest.api.models.images import *
from mint.rest.api.models.repos import *
|
from mint.rest.modellib import Model
from mint.rest.modellib import fields
class RbuilderStatus(Model):
version = fields.CharField()
conaryVersion = fields.CharField()
products = fields.UrlField('products', None)
users = fields.UrlField('users', None)
from mint.rest.api.models.members import *
from mint.rest.api.models.users import *
from mint.rest.api.models.products import *
from mint.rest.api.models.productversions import *
from mint.rest.api.models.images import *
from mint.rest.api.models.repos import *
Add extra fields to status info pagefrom mint.rest.modellib import Model
from mint.rest.modellib import fields
class RMCUrlField(fields.CalculatedField):
def getValue(self, controller, request, class_, parent, value):
return request.getHostWithProtocol() + '/catalog'
class RbuilderStatus(Model):
id = fields.AbsoluteUrlField(isAttribute=True)
version = fields.CharField()
conaryVersion = fields.CharField()
rmcService = RMCUrlField()
products = fields.UrlField('products', None)
users = fields.UrlField('users', None)
def get_absolute_url(self):
return '',
from mint.rest.api.models.members import *
from mint.rest.api.models.users import *
from mint.rest.api.models.products import *
from mint.rest.api.models.productversions import *
from mint.rest.api.models.images import *
from mint.rest.api.models.repos import *
|
<commit_before>from mint.rest.modellib import Model
from mint.rest.modellib import fields
class RbuilderStatus(Model):
version = fields.CharField()
conaryVersion = fields.CharField()
products = fields.UrlField('products', None)
users = fields.UrlField('users', None)
from mint.rest.api.models.members import *
from mint.rest.api.models.users import *
from mint.rest.api.models.products import *
from mint.rest.api.models.productversions import *
from mint.rest.api.models.images import *
from mint.rest.api.models.repos import *
<commit_msg>Add extra fields to status info page<commit_after>from mint.rest.modellib import Model
from mint.rest.modellib import fields
class RMCUrlField(fields.CalculatedField):
def getValue(self, controller, request, class_, parent, value):
return request.getHostWithProtocol() + '/catalog'
class RbuilderStatus(Model):
id = fields.AbsoluteUrlField(isAttribute=True)
version = fields.CharField()
conaryVersion = fields.CharField()
rmcService = RMCUrlField()
products = fields.UrlField('products', None)
users = fields.UrlField('users', None)
def get_absolute_url(self):
return '',
from mint.rest.api.models.members import *
from mint.rest.api.models.users import *
from mint.rest.api.models.products import *
from mint.rest.api.models.productversions import *
from mint.rest.api.models.images import *
from mint.rest.api.models.repos import *
|
6f103bd78f188c2a090c6dd522884c361e85d832
|
cyder/cydhcp/validation.py
|
cyder/cydhcp/validation.py
|
# encoding: utf-8
from django.core.exceptions import ValidationError
import re
ERROR_TOO_LONG = 'MAC address is too long'
mac_pattern = re.compile(r'^([0-9a-f]{2}:){5}[0-9a-f]{2}$')
def validate_mac(mac):
if mac == ERROR_TOO_LONG:
raise ValidationError(ERROR_TOO_LONG)
elif mac == '00:00:00:00:00:00':
raise ValidationError('Invalid MAC address—to disable DHCP for this '
'interface, uncheck "Enable DHCP"')
elif not mac_pattern.match(mac):
raise ValidationError('Invalid MAC address')
def validate_system_static_ctnr(system, static):
if system.ctnr not in static.domain.ctnr_set.all():
raise ValidationError("System's container must match static "
"interface's domain's containers.")
def validate_system_dynamic_ctnr(system, dynamic):
if system.ctnr not in dynamic.range.domain.ctnr_set.all():
raise ValidationError("System's container must match dynamic "
"interface's range's domain's containers.")
|
# encoding: utf-8
from django.core.exceptions import ValidationError
import re
ERROR_TOO_LONG = 'MAC address is too long'
mac_pattern = re.compile(r'^([0-9a-f]{2}:){5}[0-9a-f]{2}$')
def validate_mac(mac):
if mac == ERROR_TOO_LONG:
raise ValidationError(ERROR_TOO_LONG)
elif mac == '00:00:00:00:00:00':
raise ValidationError('Invalid MAC address—to disable DHCP for this '
'interface, uncheck "Enable DHCP"')
elif not mac_pattern.match(mac):
raise ValidationError('Invalid MAC address')
def validate_system_static_ctnr(system, static):
if system.ctnr not in static.domain.ctnr_set.all():
raise ValidationError("System's container must match static "
"interface's domain's containers.")
def validate_system_dynamic_ctnr(system, dynamic):
if system.ctnr not in dynamic.range.ctnr_set.all():
raise ValidationError("System's container must match dynamic "
"interface's range's containers.")
|
Validate dynamic interface's range's container, not dynamic interface's range's domain's container.
|
Validate dynamic interface's range's container, not dynamic interface's range's domain's container.
|
Python
|
bsd-3-clause
|
drkitty/cyder,akeym/cyder,OSU-Net/cyder,akeym/cyder,OSU-Net/cyder,akeym/cyder,akeym/cyder,drkitty/cyder,drkitty/cyder,OSU-Net/cyder,drkitty/cyder,OSU-Net/cyder
|
# encoding: utf-8
from django.core.exceptions import ValidationError
import re
ERROR_TOO_LONG = 'MAC address is too long'
mac_pattern = re.compile(r'^([0-9a-f]{2}:){5}[0-9a-f]{2}$')
def validate_mac(mac):
if mac == ERROR_TOO_LONG:
raise ValidationError(ERROR_TOO_LONG)
elif mac == '00:00:00:00:00:00':
raise ValidationError('Invalid MAC address—to disable DHCP for this '
'interface, uncheck "Enable DHCP"')
elif not mac_pattern.match(mac):
raise ValidationError('Invalid MAC address')
def validate_system_static_ctnr(system, static):
if system.ctnr not in static.domain.ctnr_set.all():
raise ValidationError("System's container must match static "
"interface's domain's containers.")
def validate_system_dynamic_ctnr(system, dynamic):
if system.ctnr not in dynamic.range.domain.ctnr_set.all():
raise ValidationError("System's container must match dynamic "
"interface's range's domain's containers.")
Validate dynamic interface's range's container, not dynamic interface's range's domain's container.
|
# encoding: utf-8
from django.core.exceptions import ValidationError
import re
ERROR_TOO_LONG = 'MAC address is too long'
mac_pattern = re.compile(r'^([0-9a-f]{2}:){5}[0-9a-f]{2}$')
def validate_mac(mac):
if mac == ERROR_TOO_LONG:
raise ValidationError(ERROR_TOO_LONG)
elif mac == '00:00:00:00:00:00':
raise ValidationError('Invalid MAC address—to disable DHCP for this '
'interface, uncheck "Enable DHCP"')
elif not mac_pattern.match(mac):
raise ValidationError('Invalid MAC address')
def validate_system_static_ctnr(system, static):
if system.ctnr not in static.domain.ctnr_set.all():
raise ValidationError("System's container must match static "
"interface's domain's containers.")
def validate_system_dynamic_ctnr(system, dynamic):
if system.ctnr not in dynamic.range.ctnr_set.all():
raise ValidationError("System's container must match dynamic "
"interface's range's containers.")
|
<commit_before># encoding: utf-8
from django.core.exceptions import ValidationError
import re
ERROR_TOO_LONG = 'MAC address is too long'
mac_pattern = re.compile(r'^([0-9a-f]{2}:){5}[0-9a-f]{2}$')
def validate_mac(mac):
if mac == ERROR_TOO_LONG:
raise ValidationError(ERROR_TOO_LONG)
elif mac == '00:00:00:00:00:00':
raise ValidationError('Invalid MAC address—to disable DHCP for this '
'interface, uncheck "Enable DHCP"')
elif not mac_pattern.match(mac):
raise ValidationError('Invalid MAC address')
def validate_system_static_ctnr(system, static):
if system.ctnr not in static.domain.ctnr_set.all():
raise ValidationError("System's container must match static "
"interface's domain's containers.")
def validate_system_dynamic_ctnr(system, dynamic):
if system.ctnr not in dynamic.range.domain.ctnr_set.all():
raise ValidationError("System's container must match dynamic "
"interface's range's domain's containers.")
<commit_msg>Validate dynamic interface's range's container, not dynamic interface's range's domain's container.<commit_after>
|
# encoding: utf-8
from django.core.exceptions import ValidationError
import re
ERROR_TOO_LONG = 'MAC address is too long'
mac_pattern = re.compile(r'^([0-9a-f]{2}:){5}[0-9a-f]{2}$')
def validate_mac(mac):
if mac == ERROR_TOO_LONG:
raise ValidationError(ERROR_TOO_LONG)
elif mac == '00:00:00:00:00:00':
raise ValidationError('Invalid MAC address—to disable DHCP for this '
'interface, uncheck "Enable DHCP"')
elif not mac_pattern.match(mac):
raise ValidationError('Invalid MAC address')
def validate_system_static_ctnr(system, static):
if system.ctnr not in static.domain.ctnr_set.all():
raise ValidationError("System's container must match static "
"interface's domain's containers.")
def validate_system_dynamic_ctnr(system, dynamic):
if system.ctnr not in dynamic.range.ctnr_set.all():
raise ValidationError("System's container must match dynamic "
"interface's range's containers.")
|
# encoding: utf-8
from django.core.exceptions import ValidationError
import re
ERROR_TOO_LONG = 'MAC address is too long'
mac_pattern = re.compile(r'^([0-9a-f]{2}:){5}[0-9a-f]{2}$')
def validate_mac(mac):
if mac == ERROR_TOO_LONG:
raise ValidationError(ERROR_TOO_LONG)
elif mac == '00:00:00:00:00:00':
raise ValidationError('Invalid MAC address—to disable DHCP for this '
'interface, uncheck "Enable DHCP"')
elif not mac_pattern.match(mac):
raise ValidationError('Invalid MAC address')
def validate_system_static_ctnr(system, static):
if system.ctnr not in static.domain.ctnr_set.all():
raise ValidationError("System's container must match static "
"interface's domain's containers.")
def validate_system_dynamic_ctnr(system, dynamic):
if system.ctnr not in dynamic.range.domain.ctnr_set.all():
raise ValidationError("System's container must match dynamic "
"interface's range's domain's containers.")
Validate dynamic interface's range's container, not dynamic interface's range's domain's container.# encoding: utf-8
from django.core.exceptions import ValidationError
import re
ERROR_TOO_LONG = 'MAC address is too long'
mac_pattern = re.compile(r'^([0-9a-f]{2}:){5}[0-9a-f]{2}$')
def validate_mac(mac):
if mac == ERROR_TOO_LONG:
raise ValidationError(ERROR_TOO_LONG)
elif mac == '00:00:00:00:00:00':
raise ValidationError('Invalid MAC address—to disable DHCP for this '
'interface, uncheck "Enable DHCP"')
elif not mac_pattern.match(mac):
raise ValidationError('Invalid MAC address')
def validate_system_static_ctnr(system, static):
if system.ctnr not in static.domain.ctnr_set.all():
raise ValidationError("System's container must match static "
"interface's domain's containers.")
def validate_system_dynamic_ctnr(system, dynamic):
if system.ctnr not in dynamic.range.ctnr_set.all():
raise ValidationError("System's container must match dynamic "
"interface's range's containers.")
|
<commit_before># encoding: utf-8
from django.core.exceptions import ValidationError
import re
ERROR_TOO_LONG = 'MAC address is too long'
mac_pattern = re.compile(r'^([0-9a-f]{2}:){5}[0-9a-f]{2}$')
def validate_mac(mac):
if mac == ERROR_TOO_LONG:
raise ValidationError(ERROR_TOO_LONG)
elif mac == '00:00:00:00:00:00':
raise ValidationError('Invalid MAC address—to disable DHCP for this '
'interface, uncheck "Enable DHCP"')
elif not mac_pattern.match(mac):
raise ValidationError('Invalid MAC address')
def validate_system_static_ctnr(system, static):
if system.ctnr not in static.domain.ctnr_set.all():
raise ValidationError("System's container must match static "
"interface's domain's containers.")
def validate_system_dynamic_ctnr(system, dynamic):
if system.ctnr not in dynamic.range.domain.ctnr_set.all():
raise ValidationError("System's container must match dynamic "
"interface's range's domain's containers.")
<commit_msg>Validate dynamic interface's range's container, not dynamic interface's range's domain's container.<commit_after># encoding: utf-8
from django.core.exceptions import ValidationError
import re
ERROR_TOO_LONG = 'MAC address is too long'
mac_pattern = re.compile(r'^([0-9a-f]{2}:){5}[0-9a-f]{2}$')
def validate_mac(mac):
if mac == ERROR_TOO_LONG:
raise ValidationError(ERROR_TOO_LONG)
elif mac == '00:00:00:00:00:00':
raise ValidationError('Invalid MAC address—to disable DHCP for this '
'interface, uncheck "Enable DHCP"')
elif not mac_pattern.match(mac):
raise ValidationError('Invalid MAC address')
def validate_system_static_ctnr(system, static):
if system.ctnr not in static.domain.ctnr_set.all():
raise ValidationError("System's container must match static "
"interface's domain's containers.")
def validate_system_dynamic_ctnr(system, dynamic):
if system.ctnr not in dynamic.range.ctnr_set.all():
raise ValidationError("System's container must match dynamic "
"interface's range's containers.")
|
9d796a4fe8f6c4b38eb1428d4d43f1edc041c1cd
|
dlchainer/__init__.py
|
dlchainer/__init__.py
|
#-*- coding: utf-8 -*-
from .dA import dA
|
#-*- coding: utf-8 -*-
from .dA import dA
from .SdA import SdAClassifier, SdARegressor
|
Add importing SdA in init script.
|
Add importing SdA in init script.
|
Python
|
mit
|
duonys/deep-learning-chainer
|
#-*- coding: utf-8 -*-
from .dA import dA
Add importing SdA in init script.
|
#-*- coding: utf-8 -*-
from .dA import dA
from .SdA import SdAClassifier, SdARegressor
|
<commit_before>#-*- coding: utf-8 -*-
from .dA import dA
<commit_msg>Add importing SdA in init script.<commit_after>
|
#-*- coding: utf-8 -*-
from .dA import dA
from .SdA import SdAClassifier, SdARegressor
|
#-*- coding: utf-8 -*-
from .dA import dA
Add importing SdA in init script.#-*- coding: utf-8 -*-
from .dA import dA
from .SdA import SdAClassifier, SdARegressor
|
<commit_before>#-*- coding: utf-8 -*-
from .dA import dA
<commit_msg>Add importing SdA in init script.<commit_after>#-*- coding: utf-8 -*-
from .dA import dA
from .SdA import SdAClassifier, SdARegressor
|
d011f0279f868e56b0a36bb672f432ca2bfc2b35
|
mlbgame/league.py
|
mlbgame/league.py
|
#!/usr/bin/env python
"""Module that is used for getting information
about the (MLB) league and the teams in it.
"""
import mlbgame.data
import lxml.etree as etree
def get_league_object():
"""Returns the xml object corresponding to the league
Only designed for internal use"""
# get data
data = mlbgame.data.get_properties()
# return league object
return etree.parse(data).getroot().find("leagues").find("league")
def league_info():
"""Returns a dictionary of league information"""
league = get_league_object()
output = {}
for x in league.attrib:
output[x] = league.attrib[x]
return output
|
#!/usr/bin/env python
"""Module that is used for getting information
about the (MLB) league and the teams in it.
"""
import mlbgame.data
import lxml.etree as etree
def get_league_object():
"""Returns the xml object corresponding to the league
Only designed for internal use"""
# get data
data = mlbgame.data.get_properties()
# return league object
return etree.parse(data).getroot().find("leagues").find("league")
def league_info():
"""Returns a dictionary of league information"""
league = get_league_object()
output = {}
for x in league.attrib:
output[x] = league.attrib[x]
return output
def team_info():
"""Returns a list of team information dictionaries"""
teams = get_league_object().find("teams").findall("team")
output = []
for team in teams:
info = {}
for x in team.attrib:
info[x] = team.attrib[x]
output.append(info)
return output
|
Add function to parse team info
|
Add function to parse team info
|
Python
|
mit
|
zachpanz88/mlbgame,panzarino/mlbgame
|
#!/usr/bin/env python
"""Module that is used for getting information
about the (MLB) league and the teams in it.
"""
import mlbgame.data
import lxml.etree as etree
def get_league_object():
"""Returns the xml object corresponding to the league
Only designed for internal use"""
# get data
data = mlbgame.data.get_properties()
# return league object
return etree.parse(data).getroot().find("leagues").find("league")
def league_info():
"""Returns a dictionary of league information"""
league = get_league_object()
output = {}
for x in league.attrib:
output[x] = league.attrib[x]
return outputAdd function to parse team info
|
#!/usr/bin/env python
"""Module that is used for getting information
about the (MLB) league and the teams in it.
"""
import mlbgame.data
import lxml.etree as etree
def get_league_object():
"""Returns the xml object corresponding to the league
Only designed for internal use"""
# get data
data = mlbgame.data.get_properties()
# return league object
return etree.parse(data).getroot().find("leagues").find("league")
def league_info():
"""Returns a dictionary of league information"""
league = get_league_object()
output = {}
for x in league.attrib:
output[x] = league.attrib[x]
return output
def team_info():
"""Returns a list of team information dictionaries"""
teams = get_league_object().find("teams").findall("team")
output = []
for team in teams:
info = {}
for x in team.attrib:
info[x] = team.attrib[x]
output.append(info)
return output
|
<commit_before>#!/usr/bin/env python
"""Module that is used for getting information
about the (MLB) league and the teams in it.
"""
import mlbgame.data
import lxml.etree as etree
def get_league_object():
"""Returns the xml object corresponding to the league
Only designed for internal use"""
# get data
data = mlbgame.data.get_properties()
# return league object
return etree.parse(data).getroot().find("leagues").find("league")
def league_info():
"""Returns a dictionary of league information"""
league = get_league_object()
output = {}
for x in league.attrib:
output[x] = league.attrib[x]
return output<commit_msg>Add function to parse team info<commit_after>
|
#!/usr/bin/env python
"""Module that is used for getting information
about the (MLB) league and the teams in it.
"""
import mlbgame.data
import lxml.etree as etree
def get_league_object():
"""Returns the xml object corresponding to the league
Only designed for internal use"""
# get data
data = mlbgame.data.get_properties()
# return league object
return etree.parse(data).getroot().find("leagues").find("league")
def league_info():
"""Returns a dictionary of league information"""
league = get_league_object()
output = {}
for x in league.attrib:
output[x] = league.attrib[x]
return output
def team_info():
"""Returns a list of team information dictionaries"""
teams = get_league_object().find("teams").findall("team")
output = []
for team in teams:
info = {}
for x in team.attrib:
info[x] = team.attrib[x]
output.append(info)
return output
|
#!/usr/bin/env python
"""Module that is used for getting information
about the (MLB) league and the teams in it.
"""
import mlbgame.data
import lxml.etree as etree
def get_league_object():
"""Returns the xml object corresponding to the league
Only designed for internal use"""
# get data
data = mlbgame.data.get_properties()
# return league object
return etree.parse(data).getroot().find("leagues").find("league")
def league_info():
"""Returns a dictionary of league information"""
league = get_league_object()
output = {}
for x in league.attrib:
output[x] = league.attrib[x]
return outputAdd function to parse team info#!/usr/bin/env python
"""Module that is used for getting information
about the (MLB) league and the teams in it.
"""
import mlbgame.data
import lxml.etree as etree
def get_league_object():
"""Returns the xml object corresponding to the league
Only designed for internal use"""
# get data
data = mlbgame.data.get_properties()
# return league object
return etree.parse(data).getroot().find("leagues").find("league")
def league_info():
"""Returns a dictionary of league information"""
league = get_league_object()
output = {}
for x in league.attrib:
output[x] = league.attrib[x]
return output
def team_info():
"""Returns a list of team information dictionaries"""
teams = get_league_object().find("teams").findall("team")
output = []
for team in teams:
info = {}
for x in team.attrib:
info[x] = team.attrib[x]
output.append(info)
return output
|
<commit_before>#!/usr/bin/env python
"""Module that is used for getting information
about the (MLB) league and the teams in it.
"""
import mlbgame.data
import lxml.etree as etree
def get_league_object():
"""Returns the xml object corresponding to the league
Only designed for internal use"""
# get data
data = mlbgame.data.get_properties()
# return league object
return etree.parse(data).getroot().find("leagues").find("league")
def league_info():
"""Returns a dictionary of league information"""
league = get_league_object()
output = {}
for x in league.attrib:
output[x] = league.attrib[x]
return output<commit_msg>Add function to parse team info<commit_after>#!/usr/bin/env python
"""Module that is used for getting information
about the (MLB) league and the teams in it.
"""
import mlbgame.data
import lxml.etree as etree
def get_league_object():
"""Returns the xml object corresponding to the league
Only designed for internal use"""
# get data
data = mlbgame.data.get_properties()
# return league object
return etree.parse(data).getroot().find("leagues").find("league")
def league_info():
"""Returns a dictionary of league information"""
league = get_league_object()
output = {}
for x in league.attrib:
output[x] = league.attrib[x]
return output
def team_info():
"""Returns a list of team information dictionaries"""
teams = get_league_object().find("teams").findall("team")
output = []
for team in teams:
info = {}
for x in team.attrib:
info[x] = team.attrib[x]
output.append(info)
return output
|
164ac322110407ec3ab7b9dc8b6675a405efa6a9
|
pymantic/__init__.py
|
pymantic/__init__.py
|
#
from rdflib.plugin import register
from rdflib.serializer import Serializer
from rdflib.parser import Parser
import re
# Fix rdflib's ntriples parser
from rdflib.plugins.parsers import ntriples
ntriples.litinfo = r'(?:@([a-z]+(?:-[a-zA-Z0-9]+)*)|\^\^' + ntriples.uriref + r')?'
ntriples.r_literal = re.compile(ntriples.literal + ntriples.litinfo)
register('nt', Serializer, 'pymantic.serializers', 'NTSerializer')
register('nq', Serializer, 'pymantic.serializers', 'NQSerializer')
register('nq', Parser, 'pymantic.parsers', 'NQParser')
content_type_to_rdflib_format = {
'text/plain': 'nt',
'text/x-nquads': 'nq',
'application/rdf+xml': 'xml',
'text/turtle': 'turtle',
}
rdflib_format_to_content_type = dict((value, key) for key, value in\
content_type_to_rdflib_format.iteritems())
|
#
from rdflib.plugin import register
from rdflib.serializer import Serializer
from rdflib.parser import Parser
import re
# Fix rdflib's ntriples parser
from rdflib.plugins.parsers import ntriples
ntriples.litinfo = r'(?:@([a-z]+(?:-[a-zA-Z0-9]+)*)|\^\^' + ntriples.uriref + r')?'
ntriples.r_literal = re.compile(ntriples.literal + ntriples.litinfo)
register('nt', Serializer, 'pymantic.serializers', 'NTSerializer')
register('nq', Serializer, 'pymantic.serializers', 'NQSerializer')
register('nq', Parser, 'pymantic.parsers', 'NQParser')
content_type_to_rdflib_format = {
'text/plain': 'nt',
'text/x-nquads': 'nq',
'application/rdf+xml': 'xml',
'text/turtle': 'turtle',
'text/rdf+n3': 'n3',
}
rdflib_format_to_content_type = dict((value, key) for key, value in\
content_type_to_rdflib_format.iteritems())
|
Expand rdflib to content-type mapping.
|
Expand rdflib to content-type mapping.
|
Python
|
bsd-3-clause
|
igor-kim/blazegraph-python,SYSTAP/blazegraph-python,blazegraph/blazegraph-python,SYSTAP/blazegraph-python,blazegraph/blazegraph-python,igor-kim/blazegraph-python,syapse/pymantic
|
#
from rdflib.plugin import register
from rdflib.serializer import Serializer
from rdflib.parser import Parser
import re
# Fix rdflib's ntriples parser
from rdflib.plugins.parsers import ntriples
ntriples.litinfo = r'(?:@([a-z]+(?:-[a-zA-Z0-9]+)*)|\^\^' + ntriples.uriref + r')?'
ntriples.r_literal = re.compile(ntriples.literal + ntriples.litinfo)
register('nt', Serializer, 'pymantic.serializers', 'NTSerializer')
register('nq', Serializer, 'pymantic.serializers', 'NQSerializer')
register('nq', Parser, 'pymantic.parsers', 'NQParser')
content_type_to_rdflib_format = {
'text/plain': 'nt',
'text/x-nquads': 'nq',
'application/rdf+xml': 'xml',
'text/turtle': 'turtle',
}
rdflib_format_to_content_type = dict((value, key) for key, value in\
content_type_to_rdflib_format.iteritems())
Expand rdflib to content-type mapping.
|
#
from rdflib.plugin import register
from rdflib.serializer import Serializer
from rdflib.parser import Parser
import re
# Fix rdflib's ntriples parser
from rdflib.plugins.parsers import ntriples
ntriples.litinfo = r'(?:@([a-z]+(?:-[a-zA-Z0-9]+)*)|\^\^' + ntriples.uriref + r')?'
ntriples.r_literal = re.compile(ntriples.literal + ntriples.litinfo)
register('nt', Serializer, 'pymantic.serializers', 'NTSerializer')
register('nq', Serializer, 'pymantic.serializers', 'NQSerializer')
register('nq', Parser, 'pymantic.parsers', 'NQParser')
content_type_to_rdflib_format = {
'text/plain': 'nt',
'text/x-nquads': 'nq',
'application/rdf+xml': 'xml',
'text/turtle': 'turtle',
'text/rdf+n3': 'n3',
}
rdflib_format_to_content_type = dict((value, key) for key, value in\
content_type_to_rdflib_format.iteritems())
|
<commit_before>#
from rdflib.plugin import register
from rdflib.serializer import Serializer
from rdflib.parser import Parser
import re
# Fix rdflib's ntriples parser
from rdflib.plugins.parsers import ntriples
ntriples.litinfo = r'(?:@([a-z]+(?:-[a-zA-Z0-9]+)*)|\^\^' + ntriples.uriref + r')?'
ntriples.r_literal = re.compile(ntriples.literal + ntriples.litinfo)
register('nt', Serializer, 'pymantic.serializers', 'NTSerializer')
register('nq', Serializer, 'pymantic.serializers', 'NQSerializer')
register('nq', Parser, 'pymantic.parsers', 'NQParser')
content_type_to_rdflib_format = {
'text/plain': 'nt',
'text/x-nquads': 'nq',
'application/rdf+xml': 'xml',
'text/turtle': 'turtle',
}
rdflib_format_to_content_type = dict((value, key) for key, value in\
content_type_to_rdflib_format.iteritems())
<commit_msg>Expand rdflib to content-type mapping.<commit_after>
|
#
from rdflib.plugin import register
from rdflib.serializer import Serializer
from rdflib.parser import Parser
import re
# Fix rdflib's ntriples parser
from rdflib.plugins.parsers import ntriples
ntriples.litinfo = r'(?:@([a-z]+(?:-[a-zA-Z0-9]+)*)|\^\^' + ntriples.uriref + r')?'
ntriples.r_literal = re.compile(ntriples.literal + ntriples.litinfo)
register('nt', Serializer, 'pymantic.serializers', 'NTSerializer')
register('nq', Serializer, 'pymantic.serializers', 'NQSerializer')
register('nq', Parser, 'pymantic.parsers', 'NQParser')
content_type_to_rdflib_format = {
'text/plain': 'nt',
'text/x-nquads': 'nq',
'application/rdf+xml': 'xml',
'text/turtle': 'turtle',
'text/rdf+n3': 'n3',
}
rdflib_format_to_content_type = dict((value, key) for key, value in\
content_type_to_rdflib_format.iteritems())
|
#
from rdflib.plugin import register
from rdflib.serializer import Serializer
from rdflib.parser import Parser
import re
# Fix rdflib's ntriples parser
from rdflib.plugins.parsers import ntriples
ntriples.litinfo = r'(?:@([a-z]+(?:-[a-zA-Z0-9]+)*)|\^\^' + ntriples.uriref + r')?'
ntriples.r_literal = re.compile(ntriples.literal + ntriples.litinfo)
register('nt', Serializer, 'pymantic.serializers', 'NTSerializer')
register('nq', Serializer, 'pymantic.serializers', 'NQSerializer')
register('nq', Parser, 'pymantic.parsers', 'NQParser')
content_type_to_rdflib_format = {
'text/plain': 'nt',
'text/x-nquads': 'nq',
'application/rdf+xml': 'xml',
'text/turtle': 'turtle',
}
rdflib_format_to_content_type = dict((value, key) for key, value in\
content_type_to_rdflib_format.iteritems())
Expand rdflib to content-type mapping.#
from rdflib.plugin import register
from rdflib.serializer import Serializer
from rdflib.parser import Parser
import re
# Fix rdflib's ntriples parser
from rdflib.plugins.parsers import ntriples
ntriples.litinfo = r'(?:@([a-z]+(?:-[a-zA-Z0-9]+)*)|\^\^' + ntriples.uriref + r')?'
ntriples.r_literal = re.compile(ntriples.literal + ntriples.litinfo)
register('nt', Serializer, 'pymantic.serializers', 'NTSerializer')
register('nq', Serializer, 'pymantic.serializers', 'NQSerializer')
register('nq', Parser, 'pymantic.parsers', 'NQParser')
content_type_to_rdflib_format = {
'text/plain': 'nt',
'text/x-nquads': 'nq',
'application/rdf+xml': 'xml',
'text/turtle': 'turtle',
'text/rdf+n3': 'n3',
}
rdflib_format_to_content_type = dict((value, key) for key, value in\
content_type_to_rdflib_format.iteritems())
|
<commit_before>#
from rdflib.plugin import register
from rdflib.serializer import Serializer
from rdflib.parser import Parser
import re
# Fix rdflib's ntriples parser
from rdflib.plugins.parsers import ntriples
ntriples.litinfo = r'(?:@([a-z]+(?:-[a-zA-Z0-9]+)*)|\^\^' + ntriples.uriref + r')?'
ntriples.r_literal = re.compile(ntriples.literal + ntriples.litinfo)
register('nt', Serializer, 'pymantic.serializers', 'NTSerializer')
register('nq', Serializer, 'pymantic.serializers', 'NQSerializer')
register('nq', Parser, 'pymantic.parsers', 'NQParser')
content_type_to_rdflib_format = {
'text/plain': 'nt',
'text/x-nquads': 'nq',
'application/rdf+xml': 'xml',
'text/turtle': 'turtle',
}
rdflib_format_to_content_type = dict((value, key) for key, value in\
content_type_to_rdflib_format.iteritems())
<commit_msg>Expand rdflib to content-type mapping.<commit_after>#
from rdflib.plugin import register
from rdflib.serializer import Serializer
from rdflib.parser import Parser
import re
# Fix rdflib's ntriples parser
from rdflib.plugins.parsers import ntriples
ntriples.litinfo = r'(?:@([a-z]+(?:-[a-zA-Z0-9]+)*)|\^\^' + ntriples.uriref + r')?'
ntriples.r_literal = re.compile(ntriples.literal + ntriples.litinfo)
register('nt', Serializer, 'pymantic.serializers', 'NTSerializer')
register('nq', Serializer, 'pymantic.serializers', 'NQSerializer')
register('nq', Parser, 'pymantic.parsers', 'NQParser')
content_type_to_rdflib_format = {
'text/plain': 'nt',
'text/x-nquads': 'nq',
'application/rdf+xml': 'xml',
'text/turtle': 'turtle',
'text/rdf+n3': 'n3',
}
rdflib_format_to_content_type = dict((value, key) for key, value in\
content_type_to_rdflib_format.iteritems())
|
a0fe1cb563a6aff55744def8e43a3af8b0d672cc
|
python/web_socket.py
|
python/web_socket.py
|
#!/bin/python
import urllib.request
import json
class RESTfulApi:
"""
Generic REST API call
"""
def __init__(self):
"""
Constructor
"""
pass
def request(self, url):
"""
Web request
:param: url: The url link
:return JSON object
"""
req = urllib.request.Request(url, None, headers={
"Content-Type": "application/x-www-form-urlencoded",
"Accept": "*/*",
"User-Agent": "curl/7.24.0 (x86_64-apple-darwin12.0)"})
res = urllib.request.urlopen(req)
res = json.loads(res.read().decode('utf8'))
return res
|
#!/bin/python
try:
import urllib.request as urlrequest
except ImportError:
import urllib as urlrequest
import json
class RESTfulApi:
"""
Generic REST API call
"""
def __init__(self):
"""
Constructor
"""
pass
def request(self, url):
"""
Web request
:param: url: The url link
:return JSON object
"""
req = urlrequest.Request(url, None, headers={
"Content-Type": "application/x-www-form-urlencoded",
"Accept": "*/*",
"User-Agent": "curl/7.24.0 (x86_64-apple-darwin12.0)"})
res = urlrequest.urlopen(req)
res = json.loads(res.read().decode('utf8'))
return res
|
Support python 2 and 3 compatability
|
Support python 2 and 3 compatability
|
Python
|
apache-2.0
|
Aurora-Team/BitcoinExchangeFH
|
#!/bin/python
import urllib.request
import json
class RESTfulApi:
"""
Generic REST API call
"""
def __init__(self):
"""
Constructor
"""
pass
def request(self, url):
"""
Web request
:param: url: The url link
:return JSON object
"""
req = urllib.request.Request(url, None, headers={
"Content-Type": "application/x-www-form-urlencoded",
"Accept": "*/*",
"User-Agent": "curl/7.24.0 (x86_64-apple-darwin12.0)"})
res = urllib.request.urlopen(req)
res = json.loads(res.read().decode('utf8'))
return res
Support python 2 and 3 compatability
|
#!/bin/python
try:
import urllib.request as urlrequest
except ImportError:
import urllib as urlrequest
import json
class RESTfulApi:
"""
Generic REST API call
"""
def __init__(self):
"""
Constructor
"""
pass
def request(self, url):
"""
Web request
:param: url: The url link
:return JSON object
"""
req = urlrequest.Request(url, None, headers={
"Content-Type": "application/x-www-form-urlencoded",
"Accept": "*/*",
"User-Agent": "curl/7.24.0 (x86_64-apple-darwin12.0)"})
res = urlrequest.urlopen(req)
res = json.loads(res.read().decode('utf8'))
return res
|
<commit_before>#!/bin/python
import urllib.request
import json
class RESTfulApi:
"""
Generic REST API call
"""
def __init__(self):
"""
Constructor
"""
pass
def request(self, url):
"""
Web request
:param: url: The url link
:return JSON object
"""
req = urllib.request.Request(url, None, headers={
"Content-Type": "application/x-www-form-urlencoded",
"Accept": "*/*",
"User-Agent": "curl/7.24.0 (x86_64-apple-darwin12.0)"})
res = urllib.request.urlopen(req)
res = json.loads(res.read().decode('utf8'))
return res
<commit_msg>Support python 2 and 3 compatability<commit_after>
|
#!/bin/python
try:
import urllib.request as urlrequest
except ImportError:
import urllib as urlrequest
import json
class RESTfulApi:
"""
Generic REST API call
"""
def __init__(self):
"""
Constructor
"""
pass
def request(self, url):
"""
Web request
:param: url: The url link
:return JSON object
"""
req = urlrequest.Request(url, None, headers={
"Content-Type": "application/x-www-form-urlencoded",
"Accept": "*/*",
"User-Agent": "curl/7.24.0 (x86_64-apple-darwin12.0)"})
res = urlrequest.urlopen(req)
res = json.loads(res.read().decode('utf8'))
return res
|
#!/bin/python
import urllib.request
import json
class RESTfulApi:
"""
Generic REST API call
"""
def __init__(self):
"""
Constructor
"""
pass
def request(self, url):
"""
Web request
:param: url: The url link
:return JSON object
"""
req = urllib.request.Request(url, None, headers={
"Content-Type": "application/x-www-form-urlencoded",
"Accept": "*/*",
"User-Agent": "curl/7.24.0 (x86_64-apple-darwin12.0)"})
res = urllib.request.urlopen(req)
res = json.loads(res.read().decode('utf8'))
return res
Support python 2 and 3 compatability#!/bin/python
try:
import urllib.request as urlrequest
except ImportError:
import urllib as urlrequest
import json
class RESTfulApi:
"""
Generic REST API call
"""
def __init__(self):
"""
Constructor
"""
pass
def request(self, url):
"""
Web request
:param: url: The url link
:return JSON object
"""
req = urlrequest.Request(url, None, headers={
"Content-Type": "application/x-www-form-urlencoded",
"Accept": "*/*",
"User-Agent": "curl/7.24.0 (x86_64-apple-darwin12.0)"})
res = urlrequest.urlopen(req)
res = json.loads(res.read().decode('utf8'))
return res
|
<commit_before>#!/bin/python
import urllib.request
import json
class RESTfulApi:
"""
Generic REST API call
"""
def __init__(self):
"""
Constructor
"""
pass
def request(self, url):
"""
Web request
:param: url: The url link
:return JSON object
"""
req = urllib.request.Request(url, None, headers={
"Content-Type": "application/x-www-form-urlencoded",
"Accept": "*/*",
"User-Agent": "curl/7.24.0 (x86_64-apple-darwin12.0)"})
res = urllib.request.urlopen(req)
res = json.loads(res.read().decode('utf8'))
return res
<commit_msg>Support python 2 and 3 compatability<commit_after>#!/bin/python
try:
import urllib.request as urlrequest
except ImportError:
import urllib as urlrequest
import json
class RESTfulApi:
"""
Generic REST API call
"""
def __init__(self):
"""
Constructor
"""
pass
def request(self, url):
"""
Web request
:param: url: The url link
:return JSON object
"""
req = urlrequest.Request(url, None, headers={
"Content-Type": "application/x-www-form-urlencoded",
"Accept": "*/*",
"User-Agent": "curl/7.24.0 (x86_64-apple-darwin12.0)"})
res = urlrequest.urlopen(req)
res = json.loads(res.read().decode('utf8'))
return res
|
558a44643b37e82f5b77038c34e826f38dcb6358
|
qsimcirq/_version.py
|
qsimcirq/_version.py
|
"""The version number defined here is read automatically in setup.py."""
__version__ = "0.11.1"
|
"""The version number defined here is read automatically in setup.py."""
__version__ = "0.11.2.dev20220104"
|
Update to dev version 2022-01-04
|
Update to dev version 2022-01-04
|
Python
|
apache-2.0
|
quantumlib/qsim,quantumlib/qsim,quantumlib/qsim,quantumlib/qsim
|
"""The version number defined here is read automatically in setup.py."""
__version__ = "0.11.1"
Update to dev version 2022-01-04
|
"""The version number defined here is read automatically in setup.py."""
__version__ = "0.11.2.dev20220104"
|
<commit_before>"""The version number defined here is read automatically in setup.py."""
__version__ = "0.11.1"
<commit_msg>Update to dev version 2022-01-04<commit_after>
|
"""The version number defined here is read automatically in setup.py."""
__version__ = "0.11.2.dev20220104"
|
"""The version number defined here is read automatically in setup.py."""
__version__ = "0.11.1"
Update to dev version 2022-01-04"""The version number defined here is read automatically in setup.py."""
__version__ = "0.11.2.dev20220104"
|
<commit_before>"""The version number defined here is read automatically in setup.py."""
__version__ = "0.11.1"
<commit_msg>Update to dev version 2022-01-04<commit_after>"""The version number defined here is read automatically in setup.py."""
__version__ = "0.11.2.dev20220104"
|
be4aad346f25f7daf0ba8e61b083f9e15e8f6b6a
|
luigi/tasks/release/utils/generic.py
|
luigi/tasks/release/utils/generic.py
|
# -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
def file_pattern(name):
if name == 'all':
return '.*.csv'
return name + '.*.csv'
|
# -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
def file_pattern(name):
"""
Given a database name this will produce a pattern for pgloader to match
files from that database. If the name is 'all' then a pattern that matches
all files will be created. The file pattern is quoted using '#'.
"""
pattern = name
if name == 'all':
pattern = '.'
return '~#{pattern}*.csv#'.format(pattern=pattern)
|
Make file pattern quote the name
|
Make file pattern quote the name
Might not be the right place for it, but this does need to be done.
|
Python
|
apache-2.0
|
RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline
|
# -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
def file_pattern(name):
if name == 'all':
return '.*.csv'
return name + '.*.csv'
Make file pattern quote the name
Might not be the right place for it, but this does need to be done.
|
# -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
def file_pattern(name):
"""
Given a database name this will produce a pattern for pgloader to match
files from that database. If the name is 'all' then a pattern that matches
all files will be created. The file pattern is quoted using '#'.
"""
pattern = name
if name == 'all':
pattern = '.'
return '~#{pattern}*.csv#'.format(pattern=pattern)
|
<commit_before># -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
def file_pattern(name):
if name == 'all':
return '.*.csv'
return name + '.*.csv'
<commit_msg>Make file pattern quote the name
Might not be the right place for it, but this does need to be done.<commit_after>
|
# -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
def file_pattern(name):
"""
Given a database name this will produce a pattern for pgloader to match
files from that database. If the name is 'all' then a pattern that matches
all files will be created. The file pattern is quoted using '#'.
"""
pattern = name
if name == 'all':
pattern = '.'
return '~#{pattern}*.csv#'.format(pattern=pattern)
|
# -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
def file_pattern(name):
if name == 'all':
return '.*.csv'
return name + '.*.csv'
Make file pattern quote the name
Might not be the right place for it, but this does need to be done.# -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
def file_pattern(name):
"""
Given a database name this will produce a pattern for pgloader to match
files from that database. If the name is 'all' then a pattern that matches
all files will be created. The file pattern is quoted using '#'.
"""
pattern = name
if name == 'all':
pattern = '.'
return '~#{pattern}*.csv#'.format(pattern=pattern)
|
<commit_before># -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
def file_pattern(name):
if name == 'all':
return '.*.csv'
return name + '.*.csv'
<commit_msg>Make file pattern quote the name
Might not be the right place for it, but this does need to be done.<commit_after># -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
def file_pattern(name):
"""
Given a database name this will produce a pattern for pgloader to match
files from that database. If the name is 'all' then a pattern that matches
all files will be created. The file pattern is quoted using '#'.
"""
pattern = name
if name == 'all':
pattern = '.'
return '~#{pattern}*.csv#'.format(pattern=pattern)
|
b959783f7c8db26df03760bb03227ab49f1975ba
|
pywikibot/families/wikitech_family.py
|
pywikibot/families/wikitech_family.py
|
# -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The Wikitech family
class Family(family.Family):
def __init__(self):
super(Family, self).__init__()
self.name = 'wikitech'
self.langs = {
'en': 'wikitech.wikimedia.org',
}
def version(self, code):
return '1.21wmf8'
def scriptpath(self, code):
return ''
|
# -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The Wikitech family
class Family(family.Family):
def __init__(self):
super(Family, self).__init__()
self.name = 'wikitech'
self.langs = {
'en': 'wikitech.wikimedia.org',
}
def version(self, code):
return '1.21wmf8'
|
Remove overide of default scriptpath
|
Remove overide of default scriptpath
git-svn-id: 9a050473c2aca1e14f53d73349e19b938c2cf203@11370 6a7f98fc-eeb0-4dc1-a6e2-c2c589a08aa6
|
Python
|
mit
|
legoktm/pywikipedia-rewrite
|
# -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The Wikitech family
class Family(family.Family):
def __init__(self):
super(Family, self).__init__()
self.name = 'wikitech'
self.langs = {
'en': 'wikitech.wikimedia.org',
}
def version(self, code):
return '1.21wmf8'
def scriptpath(self, code):
return ''
Remove overide of default scriptpath
git-svn-id: 9a050473c2aca1e14f53d73349e19b938c2cf203@11370 6a7f98fc-eeb0-4dc1-a6e2-c2c589a08aa6
|
# -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The Wikitech family
class Family(family.Family):
def __init__(self):
super(Family, self).__init__()
self.name = 'wikitech'
self.langs = {
'en': 'wikitech.wikimedia.org',
}
def version(self, code):
return '1.21wmf8'
|
<commit_before># -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The Wikitech family
class Family(family.Family):
def __init__(self):
super(Family, self).__init__()
self.name = 'wikitech'
self.langs = {
'en': 'wikitech.wikimedia.org',
}
def version(self, code):
return '1.21wmf8'
def scriptpath(self, code):
return ''
<commit_msg>Remove overide of default scriptpath
git-svn-id: 9a050473c2aca1e14f53d73349e19b938c2cf203@11370 6a7f98fc-eeb0-4dc1-a6e2-c2c589a08aa6<commit_after>
|
# -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The Wikitech family
class Family(family.Family):
def __init__(self):
super(Family, self).__init__()
self.name = 'wikitech'
self.langs = {
'en': 'wikitech.wikimedia.org',
}
def version(self, code):
return '1.21wmf8'
|
# -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The Wikitech family
class Family(family.Family):
def __init__(self):
super(Family, self).__init__()
self.name = 'wikitech'
self.langs = {
'en': 'wikitech.wikimedia.org',
}
def version(self, code):
return '1.21wmf8'
def scriptpath(self, code):
return ''
Remove overide of default scriptpath
git-svn-id: 9a050473c2aca1e14f53d73349e19b938c2cf203@11370 6a7f98fc-eeb0-4dc1-a6e2-c2c589a08aa6# -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The Wikitech family
class Family(family.Family):
def __init__(self):
super(Family, self).__init__()
self.name = 'wikitech'
self.langs = {
'en': 'wikitech.wikimedia.org',
}
def version(self, code):
return '1.21wmf8'
|
<commit_before># -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The Wikitech family
class Family(family.Family):
def __init__(self):
super(Family, self).__init__()
self.name = 'wikitech'
self.langs = {
'en': 'wikitech.wikimedia.org',
}
def version(self, code):
return '1.21wmf8'
def scriptpath(self, code):
return ''
<commit_msg>Remove overide of default scriptpath
git-svn-id: 9a050473c2aca1e14f53d73349e19b938c2cf203@11370 6a7f98fc-eeb0-4dc1-a6e2-c2c589a08aa6<commit_after># -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The Wikitech family
class Family(family.Family):
def __init__(self):
super(Family, self).__init__()
self.name = 'wikitech'
self.langs = {
'en': 'wikitech.wikimedia.org',
}
def version(self, code):
return '1.21wmf8'
|
a757ee7cff8f90ddf8cddb859e9924821a948f37
|
steve/_version.py
|
steve/_version.py
|
#######################################################################
# This file is part of steve.
#
# Copyright (C) 2012, 2013 Will Kahn-Greene
# Licensed under the Simplified BSD License. See LICENSE for full
# license.
#######################################################################
# See http://www.python.org/dev/peps/pep-0386/
# Examples:
# * 1.0.dev
# * 1.0a2
# * 1.0b2
# * 1.0
__version__ = '0.1'
__releasedate__ = '20130113'
|
#######################################################################
# This file is part of steve.
#
# Copyright (C) 2012, 2013 Will Kahn-Greene
# Licensed under the Simplified BSD License. See LICENSE for full
# license.
#######################################################################
# See http://www.python.org/dev/peps/pep-0386/
# Examples:
# * 1.0.dev
# * 1.0a2
# * 1.0b2
# * 1.0
__version__ = '0.2.dev'
__releasedate__ = ''
|
Fix version back to .dev
|
Fix version back to .dev
|
Python
|
bsd-2-clause
|
pyvideo/steve,CarlFK/steve,CarlFK/steve,willkg/steve,willkg/steve,pyvideo/steve,willkg/steve,pyvideo/steve,CarlFK/steve
|
#######################################################################
# This file is part of steve.
#
# Copyright (C) 2012, 2013 Will Kahn-Greene
# Licensed under the Simplified BSD License. See LICENSE for full
# license.
#######################################################################
# See http://www.python.org/dev/peps/pep-0386/
# Examples:
# * 1.0.dev
# * 1.0a2
# * 1.0b2
# * 1.0
__version__ = '0.1'
__releasedate__ = '20130113'
Fix version back to .dev
|
#######################################################################
# This file is part of steve.
#
# Copyright (C) 2012, 2013 Will Kahn-Greene
# Licensed under the Simplified BSD License. See LICENSE for full
# license.
#######################################################################
# See http://www.python.org/dev/peps/pep-0386/
# Examples:
# * 1.0.dev
# * 1.0a2
# * 1.0b2
# * 1.0
__version__ = '0.2.dev'
__releasedate__ = ''
|
<commit_before>#######################################################################
# This file is part of steve.
#
# Copyright (C) 2012, 2013 Will Kahn-Greene
# Licensed under the Simplified BSD License. See LICENSE for full
# license.
#######################################################################
# See http://www.python.org/dev/peps/pep-0386/
# Examples:
# * 1.0.dev
# * 1.0a2
# * 1.0b2
# * 1.0
__version__ = '0.1'
__releasedate__ = '20130113'
<commit_msg>Fix version back to .dev<commit_after>
|
#######################################################################
# This file is part of steve.
#
# Copyright (C) 2012, 2013 Will Kahn-Greene
# Licensed under the Simplified BSD License. See LICENSE for full
# license.
#######################################################################
# See http://www.python.org/dev/peps/pep-0386/
# Examples:
# * 1.0.dev
# * 1.0a2
# * 1.0b2
# * 1.0
__version__ = '0.2.dev'
__releasedate__ = ''
|
#######################################################################
# This file is part of steve.
#
# Copyright (C) 2012, 2013 Will Kahn-Greene
# Licensed under the Simplified BSD License. See LICENSE for full
# license.
#######################################################################
# See http://www.python.org/dev/peps/pep-0386/
# Examples:
# * 1.0.dev
# * 1.0a2
# * 1.0b2
# * 1.0
__version__ = '0.1'
__releasedate__ = '20130113'
Fix version back to .dev#######################################################################
# This file is part of steve.
#
# Copyright (C) 2012, 2013 Will Kahn-Greene
# Licensed under the Simplified BSD License. See LICENSE for full
# license.
#######################################################################
# See http://www.python.org/dev/peps/pep-0386/
# Examples:
# * 1.0.dev
# * 1.0a2
# * 1.0b2
# * 1.0
__version__ = '0.2.dev'
__releasedate__ = ''
|
<commit_before>#######################################################################
# This file is part of steve.
#
# Copyright (C) 2012, 2013 Will Kahn-Greene
# Licensed under the Simplified BSD License. See LICENSE for full
# license.
#######################################################################
# See http://www.python.org/dev/peps/pep-0386/
# Examples:
# * 1.0.dev
# * 1.0a2
# * 1.0b2
# * 1.0
__version__ = '0.1'
__releasedate__ = '20130113'
<commit_msg>Fix version back to .dev<commit_after>#######################################################################
# This file is part of steve.
#
# Copyright (C) 2012, 2013 Will Kahn-Greene
# Licensed under the Simplified BSD License. See LICENSE for full
# license.
#######################################################################
# See http://www.python.org/dev/peps/pep-0386/
# Examples:
# * 1.0.dev
# * 1.0a2
# * 1.0b2
# * 1.0
__version__ = '0.2.dev'
__releasedate__ = ''
|
f5c4581ad27dab25caac0669b9b01af922ebc57c
|
keystoneclient/v2_0/extensions.py
|
keystoneclient/v2_0/extensions.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystoneclient import base
class Extension(base.Resource):
"""Represents an Identity API extension."""
def __repr__(self):
return "<Extension %s>" % self._info
class ExtensionManager(base.ManagerWithFind):
"""Manager class for listing Identity API extensions."""
resource_class = Extension
def list(self):
"""List all available extentions."""
return self._list('/extensions', 'extensions')
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystoneclient import base
class Extension(base.Resource):
"""Represents an Identity API extension."""
def __repr__(self):
return "<Extension %s>" % self._info
class ExtensionManager(base.ManagerWithFind):
"""Manager class for listing Identity API extensions."""
resource_class = Extension
def list(self):
"""List all available extensions."""
return self._list('/extensions', 'extensions')
|
Fix a misspelling in a comment
|
Fix a misspelling in a comment
Change-Id: If049cef6def1249fd0f70e16385aa7a4167edbde
|
Python
|
apache-2.0
|
alexpilotti/python-keystoneclient,jamielennox/python-keystoneclient,sdpp/python-keystoneclient,alexpilotti/python-keystoneclient,jamielennox/python-keystoneclient,magic0704/python-keystoneclient,darren-wang/ksc,sdpp/python-keystoneclient,darren-wang/ksc,ging/python-keystoneclient,klmitch/python-keystoneclient,ging/python-keystoneclient,Mercador/python-keystoneclient,jamielennox/python-keystoneclient,magic0704/python-keystoneclient,Mercador/python-keystoneclient,klmitch/python-keystoneclient
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystoneclient import base
class Extension(base.Resource):
"""Represents an Identity API extension."""
def __repr__(self):
return "<Extension %s>" % self._info
class ExtensionManager(base.ManagerWithFind):
"""Manager class for listing Identity API extensions."""
resource_class = Extension
def list(self):
"""List all available extentions."""
return self._list('/extensions', 'extensions')
Fix a misspelling in a comment
Change-Id: If049cef6def1249fd0f70e16385aa7a4167edbde
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystoneclient import base
class Extension(base.Resource):
"""Represents an Identity API extension."""
def __repr__(self):
return "<Extension %s>" % self._info
class ExtensionManager(base.ManagerWithFind):
"""Manager class for listing Identity API extensions."""
resource_class = Extension
def list(self):
"""List all available extensions."""
return self._list('/extensions', 'extensions')
|
<commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystoneclient import base
class Extension(base.Resource):
"""Represents an Identity API extension."""
def __repr__(self):
return "<Extension %s>" % self._info
class ExtensionManager(base.ManagerWithFind):
"""Manager class for listing Identity API extensions."""
resource_class = Extension
def list(self):
"""List all available extentions."""
return self._list('/extensions', 'extensions')
<commit_msg>Fix a misspelling in a comment
Change-Id: If049cef6def1249fd0f70e16385aa7a4167edbde<commit_after>
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystoneclient import base
class Extension(base.Resource):
"""Represents an Identity API extension."""
def __repr__(self):
return "<Extension %s>" % self._info
class ExtensionManager(base.ManagerWithFind):
"""Manager class for listing Identity API extensions."""
resource_class = Extension
def list(self):
"""List all available extensions."""
return self._list('/extensions', 'extensions')
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystoneclient import base
class Extension(base.Resource):
"""Represents an Identity API extension."""
def __repr__(self):
return "<Extension %s>" % self._info
class ExtensionManager(base.ManagerWithFind):
"""Manager class for listing Identity API extensions."""
resource_class = Extension
def list(self):
"""List all available extentions."""
return self._list('/extensions', 'extensions')
Fix a misspelling in a comment
Change-Id: If049cef6def1249fd0f70e16385aa7a4167edbde# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystoneclient import base
class Extension(base.Resource):
"""Represents an Identity API extension."""
def __repr__(self):
return "<Extension %s>" % self._info
class ExtensionManager(base.ManagerWithFind):
"""Manager class for listing Identity API extensions."""
resource_class = Extension
def list(self):
"""List all available extensions."""
return self._list('/extensions', 'extensions')
|
<commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystoneclient import base
class Extension(base.Resource):
"""Represents an Identity API extension."""
def __repr__(self):
return "<Extension %s>" % self._info
class ExtensionManager(base.ManagerWithFind):
"""Manager class for listing Identity API extensions."""
resource_class = Extension
def list(self):
"""List all available extentions."""
return self._list('/extensions', 'extensions')
<commit_msg>Fix a misspelling in a comment
Change-Id: If049cef6def1249fd0f70e16385aa7a4167edbde<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystoneclient import base
class Extension(base.Resource):
"""Represents an Identity API extension."""
def __repr__(self):
return "<Extension %s>" % self._info
class ExtensionManager(base.ManagerWithFind):
"""Manager class for listing Identity API extensions."""
resource_class = Extension
def list(self):
"""List all available extensions."""
return self._list('/extensions', 'extensions')
|
3d5d52f7d529183bd56da43df2503a53fe3b6fc8
|
oauth2/_compat.py
|
oauth2/_compat.py
|
try:
TEXT = unicode
except NameError: #pragma NO COVER Py3k
TEXT = str
STRING_TYPES = (str, bytes)
else: #pragma NO COVER Python2
STRING_TYPES = (unicode, bytes)
def u(x, encoding='ascii'):
if isinstance(x, TEXT): #pragma NO COVER
return x
try:
return x.decode(encoding)
except AttributeError: #pragma NO COVER
raise ValueError('WTF: %s' % x)
try:
import urlparse
except ImportError: #pragma NO COVER Py3k
from urllib.parse import parse_qs
from urllib.parse import parse_qsl
from urllib.parse import quote
from urllib.parse import unquote
from urllib.parse import unquote_to_bytes
from urllib.parse import urlencode
from urllib.parse import urlparse
from urllib.parse import urlunparse
else: #pragma NO COVER Python2
from urlparse import parse_qs
from urlparse import parse_qsl
from urllib import quote
from urllib import unquote
from urllib import urlencode
from urlparse import urlparse
from urlparse import urlunparse
unquote_to_bytes = unquote
|
try:
TEXT = unicode
except NameError: #pragma NO COVER Py3k
TEXT = str
STRING_TYPES = (str, bytes)
def b(x, encoding='ascii'):
return bytes(x, encoding)
else: #pragma NO COVER Python2
STRING_TYPES = (unicode, bytes)
def b(x, encoding='ascii'):
if isinstance(x, unicode):
x = x.encode(encoding)
return x
def u(x, encoding='ascii'):
if isinstance(x, TEXT): #pragma NO COVER
return x
try:
return x.decode(encoding)
except AttributeError: #pragma NO COVER
raise ValueError('WTF: %s' % x)
try:
import urlparse
except ImportError: #pragma NO COVER Py3k
from urllib.parse import parse_qs
from urllib.parse import parse_qsl
from urllib.parse import quote
from urllib.parse import unquote
from urllib.parse import unquote_to_bytes
from urllib.parse import urlencode
from urllib.parse import urlparse
from urllib.parse import urlunparse
else: #pragma NO COVER Python2
from urlparse import parse_qs
from urlparse import parse_qsl
from urllib import quote
from urllib import unquote
from urllib import urlencode
from urlparse import urlparse
from urlparse import urlunparse
unquote_to_bytes = unquote
|
Add a 'b()' utility for forcing encoding to bytes.
|
Add a 'b()' utility for forcing encoding to bytes.
In Python2, the 'bytes()' builtin doesn't take an encoding argument.
|
Python
|
mit
|
CentricWebEstate/python-oauth2,squirro/python-oauth2,arthurian/python-oauth2,CestusMagnus/python-oauth2,joestump/python-oauth2,jackiekazil/python-oauth2,simplegeo/python-oauth2,edworboys/python-oauth2
|
try:
TEXT = unicode
except NameError: #pragma NO COVER Py3k
TEXT = str
STRING_TYPES = (str, bytes)
else: #pragma NO COVER Python2
STRING_TYPES = (unicode, bytes)
def u(x, encoding='ascii'):
if isinstance(x, TEXT): #pragma NO COVER
return x
try:
return x.decode(encoding)
except AttributeError: #pragma NO COVER
raise ValueError('WTF: %s' % x)
try:
import urlparse
except ImportError: #pragma NO COVER Py3k
from urllib.parse import parse_qs
from urllib.parse import parse_qsl
from urllib.parse import quote
from urllib.parse import unquote
from urllib.parse import unquote_to_bytes
from urllib.parse import urlencode
from urllib.parse import urlparse
from urllib.parse import urlunparse
else: #pragma NO COVER Python2
from urlparse import parse_qs
from urlparse import parse_qsl
from urllib import quote
from urllib import unquote
from urllib import urlencode
from urlparse import urlparse
from urlparse import urlunparse
unquote_to_bytes = unquote
Add a 'b()' utility for forcing encoding to bytes.
In Python2, the 'bytes()' builtin doesn't take an encoding argument.
|
try:
TEXT = unicode
except NameError: #pragma NO COVER Py3k
TEXT = str
STRING_TYPES = (str, bytes)
def b(x, encoding='ascii'):
return bytes(x, encoding)
else: #pragma NO COVER Python2
STRING_TYPES = (unicode, bytes)
def b(x, encoding='ascii'):
if isinstance(x, unicode):
x = x.encode(encoding)
return x
def u(x, encoding='ascii'):
if isinstance(x, TEXT): #pragma NO COVER
return x
try:
return x.decode(encoding)
except AttributeError: #pragma NO COVER
raise ValueError('WTF: %s' % x)
try:
import urlparse
except ImportError: #pragma NO COVER Py3k
from urllib.parse import parse_qs
from urllib.parse import parse_qsl
from urllib.parse import quote
from urllib.parse import unquote
from urllib.parse import unquote_to_bytes
from urllib.parse import urlencode
from urllib.parse import urlparse
from urllib.parse import urlunparse
else: #pragma NO COVER Python2
from urlparse import parse_qs
from urlparse import parse_qsl
from urllib import quote
from urllib import unquote
from urllib import urlencode
from urlparse import urlparse
from urlparse import urlunparse
unquote_to_bytes = unquote
|
<commit_before>try:
TEXT = unicode
except NameError: #pragma NO COVER Py3k
TEXT = str
STRING_TYPES = (str, bytes)
else: #pragma NO COVER Python2
STRING_TYPES = (unicode, bytes)
def u(x, encoding='ascii'):
if isinstance(x, TEXT): #pragma NO COVER
return x
try:
return x.decode(encoding)
except AttributeError: #pragma NO COVER
raise ValueError('WTF: %s' % x)
try:
import urlparse
except ImportError: #pragma NO COVER Py3k
from urllib.parse import parse_qs
from urllib.parse import parse_qsl
from urllib.parse import quote
from urllib.parse import unquote
from urllib.parse import unquote_to_bytes
from urllib.parse import urlencode
from urllib.parse import urlparse
from urllib.parse import urlunparse
else: #pragma NO COVER Python2
from urlparse import parse_qs
from urlparse import parse_qsl
from urllib import quote
from urllib import unquote
from urllib import urlencode
from urlparse import urlparse
from urlparse import urlunparse
unquote_to_bytes = unquote
<commit_msg>Add a 'b()' utility for forcing encoding to bytes.
In Python2, the 'bytes()' builtin doesn't take an encoding argument.<commit_after>
|
try:
TEXT = unicode
except NameError: #pragma NO COVER Py3k
TEXT = str
STRING_TYPES = (str, bytes)
def b(x, encoding='ascii'):
return bytes(x, encoding)
else: #pragma NO COVER Python2
STRING_TYPES = (unicode, bytes)
def b(x, encoding='ascii'):
if isinstance(x, unicode):
x = x.encode(encoding)
return x
def u(x, encoding='ascii'):
if isinstance(x, TEXT): #pragma NO COVER
return x
try:
return x.decode(encoding)
except AttributeError: #pragma NO COVER
raise ValueError('WTF: %s' % x)
try:
import urlparse
except ImportError: #pragma NO COVER Py3k
from urllib.parse import parse_qs
from urllib.parse import parse_qsl
from urllib.parse import quote
from urllib.parse import unquote
from urllib.parse import unquote_to_bytes
from urllib.parse import urlencode
from urllib.parse import urlparse
from urllib.parse import urlunparse
else: #pragma NO COVER Python2
from urlparse import parse_qs
from urlparse import parse_qsl
from urllib import quote
from urllib import unquote
from urllib import urlencode
from urlparse import urlparse
from urlparse import urlunparse
unquote_to_bytes = unquote
|
try:
TEXT = unicode
except NameError: #pragma NO COVER Py3k
TEXT = str
STRING_TYPES = (str, bytes)
else: #pragma NO COVER Python2
STRING_TYPES = (unicode, bytes)
def u(x, encoding='ascii'):
if isinstance(x, TEXT): #pragma NO COVER
return x
try:
return x.decode(encoding)
except AttributeError: #pragma NO COVER
raise ValueError('WTF: %s' % x)
try:
import urlparse
except ImportError: #pragma NO COVER Py3k
from urllib.parse import parse_qs
from urllib.parse import parse_qsl
from urllib.parse import quote
from urllib.parse import unquote
from urllib.parse import unquote_to_bytes
from urllib.parse import urlencode
from urllib.parse import urlparse
from urllib.parse import urlunparse
else: #pragma NO COVER Python2
from urlparse import parse_qs
from urlparse import parse_qsl
from urllib import quote
from urllib import unquote
from urllib import urlencode
from urlparse import urlparse
from urlparse import urlunparse
unquote_to_bytes = unquote
Add a 'b()' utility for forcing encoding to bytes.
In Python2, the 'bytes()' builtin doesn't take an encoding argument.try:
TEXT = unicode
except NameError: #pragma NO COVER Py3k
TEXT = str
STRING_TYPES = (str, bytes)
def b(x, encoding='ascii'):
return bytes(x, encoding)
else: #pragma NO COVER Python2
STRING_TYPES = (unicode, bytes)
def b(x, encoding='ascii'):
if isinstance(x, unicode):
x = x.encode(encoding)
return x
def u(x, encoding='ascii'):
if isinstance(x, TEXT): #pragma NO COVER
return x
try:
return x.decode(encoding)
except AttributeError: #pragma NO COVER
raise ValueError('WTF: %s' % x)
try:
import urlparse
except ImportError: #pragma NO COVER Py3k
from urllib.parse import parse_qs
from urllib.parse import parse_qsl
from urllib.parse import quote
from urllib.parse import unquote
from urllib.parse import unquote_to_bytes
from urllib.parse import urlencode
from urllib.parse import urlparse
from urllib.parse import urlunparse
else: #pragma NO COVER Python2
from urlparse import parse_qs
from urlparse import parse_qsl
from urllib import quote
from urllib import unquote
from urllib import urlencode
from urlparse import urlparse
from urlparse import urlunparse
unquote_to_bytes = unquote
|
<commit_before>try:
TEXT = unicode
except NameError: #pragma NO COVER Py3k
TEXT = str
STRING_TYPES = (str, bytes)
else: #pragma NO COVER Python2
STRING_TYPES = (unicode, bytes)
def u(x, encoding='ascii'):
if isinstance(x, TEXT): #pragma NO COVER
return x
try:
return x.decode(encoding)
except AttributeError: #pragma NO COVER
raise ValueError('WTF: %s' % x)
try:
import urlparse
except ImportError: #pragma NO COVER Py3k
from urllib.parse import parse_qs
from urllib.parse import parse_qsl
from urllib.parse import quote
from urllib.parse import unquote
from urllib.parse import unquote_to_bytes
from urllib.parse import urlencode
from urllib.parse import urlparse
from urllib.parse import urlunparse
else: #pragma NO COVER Python2
from urlparse import parse_qs
from urlparse import parse_qsl
from urllib import quote
from urllib import unquote
from urllib import urlencode
from urlparse import urlparse
from urlparse import urlunparse
unquote_to_bytes = unquote
<commit_msg>Add a 'b()' utility for forcing encoding to bytes.
In Python2, the 'bytes()' builtin doesn't take an encoding argument.<commit_after>try:
TEXT = unicode
except NameError: #pragma NO COVER Py3k
TEXT = str
STRING_TYPES = (str, bytes)
def b(x, encoding='ascii'):
return bytes(x, encoding)
else: #pragma NO COVER Python2
STRING_TYPES = (unicode, bytes)
def b(x, encoding='ascii'):
if isinstance(x, unicode):
x = x.encode(encoding)
return x
def u(x, encoding='ascii'):
if isinstance(x, TEXT): #pragma NO COVER
return x
try:
return x.decode(encoding)
except AttributeError: #pragma NO COVER
raise ValueError('WTF: %s' % x)
try:
import urlparse
except ImportError: #pragma NO COVER Py3k
from urllib.parse import parse_qs
from urllib.parse import parse_qsl
from urllib.parse import quote
from urllib.parse import unquote
from urllib.parse import unquote_to_bytes
from urllib.parse import urlencode
from urllib.parse import urlparse
from urllib.parse import urlunparse
else: #pragma NO COVER Python2
from urlparse import parse_qs
from urlparse import parse_qsl
from urllib import quote
from urllib import unquote
from urllib import urlencode
from urlparse import urlparse
from urlparse import urlunparse
unquote_to_bytes = unquote
|
35a0bfaf499029fa54d33d6ea712e255cc41e1de
|
core/migrations/0003_set_homepage.py
|
core/migrations/0003_set_homepage.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
def set_homepage(apps, schema_editor):
Site = apps.get_model('wagtailcore.Site')
HomePage = apps.get_model("core", "HomePage")
homepage = HomePage.objects.get(slug="home")
# Create default site
Site.objects.create(
hostname='localhost',
root_page_id=homepage.id,
is_default_site=True
)
class Migration(migrations.Migration):
dependencies = [
('core', '0002_create_home_page'),
]
operations = [
migrations.RunPython(set_homepage),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
def set_homepage(apps, schema_editor):
Site = apps.get_model('wagtailcore.Site')
HomePage = apps.get_model("core", "HomePage")
homepage = HomePage.objects.get(slug="home")
Site.objects.filter(hostname='localhost').delete()
# Create default site
Site.objects.get_or_create(
hostname='localhost',
root_page_id=homepage.id,
is_default_site=True
)
class Migration(migrations.Migration):
dependencies = [
('core', '0002_create_home_page'),
]
operations = [
migrations.RunPython(set_homepage),
]
|
Delete existing localhost entry for site.
|
Delete existing localhost entry for site.
|
Python
|
mit
|
albertoconnor/website,albertoconnor/website,OpenCanada/website,OpenCanada/website,OpenCanada/website,albertoconnor/website,albertoconnor/website,OpenCanada/website
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
def set_homepage(apps, schema_editor):
Site = apps.get_model('wagtailcore.Site')
HomePage = apps.get_model("core", "HomePage")
homepage = HomePage.objects.get(slug="home")
# Create default site
Site.objects.create(
hostname='localhost',
root_page_id=homepage.id,
is_default_site=True
)
class Migration(migrations.Migration):
dependencies = [
('core', '0002_create_home_page'),
]
operations = [
migrations.RunPython(set_homepage),
]
Delete existing localhost entry for site.
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
def set_homepage(apps, schema_editor):
Site = apps.get_model('wagtailcore.Site')
HomePage = apps.get_model("core", "HomePage")
homepage = HomePage.objects.get(slug="home")
Site.objects.filter(hostname='localhost').delete()
# Create default site
Site.objects.get_or_create(
hostname='localhost',
root_page_id=homepage.id,
is_default_site=True
)
class Migration(migrations.Migration):
dependencies = [
('core', '0002_create_home_page'),
]
operations = [
migrations.RunPython(set_homepage),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
def set_homepage(apps, schema_editor):
Site = apps.get_model('wagtailcore.Site')
HomePage = apps.get_model("core", "HomePage")
homepage = HomePage.objects.get(slug="home")
# Create default site
Site.objects.create(
hostname='localhost',
root_page_id=homepage.id,
is_default_site=True
)
class Migration(migrations.Migration):
dependencies = [
('core', '0002_create_home_page'),
]
operations = [
migrations.RunPython(set_homepage),
]
<commit_msg>Delete existing localhost entry for site.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
def set_homepage(apps, schema_editor):
Site = apps.get_model('wagtailcore.Site')
HomePage = apps.get_model("core", "HomePage")
homepage = HomePage.objects.get(slug="home")
Site.objects.filter(hostname='localhost').delete()
# Create default site
Site.objects.get_or_create(
hostname='localhost',
root_page_id=homepage.id,
is_default_site=True
)
class Migration(migrations.Migration):
dependencies = [
('core', '0002_create_home_page'),
]
operations = [
migrations.RunPython(set_homepage),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
def set_homepage(apps, schema_editor):
Site = apps.get_model('wagtailcore.Site')
HomePage = apps.get_model("core", "HomePage")
homepage = HomePage.objects.get(slug="home")
# Create default site
Site.objects.create(
hostname='localhost',
root_page_id=homepage.id,
is_default_site=True
)
class Migration(migrations.Migration):
dependencies = [
('core', '0002_create_home_page'),
]
operations = [
migrations.RunPython(set_homepage),
]
Delete existing localhost entry for site.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
def set_homepage(apps, schema_editor):
Site = apps.get_model('wagtailcore.Site')
HomePage = apps.get_model("core", "HomePage")
homepage = HomePage.objects.get(slug="home")
Site.objects.filter(hostname='localhost').delete()
# Create default site
Site.objects.get_or_create(
hostname='localhost',
root_page_id=homepage.id,
is_default_site=True
)
class Migration(migrations.Migration):
dependencies = [
('core', '0002_create_home_page'),
]
operations = [
migrations.RunPython(set_homepage),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
def set_homepage(apps, schema_editor):
Site = apps.get_model('wagtailcore.Site')
HomePage = apps.get_model("core", "HomePage")
homepage = HomePage.objects.get(slug="home")
# Create default site
Site.objects.create(
hostname='localhost',
root_page_id=homepage.id,
is_default_site=True
)
class Migration(migrations.Migration):
dependencies = [
('core', '0002_create_home_page'),
]
operations = [
migrations.RunPython(set_homepage),
]
<commit_msg>Delete existing localhost entry for site.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
def set_homepage(apps, schema_editor):
Site = apps.get_model('wagtailcore.Site')
HomePage = apps.get_model("core", "HomePage")
homepage = HomePage.objects.get(slug="home")
Site.objects.filter(hostname='localhost').delete()
# Create default site
Site.objects.get_or_create(
hostname='localhost',
root_page_id=homepage.id,
is_default_site=True
)
class Migration(migrations.Migration):
dependencies = [
('core', '0002_create_home_page'),
]
operations = [
migrations.RunPython(set_homepage),
]
|
74b94564583c2cc5de50bb86be048afe3b0ca67e
|
links/maker/serializers.py
|
links/maker/serializers.py
|
from rest_framework import serializers
from maker.models import Maker
class RegistrationRequestSerializer(serializers.Serializer):
email = serializers.EmailField(required=True)
password = serializers.CharField(required=True)
first_name = serializers.CharField(required=True)
last_name = serializers.CharField(required=True)
class AuthenticationResponseSerializer(serializers.Serializer):
token = serializers.CharField()
class AuthenticationRequestSerializer(serializers.Serializer):
identifier = serializers.CharField(required=True)
password = serializers.CharField(required=True)
class ResetPasswordRequestSerializer(serializers.Serializer):
email = serializers.EmailField(required=True)
class ChangePasswordSerializer(serializers.Serializer):
new_password = serializers.CharField(required=True)
confirm_password = serializers.CharField(required=True)
def validate(self, attrs):
if attrs['new_password'] != attrs['confirm_password']:
raise serializers.ValidationError("'new_password' and 'confirm_password' do not match")
return attrs
class EmailChangeRequestSerializer(serializers.Serializer):
new_email = serializers.EmailField(required=True)
class MakerSerializer(serializers.ModelSerializer):
class Meta:
model = Maker
fields = ('identifier', 'first_name', 'last_name', 'email',
'photo_url', 'bio', 'joined')
read_only_fields = ('identifier', 'email', 'joined',)
|
from rest_framework import serializers
from maker.models import Maker
class RegistrationRequestSerializer(serializers.Serializer):
email = serializers.EmailField(required=True)
password = serializers.CharField(required=True)
first_name = serializers.CharField(required=True)
last_name = serializers.CharField(required=True)
class AuthenticationResponseSerializer(serializers.Serializer):
token = serializers.CharField()
class AuthenticationRequestSerializer(serializers.Serializer):
identifier = serializers.CharField(required=True)
password = serializers.CharField(required=True)
class ResetPasswordRequestSerializer(serializers.Serializer):
email = serializers.EmailField(required=True)
class ChangePasswordSerializer(serializers.Serializer):
new_password = serializers.CharField(required=True)
confirm_password = serializers.CharField(required=True)
def validate(self, attrs):
if attrs['new_password'] != attrs['confirm_password']:
raise serializers.ValidationError("'new_password' and 'confirm_password' do not match")
return attrs
class EmailChangeRequestSerializer(serializers.Serializer):
new_email = serializers.EmailField(required=True)
class EmailChangeProcessSerializer(serializers.Serializer):
token = serializers.CharField(required=True)
class MakerSerializer(serializers.ModelSerializer):
class Meta:
model = Maker
fields = ('identifier', 'first_name', 'last_name', 'email',
'photo_url', 'bio', 'joined')
read_only_fields = ('identifier', 'email', 'joined',)
|
Add email change process serializer
|
Add email change process serializer
|
Python
|
mit
|
projectweekend/Links-API,projectweekend/Links-API
|
from rest_framework import serializers
from maker.models import Maker
class RegistrationRequestSerializer(serializers.Serializer):
email = serializers.EmailField(required=True)
password = serializers.CharField(required=True)
first_name = serializers.CharField(required=True)
last_name = serializers.CharField(required=True)
class AuthenticationResponseSerializer(serializers.Serializer):
token = serializers.CharField()
class AuthenticationRequestSerializer(serializers.Serializer):
identifier = serializers.CharField(required=True)
password = serializers.CharField(required=True)
class ResetPasswordRequestSerializer(serializers.Serializer):
email = serializers.EmailField(required=True)
class ChangePasswordSerializer(serializers.Serializer):
new_password = serializers.CharField(required=True)
confirm_password = serializers.CharField(required=True)
def validate(self, attrs):
if attrs['new_password'] != attrs['confirm_password']:
raise serializers.ValidationError("'new_password' and 'confirm_password' do not match")
return attrs
class EmailChangeRequestSerializer(serializers.Serializer):
new_email = serializers.EmailField(required=True)
class MakerSerializer(serializers.ModelSerializer):
class Meta:
model = Maker
fields = ('identifier', 'first_name', 'last_name', 'email',
'photo_url', 'bio', 'joined')
read_only_fields = ('identifier', 'email', 'joined',)
Add email change process serializer
|
from rest_framework import serializers
from maker.models import Maker
class RegistrationRequestSerializer(serializers.Serializer):
email = serializers.EmailField(required=True)
password = serializers.CharField(required=True)
first_name = serializers.CharField(required=True)
last_name = serializers.CharField(required=True)
class AuthenticationResponseSerializer(serializers.Serializer):
token = serializers.CharField()
class AuthenticationRequestSerializer(serializers.Serializer):
identifier = serializers.CharField(required=True)
password = serializers.CharField(required=True)
class ResetPasswordRequestSerializer(serializers.Serializer):
email = serializers.EmailField(required=True)
class ChangePasswordSerializer(serializers.Serializer):
new_password = serializers.CharField(required=True)
confirm_password = serializers.CharField(required=True)
def validate(self, attrs):
if attrs['new_password'] != attrs['confirm_password']:
raise serializers.ValidationError("'new_password' and 'confirm_password' do not match")
return attrs
class EmailChangeRequestSerializer(serializers.Serializer):
new_email = serializers.EmailField(required=True)
class EmailChangeProcessSerializer(serializers.Serializer):
token = serializers.CharField(required=True)
class MakerSerializer(serializers.ModelSerializer):
class Meta:
model = Maker
fields = ('identifier', 'first_name', 'last_name', 'email',
'photo_url', 'bio', 'joined')
read_only_fields = ('identifier', 'email', 'joined',)
|
<commit_before>from rest_framework import serializers
from maker.models import Maker
class RegistrationRequestSerializer(serializers.Serializer):
email = serializers.EmailField(required=True)
password = serializers.CharField(required=True)
first_name = serializers.CharField(required=True)
last_name = serializers.CharField(required=True)
class AuthenticationResponseSerializer(serializers.Serializer):
token = serializers.CharField()
class AuthenticationRequestSerializer(serializers.Serializer):
identifier = serializers.CharField(required=True)
password = serializers.CharField(required=True)
class ResetPasswordRequestSerializer(serializers.Serializer):
email = serializers.EmailField(required=True)
class ChangePasswordSerializer(serializers.Serializer):
new_password = serializers.CharField(required=True)
confirm_password = serializers.CharField(required=True)
def validate(self, attrs):
if attrs['new_password'] != attrs['confirm_password']:
raise serializers.ValidationError("'new_password' and 'confirm_password' do not match")
return attrs
class EmailChangeRequestSerializer(serializers.Serializer):
new_email = serializers.EmailField(required=True)
class MakerSerializer(serializers.ModelSerializer):
class Meta:
model = Maker
fields = ('identifier', 'first_name', 'last_name', 'email',
'photo_url', 'bio', 'joined')
read_only_fields = ('identifier', 'email', 'joined',)
<commit_msg>Add email change process serializer<commit_after>
|
from rest_framework import serializers
from maker.models import Maker
class RegistrationRequestSerializer(serializers.Serializer):
email = serializers.EmailField(required=True)
password = serializers.CharField(required=True)
first_name = serializers.CharField(required=True)
last_name = serializers.CharField(required=True)
class AuthenticationResponseSerializer(serializers.Serializer):
token = serializers.CharField()
class AuthenticationRequestSerializer(serializers.Serializer):
identifier = serializers.CharField(required=True)
password = serializers.CharField(required=True)
class ResetPasswordRequestSerializer(serializers.Serializer):
email = serializers.EmailField(required=True)
class ChangePasswordSerializer(serializers.Serializer):
new_password = serializers.CharField(required=True)
confirm_password = serializers.CharField(required=True)
def validate(self, attrs):
if attrs['new_password'] != attrs['confirm_password']:
raise serializers.ValidationError("'new_password' and 'confirm_password' do not match")
return attrs
class EmailChangeRequestSerializer(serializers.Serializer):
new_email = serializers.EmailField(required=True)
class EmailChangeProcessSerializer(serializers.Serializer):
token = serializers.CharField(required=True)
class MakerSerializer(serializers.ModelSerializer):
class Meta:
model = Maker
fields = ('identifier', 'first_name', 'last_name', 'email',
'photo_url', 'bio', 'joined')
read_only_fields = ('identifier', 'email', 'joined',)
|
from rest_framework import serializers
from maker.models import Maker
class RegistrationRequestSerializer(serializers.Serializer):
email = serializers.EmailField(required=True)
password = serializers.CharField(required=True)
first_name = serializers.CharField(required=True)
last_name = serializers.CharField(required=True)
class AuthenticationResponseSerializer(serializers.Serializer):
token = serializers.CharField()
class AuthenticationRequestSerializer(serializers.Serializer):
identifier = serializers.CharField(required=True)
password = serializers.CharField(required=True)
class ResetPasswordRequestSerializer(serializers.Serializer):
email = serializers.EmailField(required=True)
class ChangePasswordSerializer(serializers.Serializer):
new_password = serializers.CharField(required=True)
confirm_password = serializers.CharField(required=True)
def validate(self, attrs):
if attrs['new_password'] != attrs['confirm_password']:
raise serializers.ValidationError("'new_password' and 'confirm_password' do not match")
return attrs
class EmailChangeRequestSerializer(serializers.Serializer):
new_email = serializers.EmailField(required=True)
class MakerSerializer(serializers.ModelSerializer):
class Meta:
model = Maker
fields = ('identifier', 'first_name', 'last_name', 'email',
'photo_url', 'bio', 'joined')
read_only_fields = ('identifier', 'email', 'joined',)
Add email change process serializerfrom rest_framework import serializers
from maker.models import Maker
class RegistrationRequestSerializer(serializers.Serializer):
email = serializers.EmailField(required=True)
password = serializers.CharField(required=True)
first_name = serializers.CharField(required=True)
last_name = serializers.CharField(required=True)
class AuthenticationResponseSerializer(serializers.Serializer):
token = serializers.CharField()
class AuthenticationRequestSerializer(serializers.Serializer):
identifier = serializers.CharField(required=True)
password = serializers.CharField(required=True)
class ResetPasswordRequestSerializer(serializers.Serializer):
email = serializers.EmailField(required=True)
class ChangePasswordSerializer(serializers.Serializer):
new_password = serializers.CharField(required=True)
confirm_password = serializers.CharField(required=True)
def validate(self, attrs):
if attrs['new_password'] != attrs['confirm_password']:
raise serializers.ValidationError("'new_password' and 'confirm_password' do not match")
return attrs
class EmailChangeRequestSerializer(serializers.Serializer):
new_email = serializers.EmailField(required=True)
class EmailChangeProcessSerializer(serializers.Serializer):
token = serializers.CharField(required=True)
class MakerSerializer(serializers.ModelSerializer):
class Meta:
model = Maker
fields = ('identifier', 'first_name', 'last_name', 'email',
'photo_url', 'bio', 'joined')
read_only_fields = ('identifier', 'email', 'joined',)
|
<commit_before>from rest_framework import serializers
from maker.models import Maker
class RegistrationRequestSerializer(serializers.Serializer):
email = serializers.EmailField(required=True)
password = serializers.CharField(required=True)
first_name = serializers.CharField(required=True)
last_name = serializers.CharField(required=True)
class AuthenticationResponseSerializer(serializers.Serializer):
token = serializers.CharField()
class AuthenticationRequestSerializer(serializers.Serializer):
identifier = serializers.CharField(required=True)
password = serializers.CharField(required=True)
class ResetPasswordRequestSerializer(serializers.Serializer):
email = serializers.EmailField(required=True)
class ChangePasswordSerializer(serializers.Serializer):
new_password = serializers.CharField(required=True)
confirm_password = serializers.CharField(required=True)
def validate(self, attrs):
if attrs['new_password'] != attrs['confirm_password']:
raise serializers.ValidationError("'new_password' and 'confirm_password' do not match")
return attrs
class EmailChangeRequestSerializer(serializers.Serializer):
new_email = serializers.EmailField(required=True)
class MakerSerializer(serializers.ModelSerializer):
class Meta:
model = Maker
fields = ('identifier', 'first_name', 'last_name', 'email',
'photo_url', 'bio', 'joined')
read_only_fields = ('identifier', 'email', 'joined',)
<commit_msg>Add email change process serializer<commit_after>from rest_framework import serializers
from maker.models import Maker
class RegistrationRequestSerializer(serializers.Serializer):
email = serializers.EmailField(required=True)
password = serializers.CharField(required=True)
first_name = serializers.CharField(required=True)
last_name = serializers.CharField(required=True)
class AuthenticationResponseSerializer(serializers.Serializer):
token = serializers.CharField()
class AuthenticationRequestSerializer(serializers.Serializer):
identifier = serializers.CharField(required=True)
password = serializers.CharField(required=True)
class ResetPasswordRequestSerializer(serializers.Serializer):
email = serializers.EmailField(required=True)
class ChangePasswordSerializer(serializers.Serializer):
new_password = serializers.CharField(required=True)
confirm_password = serializers.CharField(required=True)
def validate(self, attrs):
if attrs['new_password'] != attrs['confirm_password']:
raise serializers.ValidationError("'new_password' and 'confirm_password' do not match")
return attrs
class EmailChangeRequestSerializer(serializers.Serializer):
new_email = serializers.EmailField(required=True)
class EmailChangeProcessSerializer(serializers.Serializer):
token = serializers.CharField(required=True)
class MakerSerializer(serializers.ModelSerializer):
class Meta:
model = Maker
fields = ('identifier', 'first_name', 'last_name', 'email',
'photo_url', 'bio', 'joined')
read_only_fields = ('identifier', 'email', 'joined',)
|
7d6580f2eb0e142a7ff7c77e6fc1d75f2a3d71b3
|
isort/pylama_isort.py
|
isort/pylama_isort.py
|
import os
import sys
from contextlib import contextmanager
from typing import Any, Dict, List
from pylama.lint import Linter as BaseLinter
from . import api
@contextmanager
def supress_stdout():
stdout = sys.stdout
with open(os.devnull, "w") as devnull:
sys.stdout = devnull
yield
sys.stdout = stdout
class Linter(BaseLinter):
def allow(self, path: str) -> bool:
"""Determine if this path should be linted."""
return path.endswith(".py")
def run(self, path: str, **meta: Any) -> List[Dict[str, Any]]:
"""Lint the file. Return an array of error dicts if appropriate."""
with supress_stdout():
if not api.check_file(path):
return [
{"lnum": 0, "col": 0, "text": "Incorrectly sorted imports.", "type": "ISORT"}
]
else:
return []
|
import os
import sys
from contextlib import contextmanager
from typing import Any, Dict, List
from pylama.lint import Linter as BaseLinter
from isort.exceptions import FileSkipped
from . import api
@contextmanager
def supress_stdout():
stdout = sys.stdout
with open(os.devnull, "w") as devnull:
sys.stdout = devnull
yield
sys.stdout = stdout
class Linter(BaseLinter):
def allow(self, path: str) -> bool:
"""Determine if this path should be linted."""
return path.endswith(".py")
def run(self, path: str, **meta: Any) -> List[Dict[str, Any]]:
"""Lint the file. Return an array of error dicts if appropriate."""
with supress_stdout():
try:
if not api.check_file(path, disregard_skip=False):
return [
{
"lnum": 0,
"col": 0,
"text": "Incorrectly sorted imports.",
"type": "ISORT",
}
]
except FileSkipped:
pass
return []
|
Fix pylama integration to work with file skip comments
|
Fix pylama integration to work with file skip comments
|
Python
|
mit
|
PyCQA/isort,PyCQA/isort
|
import os
import sys
from contextlib import contextmanager
from typing import Any, Dict, List
from pylama.lint import Linter as BaseLinter
from . import api
@contextmanager
def supress_stdout():
stdout = sys.stdout
with open(os.devnull, "w") as devnull:
sys.stdout = devnull
yield
sys.stdout = stdout
class Linter(BaseLinter):
def allow(self, path: str) -> bool:
"""Determine if this path should be linted."""
return path.endswith(".py")
def run(self, path: str, **meta: Any) -> List[Dict[str, Any]]:
"""Lint the file. Return an array of error dicts if appropriate."""
with supress_stdout():
if not api.check_file(path):
return [
{"lnum": 0, "col": 0, "text": "Incorrectly sorted imports.", "type": "ISORT"}
]
else:
return []
Fix pylama integration to work with file skip comments
|
import os
import sys
from contextlib import contextmanager
from typing import Any, Dict, List
from pylama.lint import Linter as BaseLinter
from isort.exceptions import FileSkipped
from . import api
@contextmanager
def supress_stdout():
stdout = sys.stdout
with open(os.devnull, "w") as devnull:
sys.stdout = devnull
yield
sys.stdout = stdout
class Linter(BaseLinter):
def allow(self, path: str) -> bool:
"""Determine if this path should be linted."""
return path.endswith(".py")
def run(self, path: str, **meta: Any) -> List[Dict[str, Any]]:
"""Lint the file. Return an array of error dicts if appropriate."""
with supress_stdout():
try:
if not api.check_file(path, disregard_skip=False):
return [
{
"lnum": 0,
"col": 0,
"text": "Incorrectly sorted imports.",
"type": "ISORT",
}
]
except FileSkipped:
pass
return []
|
<commit_before>import os
import sys
from contextlib import contextmanager
from typing import Any, Dict, List
from pylama.lint import Linter as BaseLinter
from . import api
@contextmanager
def supress_stdout():
stdout = sys.stdout
with open(os.devnull, "w") as devnull:
sys.stdout = devnull
yield
sys.stdout = stdout
class Linter(BaseLinter):
def allow(self, path: str) -> bool:
"""Determine if this path should be linted."""
return path.endswith(".py")
def run(self, path: str, **meta: Any) -> List[Dict[str, Any]]:
"""Lint the file. Return an array of error dicts if appropriate."""
with supress_stdout():
if not api.check_file(path):
return [
{"lnum": 0, "col": 0, "text": "Incorrectly sorted imports.", "type": "ISORT"}
]
else:
return []
<commit_msg>Fix pylama integration to work with file skip comments<commit_after>
|
import os
import sys
from contextlib import contextmanager
from typing import Any, Dict, List
from pylama.lint import Linter as BaseLinter
from isort.exceptions import FileSkipped
from . import api
@contextmanager
def supress_stdout():
stdout = sys.stdout
with open(os.devnull, "w") as devnull:
sys.stdout = devnull
yield
sys.stdout = stdout
class Linter(BaseLinter):
def allow(self, path: str) -> bool:
"""Determine if this path should be linted."""
return path.endswith(".py")
def run(self, path: str, **meta: Any) -> List[Dict[str, Any]]:
"""Lint the file. Return an array of error dicts if appropriate."""
with supress_stdout():
try:
if not api.check_file(path, disregard_skip=False):
return [
{
"lnum": 0,
"col": 0,
"text": "Incorrectly sorted imports.",
"type": "ISORT",
}
]
except FileSkipped:
pass
return []
|
import os
import sys
from contextlib import contextmanager
from typing import Any, Dict, List
from pylama.lint import Linter as BaseLinter
from . import api
@contextmanager
def supress_stdout():
stdout = sys.stdout
with open(os.devnull, "w") as devnull:
sys.stdout = devnull
yield
sys.stdout = stdout
class Linter(BaseLinter):
def allow(self, path: str) -> bool:
"""Determine if this path should be linted."""
return path.endswith(".py")
def run(self, path: str, **meta: Any) -> List[Dict[str, Any]]:
"""Lint the file. Return an array of error dicts if appropriate."""
with supress_stdout():
if not api.check_file(path):
return [
{"lnum": 0, "col": 0, "text": "Incorrectly sorted imports.", "type": "ISORT"}
]
else:
return []
Fix pylama integration to work with file skip commentsimport os
import sys
from contextlib import contextmanager
from typing import Any, Dict, List
from pylama.lint import Linter as BaseLinter
from isort.exceptions import FileSkipped
from . import api
@contextmanager
def supress_stdout():
stdout = sys.stdout
with open(os.devnull, "w") as devnull:
sys.stdout = devnull
yield
sys.stdout = stdout
class Linter(BaseLinter):
def allow(self, path: str) -> bool:
"""Determine if this path should be linted."""
return path.endswith(".py")
def run(self, path: str, **meta: Any) -> List[Dict[str, Any]]:
"""Lint the file. Return an array of error dicts if appropriate."""
with supress_stdout():
try:
if not api.check_file(path, disregard_skip=False):
return [
{
"lnum": 0,
"col": 0,
"text": "Incorrectly sorted imports.",
"type": "ISORT",
}
]
except FileSkipped:
pass
return []
|
<commit_before>import os
import sys
from contextlib import contextmanager
from typing import Any, Dict, List
from pylama.lint import Linter as BaseLinter
from . import api
@contextmanager
def supress_stdout():
stdout = sys.stdout
with open(os.devnull, "w") as devnull:
sys.stdout = devnull
yield
sys.stdout = stdout
class Linter(BaseLinter):
def allow(self, path: str) -> bool:
"""Determine if this path should be linted."""
return path.endswith(".py")
def run(self, path: str, **meta: Any) -> List[Dict[str, Any]]:
"""Lint the file. Return an array of error dicts if appropriate."""
with supress_stdout():
if not api.check_file(path):
return [
{"lnum": 0, "col": 0, "text": "Incorrectly sorted imports.", "type": "ISORT"}
]
else:
return []
<commit_msg>Fix pylama integration to work with file skip comments<commit_after>import os
import sys
from contextlib import contextmanager
from typing import Any, Dict, List
from pylama.lint import Linter as BaseLinter
from isort.exceptions import FileSkipped
from . import api
@contextmanager
def supress_stdout():
stdout = sys.stdout
with open(os.devnull, "w") as devnull:
sys.stdout = devnull
yield
sys.stdout = stdout
class Linter(BaseLinter):
def allow(self, path: str) -> bool:
"""Determine if this path should be linted."""
return path.endswith(".py")
def run(self, path: str, **meta: Any) -> List[Dict[str, Any]]:
"""Lint the file. Return an array of error dicts if appropriate."""
with supress_stdout():
try:
if not api.check_file(path, disregard_skip=False):
return [
{
"lnum": 0,
"col": 0,
"text": "Incorrectly sorted imports.",
"type": "ISORT",
}
]
except FileSkipped:
pass
return []
|
e7999bd8afa05854aac25cc5f16fd8555031aa5b
|
ci/run_all_spiders.py
|
ci/run_all_spiders.py
|
from scrapy.utils.project import get_project_settings
from scrapy.crawler import CrawlerProcess
if __name__ == '__main__':
settings = get_project_settings()
settings.set('LOG_FILE', 'all_spiders.log')
settings.set('LOG_LEVEL', 'ERROR')
settings.set('TELNETCONSOLE_ENABLED', False)
settings.set('FEED_URI', 'output.ndgeojson')
settings.set('FEED_FORMAT', 'ndgeojson')
settings.get('ITEM_PIPELINES')['locations.pipelines.ApplySpiderNamePipeline'] = 100
process = CrawlerProcess(settings)
for spider_name in process.spider_loader.list():
process.crawl(spider_name)
process.start()
|
from scrapy.utils.project import get_project_settings
from scrapy.crawler import CrawlerProcess
from scrapy import signals
if __name__ == '__main__':
settings = get_project_settings()
settings.set('LOG_FILE', 'all_spiders.log')
settings.set('LOG_LEVEL', 'ERROR')
settings.set('TELNETCONSOLE_ENABLED', False)
settings.set('FEED_URI', 'output.ndgeojson')
settings.set('FEED_FORMAT', 'ndgeojson')
settings.get('ITEM_PIPELINES')['locations.pipelines.ApplySpiderNamePipeline'] = 100
def spider_opened(spider):
print("Spider %s opened" % spider.name)
def spider_closed(spider):
print("Spider %s closed (%s) after %0.1f sec, %d items" % (
spider.name,
spider.crawler.stats.get_value('finish_reason'),
(spider.crawler.stats.get_value('finish_time') -
spider.crawler.stats.get_value('start_time')).total_seconds(),
spider.crawler.stats.get_value('item_scraped_count'),
))
process = CrawlerProcess(settings)
for spider_name in process.spider_loader.list():
crawler = process.create_crawler(spider_name)
crawler.signals.connect(spider_closed, signals.spider_closed)
crawler.signals.connect(spider_opened, signals.spider_opened)
process.crawl(crawler)
process.start()
|
Print some stats in the crawler
|
Print some stats in the crawler
|
Python
|
mit
|
iandees/all-the-places,iandees/all-the-places,iandees/all-the-places
|
from scrapy.utils.project import get_project_settings
from scrapy.crawler import CrawlerProcess
if __name__ == '__main__':
settings = get_project_settings()
settings.set('LOG_FILE', 'all_spiders.log')
settings.set('LOG_LEVEL', 'ERROR')
settings.set('TELNETCONSOLE_ENABLED', False)
settings.set('FEED_URI', 'output.ndgeojson')
settings.set('FEED_FORMAT', 'ndgeojson')
settings.get('ITEM_PIPELINES')['locations.pipelines.ApplySpiderNamePipeline'] = 100
process = CrawlerProcess(settings)
for spider_name in process.spider_loader.list():
process.crawl(spider_name)
process.start()
Print some stats in the crawler
|
from scrapy.utils.project import get_project_settings
from scrapy.crawler import CrawlerProcess
from scrapy import signals
if __name__ == '__main__':
settings = get_project_settings()
settings.set('LOG_FILE', 'all_spiders.log')
settings.set('LOG_LEVEL', 'ERROR')
settings.set('TELNETCONSOLE_ENABLED', False)
settings.set('FEED_URI', 'output.ndgeojson')
settings.set('FEED_FORMAT', 'ndgeojson')
settings.get('ITEM_PIPELINES')['locations.pipelines.ApplySpiderNamePipeline'] = 100
def spider_opened(spider):
print("Spider %s opened" % spider.name)
def spider_closed(spider):
print("Spider %s closed (%s) after %0.1f sec, %d items" % (
spider.name,
spider.crawler.stats.get_value('finish_reason'),
(spider.crawler.stats.get_value('finish_time') -
spider.crawler.stats.get_value('start_time')).total_seconds(),
spider.crawler.stats.get_value('item_scraped_count'),
))
process = CrawlerProcess(settings)
for spider_name in process.spider_loader.list():
crawler = process.create_crawler(spider_name)
crawler.signals.connect(spider_closed, signals.spider_closed)
crawler.signals.connect(spider_opened, signals.spider_opened)
process.crawl(crawler)
process.start()
|
<commit_before>from scrapy.utils.project import get_project_settings
from scrapy.crawler import CrawlerProcess
if __name__ == '__main__':
settings = get_project_settings()
settings.set('LOG_FILE', 'all_spiders.log')
settings.set('LOG_LEVEL', 'ERROR')
settings.set('TELNETCONSOLE_ENABLED', False)
settings.set('FEED_URI', 'output.ndgeojson')
settings.set('FEED_FORMAT', 'ndgeojson')
settings.get('ITEM_PIPELINES')['locations.pipelines.ApplySpiderNamePipeline'] = 100
process = CrawlerProcess(settings)
for spider_name in process.spider_loader.list():
process.crawl(spider_name)
process.start()
<commit_msg>Print some stats in the crawler<commit_after>
|
from scrapy.utils.project import get_project_settings
from scrapy.crawler import CrawlerProcess
from scrapy import signals
if __name__ == '__main__':
settings = get_project_settings()
settings.set('LOG_FILE', 'all_spiders.log')
settings.set('LOG_LEVEL', 'ERROR')
settings.set('TELNETCONSOLE_ENABLED', False)
settings.set('FEED_URI', 'output.ndgeojson')
settings.set('FEED_FORMAT', 'ndgeojson')
settings.get('ITEM_PIPELINES')['locations.pipelines.ApplySpiderNamePipeline'] = 100
def spider_opened(spider):
print("Spider %s opened" % spider.name)
def spider_closed(spider):
print("Spider %s closed (%s) after %0.1f sec, %d items" % (
spider.name,
spider.crawler.stats.get_value('finish_reason'),
(spider.crawler.stats.get_value('finish_time') -
spider.crawler.stats.get_value('start_time')).total_seconds(),
spider.crawler.stats.get_value('item_scraped_count'),
))
process = CrawlerProcess(settings)
for spider_name in process.spider_loader.list():
crawler = process.create_crawler(spider_name)
crawler.signals.connect(spider_closed, signals.spider_closed)
crawler.signals.connect(spider_opened, signals.spider_opened)
process.crawl(crawler)
process.start()
|
from scrapy.utils.project import get_project_settings
from scrapy.crawler import CrawlerProcess
if __name__ == '__main__':
settings = get_project_settings()
settings.set('LOG_FILE', 'all_spiders.log')
settings.set('LOG_LEVEL', 'ERROR')
settings.set('TELNETCONSOLE_ENABLED', False)
settings.set('FEED_URI', 'output.ndgeojson')
settings.set('FEED_FORMAT', 'ndgeojson')
settings.get('ITEM_PIPELINES')['locations.pipelines.ApplySpiderNamePipeline'] = 100
process = CrawlerProcess(settings)
for spider_name in process.spider_loader.list():
process.crawl(spider_name)
process.start()
Print some stats in the crawlerfrom scrapy.utils.project import get_project_settings
from scrapy.crawler import CrawlerProcess
from scrapy import signals
if __name__ == '__main__':
settings = get_project_settings()
settings.set('LOG_FILE', 'all_spiders.log')
settings.set('LOG_LEVEL', 'ERROR')
settings.set('TELNETCONSOLE_ENABLED', False)
settings.set('FEED_URI', 'output.ndgeojson')
settings.set('FEED_FORMAT', 'ndgeojson')
settings.get('ITEM_PIPELINES')['locations.pipelines.ApplySpiderNamePipeline'] = 100
def spider_opened(spider):
print("Spider %s opened" % spider.name)
def spider_closed(spider):
print("Spider %s closed (%s) after %0.1f sec, %d items" % (
spider.name,
spider.crawler.stats.get_value('finish_reason'),
(spider.crawler.stats.get_value('finish_time') -
spider.crawler.stats.get_value('start_time')).total_seconds(),
spider.crawler.stats.get_value('item_scraped_count'),
))
process = CrawlerProcess(settings)
for spider_name in process.spider_loader.list():
crawler = process.create_crawler(spider_name)
crawler.signals.connect(spider_closed, signals.spider_closed)
crawler.signals.connect(spider_opened, signals.spider_opened)
process.crawl(crawler)
process.start()
|
<commit_before>from scrapy.utils.project import get_project_settings
from scrapy.crawler import CrawlerProcess
if __name__ == '__main__':
settings = get_project_settings()
settings.set('LOG_FILE', 'all_spiders.log')
settings.set('LOG_LEVEL', 'ERROR')
settings.set('TELNETCONSOLE_ENABLED', False)
settings.set('FEED_URI', 'output.ndgeojson')
settings.set('FEED_FORMAT', 'ndgeojson')
settings.get('ITEM_PIPELINES')['locations.pipelines.ApplySpiderNamePipeline'] = 100
process = CrawlerProcess(settings)
for spider_name in process.spider_loader.list():
process.crawl(spider_name)
process.start()
<commit_msg>Print some stats in the crawler<commit_after>from scrapy.utils.project import get_project_settings
from scrapy.crawler import CrawlerProcess
from scrapy import signals
if __name__ == '__main__':
settings = get_project_settings()
settings.set('LOG_FILE', 'all_spiders.log')
settings.set('LOG_LEVEL', 'ERROR')
settings.set('TELNETCONSOLE_ENABLED', False)
settings.set('FEED_URI', 'output.ndgeojson')
settings.set('FEED_FORMAT', 'ndgeojson')
settings.get('ITEM_PIPELINES')['locations.pipelines.ApplySpiderNamePipeline'] = 100
def spider_opened(spider):
print("Spider %s opened" % spider.name)
def spider_closed(spider):
print("Spider %s closed (%s) after %0.1f sec, %d items" % (
spider.name,
spider.crawler.stats.get_value('finish_reason'),
(spider.crawler.stats.get_value('finish_time') -
spider.crawler.stats.get_value('start_time')).total_seconds(),
spider.crawler.stats.get_value('item_scraped_count'),
))
process = CrawlerProcess(settings)
for spider_name in process.spider_loader.list():
crawler = process.create_crawler(spider_name)
crawler.signals.connect(spider_closed, signals.spider_closed)
crawler.signals.connect(spider_opened, signals.spider_opened)
process.crawl(crawler)
process.start()
|
df9caa5a5735e8e74639f640272705fec886206e
|
test/factories.py
|
test/factories.py
|
# coding: utf-8
import string
import factory
from django.contrib.auth.models import User
from message.models import Message
from test.utils import generate_string, lorem_ipsum
class UserFactory(factory.Factory):
FACTORY_FOR = User
first_name = "Boy"
last_name = "Factory"
email = factory.LazyAttribute(
lambda a:
"{0}_{1}@example.com".format(a.first_name, a.last_name).lower())
username = factory.Sequence(lambda n: "username_%s" % n)
class MessageFactory(factory.Factory):
'''
Factory for messages
'''
FACTORY_FOR = Message
message = lorem_ipsum()
contact_first_name = factory.Sequence(lambda n: "user_%s" % n)
contact_last_name = factory.Sequence(lambda n: "name_%s" % n)
contact_mail = factory.LazyAttribute(
lambda a:
"{0}_{1}@example.com".format(
a.contact_first_name,
a.contact_last_name
).lower())
contact_phone = generate_string(str_len=10, src=string.digits)
user = factory.SubFactory(UserFactory)
|
# coding: utf-8
import string
import factory
import random
from django.contrib.auth.models import User
from message.models import Message, MessageType
from test.utils import generate_string, lorem_ipsum
class UserFactory(factory.Factory):
FACTORY_FOR = User
first_name = "Boy"
last_name = "Factory"
email = factory.LazyAttribute(
lambda a:
"{0}_{1}@example.com".format(a.first_name, a.last_name).lower())
username = factory.Sequence(lambda n: "username_%s" % n)
class MessageFactory(factory.Factory):
'''
Factory for messages
'''
FACTORY_FOR = Message
message = lorem_ipsum()
contact_first_name = factory.Sequence(lambda n: "user_%s" % n)
contact_last_name = factory.Sequence(lambda n: "name_%s" % n)
contact_mail = factory.LazyAttribute(
lambda a:
"{0}_{1}@example.com".format(
a.contact_first_name,
a.contact_last_name
).lower())
contact_phone = generate_string(str_len=10, src=string.digits)
user = factory.SubFactory(UserFactory)
messageType = random.randint(
MessageType.TYPE_REQUEST, MessageType.TYPE_INFO)
|
Add messageType to message factory
|
Add messageType to message factory
|
Python
|
mit
|
sarutobi/Rynda,sarutobi/flowofkindness,sarutobi/flowofkindness,sarutobi/Rynda,sarutobi/ritmserdtsa,sarutobi/ritmserdtsa,sarutobi/ritmserdtsa,sarutobi/Rynda,sarutobi/flowofkindness,sarutobi/ritmserdtsa,sarutobi/Rynda,sarutobi/flowofkindness
|
# coding: utf-8
import string
import factory
from django.contrib.auth.models import User
from message.models import Message
from test.utils import generate_string, lorem_ipsum
class UserFactory(factory.Factory):
FACTORY_FOR = User
first_name = "Boy"
last_name = "Factory"
email = factory.LazyAttribute(
lambda a:
"{0}_{1}@example.com".format(a.first_name, a.last_name).lower())
username = factory.Sequence(lambda n: "username_%s" % n)
class MessageFactory(factory.Factory):
'''
Factory for messages
'''
FACTORY_FOR = Message
message = lorem_ipsum()
contact_first_name = factory.Sequence(lambda n: "user_%s" % n)
contact_last_name = factory.Sequence(lambda n: "name_%s" % n)
contact_mail = factory.LazyAttribute(
lambda a:
"{0}_{1}@example.com".format(
a.contact_first_name,
a.contact_last_name
).lower())
contact_phone = generate_string(str_len=10, src=string.digits)
user = factory.SubFactory(UserFactory)
Add messageType to message factory
|
# coding: utf-8
import string
import factory
import random
from django.contrib.auth.models import User
from message.models import Message, MessageType
from test.utils import generate_string, lorem_ipsum
class UserFactory(factory.Factory):
FACTORY_FOR = User
first_name = "Boy"
last_name = "Factory"
email = factory.LazyAttribute(
lambda a:
"{0}_{1}@example.com".format(a.first_name, a.last_name).lower())
username = factory.Sequence(lambda n: "username_%s" % n)
class MessageFactory(factory.Factory):
'''
Factory for messages
'''
FACTORY_FOR = Message
message = lorem_ipsum()
contact_first_name = factory.Sequence(lambda n: "user_%s" % n)
contact_last_name = factory.Sequence(lambda n: "name_%s" % n)
contact_mail = factory.LazyAttribute(
lambda a:
"{0}_{1}@example.com".format(
a.contact_first_name,
a.contact_last_name
).lower())
contact_phone = generate_string(str_len=10, src=string.digits)
user = factory.SubFactory(UserFactory)
messageType = random.randint(
MessageType.TYPE_REQUEST, MessageType.TYPE_INFO)
|
<commit_before># coding: utf-8
import string
import factory
from django.contrib.auth.models import User
from message.models import Message
from test.utils import generate_string, lorem_ipsum
class UserFactory(factory.Factory):
FACTORY_FOR = User
first_name = "Boy"
last_name = "Factory"
email = factory.LazyAttribute(
lambda a:
"{0}_{1}@example.com".format(a.first_name, a.last_name).lower())
username = factory.Sequence(lambda n: "username_%s" % n)
class MessageFactory(factory.Factory):
'''
Factory for messages
'''
FACTORY_FOR = Message
message = lorem_ipsum()
contact_first_name = factory.Sequence(lambda n: "user_%s" % n)
contact_last_name = factory.Sequence(lambda n: "name_%s" % n)
contact_mail = factory.LazyAttribute(
lambda a:
"{0}_{1}@example.com".format(
a.contact_first_name,
a.contact_last_name
).lower())
contact_phone = generate_string(str_len=10, src=string.digits)
user = factory.SubFactory(UserFactory)
<commit_msg>Add messageType to message factory<commit_after>
|
# coding: utf-8
import string
import factory
import random
from django.contrib.auth.models import User
from message.models import Message, MessageType
from test.utils import generate_string, lorem_ipsum
class UserFactory(factory.Factory):
FACTORY_FOR = User
first_name = "Boy"
last_name = "Factory"
email = factory.LazyAttribute(
lambda a:
"{0}_{1}@example.com".format(a.first_name, a.last_name).lower())
username = factory.Sequence(lambda n: "username_%s" % n)
class MessageFactory(factory.Factory):
'''
Factory for messages
'''
FACTORY_FOR = Message
message = lorem_ipsum()
contact_first_name = factory.Sequence(lambda n: "user_%s" % n)
contact_last_name = factory.Sequence(lambda n: "name_%s" % n)
contact_mail = factory.LazyAttribute(
lambda a:
"{0}_{1}@example.com".format(
a.contact_first_name,
a.contact_last_name
).lower())
contact_phone = generate_string(str_len=10, src=string.digits)
user = factory.SubFactory(UserFactory)
messageType = random.randint(
MessageType.TYPE_REQUEST, MessageType.TYPE_INFO)
|
# coding: utf-8
import string
import factory
from django.contrib.auth.models import User
from message.models import Message
from test.utils import generate_string, lorem_ipsum
class UserFactory(factory.Factory):
FACTORY_FOR = User
first_name = "Boy"
last_name = "Factory"
email = factory.LazyAttribute(
lambda a:
"{0}_{1}@example.com".format(a.first_name, a.last_name).lower())
username = factory.Sequence(lambda n: "username_%s" % n)
class MessageFactory(factory.Factory):
'''
Factory for messages
'''
FACTORY_FOR = Message
message = lorem_ipsum()
contact_first_name = factory.Sequence(lambda n: "user_%s" % n)
contact_last_name = factory.Sequence(lambda n: "name_%s" % n)
contact_mail = factory.LazyAttribute(
lambda a:
"{0}_{1}@example.com".format(
a.contact_first_name,
a.contact_last_name
).lower())
contact_phone = generate_string(str_len=10, src=string.digits)
user = factory.SubFactory(UserFactory)
Add messageType to message factory# coding: utf-8
import string
import factory
import random
from django.contrib.auth.models import User
from message.models import Message, MessageType
from test.utils import generate_string, lorem_ipsum
class UserFactory(factory.Factory):
FACTORY_FOR = User
first_name = "Boy"
last_name = "Factory"
email = factory.LazyAttribute(
lambda a:
"{0}_{1}@example.com".format(a.first_name, a.last_name).lower())
username = factory.Sequence(lambda n: "username_%s" % n)
class MessageFactory(factory.Factory):
'''
Factory for messages
'''
FACTORY_FOR = Message
message = lorem_ipsum()
contact_first_name = factory.Sequence(lambda n: "user_%s" % n)
contact_last_name = factory.Sequence(lambda n: "name_%s" % n)
contact_mail = factory.LazyAttribute(
lambda a:
"{0}_{1}@example.com".format(
a.contact_first_name,
a.contact_last_name
).lower())
contact_phone = generate_string(str_len=10, src=string.digits)
user = factory.SubFactory(UserFactory)
messageType = random.randint(
MessageType.TYPE_REQUEST, MessageType.TYPE_INFO)
|
<commit_before># coding: utf-8
import string
import factory
from django.contrib.auth.models import User
from message.models import Message
from test.utils import generate_string, lorem_ipsum
class UserFactory(factory.Factory):
FACTORY_FOR = User
first_name = "Boy"
last_name = "Factory"
email = factory.LazyAttribute(
lambda a:
"{0}_{1}@example.com".format(a.first_name, a.last_name).lower())
username = factory.Sequence(lambda n: "username_%s" % n)
class MessageFactory(factory.Factory):
'''
Factory for messages
'''
FACTORY_FOR = Message
message = lorem_ipsum()
contact_first_name = factory.Sequence(lambda n: "user_%s" % n)
contact_last_name = factory.Sequence(lambda n: "name_%s" % n)
contact_mail = factory.LazyAttribute(
lambda a:
"{0}_{1}@example.com".format(
a.contact_first_name,
a.contact_last_name
).lower())
contact_phone = generate_string(str_len=10, src=string.digits)
user = factory.SubFactory(UserFactory)
<commit_msg>Add messageType to message factory<commit_after># coding: utf-8
import string
import factory
import random
from django.contrib.auth.models import User
from message.models import Message, MessageType
from test.utils import generate_string, lorem_ipsum
class UserFactory(factory.Factory):
FACTORY_FOR = User
first_name = "Boy"
last_name = "Factory"
email = factory.LazyAttribute(
lambda a:
"{0}_{1}@example.com".format(a.first_name, a.last_name).lower())
username = factory.Sequence(lambda n: "username_%s" % n)
class MessageFactory(factory.Factory):
'''
Factory for messages
'''
FACTORY_FOR = Message
message = lorem_ipsum()
contact_first_name = factory.Sequence(lambda n: "user_%s" % n)
contact_last_name = factory.Sequence(lambda n: "name_%s" % n)
contact_mail = factory.LazyAttribute(
lambda a:
"{0}_{1}@example.com".format(
a.contact_first_name,
a.contact_last_name
).lower())
contact_phone = generate_string(str_len=10, src=string.digits)
user = factory.SubFactory(UserFactory)
messageType = random.randint(
MessageType.TYPE_REQUEST, MessageType.TYPE_INFO)
|
567821c91b83e9251339a5e1caa81ea1839d8db1
|
day-04-1.py
|
day-04-1.py
|
import hashlib
puzzle_input = 'iwrupvqb'
|
import hashlib
puzzle_input = b'iwrupvqb'
number = 100000
while True:
key = puzzle_input + str(number).encode()
if hashlib.md5(key).hexdigest()[:5] == '00000':
break
number += 1
print(number)
# Runs way faster than I expected, lol
# My answer: 346386
|
Complete day 4 part 1
|
Complete day 4 part 1
|
Python
|
mit
|
foxscotch/advent-of-code,foxscotch/advent-of-code
|
import hashlib
puzzle_input = 'iwrupvqb'
Complete day 4 part 1
|
import hashlib
puzzle_input = b'iwrupvqb'
number = 100000
while True:
key = puzzle_input + str(number).encode()
if hashlib.md5(key).hexdigest()[:5] == '00000':
break
number += 1
print(number)
# Runs way faster than I expected, lol
# My answer: 346386
|
<commit_before>import hashlib
puzzle_input = 'iwrupvqb'
<commit_msg>Complete day 4 part 1<commit_after>
|
import hashlib
puzzle_input = b'iwrupvqb'
number = 100000
while True:
key = puzzle_input + str(number).encode()
if hashlib.md5(key).hexdigest()[:5] == '00000':
break
number += 1
print(number)
# Runs way faster than I expected, lol
# My answer: 346386
|
import hashlib
puzzle_input = 'iwrupvqb'
Complete day 4 part 1import hashlib
puzzle_input = b'iwrupvqb'
number = 100000
while True:
key = puzzle_input + str(number).encode()
if hashlib.md5(key).hexdigest()[:5] == '00000':
break
number += 1
print(number)
# Runs way faster than I expected, lol
# My answer: 346386
|
<commit_before>import hashlib
puzzle_input = 'iwrupvqb'
<commit_msg>Complete day 4 part 1<commit_after>import hashlib
puzzle_input = b'iwrupvqb'
number = 100000
while True:
key = puzzle_input + str(number).encode()
if hashlib.md5(key).hexdigest()[:5] == '00000':
break
number += 1
print(number)
# Runs way faster than I expected, lol
# My answer: 346386
|
677c4eb7672d2d5510ae7d8346563200e1c480d6
|
skeleton/__init__.py
|
skeleton/__init__.py
|
"""
Basic Template system for project skeleton.
skeleton is similar to the template part of PasteScript but
without any dependencies; it should also be compatible with Python 3.
However in this early phase of development, it only target python 2.5+,
and tests require Mock.
"""
from skeleton.core import Skeleton, Var
|
"""
Basic Template system for project skeleton.
skeleton is similar to the template part of PasteScript but
without any dependencies.
"""
from skeleton.core import Skeleton, Var
from skeleton.utils import insert_into_file
|
Add insert_into_file to skeleton module
|
Add insert_into_file to skeleton module
|
Python
|
bsd-2-clause
|
dinoboff/skeleton
|
"""
Basic Template system for project skeleton.
skeleton is similar to the template part of PasteScript but
without any dependencies; it should also be compatible with Python 3.
However in this early phase of development, it only target python 2.5+,
and tests require Mock.
"""
from skeleton.core import Skeleton, Var
Add insert_into_file to skeleton module
|
"""
Basic Template system for project skeleton.
skeleton is similar to the template part of PasteScript but
without any dependencies.
"""
from skeleton.core import Skeleton, Var
from skeleton.utils import insert_into_file
|
<commit_before>"""
Basic Template system for project skeleton.
skeleton is similar to the template part of PasteScript but
without any dependencies; it should also be compatible with Python 3.
However in this early phase of development, it only target python 2.5+,
and tests require Mock.
"""
from skeleton.core import Skeleton, Var
<commit_msg>Add insert_into_file to skeleton module<commit_after>
|
"""
Basic Template system for project skeleton.
skeleton is similar to the template part of PasteScript but
without any dependencies.
"""
from skeleton.core import Skeleton, Var
from skeleton.utils import insert_into_file
|
"""
Basic Template system for project skeleton.
skeleton is similar to the template part of PasteScript but
without any dependencies; it should also be compatible with Python 3.
However in this early phase of development, it only target python 2.5+,
and tests require Mock.
"""
from skeleton.core import Skeleton, Var
Add insert_into_file to skeleton module"""
Basic Template system for project skeleton.
skeleton is similar to the template part of PasteScript but
without any dependencies.
"""
from skeleton.core import Skeleton, Var
from skeleton.utils import insert_into_file
|
<commit_before>"""
Basic Template system for project skeleton.
skeleton is similar to the template part of PasteScript but
without any dependencies; it should also be compatible with Python 3.
However in this early phase of development, it only target python 2.5+,
and tests require Mock.
"""
from skeleton.core import Skeleton, Var
<commit_msg>Add insert_into_file to skeleton module<commit_after>"""
Basic Template system for project skeleton.
skeleton is similar to the template part of PasteScript but
without any dependencies.
"""
from skeleton.core import Skeleton, Var
from skeleton.utils import insert_into_file
|
eb79cce84fbb9d801d6f5087b9216e66d56bfa51
|
scripts/generate_global_kwargs_doc.py
|
scripts/generate_global_kwargs_doc.py
|
#!/usr/bin/env python
from os import path
from pyinfra.api.operation_kwargs import OPERATION_KWARGS
def build_global_kwargs_doc():
this_dir = path.dirname(path.realpath(__file__))
docs_dir = path.abspath(path.join(this_dir, '..', 'docs'))
lines = []
for category, kwarg_configs in OPERATION_KWARGS.items():
if category is None:
continue
lines.append('{0}:'.format(category))
for key, config in kwarg_configs.items():
description = config
if isinstance(config, dict):
description = config.get('description')
lines.append(' + ``{0}``: {1}'.format(key, description))
module_filename = path.join(docs_dir, '_deploy_globals.rst')
print('--> Writing {0}'.format(module_filename))
out = '\n'.join(lines)
with open(module_filename, 'w') as outfile:
outfile.write(out)
if __name__ == '__main__':
print('### Building global kwargs doc')
build_global_kwargs_doc()
|
#!/usr/bin/env python
from os import path
from pyinfra.api import Config
from pyinfra.api.operation_kwargs import OPERATION_KWARGS
def build_global_kwargs_doc():
pyinfra_config = Config()
this_dir = path.dirname(path.realpath(__file__))
docs_dir = path.abspath(path.join(this_dir, '..', 'docs'))
lines = []
for category, kwarg_configs in OPERATION_KWARGS.items():
if category is None:
continue
lines.append('{0}:'.format(category))
for key, config in kwarg_configs.items():
description = config
if isinstance(config, dict):
description = config.get('description')
default = config.get('default')
if callable(default):
default = default(pyinfra_config)
if default:
key = '{0}={1}'.format(key, default)
lines.append(' + ``{0}``: {1}'.format(key, description))
module_filename = path.join(docs_dir, '_deploy_globals.rst')
print('--> Writing {0}'.format(module_filename))
out = '\n'.join(lines)
with open(module_filename, 'w') as outfile:
outfile.write(out)
if __name__ == '__main__':
print('### Building global kwargs doc')
build_global_kwargs_doc()
|
Include defaults in generated global args list.
|
Include defaults in generated global args list.
|
Python
|
mit
|
Fizzadar/pyinfra,Fizzadar/pyinfra
|
#!/usr/bin/env python
from os import path
from pyinfra.api.operation_kwargs import OPERATION_KWARGS
def build_global_kwargs_doc():
this_dir = path.dirname(path.realpath(__file__))
docs_dir = path.abspath(path.join(this_dir, '..', 'docs'))
lines = []
for category, kwarg_configs in OPERATION_KWARGS.items():
if category is None:
continue
lines.append('{0}:'.format(category))
for key, config in kwarg_configs.items():
description = config
if isinstance(config, dict):
description = config.get('description')
lines.append(' + ``{0}``: {1}'.format(key, description))
module_filename = path.join(docs_dir, '_deploy_globals.rst')
print('--> Writing {0}'.format(module_filename))
out = '\n'.join(lines)
with open(module_filename, 'w') as outfile:
outfile.write(out)
if __name__ == '__main__':
print('### Building global kwargs doc')
build_global_kwargs_doc()
Include defaults in generated global args list.
|
#!/usr/bin/env python
from os import path
from pyinfra.api import Config
from pyinfra.api.operation_kwargs import OPERATION_KWARGS
def build_global_kwargs_doc():
pyinfra_config = Config()
this_dir = path.dirname(path.realpath(__file__))
docs_dir = path.abspath(path.join(this_dir, '..', 'docs'))
lines = []
for category, kwarg_configs in OPERATION_KWARGS.items():
if category is None:
continue
lines.append('{0}:'.format(category))
for key, config in kwarg_configs.items():
description = config
if isinstance(config, dict):
description = config.get('description')
default = config.get('default')
if callable(default):
default = default(pyinfra_config)
if default:
key = '{0}={1}'.format(key, default)
lines.append(' + ``{0}``: {1}'.format(key, description))
module_filename = path.join(docs_dir, '_deploy_globals.rst')
print('--> Writing {0}'.format(module_filename))
out = '\n'.join(lines)
with open(module_filename, 'w') as outfile:
outfile.write(out)
if __name__ == '__main__':
print('### Building global kwargs doc')
build_global_kwargs_doc()
|
<commit_before>#!/usr/bin/env python
from os import path
from pyinfra.api.operation_kwargs import OPERATION_KWARGS
def build_global_kwargs_doc():
this_dir = path.dirname(path.realpath(__file__))
docs_dir = path.abspath(path.join(this_dir, '..', 'docs'))
lines = []
for category, kwarg_configs in OPERATION_KWARGS.items():
if category is None:
continue
lines.append('{0}:'.format(category))
for key, config in kwarg_configs.items():
description = config
if isinstance(config, dict):
description = config.get('description')
lines.append(' + ``{0}``: {1}'.format(key, description))
module_filename = path.join(docs_dir, '_deploy_globals.rst')
print('--> Writing {0}'.format(module_filename))
out = '\n'.join(lines)
with open(module_filename, 'w') as outfile:
outfile.write(out)
if __name__ == '__main__':
print('### Building global kwargs doc')
build_global_kwargs_doc()
<commit_msg>Include defaults in generated global args list.<commit_after>
|
#!/usr/bin/env python
from os import path
from pyinfra.api import Config
from pyinfra.api.operation_kwargs import OPERATION_KWARGS
def build_global_kwargs_doc():
pyinfra_config = Config()
this_dir = path.dirname(path.realpath(__file__))
docs_dir = path.abspath(path.join(this_dir, '..', 'docs'))
lines = []
for category, kwarg_configs in OPERATION_KWARGS.items():
if category is None:
continue
lines.append('{0}:'.format(category))
for key, config in kwarg_configs.items():
description = config
if isinstance(config, dict):
description = config.get('description')
default = config.get('default')
if callable(default):
default = default(pyinfra_config)
if default:
key = '{0}={1}'.format(key, default)
lines.append(' + ``{0}``: {1}'.format(key, description))
module_filename = path.join(docs_dir, '_deploy_globals.rst')
print('--> Writing {0}'.format(module_filename))
out = '\n'.join(lines)
with open(module_filename, 'w') as outfile:
outfile.write(out)
if __name__ == '__main__':
print('### Building global kwargs doc')
build_global_kwargs_doc()
|
#!/usr/bin/env python
from os import path
from pyinfra.api.operation_kwargs import OPERATION_KWARGS
def build_global_kwargs_doc():
this_dir = path.dirname(path.realpath(__file__))
docs_dir = path.abspath(path.join(this_dir, '..', 'docs'))
lines = []
for category, kwarg_configs in OPERATION_KWARGS.items():
if category is None:
continue
lines.append('{0}:'.format(category))
for key, config in kwarg_configs.items():
description = config
if isinstance(config, dict):
description = config.get('description')
lines.append(' + ``{0}``: {1}'.format(key, description))
module_filename = path.join(docs_dir, '_deploy_globals.rst')
print('--> Writing {0}'.format(module_filename))
out = '\n'.join(lines)
with open(module_filename, 'w') as outfile:
outfile.write(out)
if __name__ == '__main__':
print('### Building global kwargs doc')
build_global_kwargs_doc()
Include defaults in generated global args list.#!/usr/bin/env python
from os import path
from pyinfra.api import Config
from pyinfra.api.operation_kwargs import OPERATION_KWARGS
def build_global_kwargs_doc():
pyinfra_config = Config()
this_dir = path.dirname(path.realpath(__file__))
docs_dir = path.abspath(path.join(this_dir, '..', 'docs'))
lines = []
for category, kwarg_configs in OPERATION_KWARGS.items():
if category is None:
continue
lines.append('{0}:'.format(category))
for key, config in kwarg_configs.items():
description = config
if isinstance(config, dict):
description = config.get('description')
default = config.get('default')
if callable(default):
default = default(pyinfra_config)
if default:
key = '{0}={1}'.format(key, default)
lines.append(' + ``{0}``: {1}'.format(key, description))
module_filename = path.join(docs_dir, '_deploy_globals.rst')
print('--> Writing {0}'.format(module_filename))
out = '\n'.join(lines)
with open(module_filename, 'w') as outfile:
outfile.write(out)
if __name__ == '__main__':
print('### Building global kwargs doc')
build_global_kwargs_doc()
|
<commit_before>#!/usr/bin/env python
from os import path
from pyinfra.api.operation_kwargs import OPERATION_KWARGS
def build_global_kwargs_doc():
this_dir = path.dirname(path.realpath(__file__))
docs_dir = path.abspath(path.join(this_dir, '..', 'docs'))
lines = []
for category, kwarg_configs in OPERATION_KWARGS.items():
if category is None:
continue
lines.append('{0}:'.format(category))
for key, config in kwarg_configs.items():
description = config
if isinstance(config, dict):
description = config.get('description')
lines.append(' + ``{0}``: {1}'.format(key, description))
module_filename = path.join(docs_dir, '_deploy_globals.rst')
print('--> Writing {0}'.format(module_filename))
out = '\n'.join(lines)
with open(module_filename, 'w') as outfile:
outfile.write(out)
if __name__ == '__main__':
print('### Building global kwargs doc')
build_global_kwargs_doc()
<commit_msg>Include defaults in generated global args list.<commit_after>#!/usr/bin/env python
from os import path
from pyinfra.api import Config
from pyinfra.api.operation_kwargs import OPERATION_KWARGS
def build_global_kwargs_doc():
pyinfra_config = Config()
this_dir = path.dirname(path.realpath(__file__))
docs_dir = path.abspath(path.join(this_dir, '..', 'docs'))
lines = []
for category, kwarg_configs in OPERATION_KWARGS.items():
if category is None:
continue
lines.append('{0}:'.format(category))
for key, config in kwarg_configs.items():
description = config
if isinstance(config, dict):
description = config.get('description')
default = config.get('default')
if callable(default):
default = default(pyinfra_config)
if default:
key = '{0}={1}'.format(key, default)
lines.append(' + ``{0}``: {1}'.format(key, description))
module_filename = path.join(docs_dir, '_deploy_globals.rst')
print('--> Writing {0}'.format(module_filename))
out = '\n'.join(lines)
with open(module_filename, 'w') as outfile:
outfile.write(out)
if __name__ == '__main__':
print('### Building global kwargs doc')
build_global_kwargs_doc()
|
ff96f3fd6835f11f3725ab398b2a6b7ba4275e93
|
thinglang/compiler/references.py
|
thinglang/compiler/references.py
|
class Reference(object):
def __init__(self, type):
super().__init__()
self._type = type
@property
def type(self):
return self._type
class ElementReference(Reference):
def __init__(self, thing, element):
super(ElementReference, self).__init__(element.type)
self.thing, self.element = thing, element
@property
def thing_index(self):
return self.thing.index
@property
def element_index(self):
return self.element.index
@property
def convention(self):
return self.element.convention
@property
def static(self):
return self.element.static
class LocalReference(Reference):
def __init__(self, local):
super(LocalReference, self).__init__(local.type)
self.local = local
@property
def local_index(self):
return self.local.index
class StaticReference(Reference):
def __init__(self, value):
super(StaticReference, self).__init__(value.type)
self.value = value
|
class Reference(object):
def __init__(self, type):
super().__init__()
self._type = type
@property
def type(self):
return self._type
class ElementReference(Reference):
def __init__(self, thing, element, local=None):
super(ElementReference, self).__init__(element.type)
self.thing, self.element, self.local = thing, element, local
@property
def thing_index(self):
return self.thing.index
@property
def element_index(self):
return self.element.index
@property
def local_index(self):
return self.local.index
@property
def convention(self):
return self.element.convention
@property
def static(self):
return self.element.static
class LocalReference(Reference):
def __init__(self, local):
super(LocalReference, self).__init__(local.type)
self.local = local
@property
def local_index(self):
return self.local.index
class StaticReference(Reference):
def __init__(self, value):
super(StaticReference, self).__init__(value.type)
self.value = value
|
Add locally referenced subtype to element referenced opcodes
|
Add locally referenced subtype to element referenced opcodes
|
Python
|
mit
|
ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang
|
class Reference(object):
def __init__(self, type):
super().__init__()
self._type = type
@property
def type(self):
return self._type
class ElementReference(Reference):
def __init__(self, thing, element):
super(ElementReference, self).__init__(element.type)
self.thing, self.element = thing, element
@property
def thing_index(self):
return self.thing.index
@property
def element_index(self):
return self.element.index
@property
def convention(self):
return self.element.convention
@property
def static(self):
return self.element.static
class LocalReference(Reference):
def __init__(self, local):
super(LocalReference, self).__init__(local.type)
self.local = local
@property
def local_index(self):
return self.local.index
class StaticReference(Reference):
def __init__(self, value):
super(StaticReference, self).__init__(value.type)
self.value = value
Add locally referenced subtype to element referenced opcodes
|
class Reference(object):
def __init__(self, type):
super().__init__()
self._type = type
@property
def type(self):
return self._type
class ElementReference(Reference):
def __init__(self, thing, element, local=None):
super(ElementReference, self).__init__(element.type)
self.thing, self.element, self.local = thing, element, local
@property
def thing_index(self):
return self.thing.index
@property
def element_index(self):
return self.element.index
@property
def local_index(self):
return self.local.index
@property
def convention(self):
return self.element.convention
@property
def static(self):
return self.element.static
class LocalReference(Reference):
def __init__(self, local):
super(LocalReference, self).__init__(local.type)
self.local = local
@property
def local_index(self):
return self.local.index
class StaticReference(Reference):
def __init__(self, value):
super(StaticReference, self).__init__(value.type)
self.value = value
|
<commit_before>class Reference(object):
def __init__(self, type):
super().__init__()
self._type = type
@property
def type(self):
return self._type
class ElementReference(Reference):
def __init__(self, thing, element):
super(ElementReference, self).__init__(element.type)
self.thing, self.element = thing, element
@property
def thing_index(self):
return self.thing.index
@property
def element_index(self):
return self.element.index
@property
def convention(self):
return self.element.convention
@property
def static(self):
return self.element.static
class LocalReference(Reference):
def __init__(self, local):
super(LocalReference, self).__init__(local.type)
self.local = local
@property
def local_index(self):
return self.local.index
class StaticReference(Reference):
def __init__(self, value):
super(StaticReference, self).__init__(value.type)
self.value = value
<commit_msg>Add locally referenced subtype to element referenced opcodes<commit_after>
|
class Reference(object):
def __init__(self, type):
super().__init__()
self._type = type
@property
def type(self):
return self._type
class ElementReference(Reference):
def __init__(self, thing, element, local=None):
super(ElementReference, self).__init__(element.type)
self.thing, self.element, self.local = thing, element, local
@property
def thing_index(self):
return self.thing.index
@property
def element_index(self):
return self.element.index
@property
def local_index(self):
return self.local.index
@property
def convention(self):
return self.element.convention
@property
def static(self):
return self.element.static
class LocalReference(Reference):
def __init__(self, local):
super(LocalReference, self).__init__(local.type)
self.local = local
@property
def local_index(self):
return self.local.index
class StaticReference(Reference):
def __init__(self, value):
super(StaticReference, self).__init__(value.type)
self.value = value
|
class Reference(object):
def __init__(self, type):
super().__init__()
self._type = type
@property
def type(self):
return self._type
class ElementReference(Reference):
def __init__(self, thing, element):
super(ElementReference, self).__init__(element.type)
self.thing, self.element = thing, element
@property
def thing_index(self):
return self.thing.index
@property
def element_index(self):
return self.element.index
@property
def convention(self):
return self.element.convention
@property
def static(self):
return self.element.static
class LocalReference(Reference):
def __init__(self, local):
super(LocalReference, self).__init__(local.type)
self.local = local
@property
def local_index(self):
return self.local.index
class StaticReference(Reference):
def __init__(self, value):
super(StaticReference, self).__init__(value.type)
self.value = value
Add locally referenced subtype to element referenced opcodesclass Reference(object):
def __init__(self, type):
super().__init__()
self._type = type
@property
def type(self):
return self._type
class ElementReference(Reference):
def __init__(self, thing, element, local=None):
super(ElementReference, self).__init__(element.type)
self.thing, self.element, self.local = thing, element, local
@property
def thing_index(self):
return self.thing.index
@property
def element_index(self):
return self.element.index
@property
def local_index(self):
return self.local.index
@property
def convention(self):
return self.element.convention
@property
def static(self):
return self.element.static
class LocalReference(Reference):
def __init__(self, local):
super(LocalReference, self).__init__(local.type)
self.local = local
@property
def local_index(self):
return self.local.index
class StaticReference(Reference):
def __init__(self, value):
super(StaticReference, self).__init__(value.type)
self.value = value
|
<commit_before>class Reference(object):
def __init__(self, type):
super().__init__()
self._type = type
@property
def type(self):
return self._type
class ElementReference(Reference):
def __init__(self, thing, element):
super(ElementReference, self).__init__(element.type)
self.thing, self.element = thing, element
@property
def thing_index(self):
return self.thing.index
@property
def element_index(self):
return self.element.index
@property
def convention(self):
return self.element.convention
@property
def static(self):
return self.element.static
class LocalReference(Reference):
def __init__(self, local):
super(LocalReference, self).__init__(local.type)
self.local = local
@property
def local_index(self):
return self.local.index
class StaticReference(Reference):
def __init__(self, value):
super(StaticReference, self).__init__(value.type)
self.value = value
<commit_msg>Add locally referenced subtype to element referenced opcodes<commit_after>class Reference(object):
def __init__(self, type):
super().__init__()
self._type = type
@property
def type(self):
return self._type
class ElementReference(Reference):
def __init__(self, thing, element, local=None):
super(ElementReference, self).__init__(element.type)
self.thing, self.element, self.local = thing, element, local
@property
def thing_index(self):
return self.thing.index
@property
def element_index(self):
return self.element.index
@property
def local_index(self):
return self.local.index
@property
def convention(self):
return self.element.convention
@property
def static(self):
return self.element.static
class LocalReference(Reference):
def __init__(self, local):
super(LocalReference, self).__init__(local.type)
self.local = local
@property
def local_index(self):
return self.local.index
class StaticReference(Reference):
def __init__(self, value):
super(StaticReference, self).__init__(value.type)
self.value = value
|
b9b8d77898c81afa5d918cc93c9011ace6f23965
|
content_editor/renderer.py
|
content_editor/renderer.py
|
from __future__ import absolute_import, unicode_literals
from collections import OrderedDict
from django.db.models import Model
from django.utils.html import conditional_escape, mark_safe
class PluginRenderer(object):
def __init__(self):
self._renderers = OrderedDict(((
Model,
lambda plugin: mark_safe('<!-- %s: %s -->' % (
plugin._meta.label,
plugin,
)),
),))
def register(self, plugin, renderer):
self._renderers[plugin] = renderer
def render(self, contents):
return mark_safe(''.join(
conditional_escape(self.render_content(c)) for c in contents
))
def render_content(self, content):
if content.__class__ not in self._renderers:
for plugin, renderer in reversed( # pragma: no branch
list(self._renderers.items())):
if isinstance(content, plugin):
self.register(content.__class__, renderer)
break
return self._renderers[content.__class__](content)
|
from __future__ import absolute_import, unicode_literals
from collections import OrderedDict
from django.db.models import Model
from django.utils.html import conditional_escape, mark_safe
__all__ = ('PluginRenderer',)
class RenderedContents(object):
def __init__(self, contents):
self.contents = contents
def __unicode__(self):
return mark_safe(''.join(self.contents))
def __iter__(self):
return iter(self.contents)
class PluginRenderer(object):
def __init__(self):
self._renderers = OrderedDict(((
Model,
lambda plugin: mark_safe('<!-- %s: %s -->' % (
plugin._meta.label,
plugin,
)),
),))
def register(self, plugin, renderer):
self._renderers[plugin] = renderer
def render(self, contents):
return RenderedContents(
conditional_escape(self.render_content(c)) for c in contents
)
def render_content(self, content):
if content.__class__ not in self._renderers:
for plugin, renderer in reversed( # pragma: no branch
list(self._renderers.items())):
if isinstance(content, plugin):
self.register(content.__class__, renderer)
break
return self._renderers[content.__class__](content)
|
Allow iterating over rendered contents
|
Allow iterating over rendered contents
|
Python
|
bsd-3-clause
|
matthiask/feincms2-content,matthiask/feincms2-content,matthiask/django-content-editor,matthiask/feincms2-content,matthiask/django-content-editor,matthiask/django-content-editor,matthiask/django-content-editor
|
from __future__ import absolute_import, unicode_literals
from collections import OrderedDict
from django.db.models import Model
from django.utils.html import conditional_escape, mark_safe
class PluginRenderer(object):
def __init__(self):
self._renderers = OrderedDict(((
Model,
lambda plugin: mark_safe('<!-- %s: %s -->' % (
plugin._meta.label,
plugin,
)),
),))
def register(self, plugin, renderer):
self._renderers[plugin] = renderer
def render(self, contents):
return mark_safe(''.join(
conditional_escape(self.render_content(c)) for c in contents
))
def render_content(self, content):
if content.__class__ not in self._renderers:
for plugin, renderer in reversed( # pragma: no branch
list(self._renderers.items())):
if isinstance(content, plugin):
self.register(content.__class__, renderer)
break
return self._renderers[content.__class__](content)
Allow iterating over rendered contents
|
from __future__ import absolute_import, unicode_literals
from collections import OrderedDict
from django.db.models import Model
from django.utils.html import conditional_escape, mark_safe
__all__ = ('PluginRenderer',)
class RenderedContents(object):
def __init__(self, contents):
self.contents = contents
def __unicode__(self):
return mark_safe(''.join(self.contents))
def __iter__(self):
return iter(self.contents)
class PluginRenderer(object):
def __init__(self):
self._renderers = OrderedDict(((
Model,
lambda plugin: mark_safe('<!-- %s: %s -->' % (
plugin._meta.label,
plugin,
)),
),))
def register(self, plugin, renderer):
self._renderers[plugin] = renderer
def render(self, contents):
return RenderedContents(
conditional_escape(self.render_content(c)) for c in contents
)
def render_content(self, content):
if content.__class__ not in self._renderers:
for plugin, renderer in reversed( # pragma: no branch
list(self._renderers.items())):
if isinstance(content, plugin):
self.register(content.__class__, renderer)
break
return self._renderers[content.__class__](content)
|
<commit_before>from __future__ import absolute_import, unicode_literals
from collections import OrderedDict
from django.db.models import Model
from django.utils.html import conditional_escape, mark_safe
class PluginRenderer(object):
def __init__(self):
self._renderers = OrderedDict(((
Model,
lambda plugin: mark_safe('<!-- %s: %s -->' % (
plugin._meta.label,
plugin,
)),
),))
def register(self, plugin, renderer):
self._renderers[plugin] = renderer
def render(self, contents):
return mark_safe(''.join(
conditional_escape(self.render_content(c)) for c in contents
))
def render_content(self, content):
if content.__class__ not in self._renderers:
for plugin, renderer in reversed( # pragma: no branch
list(self._renderers.items())):
if isinstance(content, plugin):
self.register(content.__class__, renderer)
break
return self._renderers[content.__class__](content)
<commit_msg>Allow iterating over rendered contents<commit_after>
|
from __future__ import absolute_import, unicode_literals
from collections import OrderedDict
from django.db.models import Model
from django.utils.html import conditional_escape, mark_safe
__all__ = ('PluginRenderer',)
class RenderedContents(object):
def __init__(self, contents):
self.contents = contents
def __unicode__(self):
return mark_safe(''.join(self.contents))
def __iter__(self):
return iter(self.contents)
class PluginRenderer(object):
def __init__(self):
self._renderers = OrderedDict(((
Model,
lambda plugin: mark_safe('<!-- %s: %s -->' % (
plugin._meta.label,
plugin,
)),
),))
def register(self, plugin, renderer):
self._renderers[plugin] = renderer
def render(self, contents):
return RenderedContents(
conditional_escape(self.render_content(c)) for c in contents
)
def render_content(self, content):
if content.__class__ not in self._renderers:
for plugin, renderer in reversed( # pragma: no branch
list(self._renderers.items())):
if isinstance(content, plugin):
self.register(content.__class__, renderer)
break
return self._renderers[content.__class__](content)
|
from __future__ import absolute_import, unicode_literals
from collections import OrderedDict
from django.db.models import Model
from django.utils.html import conditional_escape, mark_safe
class PluginRenderer(object):
def __init__(self):
self._renderers = OrderedDict(((
Model,
lambda plugin: mark_safe('<!-- %s: %s -->' % (
plugin._meta.label,
plugin,
)),
),))
def register(self, plugin, renderer):
self._renderers[plugin] = renderer
def render(self, contents):
return mark_safe(''.join(
conditional_escape(self.render_content(c)) for c in contents
))
def render_content(self, content):
if content.__class__ not in self._renderers:
for plugin, renderer in reversed( # pragma: no branch
list(self._renderers.items())):
if isinstance(content, plugin):
self.register(content.__class__, renderer)
break
return self._renderers[content.__class__](content)
Allow iterating over rendered contentsfrom __future__ import absolute_import, unicode_literals
from collections import OrderedDict
from django.db.models import Model
from django.utils.html import conditional_escape, mark_safe
__all__ = ('PluginRenderer',)
class RenderedContents(object):
def __init__(self, contents):
self.contents = contents
def __unicode__(self):
return mark_safe(''.join(self.contents))
def __iter__(self):
return iter(self.contents)
class PluginRenderer(object):
def __init__(self):
self._renderers = OrderedDict(((
Model,
lambda plugin: mark_safe('<!-- %s: %s -->' % (
plugin._meta.label,
plugin,
)),
),))
def register(self, plugin, renderer):
self._renderers[plugin] = renderer
def render(self, contents):
return RenderedContents(
conditional_escape(self.render_content(c)) for c in contents
)
def render_content(self, content):
if content.__class__ not in self._renderers:
for plugin, renderer in reversed( # pragma: no branch
list(self._renderers.items())):
if isinstance(content, plugin):
self.register(content.__class__, renderer)
break
return self._renderers[content.__class__](content)
|
<commit_before>from __future__ import absolute_import, unicode_literals
from collections import OrderedDict
from django.db.models import Model
from django.utils.html import conditional_escape, mark_safe
class PluginRenderer(object):
def __init__(self):
self._renderers = OrderedDict(((
Model,
lambda plugin: mark_safe('<!-- %s: %s -->' % (
plugin._meta.label,
plugin,
)),
),))
def register(self, plugin, renderer):
self._renderers[plugin] = renderer
def render(self, contents):
return mark_safe(''.join(
conditional_escape(self.render_content(c)) for c in contents
))
def render_content(self, content):
if content.__class__ not in self._renderers:
for plugin, renderer in reversed( # pragma: no branch
list(self._renderers.items())):
if isinstance(content, plugin):
self.register(content.__class__, renderer)
break
return self._renderers[content.__class__](content)
<commit_msg>Allow iterating over rendered contents<commit_after>from __future__ import absolute_import, unicode_literals
from collections import OrderedDict
from django.db.models import Model
from django.utils.html import conditional_escape, mark_safe
__all__ = ('PluginRenderer',)
class RenderedContents(object):
def __init__(self, contents):
self.contents = contents
def __unicode__(self):
return mark_safe(''.join(self.contents))
def __iter__(self):
return iter(self.contents)
class PluginRenderer(object):
def __init__(self):
self._renderers = OrderedDict(((
Model,
lambda plugin: mark_safe('<!-- %s: %s -->' % (
plugin._meta.label,
plugin,
)),
),))
def register(self, plugin, renderer):
self._renderers[plugin] = renderer
def render(self, contents):
return RenderedContents(
conditional_escape(self.render_content(c)) for c in contents
)
def render_content(self, content):
if content.__class__ not in self._renderers:
for plugin, renderer in reversed( # pragma: no branch
list(self._renderers.items())):
if isinstance(content, plugin):
self.register(content.__class__, renderer)
break
return self._renderers[content.__class__](content)
|
9c83b5b064a50b6813bb3819927c9d268f89aaa1
|
ninja/files.py
|
ninja/files.py
|
from typing import Any, Callable, Iterable, Optional, Type
from django.core.files.uploadedfile import UploadedFile as DjangoUploadedFile
from pydantic.fields import ModelField
__all__ = ["UploadedFile"]
class UploadedFile(DjangoUploadedFile):
@classmethod
def __get_validators__(cls: Type["UploadedFile"]) -> Iterable[Callable[..., Any]]:
yield cls._validate
@classmethod
def _validate(cls: Type["UploadedFile"], v: Any) -> Any:
if not isinstance(v, DjangoUploadedFile):
raise ValueError(f"Expected UploadFile, received: {type(v)}")
return v
@classmethod
def __modify_schema__(cls, field_schema, field: Optional[ModelField]):
field_schema.update(type="string", format="binary")
|
from typing import Any, Callable, Dict, Iterable, Optional, Type
from django.core.files.uploadedfile import UploadedFile as DjangoUploadedFile
from pydantic.fields import ModelField
__all__ = ["UploadedFile"]
class UploadedFile(DjangoUploadedFile):
@classmethod
def __get_validators__(cls: Type["UploadedFile"]) -> Iterable[Callable[..., Any]]:
yield cls._validate
@classmethod
def _validate(cls: Type["UploadedFile"], v: Any) -> Any:
if not isinstance(v, DjangoUploadedFile):
raise ValueError(f"Expected UploadFile, received: {type(v)}")
return v
@classmethod
def __modify_schema__(cls, field_schema: Dict[str, Any], field: Optional[ModelField]):
field_schema.update(type="string", format="binary")
|
Add missing type hint for field_schema parameter
|
Add missing type hint for field_schema parameter
|
Python
|
mit
|
vitalik/django-ninja,vitalik/django-ninja,vitalik/django-ninja
|
from typing import Any, Callable, Iterable, Optional, Type
from django.core.files.uploadedfile import UploadedFile as DjangoUploadedFile
from pydantic.fields import ModelField
__all__ = ["UploadedFile"]
class UploadedFile(DjangoUploadedFile):
@classmethod
def __get_validators__(cls: Type["UploadedFile"]) -> Iterable[Callable[..., Any]]:
yield cls._validate
@classmethod
def _validate(cls: Type["UploadedFile"], v: Any) -> Any:
if not isinstance(v, DjangoUploadedFile):
raise ValueError(f"Expected UploadFile, received: {type(v)}")
return v
@classmethod
def __modify_schema__(cls, field_schema, field: Optional[ModelField]):
field_schema.update(type="string", format="binary")
Add missing type hint for field_schema parameter
|
from typing import Any, Callable, Dict, Iterable, Optional, Type
from django.core.files.uploadedfile import UploadedFile as DjangoUploadedFile
from pydantic.fields import ModelField
__all__ = ["UploadedFile"]
class UploadedFile(DjangoUploadedFile):
@classmethod
def __get_validators__(cls: Type["UploadedFile"]) -> Iterable[Callable[..., Any]]:
yield cls._validate
@classmethod
def _validate(cls: Type["UploadedFile"], v: Any) -> Any:
if not isinstance(v, DjangoUploadedFile):
raise ValueError(f"Expected UploadFile, received: {type(v)}")
return v
@classmethod
def __modify_schema__(cls, field_schema: Dict[str, Any], field: Optional[ModelField]):
field_schema.update(type="string", format="binary")
|
<commit_before>from typing import Any, Callable, Iterable, Optional, Type
from django.core.files.uploadedfile import UploadedFile as DjangoUploadedFile
from pydantic.fields import ModelField
__all__ = ["UploadedFile"]
class UploadedFile(DjangoUploadedFile):
@classmethod
def __get_validators__(cls: Type["UploadedFile"]) -> Iterable[Callable[..., Any]]:
yield cls._validate
@classmethod
def _validate(cls: Type["UploadedFile"], v: Any) -> Any:
if not isinstance(v, DjangoUploadedFile):
raise ValueError(f"Expected UploadFile, received: {type(v)}")
return v
@classmethod
def __modify_schema__(cls, field_schema, field: Optional[ModelField]):
field_schema.update(type="string", format="binary")
<commit_msg>Add missing type hint for field_schema parameter<commit_after>
|
from typing import Any, Callable, Dict, Iterable, Optional, Type
from django.core.files.uploadedfile import UploadedFile as DjangoUploadedFile
from pydantic.fields import ModelField
__all__ = ["UploadedFile"]
class UploadedFile(DjangoUploadedFile):
@classmethod
def __get_validators__(cls: Type["UploadedFile"]) -> Iterable[Callable[..., Any]]:
yield cls._validate
@classmethod
def _validate(cls: Type["UploadedFile"], v: Any) -> Any:
if not isinstance(v, DjangoUploadedFile):
raise ValueError(f"Expected UploadFile, received: {type(v)}")
return v
@classmethod
def __modify_schema__(cls, field_schema: Dict[str, Any], field: Optional[ModelField]):
field_schema.update(type="string", format="binary")
|
from typing import Any, Callable, Iterable, Optional, Type
from django.core.files.uploadedfile import UploadedFile as DjangoUploadedFile
from pydantic.fields import ModelField
__all__ = ["UploadedFile"]
class UploadedFile(DjangoUploadedFile):
@classmethod
def __get_validators__(cls: Type["UploadedFile"]) -> Iterable[Callable[..., Any]]:
yield cls._validate
@classmethod
def _validate(cls: Type["UploadedFile"], v: Any) -> Any:
if not isinstance(v, DjangoUploadedFile):
raise ValueError(f"Expected UploadFile, received: {type(v)}")
return v
@classmethod
def __modify_schema__(cls, field_schema, field: Optional[ModelField]):
field_schema.update(type="string", format="binary")
Add missing type hint for field_schema parameterfrom typing import Any, Callable, Dict, Iterable, Optional, Type
from django.core.files.uploadedfile import UploadedFile as DjangoUploadedFile
from pydantic.fields import ModelField
__all__ = ["UploadedFile"]
class UploadedFile(DjangoUploadedFile):
@classmethod
def __get_validators__(cls: Type["UploadedFile"]) -> Iterable[Callable[..., Any]]:
yield cls._validate
@classmethod
def _validate(cls: Type["UploadedFile"], v: Any) -> Any:
if not isinstance(v, DjangoUploadedFile):
raise ValueError(f"Expected UploadFile, received: {type(v)}")
return v
@classmethod
def __modify_schema__(cls, field_schema: Dict[str, Any], field: Optional[ModelField]):
field_schema.update(type="string", format="binary")
|
<commit_before>from typing import Any, Callable, Iterable, Optional, Type
from django.core.files.uploadedfile import UploadedFile as DjangoUploadedFile
from pydantic.fields import ModelField
__all__ = ["UploadedFile"]
class UploadedFile(DjangoUploadedFile):
@classmethod
def __get_validators__(cls: Type["UploadedFile"]) -> Iterable[Callable[..., Any]]:
yield cls._validate
@classmethod
def _validate(cls: Type["UploadedFile"], v: Any) -> Any:
if not isinstance(v, DjangoUploadedFile):
raise ValueError(f"Expected UploadFile, received: {type(v)}")
return v
@classmethod
def __modify_schema__(cls, field_schema, field: Optional[ModelField]):
field_schema.update(type="string", format="binary")
<commit_msg>Add missing type hint for field_schema parameter<commit_after>from typing import Any, Callable, Dict, Iterable, Optional, Type
from django.core.files.uploadedfile import UploadedFile as DjangoUploadedFile
from pydantic.fields import ModelField
__all__ = ["UploadedFile"]
class UploadedFile(DjangoUploadedFile):
@classmethod
def __get_validators__(cls: Type["UploadedFile"]) -> Iterable[Callable[..., Any]]:
yield cls._validate
@classmethod
def _validate(cls: Type["UploadedFile"], v: Any) -> Any:
if not isinstance(v, DjangoUploadedFile):
raise ValueError(f"Expected UploadFile, received: {type(v)}")
return v
@classmethod
def __modify_schema__(cls, field_schema: Dict[str, Any], field: Optional[ModelField]):
field_schema.update(type="string", format="binary")
|
6cb9b6af77768466d7b6fd8e5d0964611da55282
|
tests/__init__.py
|
tests/__init__.py
|
from django.conf import settings
from django.core.management import call_command
settings.configure(
DATABASES = {
'default': {
'ENGINE': 'sqlite3',
'NAME': ':memory:'
}
},
INSTALLED_APPS = [
'fandjango',
'south',
'tests.app'
],
ROOT_URLCONF = 'tests.app.urls',
MIDDLEWARE_CLASSES = [
'fandjango.middleware.FacebookMiddleware'
],
FACEBOOK_APPLICATION_ID = 181259711925270,
FACEBOOK_APPLICATION_SECRET_KEY = '214e4cb484c28c35f18a70a3d735999b',
FACEBOOK_APPLICATION_URL = 'http://apps.facebook.com/fandjango-test'
)
call_command('syncdb')
|
from django.conf import settings
from django.core.management import call_command
settings.configure(
DATABASES = {
'default': {
'ENGINE': 'sqlite3',
'NAME': ':memory:'
}
},
INSTALLED_APPS = [
'fandjango',
'south',
'tests.app'
],
ROOT_URLCONF = 'tests.app.urls',
MIDDLEWARE_CLASSES = [
'fandjango.middleware.FacebookMiddleware'
],
FACEBOOK_APPLICATION_ID = 181259711925270,
FACEBOOK_APPLICATION_SECRET_KEY = '214e4cb484c28c35f18a70a3d735999b',
FACEBOOK_APPLICATION_URL = 'http://apps.facebook.com/fandjango-test'
)
call_command('syncdb')
call_command('migrate')
|
Fix a bug that caused tests to raise a DatabaseError
|
Fix a bug that caused tests to raise a DatabaseError
|
Python
|
mit
|
jgorset/fandjango,jgorset/fandjango
|
from django.conf import settings
from django.core.management import call_command
settings.configure(
DATABASES = {
'default': {
'ENGINE': 'sqlite3',
'NAME': ':memory:'
}
},
INSTALLED_APPS = [
'fandjango',
'south',
'tests.app'
],
ROOT_URLCONF = 'tests.app.urls',
MIDDLEWARE_CLASSES = [
'fandjango.middleware.FacebookMiddleware'
],
FACEBOOK_APPLICATION_ID = 181259711925270,
FACEBOOK_APPLICATION_SECRET_KEY = '214e4cb484c28c35f18a70a3d735999b',
FACEBOOK_APPLICATION_URL = 'http://apps.facebook.com/fandjango-test'
)
call_command('syncdb')
Fix a bug that caused tests to raise a DatabaseError
|
from django.conf import settings
from django.core.management import call_command
settings.configure(
DATABASES = {
'default': {
'ENGINE': 'sqlite3',
'NAME': ':memory:'
}
},
INSTALLED_APPS = [
'fandjango',
'south',
'tests.app'
],
ROOT_URLCONF = 'tests.app.urls',
MIDDLEWARE_CLASSES = [
'fandjango.middleware.FacebookMiddleware'
],
FACEBOOK_APPLICATION_ID = 181259711925270,
FACEBOOK_APPLICATION_SECRET_KEY = '214e4cb484c28c35f18a70a3d735999b',
FACEBOOK_APPLICATION_URL = 'http://apps.facebook.com/fandjango-test'
)
call_command('syncdb')
call_command('migrate')
|
<commit_before>from django.conf import settings
from django.core.management import call_command
settings.configure(
DATABASES = {
'default': {
'ENGINE': 'sqlite3',
'NAME': ':memory:'
}
},
INSTALLED_APPS = [
'fandjango',
'south',
'tests.app'
],
ROOT_URLCONF = 'tests.app.urls',
MIDDLEWARE_CLASSES = [
'fandjango.middleware.FacebookMiddleware'
],
FACEBOOK_APPLICATION_ID = 181259711925270,
FACEBOOK_APPLICATION_SECRET_KEY = '214e4cb484c28c35f18a70a3d735999b',
FACEBOOK_APPLICATION_URL = 'http://apps.facebook.com/fandjango-test'
)
call_command('syncdb')
<commit_msg>Fix a bug that caused tests to raise a DatabaseError<commit_after>
|
from django.conf import settings
from django.core.management import call_command
settings.configure(
DATABASES = {
'default': {
'ENGINE': 'sqlite3',
'NAME': ':memory:'
}
},
INSTALLED_APPS = [
'fandjango',
'south',
'tests.app'
],
ROOT_URLCONF = 'tests.app.urls',
MIDDLEWARE_CLASSES = [
'fandjango.middleware.FacebookMiddleware'
],
FACEBOOK_APPLICATION_ID = 181259711925270,
FACEBOOK_APPLICATION_SECRET_KEY = '214e4cb484c28c35f18a70a3d735999b',
FACEBOOK_APPLICATION_URL = 'http://apps.facebook.com/fandjango-test'
)
call_command('syncdb')
call_command('migrate')
|
from django.conf import settings
from django.core.management import call_command
settings.configure(
DATABASES = {
'default': {
'ENGINE': 'sqlite3',
'NAME': ':memory:'
}
},
INSTALLED_APPS = [
'fandjango',
'south',
'tests.app'
],
ROOT_URLCONF = 'tests.app.urls',
MIDDLEWARE_CLASSES = [
'fandjango.middleware.FacebookMiddleware'
],
FACEBOOK_APPLICATION_ID = 181259711925270,
FACEBOOK_APPLICATION_SECRET_KEY = '214e4cb484c28c35f18a70a3d735999b',
FACEBOOK_APPLICATION_URL = 'http://apps.facebook.com/fandjango-test'
)
call_command('syncdb')
Fix a bug that caused tests to raise a DatabaseErrorfrom django.conf import settings
from django.core.management import call_command
settings.configure(
DATABASES = {
'default': {
'ENGINE': 'sqlite3',
'NAME': ':memory:'
}
},
INSTALLED_APPS = [
'fandjango',
'south',
'tests.app'
],
ROOT_URLCONF = 'tests.app.urls',
MIDDLEWARE_CLASSES = [
'fandjango.middleware.FacebookMiddleware'
],
FACEBOOK_APPLICATION_ID = 181259711925270,
FACEBOOK_APPLICATION_SECRET_KEY = '214e4cb484c28c35f18a70a3d735999b',
FACEBOOK_APPLICATION_URL = 'http://apps.facebook.com/fandjango-test'
)
call_command('syncdb')
call_command('migrate')
|
<commit_before>from django.conf import settings
from django.core.management import call_command
settings.configure(
DATABASES = {
'default': {
'ENGINE': 'sqlite3',
'NAME': ':memory:'
}
},
INSTALLED_APPS = [
'fandjango',
'south',
'tests.app'
],
ROOT_URLCONF = 'tests.app.urls',
MIDDLEWARE_CLASSES = [
'fandjango.middleware.FacebookMiddleware'
],
FACEBOOK_APPLICATION_ID = 181259711925270,
FACEBOOK_APPLICATION_SECRET_KEY = '214e4cb484c28c35f18a70a3d735999b',
FACEBOOK_APPLICATION_URL = 'http://apps.facebook.com/fandjango-test'
)
call_command('syncdb')
<commit_msg>Fix a bug that caused tests to raise a DatabaseError<commit_after>from django.conf import settings
from django.core.management import call_command
settings.configure(
DATABASES = {
'default': {
'ENGINE': 'sqlite3',
'NAME': ':memory:'
}
},
INSTALLED_APPS = [
'fandjango',
'south',
'tests.app'
],
ROOT_URLCONF = 'tests.app.urls',
MIDDLEWARE_CLASSES = [
'fandjango.middleware.FacebookMiddleware'
],
FACEBOOK_APPLICATION_ID = 181259711925270,
FACEBOOK_APPLICATION_SECRET_KEY = '214e4cb484c28c35f18a70a3d735999b',
FACEBOOK_APPLICATION_URL = 'http://apps.facebook.com/fandjango-test'
)
call_command('syncdb')
call_command('migrate')
|
aaf9d8169a685f4ab808765e132b2937d252e70c
|
core/context_processors.py
|
core/context_processors.py
|
from collections import OrderedDict
from django.db.models import Count
from mpages.models import Page, PageRead
def menus(request):
main_menu = Page.objects.get(slug="main-menu")
recent_updates = Page.objects.all().order_by("-updated")[:10]
updates = OrderedDict()
for page in recent_updates:
update_date = page.updated.strftime("%Y-%m-%d")
if update_date not in updates:
updates[update_date] = {"date": update_date, "pages": []}
updates[update_date]["pages"].append(page)
page_reads = PageRead.objects.all()[:50]
recent = []
for page_read in page_reads:
if page_read.page not in recent:
recent.append(page_read.page)
if len(recent) == 10:
break
popular = PageRead.objects.all().values("page__slug", "page__title").annotate(total=Count("page__slug")).order_by("-total", "page__slug")[:10]
return {
"mainmenu": main_menu.content_as_html,
"sidebar": {
"updates": list(updates.values()),
"recent": recent,
"popular": popular,
},
"request": request,
}
|
from collections import OrderedDict
from django.db.models import Count
from mpages.models import Page, PageRead
def menus(request):
main_menu = Page.objects.get(slug="main-menu")
recent_updates = Page.objects.all().order_by("-updated")[:10]
updates = OrderedDict()
for page in recent_updates:
update_date = page.updated.strftime("%Y-%m-%d")
if update_date not in updates:
updates[update_date] = {"date": update_date, "pages": []}
updates[update_date]["pages"].append(page)
page_reads = PageRead.objects.all()[:50]
recent = []
for page_read in page_reads:
if page_read.page not in recent:
recent.append(page_read.page)
if len(recent) == 10:
break
popular = PageRead.objects.all().values("page__slug", "page__title").annotate(total=Count("page__slug")).order_by("-total", "page__slug")[:11]
return {
"mainmenu": main_menu.content_as_html,
"sidebar": {
"updates": list(updates.values()),
"recent": recent,
"popular": popular[1:11],
},
"request": request,
}
|
Remove Home from list of popular pages
|
Remove Home from list of popular pages
|
Python
|
bsd-3-clause
|
ahernp/DMCM,ahernp/DMCM,ahernp/DMCM
|
from collections import OrderedDict
from django.db.models import Count
from mpages.models import Page, PageRead
def menus(request):
main_menu = Page.objects.get(slug="main-menu")
recent_updates = Page.objects.all().order_by("-updated")[:10]
updates = OrderedDict()
for page in recent_updates:
update_date = page.updated.strftime("%Y-%m-%d")
if update_date not in updates:
updates[update_date] = {"date": update_date, "pages": []}
updates[update_date]["pages"].append(page)
page_reads = PageRead.objects.all()[:50]
recent = []
for page_read in page_reads:
if page_read.page not in recent:
recent.append(page_read.page)
if len(recent) == 10:
break
popular = PageRead.objects.all().values("page__slug", "page__title").annotate(total=Count("page__slug")).order_by("-total", "page__slug")[:10]
return {
"mainmenu": main_menu.content_as_html,
"sidebar": {
"updates": list(updates.values()),
"recent": recent,
"popular": popular,
},
"request": request,
}
Remove Home from list of popular pages
|
from collections import OrderedDict
from django.db.models import Count
from mpages.models import Page, PageRead
def menus(request):
main_menu = Page.objects.get(slug="main-menu")
recent_updates = Page.objects.all().order_by("-updated")[:10]
updates = OrderedDict()
for page in recent_updates:
update_date = page.updated.strftime("%Y-%m-%d")
if update_date not in updates:
updates[update_date] = {"date": update_date, "pages": []}
updates[update_date]["pages"].append(page)
page_reads = PageRead.objects.all()[:50]
recent = []
for page_read in page_reads:
if page_read.page not in recent:
recent.append(page_read.page)
if len(recent) == 10:
break
popular = PageRead.objects.all().values("page__slug", "page__title").annotate(total=Count("page__slug")).order_by("-total", "page__slug")[:11]
return {
"mainmenu": main_menu.content_as_html,
"sidebar": {
"updates": list(updates.values()),
"recent": recent,
"popular": popular[1:11],
},
"request": request,
}
|
<commit_before>from collections import OrderedDict
from django.db.models import Count
from mpages.models import Page, PageRead
def menus(request):
main_menu = Page.objects.get(slug="main-menu")
recent_updates = Page.objects.all().order_by("-updated")[:10]
updates = OrderedDict()
for page in recent_updates:
update_date = page.updated.strftime("%Y-%m-%d")
if update_date not in updates:
updates[update_date] = {"date": update_date, "pages": []}
updates[update_date]["pages"].append(page)
page_reads = PageRead.objects.all()[:50]
recent = []
for page_read in page_reads:
if page_read.page not in recent:
recent.append(page_read.page)
if len(recent) == 10:
break
popular = PageRead.objects.all().values("page__slug", "page__title").annotate(total=Count("page__slug")).order_by("-total", "page__slug")[:10]
return {
"mainmenu": main_menu.content_as_html,
"sidebar": {
"updates": list(updates.values()),
"recent": recent,
"popular": popular,
},
"request": request,
}
<commit_msg>Remove Home from list of popular pages<commit_after>
|
from collections import OrderedDict
from django.db.models import Count
from mpages.models import Page, PageRead
def menus(request):
main_menu = Page.objects.get(slug="main-menu")
recent_updates = Page.objects.all().order_by("-updated")[:10]
updates = OrderedDict()
for page in recent_updates:
update_date = page.updated.strftime("%Y-%m-%d")
if update_date not in updates:
updates[update_date] = {"date": update_date, "pages": []}
updates[update_date]["pages"].append(page)
page_reads = PageRead.objects.all()[:50]
recent = []
for page_read in page_reads:
if page_read.page not in recent:
recent.append(page_read.page)
if len(recent) == 10:
break
popular = PageRead.objects.all().values("page__slug", "page__title").annotate(total=Count("page__slug")).order_by("-total", "page__slug")[:11]
return {
"mainmenu": main_menu.content_as_html,
"sidebar": {
"updates": list(updates.values()),
"recent": recent,
"popular": popular[1:11],
},
"request": request,
}
|
from collections import OrderedDict
from django.db.models import Count
from mpages.models import Page, PageRead
def menus(request):
main_menu = Page.objects.get(slug="main-menu")
recent_updates = Page.objects.all().order_by("-updated")[:10]
updates = OrderedDict()
for page in recent_updates:
update_date = page.updated.strftime("%Y-%m-%d")
if update_date not in updates:
updates[update_date] = {"date": update_date, "pages": []}
updates[update_date]["pages"].append(page)
page_reads = PageRead.objects.all()[:50]
recent = []
for page_read in page_reads:
if page_read.page not in recent:
recent.append(page_read.page)
if len(recent) == 10:
break
popular = PageRead.objects.all().values("page__slug", "page__title").annotate(total=Count("page__slug")).order_by("-total", "page__slug")[:10]
return {
"mainmenu": main_menu.content_as_html,
"sidebar": {
"updates": list(updates.values()),
"recent": recent,
"popular": popular,
},
"request": request,
}
Remove Home from list of popular pagesfrom collections import OrderedDict
from django.db.models import Count
from mpages.models import Page, PageRead
def menus(request):
main_menu = Page.objects.get(slug="main-menu")
recent_updates = Page.objects.all().order_by("-updated")[:10]
updates = OrderedDict()
for page in recent_updates:
update_date = page.updated.strftime("%Y-%m-%d")
if update_date not in updates:
updates[update_date] = {"date": update_date, "pages": []}
updates[update_date]["pages"].append(page)
page_reads = PageRead.objects.all()[:50]
recent = []
for page_read in page_reads:
if page_read.page not in recent:
recent.append(page_read.page)
if len(recent) == 10:
break
popular = PageRead.objects.all().values("page__slug", "page__title").annotate(total=Count("page__slug")).order_by("-total", "page__slug")[:11]
return {
"mainmenu": main_menu.content_as_html,
"sidebar": {
"updates": list(updates.values()),
"recent": recent,
"popular": popular[1:11],
},
"request": request,
}
|
<commit_before>from collections import OrderedDict
from django.db.models import Count
from mpages.models import Page, PageRead
def menus(request):
main_menu = Page.objects.get(slug="main-menu")
recent_updates = Page.objects.all().order_by("-updated")[:10]
updates = OrderedDict()
for page in recent_updates:
update_date = page.updated.strftime("%Y-%m-%d")
if update_date not in updates:
updates[update_date] = {"date": update_date, "pages": []}
updates[update_date]["pages"].append(page)
page_reads = PageRead.objects.all()[:50]
recent = []
for page_read in page_reads:
if page_read.page not in recent:
recent.append(page_read.page)
if len(recent) == 10:
break
popular = PageRead.objects.all().values("page__slug", "page__title").annotate(total=Count("page__slug")).order_by("-total", "page__slug")[:10]
return {
"mainmenu": main_menu.content_as_html,
"sidebar": {
"updates": list(updates.values()),
"recent": recent,
"popular": popular,
},
"request": request,
}
<commit_msg>Remove Home from list of popular pages<commit_after>from collections import OrderedDict
from django.db.models import Count
from mpages.models import Page, PageRead
def menus(request):
main_menu = Page.objects.get(slug="main-menu")
recent_updates = Page.objects.all().order_by("-updated")[:10]
updates = OrderedDict()
for page in recent_updates:
update_date = page.updated.strftime("%Y-%m-%d")
if update_date not in updates:
updates[update_date] = {"date": update_date, "pages": []}
updates[update_date]["pages"].append(page)
page_reads = PageRead.objects.all()[:50]
recent = []
for page_read in page_reads:
if page_read.page not in recent:
recent.append(page_read.page)
if len(recent) == 10:
break
popular = PageRead.objects.all().values("page__slug", "page__title").annotate(total=Count("page__slug")).order_by("-total", "page__slug")[:11]
return {
"mainmenu": main_menu.content_as_html,
"sidebar": {
"updates": list(updates.values()),
"recent": recent,
"popular": popular[1:11],
},
"request": request,
}
|
a19d0c2d77102c1f14823e0fbc255de3b0b2d4f4
|
tests/conftest.py
|
tests/conftest.py
|
# -*- coding: utf-8 -*-
"""
Acolyte Tests
"""
import pytest
from acolyte import create_app
from acolyte.database import db
from config import TestConfig, TestConfigCRSF
@pytest.yield_fixture(scope='function')
def app(request):
app = create_app(TestConfig)
app.app_context().push()
db.create_all()
yield app
@pytest.yield_fixture(scope='function')
def client(app):
result = app.test_client()
yield result
@pytest.yield_fixture(scope='function')
def client_with_crsf(app):
app = create_app(TestConfigCRSF)
app.app_context().push()
result = app.test_client()
yield result
|
# -*- coding: utf-8 -*-
"""Acolyte test fixtures
"""
import pytest
from acolyte import create_app
from acolyte.database import db
from config import TestConfig, TestConfigCRSF
@pytest.yield_fixture(scope='function')
def app():
"""Pytest fixture to yield a fully initialised Acolyte
Decorators:
pytest
Yields:
obj -- Fully initialised Acolyte application
"""
result = create_app(TestConfig)
result.app_context().push()
db.create_all()
yield result
@pytest.yield_fixture(scope='function')
def client(app):
"""Pytest fixture to yield Flask test client for
initialised Acolyte application
Decorators:
pytest
Arguments:
app {obj} -- Initialised Acolyte application
Yields:
obj -- Flask test client
"""
result = app.test_client()
yield result
@pytest.yield_fixture(scope='function')
def client_with_crsf():
"""Pytest fixture that yields Flask test client for
Acolyte application initialised with CRSF testing
Decorators:
pytest
Yields:
obj -- Flask test client
"""
ap = create_app(TestConfigCRSF)
ap.app_context().push()
result = ap.test_client()
yield result
|
Add docstrings to test fixtures
|
Add docstrings to test fixtures
|
Python
|
mit
|
rabramley/frostgrave_acolyte,rabramley/frostgrave_acolyte
|
# -*- coding: utf-8 -*-
"""
Acolyte Tests
"""
import pytest
from acolyte import create_app
from acolyte.database import db
from config import TestConfig, TestConfigCRSF
@pytest.yield_fixture(scope='function')
def app(request):
app = create_app(TestConfig)
app.app_context().push()
db.create_all()
yield app
@pytest.yield_fixture(scope='function')
def client(app):
result = app.test_client()
yield result
@pytest.yield_fixture(scope='function')
def client_with_crsf(app):
app = create_app(TestConfigCRSF)
app.app_context().push()
result = app.test_client()
yield result
Add docstrings to test fixtures
|
# -*- coding: utf-8 -*-
"""Acolyte test fixtures
"""
import pytest
from acolyte import create_app
from acolyte.database import db
from config import TestConfig, TestConfigCRSF
@pytest.yield_fixture(scope='function')
def app():
"""Pytest fixture to yield a fully initialised Acolyte
Decorators:
pytest
Yields:
obj -- Fully initialised Acolyte application
"""
result = create_app(TestConfig)
result.app_context().push()
db.create_all()
yield result
@pytest.yield_fixture(scope='function')
def client(app):
"""Pytest fixture to yield Flask test client for
initialised Acolyte application
Decorators:
pytest
Arguments:
app {obj} -- Initialised Acolyte application
Yields:
obj -- Flask test client
"""
result = app.test_client()
yield result
@pytest.yield_fixture(scope='function')
def client_with_crsf():
"""Pytest fixture that yields Flask test client for
Acolyte application initialised with CRSF testing
Decorators:
pytest
Yields:
obj -- Flask test client
"""
ap = create_app(TestConfigCRSF)
ap.app_context().push()
result = ap.test_client()
yield result
|
<commit_before># -*- coding: utf-8 -*-
"""
Acolyte Tests
"""
import pytest
from acolyte import create_app
from acolyte.database import db
from config import TestConfig, TestConfigCRSF
@pytest.yield_fixture(scope='function')
def app(request):
app = create_app(TestConfig)
app.app_context().push()
db.create_all()
yield app
@pytest.yield_fixture(scope='function')
def client(app):
result = app.test_client()
yield result
@pytest.yield_fixture(scope='function')
def client_with_crsf(app):
app = create_app(TestConfigCRSF)
app.app_context().push()
result = app.test_client()
yield result
<commit_msg>Add docstrings to test fixtures<commit_after>
|
# -*- coding: utf-8 -*-
"""Acolyte test fixtures
"""
import pytest
from acolyte import create_app
from acolyte.database import db
from config import TestConfig, TestConfigCRSF
@pytest.yield_fixture(scope='function')
def app():
"""Pytest fixture to yield a fully initialised Acolyte
Decorators:
pytest
Yields:
obj -- Fully initialised Acolyte application
"""
result = create_app(TestConfig)
result.app_context().push()
db.create_all()
yield result
@pytest.yield_fixture(scope='function')
def client(app):
"""Pytest fixture to yield Flask test client for
initialised Acolyte application
Decorators:
pytest
Arguments:
app {obj} -- Initialised Acolyte application
Yields:
obj -- Flask test client
"""
result = app.test_client()
yield result
@pytest.yield_fixture(scope='function')
def client_with_crsf():
"""Pytest fixture that yields Flask test client for
Acolyte application initialised with CRSF testing
Decorators:
pytest
Yields:
obj -- Flask test client
"""
ap = create_app(TestConfigCRSF)
ap.app_context().push()
result = ap.test_client()
yield result
|
# -*- coding: utf-8 -*-
"""
Acolyte Tests
"""
import pytest
from acolyte import create_app
from acolyte.database import db
from config import TestConfig, TestConfigCRSF
@pytest.yield_fixture(scope='function')
def app(request):
app = create_app(TestConfig)
app.app_context().push()
db.create_all()
yield app
@pytest.yield_fixture(scope='function')
def client(app):
result = app.test_client()
yield result
@pytest.yield_fixture(scope='function')
def client_with_crsf(app):
app = create_app(TestConfigCRSF)
app.app_context().push()
result = app.test_client()
yield result
Add docstrings to test fixtures# -*- coding: utf-8 -*-
"""Acolyte test fixtures
"""
import pytest
from acolyte import create_app
from acolyte.database import db
from config import TestConfig, TestConfigCRSF
@pytest.yield_fixture(scope='function')
def app():
"""Pytest fixture to yield a fully initialised Acolyte
Decorators:
pytest
Yields:
obj -- Fully initialised Acolyte application
"""
result = create_app(TestConfig)
result.app_context().push()
db.create_all()
yield result
@pytest.yield_fixture(scope='function')
def client(app):
"""Pytest fixture to yield Flask test client for
initialised Acolyte application
Decorators:
pytest
Arguments:
app {obj} -- Initialised Acolyte application
Yields:
obj -- Flask test client
"""
result = app.test_client()
yield result
@pytest.yield_fixture(scope='function')
def client_with_crsf():
"""Pytest fixture that yields Flask test client for
Acolyte application initialised with CRSF testing
Decorators:
pytest
Yields:
obj -- Flask test client
"""
ap = create_app(TestConfigCRSF)
ap.app_context().push()
result = ap.test_client()
yield result
|
<commit_before># -*- coding: utf-8 -*-
"""
Acolyte Tests
"""
import pytest
from acolyte import create_app
from acolyte.database import db
from config import TestConfig, TestConfigCRSF
@pytest.yield_fixture(scope='function')
def app(request):
app = create_app(TestConfig)
app.app_context().push()
db.create_all()
yield app
@pytest.yield_fixture(scope='function')
def client(app):
result = app.test_client()
yield result
@pytest.yield_fixture(scope='function')
def client_with_crsf(app):
app = create_app(TestConfigCRSF)
app.app_context().push()
result = app.test_client()
yield result
<commit_msg>Add docstrings to test fixtures<commit_after># -*- coding: utf-8 -*-
"""Acolyte test fixtures
"""
import pytest
from acolyte import create_app
from acolyte.database import db
from config import TestConfig, TestConfigCRSF
@pytest.yield_fixture(scope='function')
def app():
"""Pytest fixture to yield a fully initialised Acolyte
Decorators:
pytest
Yields:
obj -- Fully initialised Acolyte application
"""
result = create_app(TestConfig)
result.app_context().push()
db.create_all()
yield result
@pytest.yield_fixture(scope='function')
def client(app):
"""Pytest fixture to yield Flask test client for
initialised Acolyte application
Decorators:
pytest
Arguments:
app {obj} -- Initialised Acolyte application
Yields:
obj -- Flask test client
"""
result = app.test_client()
yield result
@pytest.yield_fixture(scope='function')
def client_with_crsf():
"""Pytest fixture that yields Flask test client for
Acolyte application initialised with CRSF testing
Decorators:
pytest
Yields:
obj -- Flask test client
"""
ap = create_app(TestConfigCRSF)
ap.app_context().push()
result = ap.test_client()
yield result
|
34cf1f2467a7fb09850f834d7c1dd165457e36c2
|
tests/conftest.py
|
tests/conftest.py
|
import sys
collect_ignore = []
if sys.version_info < (3, 5):
collect_ignore.append("test_async.py")
if sys.version_info < (3, 4):
collect_ignore.append("test_coroutines.py")
|
"""Configuration for test environment"""
import sys
from .fixtures import *
collect_ignore = []
if sys.version_info < (3, 5):
collect_ignore.append("test_async.py")
if sys.version_info < (3, 4):
collect_ignore.append("test_coroutines.py")
|
Add hug_api fixture, and all future fixtures to default test config
|
Add hug_api fixture, and all future fixtures to default test config
|
Python
|
mit
|
timothycrosley/hug,timothycrosley/hug,MuhammadAlkarouri/hug,MuhammadAlkarouri/hug,MuhammadAlkarouri/hug,timothycrosley/hug
|
import sys
collect_ignore = []
if sys.version_info < (3, 5):
collect_ignore.append("test_async.py")
if sys.version_info < (3, 4):
collect_ignore.append("test_coroutines.py")
Add hug_api fixture, and all future fixtures to default test config
|
"""Configuration for test environment"""
import sys
from .fixtures import *
collect_ignore = []
if sys.version_info < (3, 5):
collect_ignore.append("test_async.py")
if sys.version_info < (3, 4):
collect_ignore.append("test_coroutines.py")
|
<commit_before>
import sys
collect_ignore = []
if sys.version_info < (3, 5):
collect_ignore.append("test_async.py")
if sys.version_info < (3, 4):
collect_ignore.append("test_coroutines.py")
<commit_msg>Add hug_api fixture, and all future fixtures to default test config<commit_after>
|
"""Configuration for test environment"""
import sys
from .fixtures import *
collect_ignore = []
if sys.version_info < (3, 5):
collect_ignore.append("test_async.py")
if sys.version_info < (3, 4):
collect_ignore.append("test_coroutines.py")
|
import sys
collect_ignore = []
if sys.version_info < (3, 5):
collect_ignore.append("test_async.py")
if sys.version_info < (3, 4):
collect_ignore.append("test_coroutines.py")
Add hug_api fixture, and all future fixtures to default test config"""Configuration for test environment"""
import sys
from .fixtures import *
collect_ignore = []
if sys.version_info < (3, 5):
collect_ignore.append("test_async.py")
if sys.version_info < (3, 4):
collect_ignore.append("test_coroutines.py")
|
<commit_before>
import sys
collect_ignore = []
if sys.version_info < (3, 5):
collect_ignore.append("test_async.py")
if sys.version_info < (3, 4):
collect_ignore.append("test_coroutines.py")
<commit_msg>Add hug_api fixture, and all future fixtures to default test config<commit_after>"""Configuration for test environment"""
import sys
from .fixtures import *
collect_ignore = []
if sys.version_info < (3, 5):
collect_ignore.append("test_async.py")
if sys.version_info < (3, 4):
collect_ignore.append("test_coroutines.py")
|
98c1f2b21c55d0f4926602fa6d3534faa623b9ab
|
orges/orges.py
|
orges/orges.py
|
def optimize(f, param_spec=None, return_spec=None):
"""Assume f has to be minimized for now."""
if __name__ == '__main__':
pass
|
from test.algorithms.saes import f as saes
from paramspec import ParamSpec
from args import ArgsCreator, call
def optimize(f, param_spec=None, return_spec=None):
"""Assume f has to be minimized for now."""
if param_spec is None:
param_spec = ParamSpec(f)
args_creator = ArgsCreator(param_spec)
for args in args_creator.product():
print call(f, args), args
if __name__ == '__main__':
def f(args):
args["d"] = 2
args["epsilon"] = 0.0001
return saes(args)
param_spec = ParamSpec()
param_spec.int("mu").interval((10, 20))
param_spec.int("lambd").interval((10, 50))
param_spec.float("tau0").interval((0, 1)).step(0.1)
param_spec.float("tau1").interval((0, 1)).step(0.1)
optimize(f, param_spec)
|
Add prototype implementation for optimize()
|
Add prototype implementation for optimize()
It is currently doing a "Grid search" without actually searching for
anything ;)
|
Python
|
bsd-3-clause
|
cigroup-ol/metaopt,cigroup-ol/metaopt,cigroup-ol/metaopt
|
def optimize(f, param_spec=None, return_spec=None):
"""Assume f has to be minimized for now."""
if __name__ == '__main__':
passAdd prototype implementation for optimize()
It is currently doing a "Grid search" without actually searching for
anything ;)
|
from test.algorithms.saes import f as saes
from paramspec import ParamSpec
from args import ArgsCreator, call
def optimize(f, param_spec=None, return_spec=None):
"""Assume f has to be minimized for now."""
if param_spec is None:
param_spec = ParamSpec(f)
args_creator = ArgsCreator(param_spec)
for args in args_creator.product():
print call(f, args), args
if __name__ == '__main__':
def f(args):
args["d"] = 2
args["epsilon"] = 0.0001
return saes(args)
param_spec = ParamSpec()
param_spec.int("mu").interval((10, 20))
param_spec.int("lambd").interval((10, 50))
param_spec.float("tau0").interval((0, 1)).step(0.1)
param_spec.float("tau1").interval((0, 1)).step(0.1)
optimize(f, param_spec)
|
<commit_before>def optimize(f, param_spec=None, return_spec=None):
"""Assume f has to be minimized for now."""
if __name__ == '__main__':
pass<commit_msg>Add prototype implementation for optimize()
It is currently doing a "Grid search" without actually searching for
anything ;)<commit_after>
|
from test.algorithms.saes import f as saes
from paramspec import ParamSpec
from args import ArgsCreator, call
def optimize(f, param_spec=None, return_spec=None):
"""Assume f has to be minimized for now."""
if param_spec is None:
param_spec = ParamSpec(f)
args_creator = ArgsCreator(param_spec)
for args in args_creator.product():
print call(f, args), args
if __name__ == '__main__':
def f(args):
args["d"] = 2
args["epsilon"] = 0.0001
return saes(args)
param_spec = ParamSpec()
param_spec.int("mu").interval((10, 20))
param_spec.int("lambd").interval((10, 50))
param_spec.float("tau0").interval((0, 1)).step(0.1)
param_spec.float("tau1").interval((0, 1)).step(0.1)
optimize(f, param_spec)
|
def optimize(f, param_spec=None, return_spec=None):
"""Assume f has to be minimized for now."""
if __name__ == '__main__':
passAdd prototype implementation for optimize()
It is currently doing a "Grid search" without actually searching for
anything ;)from test.algorithms.saes import f as saes
from paramspec import ParamSpec
from args import ArgsCreator, call
def optimize(f, param_spec=None, return_spec=None):
"""Assume f has to be minimized for now."""
if param_spec is None:
param_spec = ParamSpec(f)
args_creator = ArgsCreator(param_spec)
for args in args_creator.product():
print call(f, args), args
if __name__ == '__main__':
def f(args):
args["d"] = 2
args["epsilon"] = 0.0001
return saes(args)
param_spec = ParamSpec()
param_spec.int("mu").interval((10, 20))
param_spec.int("lambd").interval((10, 50))
param_spec.float("tau0").interval((0, 1)).step(0.1)
param_spec.float("tau1").interval((0, 1)).step(0.1)
optimize(f, param_spec)
|
<commit_before>def optimize(f, param_spec=None, return_spec=None):
"""Assume f has to be minimized for now."""
if __name__ == '__main__':
pass<commit_msg>Add prototype implementation for optimize()
It is currently doing a "Grid search" without actually searching for
anything ;)<commit_after>from test.algorithms.saes import f as saes
from paramspec import ParamSpec
from args import ArgsCreator, call
def optimize(f, param_spec=None, return_spec=None):
"""Assume f has to be minimized for now."""
if param_spec is None:
param_spec = ParamSpec(f)
args_creator = ArgsCreator(param_spec)
for args in args_creator.product():
print call(f, args), args
if __name__ == '__main__':
def f(args):
args["d"] = 2
args["epsilon"] = 0.0001
return saes(args)
param_spec = ParamSpec()
param_spec.int("mu").interval((10, 20))
param_spec.int("lambd").interval((10, 50))
param_spec.float("tau0").interval((0, 1)).step(0.1)
param_spec.float("tau1").interval((0, 1)).step(0.1)
optimize(f, param_spec)
|
bc1409937a16698bdef21c0ec90d8b823db0bb97
|
rackattack/physical/logconfig.py
|
rackattack/physical/logconfig.py
|
import logging
from rackattack.ssh import connection
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
logging.getLogger('network').setLevel(logging.DEBUG)
logging.getLogger('network').propagate = False
logging.getLogger().setLevel(logging.DEBUG)
handler = logging.StreamHandler()
handler.setLevel(logging.DEBUG)
handler.setFormatter(formatter)
logging.getLogger().addHandler(handler)
handler = logging.FileHandler("/var/log/rackattack.physical.network.log")
handler.setLevel(logging.DEBUG)
handler.setFormatter(formatter)
logging.getLogger('network').addHandler(handler)
connection.discardParamikoLogs()
connection.discardSSHDebugMessages()
logging.getLogger("pika").setLevel(logging.INFO)
|
import logging
from rackattack.ssh import connection
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
logging.getLogger('network').setLevel(logging.ERROR)
logging.getLogger('network').propagate = False
logging.getLogger().setLevel(logging.DEBUG)
handler = logging.StreamHandler()
handler.setLevel(logging.DEBUG)
handler.setFormatter(formatter)
logging.getLogger().addHandler(handler)
handler = logging.FileHandler("/var/log/rackattack.physical.network.log")
handler.setLevel(logging.DEBUG)
handler.setFormatter(formatter)
logging.getLogger('network').addHandler(handler)
connection.discardParamikoLogs()
connection.discardSSHDebugMessages()
logging.getLogger("pika").setLevel(logging.INFO)
|
Set the log level of the network logger to ERROR, since it's less needed now
|
Set the log level of the network logger to ERROR, since it's less needed now
|
Python
|
apache-2.0
|
eliran-stratoscale/rackattack-physical,Stratoscale/rackattack-physical,eliran-stratoscale/rackattack-physical,Stratoscale/rackattack-physical
|
import logging
from rackattack.ssh import connection
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
logging.getLogger('network').setLevel(logging.DEBUG)
logging.getLogger('network').propagate = False
logging.getLogger().setLevel(logging.DEBUG)
handler = logging.StreamHandler()
handler.setLevel(logging.DEBUG)
handler.setFormatter(formatter)
logging.getLogger().addHandler(handler)
handler = logging.FileHandler("/var/log/rackattack.physical.network.log")
handler.setLevel(logging.DEBUG)
handler.setFormatter(formatter)
logging.getLogger('network').addHandler(handler)
connection.discardParamikoLogs()
connection.discardSSHDebugMessages()
logging.getLogger("pika").setLevel(logging.INFO)
Set the log level of the network logger to ERROR, since it's less needed now
|
import logging
from rackattack.ssh import connection
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
logging.getLogger('network').setLevel(logging.ERROR)
logging.getLogger('network').propagate = False
logging.getLogger().setLevel(logging.DEBUG)
handler = logging.StreamHandler()
handler.setLevel(logging.DEBUG)
handler.setFormatter(formatter)
logging.getLogger().addHandler(handler)
handler = logging.FileHandler("/var/log/rackattack.physical.network.log")
handler.setLevel(logging.DEBUG)
handler.setFormatter(formatter)
logging.getLogger('network').addHandler(handler)
connection.discardParamikoLogs()
connection.discardSSHDebugMessages()
logging.getLogger("pika").setLevel(logging.INFO)
|
<commit_before>import logging
from rackattack.ssh import connection
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
logging.getLogger('network').setLevel(logging.DEBUG)
logging.getLogger('network').propagate = False
logging.getLogger().setLevel(logging.DEBUG)
handler = logging.StreamHandler()
handler.setLevel(logging.DEBUG)
handler.setFormatter(formatter)
logging.getLogger().addHandler(handler)
handler = logging.FileHandler("/var/log/rackattack.physical.network.log")
handler.setLevel(logging.DEBUG)
handler.setFormatter(formatter)
logging.getLogger('network').addHandler(handler)
connection.discardParamikoLogs()
connection.discardSSHDebugMessages()
logging.getLogger("pika").setLevel(logging.INFO)
<commit_msg>Set the log level of the network logger to ERROR, since it's less needed now<commit_after>
|
import logging
from rackattack.ssh import connection
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
logging.getLogger('network').setLevel(logging.ERROR)
logging.getLogger('network').propagate = False
logging.getLogger().setLevel(logging.DEBUG)
handler = logging.StreamHandler()
handler.setLevel(logging.DEBUG)
handler.setFormatter(formatter)
logging.getLogger().addHandler(handler)
handler = logging.FileHandler("/var/log/rackattack.physical.network.log")
handler.setLevel(logging.DEBUG)
handler.setFormatter(formatter)
logging.getLogger('network').addHandler(handler)
connection.discardParamikoLogs()
connection.discardSSHDebugMessages()
logging.getLogger("pika").setLevel(logging.INFO)
|
import logging
from rackattack.ssh import connection
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
logging.getLogger('network').setLevel(logging.DEBUG)
logging.getLogger('network').propagate = False
logging.getLogger().setLevel(logging.DEBUG)
handler = logging.StreamHandler()
handler.setLevel(logging.DEBUG)
handler.setFormatter(formatter)
logging.getLogger().addHandler(handler)
handler = logging.FileHandler("/var/log/rackattack.physical.network.log")
handler.setLevel(logging.DEBUG)
handler.setFormatter(formatter)
logging.getLogger('network').addHandler(handler)
connection.discardParamikoLogs()
connection.discardSSHDebugMessages()
logging.getLogger("pika").setLevel(logging.INFO)
Set the log level of the network logger to ERROR, since it's less needed nowimport logging
from rackattack.ssh import connection
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
logging.getLogger('network').setLevel(logging.ERROR)
logging.getLogger('network').propagate = False
logging.getLogger().setLevel(logging.DEBUG)
handler = logging.StreamHandler()
handler.setLevel(logging.DEBUG)
handler.setFormatter(formatter)
logging.getLogger().addHandler(handler)
handler = logging.FileHandler("/var/log/rackattack.physical.network.log")
handler.setLevel(logging.DEBUG)
handler.setFormatter(formatter)
logging.getLogger('network').addHandler(handler)
connection.discardParamikoLogs()
connection.discardSSHDebugMessages()
logging.getLogger("pika").setLevel(logging.INFO)
|
<commit_before>import logging
from rackattack.ssh import connection
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
logging.getLogger('network').setLevel(logging.DEBUG)
logging.getLogger('network').propagate = False
logging.getLogger().setLevel(logging.DEBUG)
handler = logging.StreamHandler()
handler.setLevel(logging.DEBUG)
handler.setFormatter(formatter)
logging.getLogger().addHandler(handler)
handler = logging.FileHandler("/var/log/rackattack.physical.network.log")
handler.setLevel(logging.DEBUG)
handler.setFormatter(formatter)
logging.getLogger('network').addHandler(handler)
connection.discardParamikoLogs()
connection.discardSSHDebugMessages()
logging.getLogger("pika").setLevel(logging.INFO)
<commit_msg>Set the log level of the network logger to ERROR, since it's less needed now<commit_after>import logging
from rackattack.ssh import connection
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
logging.getLogger('network').setLevel(logging.ERROR)
logging.getLogger('network').propagate = False
logging.getLogger().setLevel(logging.DEBUG)
handler = logging.StreamHandler()
handler.setLevel(logging.DEBUG)
handler.setFormatter(formatter)
logging.getLogger().addHandler(handler)
handler = logging.FileHandler("/var/log/rackattack.physical.network.log")
handler.setLevel(logging.DEBUG)
handler.setFormatter(formatter)
logging.getLogger('network').addHandler(handler)
connection.discardParamikoLogs()
connection.discardSSHDebugMessages()
logging.getLogger("pika").setLevel(logging.INFO)
|
92c01be43b80247ce2233851dd74b041bb9d44b0
|
csunplugged/resources/views/BarcodeChecksumPosterResourceGenerator.py
|
csunplugged/resources/views/BarcodeChecksumPosterResourceGenerator.py
|
"""Class for Barcode Checksum Poster resource generator."""
from PIL import Image
from utils.BaseResourceGenerator import BaseResourceGenerator
class BarcodeChecksumPosterResourceGenerator(BaseResourceGenerator):
"""Class for Grid resource generator."""
additional_valid_options = {
"barcode_length": ["12", "13"]
}
def data(self):
"""Create data for a copy of the Grid resource.
Returns:
A dictionary of the one page for the resource.
"""
image_path = "static/img/resources/barcode-checksum-poster/{}-digits.png"
image_path = image_path.format(self.requested_options["barcode_length"])
image = Image.open(image_path)
return {"type": "image", "data": image}
@property
def subtitle(self):
"""Return the subtitle string of the resource.
Used after the resource name in the filename, and
also on the resource image.
Returns:
text for subtitle (str).
"""
barcode_length = self.requested_options["barcode_length"]
return "{} digits - {}".format(barcode_length, super().subtitle)
|
"""Class for Barcode Checksum Poster resource generator."""
from PIL import Image, ImageDraw
from utils.BaseResourceGenerator import BaseResourceGenerator
from utils.TextBoxDrawer import TextBoxDrawer
from django.utils.translation import ugettext as _
class BarcodeChecksumPosterResourceGenerator(BaseResourceGenerator):
"""Class for Grid resource generator."""
additional_valid_options = {
"barcode_length": ["12", "13"]
}
def data(self):
"""Create data for a copy of the Grid resource.
Returns:
A dictionary of the one page for the resource.
"""
path = "static/img/resources/barcode-checksum-poster/{}-digits"
path = path.format(self.requested_options["barcode_length"])
image_path = "{}.png".format(path)
svg_path = "{}.svg".format(path)
image = Image.open(image_path)
draw = ImageDraw.Draw(image)
textbox_drawer = TextBoxDrawer(image, draw, svg_path)
textbox_drawer.write_text_box(
"title",
_("13 Digit Barcode"),
horiz_just="center",
vert_just="center",
)
headings = {
"heading1": _("Separate!"),
"heading2": _("Operate!"),
"heading3": _("Calculate!")
}
for heading_id, heading in headings.items():
textbox_drawer.write_text_box(
heading_id,
heading,
)
textbox_drawer.write_text_box(
"paragraph",
_("Remember that this algorithm uses modulo 10, so we are only "
"interested in the number in the one's column."),
)
return {"type": "image", "data": image}
@property
def subtitle(self):
"""Return the subtitle string of the resource.
Used after the resource name in the filename, and
also on the resource image.
Returns:
text for subtitle (str).
"""
barcode_length = self.requested_options["barcode_length"]
return "{} digits - {}".format(barcode_length, super().subtitle)
|
Modify Barcode Checksum Poster resource to dynamically overlay text
|
Modify Barcode Checksum Poster resource to dynamically overlay text
|
Python
|
mit
|
uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged
|
"""Class for Barcode Checksum Poster resource generator."""
from PIL import Image
from utils.BaseResourceGenerator import BaseResourceGenerator
class BarcodeChecksumPosterResourceGenerator(BaseResourceGenerator):
"""Class for Grid resource generator."""
additional_valid_options = {
"barcode_length": ["12", "13"]
}
def data(self):
"""Create data for a copy of the Grid resource.
Returns:
A dictionary of the one page for the resource.
"""
image_path = "static/img/resources/barcode-checksum-poster/{}-digits.png"
image_path = image_path.format(self.requested_options["barcode_length"])
image = Image.open(image_path)
return {"type": "image", "data": image}
@property
def subtitle(self):
"""Return the subtitle string of the resource.
Used after the resource name in the filename, and
also on the resource image.
Returns:
text for subtitle (str).
"""
barcode_length = self.requested_options["barcode_length"]
return "{} digits - {}".format(barcode_length, super().subtitle)
Modify Barcode Checksum Poster resource to dynamically overlay text
|
"""Class for Barcode Checksum Poster resource generator."""
from PIL import Image, ImageDraw
from utils.BaseResourceGenerator import BaseResourceGenerator
from utils.TextBoxDrawer import TextBoxDrawer
from django.utils.translation import ugettext as _
class BarcodeChecksumPosterResourceGenerator(BaseResourceGenerator):
"""Class for Grid resource generator."""
additional_valid_options = {
"barcode_length": ["12", "13"]
}
def data(self):
"""Create data for a copy of the Grid resource.
Returns:
A dictionary of the one page for the resource.
"""
path = "static/img/resources/barcode-checksum-poster/{}-digits"
path = path.format(self.requested_options["barcode_length"])
image_path = "{}.png".format(path)
svg_path = "{}.svg".format(path)
image = Image.open(image_path)
draw = ImageDraw.Draw(image)
textbox_drawer = TextBoxDrawer(image, draw, svg_path)
textbox_drawer.write_text_box(
"title",
_("13 Digit Barcode"),
horiz_just="center",
vert_just="center",
)
headings = {
"heading1": _("Separate!"),
"heading2": _("Operate!"),
"heading3": _("Calculate!")
}
for heading_id, heading in headings.items():
textbox_drawer.write_text_box(
heading_id,
heading,
)
textbox_drawer.write_text_box(
"paragraph",
_("Remember that this algorithm uses modulo 10, so we are only "
"interested in the number in the one's column."),
)
return {"type": "image", "data": image}
@property
def subtitle(self):
"""Return the subtitle string of the resource.
Used after the resource name in the filename, and
also on the resource image.
Returns:
text for subtitle (str).
"""
barcode_length = self.requested_options["barcode_length"]
return "{} digits - {}".format(barcode_length, super().subtitle)
|
<commit_before>"""Class for Barcode Checksum Poster resource generator."""
from PIL import Image
from utils.BaseResourceGenerator import BaseResourceGenerator
class BarcodeChecksumPosterResourceGenerator(BaseResourceGenerator):
"""Class for Grid resource generator."""
additional_valid_options = {
"barcode_length": ["12", "13"]
}
def data(self):
"""Create data for a copy of the Grid resource.
Returns:
A dictionary of the one page for the resource.
"""
image_path = "static/img/resources/barcode-checksum-poster/{}-digits.png"
image_path = image_path.format(self.requested_options["barcode_length"])
image = Image.open(image_path)
return {"type": "image", "data": image}
@property
def subtitle(self):
"""Return the subtitle string of the resource.
Used after the resource name in the filename, and
also on the resource image.
Returns:
text for subtitle (str).
"""
barcode_length = self.requested_options["barcode_length"]
return "{} digits - {}".format(barcode_length, super().subtitle)
<commit_msg>Modify Barcode Checksum Poster resource to dynamically overlay text<commit_after>
|
"""Class for Barcode Checksum Poster resource generator."""
from PIL import Image, ImageDraw
from utils.BaseResourceGenerator import BaseResourceGenerator
from utils.TextBoxDrawer import TextBoxDrawer
from django.utils.translation import ugettext as _
class BarcodeChecksumPosterResourceGenerator(BaseResourceGenerator):
"""Class for Grid resource generator."""
additional_valid_options = {
"barcode_length": ["12", "13"]
}
def data(self):
"""Create data for a copy of the Grid resource.
Returns:
A dictionary of the one page for the resource.
"""
path = "static/img/resources/barcode-checksum-poster/{}-digits"
path = path.format(self.requested_options["barcode_length"])
image_path = "{}.png".format(path)
svg_path = "{}.svg".format(path)
image = Image.open(image_path)
draw = ImageDraw.Draw(image)
textbox_drawer = TextBoxDrawer(image, draw, svg_path)
textbox_drawer.write_text_box(
"title",
_("13 Digit Barcode"),
horiz_just="center",
vert_just="center",
)
headings = {
"heading1": _("Separate!"),
"heading2": _("Operate!"),
"heading3": _("Calculate!")
}
for heading_id, heading in headings.items():
textbox_drawer.write_text_box(
heading_id,
heading,
)
textbox_drawer.write_text_box(
"paragraph",
_("Remember that this algorithm uses modulo 10, so we are only "
"interested in the number in the one's column."),
)
return {"type": "image", "data": image}
@property
def subtitle(self):
"""Return the subtitle string of the resource.
Used after the resource name in the filename, and
also on the resource image.
Returns:
text for subtitle (str).
"""
barcode_length = self.requested_options["barcode_length"]
return "{} digits - {}".format(barcode_length, super().subtitle)
|
"""Class for Barcode Checksum Poster resource generator."""
from PIL import Image
from utils.BaseResourceGenerator import BaseResourceGenerator
class BarcodeChecksumPosterResourceGenerator(BaseResourceGenerator):
"""Class for Grid resource generator."""
additional_valid_options = {
"barcode_length": ["12", "13"]
}
def data(self):
"""Create data for a copy of the Grid resource.
Returns:
A dictionary of the one page for the resource.
"""
image_path = "static/img/resources/barcode-checksum-poster/{}-digits.png"
image_path = image_path.format(self.requested_options["barcode_length"])
image = Image.open(image_path)
return {"type": "image", "data": image}
@property
def subtitle(self):
"""Return the subtitle string of the resource.
Used after the resource name in the filename, and
also on the resource image.
Returns:
text for subtitle (str).
"""
barcode_length = self.requested_options["barcode_length"]
return "{} digits - {}".format(barcode_length, super().subtitle)
Modify Barcode Checksum Poster resource to dynamically overlay text"""Class for Barcode Checksum Poster resource generator."""
from PIL import Image, ImageDraw
from utils.BaseResourceGenerator import BaseResourceGenerator
from utils.TextBoxDrawer import TextBoxDrawer
from django.utils.translation import ugettext as _
class BarcodeChecksumPosterResourceGenerator(BaseResourceGenerator):
"""Class for Grid resource generator."""
additional_valid_options = {
"barcode_length": ["12", "13"]
}
def data(self):
"""Create data for a copy of the Grid resource.
Returns:
A dictionary of the one page for the resource.
"""
path = "static/img/resources/barcode-checksum-poster/{}-digits"
path = path.format(self.requested_options["barcode_length"])
image_path = "{}.png".format(path)
svg_path = "{}.svg".format(path)
image = Image.open(image_path)
draw = ImageDraw.Draw(image)
textbox_drawer = TextBoxDrawer(image, draw, svg_path)
textbox_drawer.write_text_box(
"title",
_("13 Digit Barcode"),
horiz_just="center",
vert_just="center",
)
headings = {
"heading1": _("Separate!"),
"heading2": _("Operate!"),
"heading3": _("Calculate!")
}
for heading_id, heading in headings.items():
textbox_drawer.write_text_box(
heading_id,
heading,
)
textbox_drawer.write_text_box(
"paragraph",
_("Remember that this algorithm uses modulo 10, so we are only "
"interested in the number in the one's column."),
)
return {"type": "image", "data": image}
@property
def subtitle(self):
"""Return the subtitle string of the resource.
Used after the resource name in the filename, and
also on the resource image.
Returns:
text for subtitle (str).
"""
barcode_length = self.requested_options["barcode_length"]
return "{} digits - {}".format(barcode_length, super().subtitle)
|
<commit_before>"""Class for Barcode Checksum Poster resource generator."""
from PIL import Image
from utils.BaseResourceGenerator import BaseResourceGenerator
class BarcodeChecksumPosterResourceGenerator(BaseResourceGenerator):
"""Class for Grid resource generator."""
additional_valid_options = {
"barcode_length": ["12", "13"]
}
def data(self):
"""Create data for a copy of the Grid resource.
Returns:
A dictionary of the one page for the resource.
"""
image_path = "static/img/resources/barcode-checksum-poster/{}-digits.png"
image_path = image_path.format(self.requested_options["barcode_length"])
image = Image.open(image_path)
return {"type": "image", "data": image}
@property
def subtitle(self):
"""Return the subtitle string of the resource.
Used after the resource name in the filename, and
also on the resource image.
Returns:
text for subtitle (str).
"""
barcode_length = self.requested_options["barcode_length"]
return "{} digits - {}".format(barcode_length, super().subtitle)
<commit_msg>Modify Barcode Checksum Poster resource to dynamically overlay text<commit_after>"""Class for Barcode Checksum Poster resource generator."""
from PIL import Image, ImageDraw
from utils.BaseResourceGenerator import BaseResourceGenerator
from utils.TextBoxDrawer import TextBoxDrawer
from django.utils.translation import ugettext as _
class BarcodeChecksumPosterResourceGenerator(BaseResourceGenerator):
"""Class for Grid resource generator."""
additional_valid_options = {
"barcode_length": ["12", "13"]
}
def data(self):
"""Create data for a copy of the Grid resource.
Returns:
A dictionary of the one page for the resource.
"""
path = "static/img/resources/barcode-checksum-poster/{}-digits"
path = path.format(self.requested_options["barcode_length"])
image_path = "{}.png".format(path)
svg_path = "{}.svg".format(path)
image = Image.open(image_path)
draw = ImageDraw.Draw(image)
textbox_drawer = TextBoxDrawer(image, draw, svg_path)
textbox_drawer.write_text_box(
"title",
_("13 Digit Barcode"),
horiz_just="center",
vert_just="center",
)
headings = {
"heading1": _("Separate!"),
"heading2": _("Operate!"),
"heading3": _("Calculate!")
}
for heading_id, heading in headings.items():
textbox_drawer.write_text_box(
heading_id,
heading,
)
textbox_drawer.write_text_box(
"paragraph",
_("Remember that this algorithm uses modulo 10, so we are only "
"interested in the number in the one's column."),
)
return {"type": "image", "data": image}
@property
def subtitle(self):
"""Return the subtitle string of the resource.
Used after the resource name in the filename, and
also on the resource image.
Returns:
text for subtitle (str).
"""
barcode_length = self.requested_options["barcode_length"]
return "{} digits - {}".format(barcode_length, super().subtitle)
|
b98ce71770d22d4b80e83f56b74dc710405cf535
|
mitmproxy/utils/sliding_window.py
|
mitmproxy/utils/sliding_window.py
|
import itertools
from typing import TypeVar, Iterator, Tuple, Optional
T = TypeVar('T')
def window(iterator: Iterator[T], behind: int = 0, ahead: int = 0) -> Iterator[Tuple[Optional[T]]]:
"""
Sliding window for an iterator.
Example:
>>> for prev, i, nxt in window(range(10), 1, 1):
>>> print(prev, i, nxt)
None 0 1
0 1 2
1 2 3
2 3 None
"""
# TODO: move into utils
iters = list(itertools.tee(iterator, behind + 1 + ahead))
for i in range(behind):
iters[i] = itertools.chain((behind - i) * [None], iters[i])
for i in range(ahead):
iters[-1 - i] = itertools.islice(
itertools.chain(iters[-1 - i], (ahead - i) * [None]),
(ahead - i),
None
)
return zip(*iters)
|
import itertools
from typing import TypeVar, Iterable, Iterator, Tuple, Optional
T = TypeVar('T')
def window(iterator: Iterable[T], behind: int = 0, ahead: int = 0) -> Iterator[Tuple[Optional[T], ...]]:
"""
Sliding window for an iterator.
Example:
>>> for prev, i, nxt in window(range(10), 1, 1):
>>> print(prev, i, nxt)
None 0 1
0 1 2
1 2 3
2 3 None
"""
# TODO: move into utils
iters = list(itertools.tee(iterator, behind + 1 + ahead))
for i in range(behind):
iters[i] = itertools.chain((behind - i) * [None], iters[i])
for i in range(ahead):
iters[-1 - i] = itertools.islice(
itertools.chain(iters[-1 - i], (ahead - i) * [None]),
(ahead - i),
None
)
return zip(*iters)
|
Fix sliding window type specs
|
Fix sliding window type specs
|
Python
|
mit
|
mhils/mitmproxy,ujjwal96/mitmproxy,vhaupert/mitmproxy,StevenVanAcker/mitmproxy,ujjwal96/mitmproxy,zlorb/mitmproxy,Kriechi/mitmproxy,vhaupert/mitmproxy,mitmproxy/mitmproxy,zlorb/mitmproxy,Kriechi/mitmproxy,ddworken/mitmproxy,mitmproxy/mitmproxy,vhaupert/mitmproxy,mitmproxy/mitmproxy,StevenVanAcker/mitmproxy,ujjwal96/mitmproxy,xaxa89/mitmproxy,cortesi/mitmproxy,mitmproxy/mitmproxy,StevenVanAcker/mitmproxy,mhils/mitmproxy,StevenVanAcker/mitmproxy,mhils/mitmproxy,mitmproxy/mitmproxy,xaxa89/mitmproxy,xaxa89/mitmproxy,vhaupert/mitmproxy,cortesi/mitmproxy,ddworken/mitmproxy,MatthewShao/mitmproxy,cortesi/mitmproxy,cortesi/mitmproxy,zlorb/mitmproxy,MatthewShao/mitmproxy,xaxa89/mitmproxy,ujjwal96/mitmproxy,ddworken/mitmproxy,mhils/mitmproxy,MatthewShao/mitmproxy,Kriechi/mitmproxy,MatthewShao/mitmproxy,zlorb/mitmproxy,Kriechi/mitmproxy,mhils/mitmproxy,ddworken/mitmproxy
|
import itertools
from typing import TypeVar, Iterator, Tuple, Optional
T = TypeVar('T')
def window(iterator: Iterator[T], behind: int = 0, ahead: int = 0) -> Iterator[Tuple[Optional[T]]]:
"""
Sliding window for an iterator.
Example:
>>> for prev, i, nxt in window(range(10), 1, 1):
>>> print(prev, i, nxt)
None 0 1
0 1 2
1 2 3
2 3 None
"""
# TODO: move into utils
iters = list(itertools.tee(iterator, behind + 1 + ahead))
for i in range(behind):
iters[i] = itertools.chain((behind - i) * [None], iters[i])
for i in range(ahead):
iters[-1 - i] = itertools.islice(
itertools.chain(iters[-1 - i], (ahead - i) * [None]),
(ahead - i),
None
)
return zip(*iters)
Fix sliding window type specs
|
import itertools
from typing import TypeVar, Iterable, Iterator, Tuple, Optional
T = TypeVar('T')
def window(iterator: Iterable[T], behind: int = 0, ahead: int = 0) -> Iterator[Tuple[Optional[T], ...]]:
"""
Sliding window for an iterator.
Example:
>>> for prev, i, nxt in window(range(10), 1, 1):
>>> print(prev, i, nxt)
None 0 1
0 1 2
1 2 3
2 3 None
"""
# TODO: move into utils
iters = list(itertools.tee(iterator, behind + 1 + ahead))
for i in range(behind):
iters[i] = itertools.chain((behind - i) * [None], iters[i])
for i in range(ahead):
iters[-1 - i] = itertools.islice(
itertools.chain(iters[-1 - i], (ahead - i) * [None]),
(ahead - i),
None
)
return zip(*iters)
|
<commit_before>import itertools
from typing import TypeVar, Iterator, Tuple, Optional
T = TypeVar('T')
def window(iterator: Iterator[T], behind: int = 0, ahead: int = 0) -> Iterator[Tuple[Optional[T]]]:
"""
Sliding window for an iterator.
Example:
>>> for prev, i, nxt in window(range(10), 1, 1):
>>> print(prev, i, nxt)
None 0 1
0 1 2
1 2 3
2 3 None
"""
# TODO: move into utils
iters = list(itertools.tee(iterator, behind + 1 + ahead))
for i in range(behind):
iters[i] = itertools.chain((behind - i) * [None], iters[i])
for i in range(ahead):
iters[-1 - i] = itertools.islice(
itertools.chain(iters[-1 - i], (ahead - i) * [None]),
(ahead - i),
None
)
return zip(*iters)
<commit_msg>Fix sliding window type specs<commit_after>
|
import itertools
from typing import TypeVar, Iterable, Iterator, Tuple, Optional
T = TypeVar('T')
def window(iterator: Iterable[T], behind: int = 0, ahead: int = 0) -> Iterator[Tuple[Optional[T], ...]]:
"""
Sliding window for an iterator.
Example:
>>> for prev, i, nxt in window(range(10), 1, 1):
>>> print(prev, i, nxt)
None 0 1
0 1 2
1 2 3
2 3 None
"""
# TODO: move into utils
iters = list(itertools.tee(iterator, behind + 1 + ahead))
for i in range(behind):
iters[i] = itertools.chain((behind - i) * [None], iters[i])
for i in range(ahead):
iters[-1 - i] = itertools.islice(
itertools.chain(iters[-1 - i], (ahead - i) * [None]),
(ahead - i),
None
)
return zip(*iters)
|
import itertools
from typing import TypeVar, Iterator, Tuple, Optional
T = TypeVar('T')
def window(iterator: Iterator[T], behind: int = 0, ahead: int = 0) -> Iterator[Tuple[Optional[T]]]:
"""
Sliding window for an iterator.
Example:
>>> for prev, i, nxt in window(range(10), 1, 1):
>>> print(prev, i, nxt)
None 0 1
0 1 2
1 2 3
2 3 None
"""
# TODO: move into utils
iters = list(itertools.tee(iterator, behind + 1 + ahead))
for i in range(behind):
iters[i] = itertools.chain((behind - i) * [None], iters[i])
for i in range(ahead):
iters[-1 - i] = itertools.islice(
itertools.chain(iters[-1 - i], (ahead - i) * [None]),
(ahead - i),
None
)
return zip(*iters)
Fix sliding window type specsimport itertools
from typing import TypeVar, Iterable, Iterator, Tuple, Optional
T = TypeVar('T')
def window(iterator: Iterable[T], behind: int = 0, ahead: int = 0) -> Iterator[Tuple[Optional[T], ...]]:
"""
Sliding window for an iterator.
Example:
>>> for prev, i, nxt in window(range(10), 1, 1):
>>> print(prev, i, nxt)
None 0 1
0 1 2
1 2 3
2 3 None
"""
# TODO: move into utils
iters = list(itertools.tee(iterator, behind + 1 + ahead))
for i in range(behind):
iters[i] = itertools.chain((behind - i) * [None], iters[i])
for i in range(ahead):
iters[-1 - i] = itertools.islice(
itertools.chain(iters[-1 - i], (ahead - i) * [None]),
(ahead - i),
None
)
return zip(*iters)
|
<commit_before>import itertools
from typing import TypeVar, Iterator, Tuple, Optional
T = TypeVar('T')
def window(iterator: Iterator[T], behind: int = 0, ahead: int = 0) -> Iterator[Tuple[Optional[T]]]:
"""
Sliding window for an iterator.
Example:
>>> for prev, i, nxt in window(range(10), 1, 1):
>>> print(prev, i, nxt)
None 0 1
0 1 2
1 2 3
2 3 None
"""
# TODO: move into utils
iters = list(itertools.tee(iterator, behind + 1 + ahead))
for i in range(behind):
iters[i] = itertools.chain((behind - i) * [None], iters[i])
for i in range(ahead):
iters[-1 - i] = itertools.islice(
itertools.chain(iters[-1 - i], (ahead - i) * [None]),
(ahead - i),
None
)
return zip(*iters)
<commit_msg>Fix sliding window type specs<commit_after>import itertools
from typing import TypeVar, Iterable, Iterator, Tuple, Optional
T = TypeVar('T')
def window(iterator: Iterable[T], behind: int = 0, ahead: int = 0) -> Iterator[Tuple[Optional[T], ...]]:
"""
Sliding window for an iterator.
Example:
>>> for prev, i, nxt in window(range(10), 1, 1):
>>> print(prev, i, nxt)
None 0 1
0 1 2
1 2 3
2 3 None
"""
# TODO: move into utils
iters = list(itertools.tee(iterator, behind + 1 + ahead))
for i in range(behind):
iters[i] = itertools.chain((behind - i) * [None], iters[i])
for i in range(ahead):
iters[-1 - i] = itertools.islice(
itertools.chain(iters[-1 - i], (ahead - i) * [None]),
(ahead - i),
None
)
return zip(*iters)
|
f636420211821faeb3e26a501fbe5a9a7e3eef5e
|
normal_admin/user_admin.py
|
normal_admin/user_admin.py
|
__author__ = 'weijia'
from django.contrib.admin.sites import AdminSite
from django.contrib.admin.forms import AdminAuthenticationForm
from django import forms
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth import authenticate
ERROR_MESSAGE = _("Please enter the correct username and password "
"for a staff account. Note that both fields are case-sensitive.")
class UserAdminAuthenticationForm(AdminAuthenticationForm):
"""
Same as Django's AdminAuthenticationForm but allows to login
any user who is not staff.
"""
def clean(self):
username = self.cleaned_data.get('username')
password = self.cleaned_data.get('password')
message = ERROR_MESSAGE
if username and password:
try:
self.user_cache = authenticate(username=username, password=password)
except:
# The following is for userena as it uses different param
self.user_cache = authenticate(identification=username, password=password)
if self.user_cache is None:
raise forms.ValidationError(message)
elif not self.user_cache.is_active:
raise forms.ValidationError(message)
self.check_for_test_cookie()
return self.cleaned_data
class UserAdmin(AdminSite):
# Anything we wish to add or override
login_form = UserAdminAuthenticationForm
def has_permission(self, request):
return request.user.is_active
|
from django.contrib.admin.sites import AdminSite
from django.contrib.admin.forms import AdminAuthenticationForm
from django import forms
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth import authenticate
__author__ = 'weijia'
ERROR_MESSAGE = _("Please enter the correct username and password "
"for a staff account. Note that both fields are case-sensitive.")
class UserAdminAuthenticationForm(AdminAuthenticationForm):
"""
Same as Django's AdminAuthenticationForm but allows to login
any user who is not staff.
"""
def clean(self):
try:
return super(UserAdminAuthenticationForm, self).clean()
except:
username = self.cleaned_data.get('username')
password = self.cleaned_data.get('password')
message = ERROR_MESSAGE
if username and password:
try:
self.user_cache = authenticate(username=username, password=password)
except:
# The following is for userena as it uses different param
self.user_cache = authenticate(identification=username, password=password)
if self.user_cache is None:
raise forms.ValidationError(message)
elif not self.user_cache.is_active:
raise forms.ValidationError(message)
self.check_for_test_cookie()
return self.cleaned_data
# For Django 1.8
def confirm_login_allowed(self, user):
if not user.is_active:
raise forms.ValidationError(
self.error_messages['invalid_login'],
code='invalid_login',
params={'username': self.username_field.verbose_name}
)
class UserAdmin(AdminSite):
# Anything we wish to add or override
login_form = UserAdminAuthenticationForm
def has_permission(self, request):
return request.user.is_active
|
Fix login error in Django 1.8.
|
Fix login error in Django 1.8.
|
Python
|
bsd-3-clause
|
weijia/normal_admin,weijia/normal_admin
|
__author__ = 'weijia'
from django.contrib.admin.sites import AdminSite
from django.contrib.admin.forms import AdminAuthenticationForm
from django import forms
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth import authenticate
ERROR_MESSAGE = _("Please enter the correct username and password "
"for a staff account. Note that both fields are case-sensitive.")
class UserAdminAuthenticationForm(AdminAuthenticationForm):
"""
Same as Django's AdminAuthenticationForm but allows to login
any user who is not staff.
"""
def clean(self):
username = self.cleaned_data.get('username')
password = self.cleaned_data.get('password')
message = ERROR_MESSAGE
if username and password:
try:
self.user_cache = authenticate(username=username, password=password)
except:
# The following is for userena as it uses different param
self.user_cache = authenticate(identification=username, password=password)
if self.user_cache is None:
raise forms.ValidationError(message)
elif not self.user_cache.is_active:
raise forms.ValidationError(message)
self.check_for_test_cookie()
return self.cleaned_data
class UserAdmin(AdminSite):
# Anything we wish to add or override
login_form = UserAdminAuthenticationForm
def has_permission(self, request):
return request.user.is_active
Fix login error in Django 1.8.
|
from django.contrib.admin.sites import AdminSite
from django.contrib.admin.forms import AdminAuthenticationForm
from django import forms
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth import authenticate
__author__ = 'weijia'
ERROR_MESSAGE = _("Please enter the correct username and password "
"for a staff account. Note that both fields are case-sensitive.")
class UserAdminAuthenticationForm(AdminAuthenticationForm):
"""
Same as Django's AdminAuthenticationForm but allows to login
any user who is not staff.
"""
def clean(self):
try:
return super(UserAdminAuthenticationForm, self).clean()
except:
username = self.cleaned_data.get('username')
password = self.cleaned_data.get('password')
message = ERROR_MESSAGE
if username and password:
try:
self.user_cache = authenticate(username=username, password=password)
except:
# The following is for userena as it uses different param
self.user_cache = authenticate(identification=username, password=password)
if self.user_cache is None:
raise forms.ValidationError(message)
elif not self.user_cache.is_active:
raise forms.ValidationError(message)
self.check_for_test_cookie()
return self.cleaned_data
# For Django 1.8
def confirm_login_allowed(self, user):
if not user.is_active:
raise forms.ValidationError(
self.error_messages['invalid_login'],
code='invalid_login',
params={'username': self.username_field.verbose_name}
)
class UserAdmin(AdminSite):
# Anything we wish to add or override
login_form = UserAdminAuthenticationForm
def has_permission(self, request):
return request.user.is_active
|
<commit_before>__author__ = 'weijia'
from django.contrib.admin.sites import AdminSite
from django.contrib.admin.forms import AdminAuthenticationForm
from django import forms
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth import authenticate
ERROR_MESSAGE = _("Please enter the correct username and password "
"for a staff account. Note that both fields are case-sensitive.")
class UserAdminAuthenticationForm(AdminAuthenticationForm):
"""
Same as Django's AdminAuthenticationForm but allows to login
any user who is not staff.
"""
def clean(self):
username = self.cleaned_data.get('username')
password = self.cleaned_data.get('password')
message = ERROR_MESSAGE
if username and password:
try:
self.user_cache = authenticate(username=username, password=password)
except:
# The following is for userena as it uses different param
self.user_cache = authenticate(identification=username, password=password)
if self.user_cache is None:
raise forms.ValidationError(message)
elif not self.user_cache.is_active:
raise forms.ValidationError(message)
self.check_for_test_cookie()
return self.cleaned_data
class UserAdmin(AdminSite):
# Anything we wish to add or override
login_form = UserAdminAuthenticationForm
def has_permission(self, request):
return request.user.is_active
<commit_msg>Fix login error in Django 1.8.<commit_after>
|
from django.contrib.admin.sites import AdminSite
from django.contrib.admin.forms import AdminAuthenticationForm
from django import forms
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth import authenticate
__author__ = 'weijia'
ERROR_MESSAGE = _("Please enter the correct username and password "
"for a staff account. Note that both fields are case-sensitive.")
class UserAdminAuthenticationForm(AdminAuthenticationForm):
"""
Same as Django's AdminAuthenticationForm but allows to login
any user who is not staff.
"""
def clean(self):
try:
return super(UserAdminAuthenticationForm, self).clean()
except:
username = self.cleaned_data.get('username')
password = self.cleaned_data.get('password')
message = ERROR_MESSAGE
if username and password:
try:
self.user_cache = authenticate(username=username, password=password)
except:
# The following is for userena as it uses different param
self.user_cache = authenticate(identification=username, password=password)
if self.user_cache is None:
raise forms.ValidationError(message)
elif not self.user_cache.is_active:
raise forms.ValidationError(message)
self.check_for_test_cookie()
return self.cleaned_data
# For Django 1.8
def confirm_login_allowed(self, user):
if not user.is_active:
raise forms.ValidationError(
self.error_messages['invalid_login'],
code='invalid_login',
params={'username': self.username_field.verbose_name}
)
class UserAdmin(AdminSite):
# Anything we wish to add or override
login_form = UserAdminAuthenticationForm
def has_permission(self, request):
return request.user.is_active
|
__author__ = 'weijia'
from django.contrib.admin.sites import AdminSite
from django.contrib.admin.forms import AdminAuthenticationForm
from django import forms
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth import authenticate
ERROR_MESSAGE = _("Please enter the correct username and password "
"for a staff account. Note that both fields are case-sensitive.")
class UserAdminAuthenticationForm(AdminAuthenticationForm):
"""
Same as Django's AdminAuthenticationForm but allows to login
any user who is not staff.
"""
def clean(self):
username = self.cleaned_data.get('username')
password = self.cleaned_data.get('password')
message = ERROR_MESSAGE
if username and password:
try:
self.user_cache = authenticate(username=username, password=password)
except:
# The following is for userena as it uses different param
self.user_cache = authenticate(identification=username, password=password)
if self.user_cache is None:
raise forms.ValidationError(message)
elif not self.user_cache.is_active:
raise forms.ValidationError(message)
self.check_for_test_cookie()
return self.cleaned_data
class UserAdmin(AdminSite):
# Anything we wish to add or override
login_form = UserAdminAuthenticationForm
def has_permission(self, request):
return request.user.is_active
Fix login error in Django 1.8.from django.contrib.admin.sites import AdminSite
from django.contrib.admin.forms import AdminAuthenticationForm
from django import forms
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth import authenticate
__author__ = 'weijia'
ERROR_MESSAGE = _("Please enter the correct username and password "
"for a staff account. Note that both fields are case-sensitive.")
class UserAdminAuthenticationForm(AdminAuthenticationForm):
"""
Same as Django's AdminAuthenticationForm but allows to login
any user who is not staff.
"""
def clean(self):
try:
return super(UserAdminAuthenticationForm, self).clean()
except:
username = self.cleaned_data.get('username')
password = self.cleaned_data.get('password')
message = ERROR_MESSAGE
if username and password:
try:
self.user_cache = authenticate(username=username, password=password)
except:
# The following is for userena as it uses different param
self.user_cache = authenticate(identification=username, password=password)
if self.user_cache is None:
raise forms.ValidationError(message)
elif not self.user_cache.is_active:
raise forms.ValidationError(message)
self.check_for_test_cookie()
return self.cleaned_data
# For Django 1.8
def confirm_login_allowed(self, user):
if not user.is_active:
raise forms.ValidationError(
self.error_messages['invalid_login'],
code='invalid_login',
params={'username': self.username_field.verbose_name}
)
class UserAdmin(AdminSite):
# Anything we wish to add or override
login_form = UserAdminAuthenticationForm
def has_permission(self, request):
return request.user.is_active
|
<commit_before>__author__ = 'weijia'
from django.contrib.admin.sites import AdminSite
from django.contrib.admin.forms import AdminAuthenticationForm
from django import forms
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth import authenticate
ERROR_MESSAGE = _("Please enter the correct username and password "
"for a staff account. Note that both fields are case-sensitive.")
class UserAdminAuthenticationForm(AdminAuthenticationForm):
"""
Same as Django's AdminAuthenticationForm but allows to login
any user who is not staff.
"""
def clean(self):
username = self.cleaned_data.get('username')
password = self.cleaned_data.get('password')
message = ERROR_MESSAGE
if username and password:
try:
self.user_cache = authenticate(username=username, password=password)
except:
# The following is for userena as it uses different param
self.user_cache = authenticate(identification=username, password=password)
if self.user_cache is None:
raise forms.ValidationError(message)
elif not self.user_cache.is_active:
raise forms.ValidationError(message)
self.check_for_test_cookie()
return self.cleaned_data
class UserAdmin(AdminSite):
# Anything we wish to add or override
login_form = UserAdminAuthenticationForm
def has_permission(self, request):
return request.user.is_active
<commit_msg>Fix login error in Django 1.8.<commit_after>from django.contrib.admin.sites import AdminSite
from django.contrib.admin.forms import AdminAuthenticationForm
from django import forms
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth import authenticate
__author__ = 'weijia'
ERROR_MESSAGE = _("Please enter the correct username and password "
"for a staff account. Note that both fields are case-sensitive.")
class UserAdminAuthenticationForm(AdminAuthenticationForm):
"""
Same as Django's AdminAuthenticationForm but allows to login
any user who is not staff.
"""
def clean(self):
try:
return super(UserAdminAuthenticationForm, self).clean()
except:
username = self.cleaned_data.get('username')
password = self.cleaned_data.get('password')
message = ERROR_MESSAGE
if username and password:
try:
self.user_cache = authenticate(username=username, password=password)
except:
# The following is for userena as it uses different param
self.user_cache = authenticate(identification=username, password=password)
if self.user_cache is None:
raise forms.ValidationError(message)
elif not self.user_cache.is_active:
raise forms.ValidationError(message)
self.check_for_test_cookie()
return self.cleaned_data
# For Django 1.8
def confirm_login_allowed(self, user):
if not user.is_active:
raise forms.ValidationError(
self.error_messages['invalid_login'],
code='invalid_login',
params={'username': self.username_field.verbose_name}
)
class UserAdmin(AdminSite):
# Anything we wish to add or override
login_form = UserAdminAuthenticationForm
def has_permission(self, request):
return request.user.is_active
|
ea2979c75f8f771a70617e607b8398809dba8dac
|
twython/compat.py
|
twython/compat.py
|
# -*- coding: utf-8 -*-
"""
twython.compat
~~~~~~~~~~~~~~
This module contains imports and declarations for seamless Python 2 and
Python 3 compatibility.
"""
import sys
import numpy as np
_ver = sys.version_info
#: Python 2.x?
is_py2 = (_ver[0] == 2)
#: Python 3.x?
is_py3 = (_ver[0] == 3)
try:
import simplejson as json
except ImportError:
import json
if is_py2:
from urllib import urlencode, quote_plus
from urlparse import parse_qsl, urlsplit
str = unicode
basestring = basestring
numeric_types = (int, long, float, np.int64, np.float64)
elif is_py3:
from urllib.parse import urlencode, quote_plus, parse_qsl, urlsplit
str = str
basestring = (str, bytes)
numeric_types = (int, float, np.int64, np.float64)
|
# -*- coding: utf-8 -*-
"""
twython.compat
~~~~~~~~~~~~~~
This module contains imports and declarations for seamless Python 2 and
Python 3 compatibility.
"""
import sys
_ver = sys.version_info
#: Python 2.x?
is_py2 = (_ver[0] == 2)
#: Python 3.x?
is_py3 = (_ver[0] == 3)
try:
import simplejson as json
except ImportError:
import json
if is_py2:
from urllib import urlencode, quote_plus
from urlparse import parse_qsl, urlsplit
str = unicode
basestring = basestring
numeric_types = (int, long, float)
elif is_py3:
from urllib.parse import urlencode, quote_plus, parse_qsl, urlsplit
str = str
basestring = (str, bytes)
numeric_types = (int, float)
|
Remove this merge as numpy shouldn't be a dependency
|
Remove this merge as numpy shouldn't be a dependency
|
Python
|
mit
|
ryanmcgrath/twython
|
# -*- coding: utf-8 -*-
"""
twython.compat
~~~~~~~~~~~~~~
This module contains imports and declarations for seamless Python 2 and
Python 3 compatibility.
"""
import sys
import numpy as np
_ver = sys.version_info
#: Python 2.x?
is_py2 = (_ver[0] == 2)
#: Python 3.x?
is_py3 = (_ver[0] == 3)
try:
import simplejson as json
except ImportError:
import json
if is_py2:
from urllib import urlencode, quote_plus
from urlparse import parse_qsl, urlsplit
str = unicode
basestring = basestring
numeric_types = (int, long, float, np.int64, np.float64)
elif is_py3:
from urllib.parse import urlencode, quote_plus, parse_qsl, urlsplit
str = str
basestring = (str, bytes)
numeric_types = (int, float, np.int64, np.float64)
Remove this merge as numpy shouldn't be a dependency
|
# -*- coding: utf-8 -*-
"""
twython.compat
~~~~~~~~~~~~~~
This module contains imports and declarations for seamless Python 2 and
Python 3 compatibility.
"""
import sys
_ver = sys.version_info
#: Python 2.x?
is_py2 = (_ver[0] == 2)
#: Python 3.x?
is_py3 = (_ver[0] == 3)
try:
import simplejson as json
except ImportError:
import json
if is_py2:
from urllib import urlencode, quote_plus
from urlparse import parse_qsl, urlsplit
str = unicode
basestring = basestring
numeric_types = (int, long, float)
elif is_py3:
from urllib.parse import urlencode, quote_plus, parse_qsl, urlsplit
str = str
basestring = (str, bytes)
numeric_types = (int, float)
|
<commit_before># -*- coding: utf-8 -*-
"""
twython.compat
~~~~~~~~~~~~~~
This module contains imports and declarations for seamless Python 2 and
Python 3 compatibility.
"""
import sys
import numpy as np
_ver = sys.version_info
#: Python 2.x?
is_py2 = (_ver[0] == 2)
#: Python 3.x?
is_py3 = (_ver[0] == 3)
try:
import simplejson as json
except ImportError:
import json
if is_py2:
from urllib import urlencode, quote_plus
from urlparse import parse_qsl, urlsplit
str = unicode
basestring = basestring
numeric_types = (int, long, float, np.int64, np.float64)
elif is_py3:
from urllib.parse import urlencode, quote_plus, parse_qsl, urlsplit
str = str
basestring = (str, bytes)
numeric_types = (int, float, np.int64, np.float64)
<commit_msg>Remove this merge as numpy shouldn't be a dependency<commit_after>
|
# -*- coding: utf-8 -*-
"""
twython.compat
~~~~~~~~~~~~~~
This module contains imports and declarations for seamless Python 2 and
Python 3 compatibility.
"""
import sys
_ver = sys.version_info
#: Python 2.x?
is_py2 = (_ver[0] == 2)
#: Python 3.x?
is_py3 = (_ver[0] == 3)
try:
import simplejson as json
except ImportError:
import json
if is_py2:
from urllib import urlencode, quote_plus
from urlparse import parse_qsl, urlsplit
str = unicode
basestring = basestring
numeric_types = (int, long, float)
elif is_py3:
from urllib.parse import urlencode, quote_plus, parse_qsl, urlsplit
str = str
basestring = (str, bytes)
numeric_types = (int, float)
|
# -*- coding: utf-8 -*-
"""
twython.compat
~~~~~~~~~~~~~~
This module contains imports and declarations for seamless Python 2 and
Python 3 compatibility.
"""
import sys
import numpy as np
_ver = sys.version_info
#: Python 2.x?
is_py2 = (_ver[0] == 2)
#: Python 3.x?
is_py3 = (_ver[0] == 3)
try:
import simplejson as json
except ImportError:
import json
if is_py2:
from urllib import urlencode, quote_plus
from urlparse import parse_qsl, urlsplit
str = unicode
basestring = basestring
numeric_types = (int, long, float, np.int64, np.float64)
elif is_py3:
from urllib.parse import urlencode, quote_plus, parse_qsl, urlsplit
str = str
basestring = (str, bytes)
numeric_types = (int, float, np.int64, np.float64)
Remove this merge as numpy shouldn't be a dependency# -*- coding: utf-8 -*-
"""
twython.compat
~~~~~~~~~~~~~~
This module contains imports and declarations for seamless Python 2 and
Python 3 compatibility.
"""
import sys
_ver = sys.version_info
#: Python 2.x?
is_py2 = (_ver[0] == 2)
#: Python 3.x?
is_py3 = (_ver[0] == 3)
try:
import simplejson as json
except ImportError:
import json
if is_py2:
from urllib import urlencode, quote_plus
from urlparse import parse_qsl, urlsplit
str = unicode
basestring = basestring
numeric_types = (int, long, float)
elif is_py3:
from urllib.parse import urlencode, quote_plus, parse_qsl, urlsplit
str = str
basestring = (str, bytes)
numeric_types = (int, float)
|
<commit_before># -*- coding: utf-8 -*-
"""
twython.compat
~~~~~~~~~~~~~~
This module contains imports and declarations for seamless Python 2 and
Python 3 compatibility.
"""
import sys
import numpy as np
_ver = sys.version_info
#: Python 2.x?
is_py2 = (_ver[0] == 2)
#: Python 3.x?
is_py3 = (_ver[0] == 3)
try:
import simplejson as json
except ImportError:
import json
if is_py2:
from urllib import urlencode, quote_plus
from urlparse import parse_qsl, urlsplit
str = unicode
basestring = basestring
numeric_types = (int, long, float, np.int64, np.float64)
elif is_py3:
from urllib.parse import urlencode, quote_plus, parse_qsl, urlsplit
str = str
basestring = (str, bytes)
numeric_types = (int, float, np.int64, np.float64)
<commit_msg>Remove this merge as numpy shouldn't be a dependency<commit_after># -*- coding: utf-8 -*-
"""
twython.compat
~~~~~~~~~~~~~~
This module contains imports and declarations for seamless Python 2 and
Python 3 compatibility.
"""
import sys
_ver = sys.version_info
#: Python 2.x?
is_py2 = (_ver[0] == 2)
#: Python 3.x?
is_py3 = (_ver[0] == 3)
try:
import simplejson as json
except ImportError:
import json
if is_py2:
from urllib import urlencode, quote_plus
from urlparse import parse_qsl, urlsplit
str = unicode
basestring = basestring
numeric_types = (int, long, float)
elif is_py3:
from urllib.parse import urlencode, quote_plus, parse_qsl, urlsplit
str = str
basestring = (str, bytes)
numeric_types = (int, float)
|
b8f03556991cabab858bb31e5c8cb2f043ad14ce
|
packages/pcl-reference-assemblies.py
|
packages/pcl-reference-assemblies.py
|
import glob
import os
import shutil
class PCLReferenceAssembliesPackage(Package):
def __init__(self):
Package.__init__(self,
name='mono-pcl-profiles',
version='2013-10-23',
sources=['http://storage.bos.xamarin.com/mono-pcl/58/5825e0404974d87799504a0df75ea4dca91f9bfe/mono-pcl-profiles.tar.gz'])
self.source_dir_name = "mono-pcl-profiles"
def prep(self):
self.extract_archive(self.sources[0],
validate_only=False,
overwrite=True)
def build(self):
pass
# A bunch of shell script written inside python literals ;(
def install(self):
dest = os.path.join(self.prefix, "lib", "mono", "xbuild-frameworks", ".NETPortable")
if not os.path.exists(dest):
os.makedirs(dest)
shutil.rmtree(dest, ignore_errors=True)
pcldir = os.path.join(self.package_build_dir(), self.source_dir_name, ".NETPortable")
self.sh("rsync -abv -q %s/* %s" % (pcldir, dest))
PCLReferenceAssembliesPackage()
|
import glob
import os
import shutil
class PCLReferenceAssembliesPackage(Package):
def __init__(self):
Package.__init__(self,
name='mono-pcl-profiles-2013-10-25',
version='2013-10-25',
sources=['http://storage.bos.xamarin.com/bot-provisioning/mono-pcl-profiles-2013-10-25.tar.gz'])
self.source_dir_name = "mono-pcl-profiles"
def prep(self):
self.extract_archive(self.sources[0],
validate_only=False,
overwrite=True)
def build(self):
pass
# A bunch of shell script written inside python literals ;(
def install(self):
dest = os.path.join(self.prefix, "lib", "mono", "xbuild-frameworks", ".NETPortable")
if not os.path.exists(dest):
os.makedirs(dest)
shutil.rmtree(dest, ignore_errors=True)
pcldir = os.path.join(self.package_build_dir(), self.source_dir_name, ".NETPortable")
self.sh("rsync -abv -q %s/* %s" % (pcldir, dest))
PCLReferenceAssembliesPackage()
|
Use a versioned filename for the PCL profiles.
|
Use a versioned filename for the PCL profiles.
|
Python
|
mit
|
mono/bockbuild,mono/bockbuild
|
import glob
import os
import shutil
class PCLReferenceAssembliesPackage(Package):
def __init__(self):
Package.__init__(self,
name='mono-pcl-profiles',
version='2013-10-23',
sources=['http://storage.bos.xamarin.com/mono-pcl/58/5825e0404974d87799504a0df75ea4dca91f9bfe/mono-pcl-profiles.tar.gz'])
self.source_dir_name = "mono-pcl-profiles"
def prep(self):
self.extract_archive(self.sources[0],
validate_only=False,
overwrite=True)
def build(self):
pass
# A bunch of shell script written inside python literals ;(
def install(self):
dest = os.path.join(self.prefix, "lib", "mono", "xbuild-frameworks", ".NETPortable")
if not os.path.exists(dest):
os.makedirs(dest)
shutil.rmtree(dest, ignore_errors=True)
pcldir = os.path.join(self.package_build_dir(), self.source_dir_name, ".NETPortable")
self.sh("rsync -abv -q %s/* %s" % (pcldir, dest))
PCLReferenceAssembliesPackage()
Use a versioned filename for the PCL profiles.
|
import glob
import os
import shutil
class PCLReferenceAssembliesPackage(Package):
def __init__(self):
Package.__init__(self,
name='mono-pcl-profiles-2013-10-25',
version='2013-10-25',
sources=['http://storage.bos.xamarin.com/bot-provisioning/mono-pcl-profiles-2013-10-25.tar.gz'])
self.source_dir_name = "mono-pcl-profiles"
def prep(self):
self.extract_archive(self.sources[0],
validate_only=False,
overwrite=True)
def build(self):
pass
# A bunch of shell script written inside python literals ;(
def install(self):
dest = os.path.join(self.prefix, "lib", "mono", "xbuild-frameworks", ".NETPortable")
if not os.path.exists(dest):
os.makedirs(dest)
shutil.rmtree(dest, ignore_errors=True)
pcldir = os.path.join(self.package_build_dir(), self.source_dir_name, ".NETPortable")
self.sh("rsync -abv -q %s/* %s" % (pcldir, dest))
PCLReferenceAssembliesPackage()
|
<commit_before>import glob
import os
import shutil
class PCLReferenceAssembliesPackage(Package):
def __init__(self):
Package.__init__(self,
name='mono-pcl-profiles',
version='2013-10-23',
sources=['http://storage.bos.xamarin.com/mono-pcl/58/5825e0404974d87799504a0df75ea4dca91f9bfe/mono-pcl-profiles.tar.gz'])
self.source_dir_name = "mono-pcl-profiles"
def prep(self):
self.extract_archive(self.sources[0],
validate_only=False,
overwrite=True)
def build(self):
pass
# A bunch of shell script written inside python literals ;(
def install(self):
dest = os.path.join(self.prefix, "lib", "mono", "xbuild-frameworks", ".NETPortable")
if not os.path.exists(dest):
os.makedirs(dest)
shutil.rmtree(dest, ignore_errors=True)
pcldir = os.path.join(self.package_build_dir(), self.source_dir_name, ".NETPortable")
self.sh("rsync -abv -q %s/* %s" % (pcldir, dest))
PCLReferenceAssembliesPackage()
<commit_msg>Use a versioned filename for the PCL profiles.<commit_after>
|
import glob
import os
import shutil
class PCLReferenceAssembliesPackage(Package):
def __init__(self):
Package.__init__(self,
name='mono-pcl-profiles-2013-10-25',
version='2013-10-25',
sources=['http://storage.bos.xamarin.com/bot-provisioning/mono-pcl-profiles-2013-10-25.tar.gz'])
self.source_dir_name = "mono-pcl-profiles"
def prep(self):
self.extract_archive(self.sources[0],
validate_only=False,
overwrite=True)
def build(self):
pass
# A bunch of shell script written inside python literals ;(
def install(self):
dest = os.path.join(self.prefix, "lib", "mono", "xbuild-frameworks", ".NETPortable")
if not os.path.exists(dest):
os.makedirs(dest)
shutil.rmtree(dest, ignore_errors=True)
pcldir = os.path.join(self.package_build_dir(), self.source_dir_name, ".NETPortable")
self.sh("rsync -abv -q %s/* %s" % (pcldir, dest))
PCLReferenceAssembliesPackage()
|
import glob
import os
import shutil
class PCLReferenceAssembliesPackage(Package):
def __init__(self):
Package.__init__(self,
name='mono-pcl-profiles',
version='2013-10-23',
sources=['http://storage.bos.xamarin.com/mono-pcl/58/5825e0404974d87799504a0df75ea4dca91f9bfe/mono-pcl-profiles.tar.gz'])
self.source_dir_name = "mono-pcl-profiles"
def prep(self):
self.extract_archive(self.sources[0],
validate_only=False,
overwrite=True)
def build(self):
pass
# A bunch of shell script written inside python literals ;(
def install(self):
dest = os.path.join(self.prefix, "lib", "mono", "xbuild-frameworks", ".NETPortable")
if not os.path.exists(dest):
os.makedirs(dest)
shutil.rmtree(dest, ignore_errors=True)
pcldir = os.path.join(self.package_build_dir(), self.source_dir_name, ".NETPortable")
self.sh("rsync -abv -q %s/* %s" % (pcldir, dest))
PCLReferenceAssembliesPackage()
Use a versioned filename for the PCL profiles.import glob
import os
import shutil
class PCLReferenceAssembliesPackage(Package):
def __init__(self):
Package.__init__(self,
name='mono-pcl-profiles-2013-10-25',
version='2013-10-25',
sources=['http://storage.bos.xamarin.com/bot-provisioning/mono-pcl-profiles-2013-10-25.tar.gz'])
self.source_dir_name = "mono-pcl-profiles"
def prep(self):
self.extract_archive(self.sources[0],
validate_only=False,
overwrite=True)
def build(self):
pass
# A bunch of shell script written inside python literals ;(
def install(self):
dest = os.path.join(self.prefix, "lib", "mono", "xbuild-frameworks", ".NETPortable")
if not os.path.exists(dest):
os.makedirs(dest)
shutil.rmtree(dest, ignore_errors=True)
pcldir = os.path.join(self.package_build_dir(), self.source_dir_name, ".NETPortable")
self.sh("rsync -abv -q %s/* %s" % (pcldir, dest))
PCLReferenceAssembliesPackage()
|
<commit_before>import glob
import os
import shutil
class PCLReferenceAssembliesPackage(Package):
def __init__(self):
Package.__init__(self,
name='mono-pcl-profiles',
version='2013-10-23',
sources=['http://storage.bos.xamarin.com/mono-pcl/58/5825e0404974d87799504a0df75ea4dca91f9bfe/mono-pcl-profiles.tar.gz'])
self.source_dir_name = "mono-pcl-profiles"
def prep(self):
self.extract_archive(self.sources[0],
validate_only=False,
overwrite=True)
def build(self):
pass
# A bunch of shell script written inside python literals ;(
def install(self):
dest = os.path.join(self.prefix, "lib", "mono", "xbuild-frameworks", ".NETPortable")
if not os.path.exists(dest):
os.makedirs(dest)
shutil.rmtree(dest, ignore_errors=True)
pcldir = os.path.join(self.package_build_dir(), self.source_dir_name, ".NETPortable")
self.sh("rsync -abv -q %s/* %s" % (pcldir, dest))
PCLReferenceAssembliesPackage()
<commit_msg>Use a versioned filename for the PCL profiles.<commit_after>import glob
import os
import shutil
class PCLReferenceAssembliesPackage(Package):
def __init__(self):
Package.__init__(self,
name='mono-pcl-profiles-2013-10-25',
version='2013-10-25',
sources=['http://storage.bos.xamarin.com/bot-provisioning/mono-pcl-profiles-2013-10-25.tar.gz'])
self.source_dir_name = "mono-pcl-profiles"
def prep(self):
self.extract_archive(self.sources[0],
validate_only=False,
overwrite=True)
def build(self):
pass
# A bunch of shell script written inside python literals ;(
def install(self):
dest = os.path.join(self.prefix, "lib", "mono", "xbuild-frameworks", ".NETPortable")
if not os.path.exists(dest):
os.makedirs(dest)
shutil.rmtree(dest, ignore_errors=True)
pcldir = os.path.join(self.package_build_dir(), self.source_dir_name, ".NETPortable")
self.sh("rsync -abv -q %s/* %s" % (pcldir, dest))
PCLReferenceAssembliesPackage()
|
4f9b0fc97b873a1e43a4312ae3a4b12d8b7bec35
|
ui.py
|
ui.py
|
def render(object, **kw):
if object == 'gallows':
render_gallows(**kw)
if object == 'bank':
render_bank(**kw)
if object == 'game_state':
render_game_state(**kw)
def render_gallows(parts=0, **kw):
print("""
______
| |
O |
| |
| |
/ |
|
---------
""")
def render_bank(letters=[], **kw):
print("""
_________________________
| |
| |
| |
-------------------------
""")
def render_game_state(word="", found=[], **kw):
for letter in word:
if letter in found:
print(letter, end='')
else:
print(' _ ', end='')
|
from terminaltables import SingleTable
def render(object, **kw):
if object == 'gallows':
render_gallows(**kw)
if object == 'bank':
render_bank(**kw)
if object == 'game_state':
render_game_state(**kw)
def render_gallows(parts=0, **kw):
print("""
______
| |
O |
| |
| |
/ |
|
---------
""")
def render_bank(letters=[], **kw):
sz = 6 # Size of table
if not any(letters):
let = [' ']
else:
let = sorted(list(letters))
table = SingleTable([let[i:i + sz] for i in range(0, len(let), sz)],
'Incorrect Guesses')
table.inner_heading_row_border = False
table.inner_row_border = True
table.justify_columns = {idx: val for idx, val in
enumerate(['center'] * sz)}
print()
print(table.table)
def render_game_state(word="", found=[], **kw):
for letter in word:
if letter in found:
print(letter, end='')
else:
print(' _ ', end='')
|
Use terminaltables.SingleTable for displaying letter bank
|
Use terminaltables.SingleTable for displaying letter bank
|
Python
|
mit
|
tml/python-hangman-2017-summer
|
def render(object, **kw):
if object == 'gallows':
render_gallows(**kw)
if object == 'bank':
render_bank(**kw)
if object == 'game_state':
render_game_state(**kw)
def render_gallows(parts=0, **kw):
print("""
______
| |
O |
| |
| |
/ |
|
---------
""")
def render_bank(letters=[], **kw):
print("""
_________________________
| |
| |
| |
-------------------------
""")
def render_game_state(word="", found=[], **kw):
for letter in word:
if letter in found:
print(letter, end='')
else:
print(' _ ', end='')Use terminaltables.SingleTable for displaying letter bank
|
from terminaltables import SingleTable
def render(object, **kw):
if object == 'gallows':
render_gallows(**kw)
if object == 'bank':
render_bank(**kw)
if object == 'game_state':
render_game_state(**kw)
def render_gallows(parts=0, **kw):
print("""
______
| |
O |
| |
| |
/ |
|
---------
""")
def render_bank(letters=[], **kw):
sz = 6 # Size of table
if not any(letters):
let = [' ']
else:
let = sorted(list(letters))
table = SingleTable([let[i:i + sz] for i in range(0, len(let), sz)],
'Incorrect Guesses')
table.inner_heading_row_border = False
table.inner_row_border = True
table.justify_columns = {idx: val for idx, val in
enumerate(['center'] * sz)}
print()
print(table.table)
def render_game_state(word="", found=[], **kw):
for letter in word:
if letter in found:
print(letter, end='')
else:
print(' _ ', end='')
|
<commit_before>def render(object, **kw):
if object == 'gallows':
render_gallows(**kw)
if object == 'bank':
render_bank(**kw)
if object == 'game_state':
render_game_state(**kw)
def render_gallows(parts=0, **kw):
print("""
______
| |
O |
| |
| |
/ |
|
---------
""")
def render_bank(letters=[], **kw):
print("""
_________________________
| |
| |
| |
-------------------------
""")
def render_game_state(word="", found=[], **kw):
for letter in word:
if letter in found:
print(letter, end='')
else:
print(' _ ', end='')<commit_msg>Use terminaltables.SingleTable for displaying letter bank<commit_after>
|
from terminaltables import SingleTable
def render(object, **kw):
if object == 'gallows':
render_gallows(**kw)
if object == 'bank':
render_bank(**kw)
if object == 'game_state':
render_game_state(**kw)
def render_gallows(parts=0, **kw):
print("""
______
| |
O |
| |
| |
/ |
|
---------
""")
def render_bank(letters=[], **kw):
sz = 6 # Size of table
if not any(letters):
let = [' ']
else:
let = sorted(list(letters))
table = SingleTable([let[i:i + sz] for i in range(0, len(let), sz)],
'Incorrect Guesses')
table.inner_heading_row_border = False
table.inner_row_border = True
table.justify_columns = {idx: val for idx, val in
enumerate(['center'] * sz)}
print()
print(table.table)
def render_game_state(word="", found=[], **kw):
for letter in word:
if letter in found:
print(letter, end='')
else:
print(' _ ', end='')
|
def render(object, **kw):
if object == 'gallows':
render_gallows(**kw)
if object == 'bank':
render_bank(**kw)
if object == 'game_state':
render_game_state(**kw)
def render_gallows(parts=0, **kw):
print("""
______
| |
O |
| |
| |
/ |
|
---------
""")
def render_bank(letters=[], **kw):
print("""
_________________________
| |
| |
| |
-------------------------
""")
def render_game_state(word="", found=[], **kw):
for letter in word:
if letter in found:
print(letter, end='')
else:
print(' _ ', end='')Use terminaltables.SingleTable for displaying letter bankfrom terminaltables import SingleTable
def render(object, **kw):
if object == 'gallows':
render_gallows(**kw)
if object == 'bank':
render_bank(**kw)
if object == 'game_state':
render_game_state(**kw)
def render_gallows(parts=0, **kw):
print("""
______
| |
O |
| |
| |
/ |
|
---------
""")
def render_bank(letters=[], **kw):
sz = 6 # Size of table
if not any(letters):
let = [' ']
else:
let = sorted(list(letters))
table = SingleTable([let[i:i + sz] for i in range(0, len(let), sz)],
'Incorrect Guesses')
table.inner_heading_row_border = False
table.inner_row_border = True
table.justify_columns = {idx: val for idx, val in
enumerate(['center'] * sz)}
print()
print(table.table)
def render_game_state(word="", found=[], **kw):
for letter in word:
if letter in found:
print(letter, end='')
else:
print(' _ ', end='')
|
<commit_before>def render(object, **kw):
if object == 'gallows':
render_gallows(**kw)
if object == 'bank':
render_bank(**kw)
if object == 'game_state':
render_game_state(**kw)
def render_gallows(parts=0, **kw):
print("""
______
| |
O |
| |
| |
/ |
|
---------
""")
def render_bank(letters=[], **kw):
print("""
_________________________
| |
| |
| |
-------------------------
""")
def render_game_state(word="", found=[], **kw):
for letter in word:
if letter in found:
print(letter, end='')
else:
print(' _ ', end='')<commit_msg>Use terminaltables.SingleTable for displaying letter bank<commit_after>from terminaltables import SingleTable
def render(object, **kw):
if object == 'gallows':
render_gallows(**kw)
if object == 'bank':
render_bank(**kw)
if object == 'game_state':
render_game_state(**kw)
def render_gallows(parts=0, **kw):
print("""
______
| |
O |
| |
| |
/ |
|
---------
""")
def render_bank(letters=[], **kw):
sz = 6 # Size of table
if not any(letters):
let = [' ']
else:
let = sorted(list(letters))
table = SingleTable([let[i:i + sz] for i in range(0, len(let), sz)],
'Incorrect Guesses')
table.inner_heading_row_border = False
table.inner_row_border = True
table.justify_columns = {idx: val for idx, val in
enumerate(['center'] * sz)}
print()
print(table.table)
def render_game_state(word="", found=[], **kw):
for letter in word:
if letter in found:
print(letter, end='')
else:
print(' _ ', end='')
|
8b45f5d4ced4954f9c86b52295772ae17776a552
|
ws.py
|
ws.py
|
import logging
import json
from tornado import web, ioloop
from sockjs.tornado import SockJSRouter, SockJSConnection
from blimp.utils.websockets import WebSocketsRequest
class EchoConnection(SockJSConnection):
def on_open(self, info):
self.send_json({'connected': True})
def on_message(self, data):
response = WebSocketsRequest(data).get_response()
self.send_json(response)
def send_json(self, obj):
self.send(json.dumps(obj))
if __name__ == '__main__':
logging.getLogger().setLevel(logging.DEBUG)
EchoRouter = SockJSRouter(EchoConnection, '/echo')
app = web.Application(EchoRouter.urls)
app.listen(8080)
logging.info(" [*] Listening on 0.0.0.0:8080")
ioloop.IOLoop.instance().start()
|
import json
from tornado import web, ioloop
from sockjs.tornado import SockJSRouter, SockJSConnection
from blimp.utils.websockets import WebSocketsRequest
class EchoConnection(SockJSConnection):
def on_open(self, info):
self.send_json({'connected': True})
def on_message(self, data):
response = WebSocketsRequest(data).get_response()
self.send_json(response)
def send_json(self, obj):
self.send(json.dumps(obj))
if __name__ == '__main__':
import logging
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--port',
help='Optional port number. Defaults to 8080',
default=8080,
)
parser.add_argument('--debug',
help='Verbosity level set to DEBUG. Defaults to WARNING.',
action='store_const',
dest='loglevel',
const=logging.DEBUG,
default=logging.WARNING
)
parser.add_argument('--verbose',
help='Verbosity level set to INFO.',
action='store_const',
dest='loglevel',
const=logging.INFO
)
args = parser.parse_args()
port = args.port
logging.getLogger().setLevel(args.loglevel)
EchoRouter = SockJSRouter(EchoConnection, '/echo')
app = web.Application(EchoRouter.urls)
app.listen(port)
logging.info(" [*] Listening on 0.0.0.0:{}".format(port))
ioloop.IOLoop.instance().start()
|
Add argparse arguments to websockets server
|
Add argparse arguments to websockets server
|
Python
|
agpl-3.0
|
jessamynsmith/boards-backend,GetBlimp/boards-backend,jessamynsmith/boards-backend
|
import logging
import json
from tornado import web, ioloop
from sockjs.tornado import SockJSRouter, SockJSConnection
from blimp.utils.websockets import WebSocketsRequest
class EchoConnection(SockJSConnection):
def on_open(self, info):
self.send_json({'connected': True})
def on_message(self, data):
response = WebSocketsRequest(data).get_response()
self.send_json(response)
def send_json(self, obj):
self.send(json.dumps(obj))
if __name__ == '__main__':
logging.getLogger().setLevel(logging.DEBUG)
EchoRouter = SockJSRouter(EchoConnection, '/echo')
app = web.Application(EchoRouter.urls)
app.listen(8080)
logging.info(" [*] Listening on 0.0.0.0:8080")
ioloop.IOLoop.instance().start()
Add argparse arguments to websockets server
|
import json
from tornado import web, ioloop
from sockjs.tornado import SockJSRouter, SockJSConnection
from blimp.utils.websockets import WebSocketsRequest
class EchoConnection(SockJSConnection):
def on_open(self, info):
self.send_json({'connected': True})
def on_message(self, data):
response = WebSocketsRequest(data).get_response()
self.send_json(response)
def send_json(self, obj):
self.send(json.dumps(obj))
if __name__ == '__main__':
import logging
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--port',
help='Optional port number. Defaults to 8080',
default=8080,
)
parser.add_argument('--debug',
help='Verbosity level set to DEBUG. Defaults to WARNING.',
action='store_const',
dest='loglevel',
const=logging.DEBUG,
default=logging.WARNING
)
parser.add_argument('--verbose',
help='Verbosity level set to INFO.',
action='store_const',
dest='loglevel',
const=logging.INFO
)
args = parser.parse_args()
port = args.port
logging.getLogger().setLevel(args.loglevel)
EchoRouter = SockJSRouter(EchoConnection, '/echo')
app = web.Application(EchoRouter.urls)
app.listen(port)
logging.info(" [*] Listening on 0.0.0.0:{}".format(port))
ioloop.IOLoop.instance().start()
|
<commit_before>import logging
import json
from tornado import web, ioloop
from sockjs.tornado import SockJSRouter, SockJSConnection
from blimp.utils.websockets import WebSocketsRequest
class EchoConnection(SockJSConnection):
def on_open(self, info):
self.send_json({'connected': True})
def on_message(self, data):
response = WebSocketsRequest(data).get_response()
self.send_json(response)
def send_json(self, obj):
self.send(json.dumps(obj))
if __name__ == '__main__':
logging.getLogger().setLevel(logging.DEBUG)
EchoRouter = SockJSRouter(EchoConnection, '/echo')
app = web.Application(EchoRouter.urls)
app.listen(8080)
logging.info(" [*] Listening on 0.0.0.0:8080")
ioloop.IOLoop.instance().start()
<commit_msg>Add argparse arguments to websockets server<commit_after>
|
import json
from tornado import web, ioloop
from sockjs.tornado import SockJSRouter, SockJSConnection
from blimp.utils.websockets import WebSocketsRequest
class EchoConnection(SockJSConnection):
def on_open(self, info):
self.send_json({'connected': True})
def on_message(self, data):
response = WebSocketsRequest(data).get_response()
self.send_json(response)
def send_json(self, obj):
self.send(json.dumps(obj))
if __name__ == '__main__':
import logging
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--port',
help='Optional port number. Defaults to 8080',
default=8080,
)
parser.add_argument('--debug',
help='Verbosity level set to DEBUG. Defaults to WARNING.',
action='store_const',
dest='loglevel',
const=logging.DEBUG,
default=logging.WARNING
)
parser.add_argument('--verbose',
help='Verbosity level set to INFO.',
action='store_const',
dest='loglevel',
const=logging.INFO
)
args = parser.parse_args()
port = args.port
logging.getLogger().setLevel(args.loglevel)
EchoRouter = SockJSRouter(EchoConnection, '/echo')
app = web.Application(EchoRouter.urls)
app.listen(port)
logging.info(" [*] Listening on 0.0.0.0:{}".format(port))
ioloop.IOLoop.instance().start()
|
import logging
import json
from tornado import web, ioloop
from sockjs.tornado import SockJSRouter, SockJSConnection
from blimp.utils.websockets import WebSocketsRequest
class EchoConnection(SockJSConnection):
def on_open(self, info):
self.send_json({'connected': True})
def on_message(self, data):
response = WebSocketsRequest(data).get_response()
self.send_json(response)
def send_json(self, obj):
self.send(json.dumps(obj))
if __name__ == '__main__':
logging.getLogger().setLevel(logging.DEBUG)
EchoRouter = SockJSRouter(EchoConnection, '/echo')
app = web.Application(EchoRouter.urls)
app.listen(8080)
logging.info(" [*] Listening on 0.0.0.0:8080")
ioloop.IOLoop.instance().start()
Add argparse arguments to websockets serverimport json
from tornado import web, ioloop
from sockjs.tornado import SockJSRouter, SockJSConnection
from blimp.utils.websockets import WebSocketsRequest
class EchoConnection(SockJSConnection):
def on_open(self, info):
self.send_json({'connected': True})
def on_message(self, data):
response = WebSocketsRequest(data).get_response()
self.send_json(response)
def send_json(self, obj):
self.send(json.dumps(obj))
if __name__ == '__main__':
import logging
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--port',
help='Optional port number. Defaults to 8080',
default=8080,
)
parser.add_argument('--debug',
help='Verbosity level set to DEBUG. Defaults to WARNING.',
action='store_const',
dest='loglevel',
const=logging.DEBUG,
default=logging.WARNING
)
parser.add_argument('--verbose',
help='Verbosity level set to INFO.',
action='store_const',
dest='loglevel',
const=logging.INFO
)
args = parser.parse_args()
port = args.port
logging.getLogger().setLevel(args.loglevel)
EchoRouter = SockJSRouter(EchoConnection, '/echo')
app = web.Application(EchoRouter.urls)
app.listen(port)
logging.info(" [*] Listening on 0.0.0.0:{}".format(port))
ioloop.IOLoop.instance().start()
|
<commit_before>import logging
import json
from tornado import web, ioloop
from sockjs.tornado import SockJSRouter, SockJSConnection
from blimp.utils.websockets import WebSocketsRequest
class EchoConnection(SockJSConnection):
def on_open(self, info):
self.send_json({'connected': True})
def on_message(self, data):
response = WebSocketsRequest(data).get_response()
self.send_json(response)
def send_json(self, obj):
self.send(json.dumps(obj))
if __name__ == '__main__':
logging.getLogger().setLevel(logging.DEBUG)
EchoRouter = SockJSRouter(EchoConnection, '/echo')
app = web.Application(EchoRouter.urls)
app.listen(8080)
logging.info(" [*] Listening on 0.0.0.0:8080")
ioloop.IOLoop.instance().start()
<commit_msg>Add argparse arguments to websockets server<commit_after>import json
from tornado import web, ioloop
from sockjs.tornado import SockJSRouter, SockJSConnection
from blimp.utils.websockets import WebSocketsRequest
class EchoConnection(SockJSConnection):
def on_open(self, info):
self.send_json({'connected': True})
def on_message(self, data):
response = WebSocketsRequest(data).get_response()
self.send_json(response)
def send_json(self, obj):
self.send(json.dumps(obj))
if __name__ == '__main__':
import logging
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--port',
help='Optional port number. Defaults to 8080',
default=8080,
)
parser.add_argument('--debug',
help='Verbosity level set to DEBUG. Defaults to WARNING.',
action='store_const',
dest='loglevel',
const=logging.DEBUG,
default=logging.WARNING
)
parser.add_argument('--verbose',
help='Verbosity level set to INFO.',
action='store_const',
dest='loglevel',
const=logging.INFO
)
args = parser.parse_args()
port = args.port
logging.getLogger().setLevel(args.loglevel)
EchoRouter = SockJSRouter(EchoConnection, '/echo')
app = web.Application(EchoRouter.urls)
app.listen(port)
logging.info(" [*] Listening on 0.0.0.0:{}".format(port))
ioloop.IOLoop.instance().start()
|
66dc05130cba9856bec4eb56b55ec76e287e2605
|
rest_framework_oauth/__init__.py
|
rest_framework_oauth/__init__.py
|
"""
OAuth Support for Django REST Framework
"""
__title__ = 'Django REST framework OAuth'
__version__ = '0.1'
__author__ = 'Jharrod LaFon'
__license__ = 'MIT'
__copyright__ = 'Copyright 2014 Jharrod LaFon'
# Version synonym
VERSION = __version__
|
"""
OAuth Support for Django REST Framework
"""
__title__ = 'Django REST framework OAuth'
__version__ = '0.0.1'
__author__ = 'Jharrod LaFon'
__license__ = 'MIT'
__copyright__ = 'Copyright 2014 Jharrod LaFon'
# Version synonym
VERSION = __version__
|
Use a 3 digit semver version
|
Use a 3 digit semver version
|
Python
|
mit
|
jlafon/django-rest-framework-oauth
|
"""
OAuth Support for Django REST Framework
"""
__title__ = 'Django REST framework OAuth'
__version__ = '0.1'
__author__ = 'Jharrod LaFon'
__license__ = 'MIT'
__copyright__ = 'Copyright 2014 Jharrod LaFon'
# Version synonym
VERSION = __version__
Use a 3 digit semver version
|
"""
OAuth Support for Django REST Framework
"""
__title__ = 'Django REST framework OAuth'
__version__ = '0.0.1'
__author__ = 'Jharrod LaFon'
__license__ = 'MIT'
__copyright__ = 'Copyright 2014 Jharrod LaFon'
# Version synonym
VERSION = __version__
|
<commit_before>"""
OAuth Support for Django REST Framework
"""
__title__ = 'Django REST framework OAuth'
__version__ = '0.1'
__author__ = 'Jharrod LaFon'
__license__ = 'MIT'
__copyright__ = 'Copyright 2014 Jharrod LaFon'
# Version synonym
VERSION = __version__
<commit_msg>Use a 3 digit semver version<commit_after>
|
"""
OAuth Support for Django REST Framework
"""
__title__ = 'Django REST framework OAuth'
__version__ = '0.0.1'
__author__ = 'Jharrod LaFon'
__license__ = 'MIT'
__copyright__ = 'Copyright 2014 Jharrod LaFon'
# Version synonym
VERSION = __version__
|
"""
OAuth Support for Django REST Framework
"""
__title__ = 'Django REST framework OAuth'
__version__ = '0.1'
__author__ = 'Jharrod LaFon'
__license__ = 'MIT'
__copyright__ = 'Copyright 2014 Jharrod LaFon'
# Version synonym
VERSION = __version__
Use a 3 digit semver version"""
OAuth Support for Django REST Framework
"""
__title__ = 'Django REST framework OAuth'
__version__ = '0.0.1'
__author__ = 'Jharrod LaFon'
__license__ = 'MIT'
__copyright__ = 'Copyright 2014 Jharrod LaFon'
# Version synonym
VERSION = __version__
|
<commit_before>"""
OAuth Support for Django REST Framework
"""
__title__ = 'Django REST framework OAuth'
__version__ = '0.1'
__author__ = 'Jharrod LaFon'
__license__ = 'MIT'
__copyright__ = 'Copyright 2014 Jharrod LaFon'
# Version synonym
VERSION = __version__
<commit_msg>Use a 3 digit semver version<commit_after>"""
OAuth Support for Django REST Framework
"""
__title__ = 'Django REST framework OAuth'
__version__ = '0.0.1'
__author__ = 'Jharrod LaFon'
__license__ = 'MIT'
__copyright__ = 'Copyright 2014 Jharrod LaFon'
# Version synonym
VERSION = __version__
|
5d622e350784ede5af2490495ce3119a2589b1e9
|
hb_res/resources/build_assets.py
|
hb_res/resources/build_assets.py
|
from .Resource import names_registered, resource_by_name
def build():
for name in names_registered():
resource = resource_by_name(name)()
for explanation in resource:
r = explanation
for functor in resource.modifiers:
r = functor(r)
# write res in file 'name'
print(r)
|
from .Resource import names_registered, resource_by_name
def build():
for name in names_registered():
resource = resource_by_name(name)()
for explanation in resource:
r = explanation
for functor in resource.modifiers:
if r is None:
break
r = functor(r)
if r is None:
continue
# write res in file 'name'
print(r)
|
Add None check while applying modifiers
|
Add None check while applying modifiers
|
Python
|
mit
|
hatbot-team/hatbot_resources
|
from .Resource import names_registered, resource_by_name
def build():
for name in names_registered():
resource = resource_by_name(name)()
for explanation in resource:
r = explanation
for functor in resource.modifiers:
r = functor(r)
# write res in file 'name'
print(r)
Add None check while applying modifiers
|
from .Resource import names_registered, resource_by_name
def build():
for name in names_registered():
resource = resource_by_name(name)()
for explanation in resource:
r = explanation
for functor in resource.modifiers:
if r is None:
break
r = functor(r)
if r is None:
continue
# write res in file 'name'
print(r)
|
<commit_before>from .Resource import names_registered, resource_by_name
def build():
for name in names_registered():
resource = resource_by_name(name)()
for explanation in resource:
r = explanation
for functor in resource.modifiers:
r = functor(r)
# write res in file 'name'
print(r)
<commit_msg>Add None check while applying modifiers<commit_after>
|
from .Resource import names_registered, resource_by_name
def build():
for name in names_registered():
resource = resource_by_name(name)()
for explanation in resource:
r = explanation
for functor in resource.modifiers:
if r is None:
break
r = functor(r)
if r is None:
continue
# write res in file 'name'
print(r)
|
from .Resource import names_registered, resource_by_name
def build():
for name in names_registered():
resource = resource_by_name(name)()
for explanation in resource:
r = explanation
for functor in resource.modifiers:
r = functor(r)
# write res in file 'name'
print(r)
Add None check while applying modifiersfrom .Resource import names_registered, resource_by_name
def build():
for name in names_registered():
resource = resource_by_name(name)()
for explanation in resource:
r = explanation
for functor in resource.modifiers:
if r is None:
break
r = functor(r)
if r is None:
continue
# write res in file 'name'
print(r)
|
<commit_before>from .Resource import names_registered, resource_by_name
def build():
for name in names_registered():
resource = resource_by_name(name)()
for explanation in resource:
r = explanation
for functor in resource.modifiers:
r = functor(r)
# write res in file 'name'
print(r)
<commit_msg>Add None check while applying modifiers<commit_after>from .Resource import names_registered, resource_by_name
def build():
for name in names_registered():
resource = resource_by_name(name)()
for explanation in resource:
r = explanation
for functor in resource.modifiers:
if r is None:
break
r = functor(r)
if r is None:
continue
# write res in file 'name'
print(r)
|
766b8564f524c9fcad2d82d08c8ec370532b7411
|
crm_department/models/crm_department.py
|
crm_department/models/crm_department.py
|
# -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, fields
class CrmDepartment(models.Model):
_name = 'crm.department'
_order = "parent_left"
_parent_order = "name"
_parent_store = True
_description = "Department"
name = fields.Char(required=True)
parent_id = fields.Many2one(comodel_name='crm.department')
children = fields.One2many(comodel_name='crm.department',
inverse_name='parent_id')
parent_left = fields.Integer('Parent Left', select=True)
parent_right = fields.Integer('Parent Right', select=True)
|
# -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, fields
class CrmDepartment(models.Model):
_name = 'crm.department'
_order = "parent_left"
_parent_order = "name"
_parent_store = True
_description = "Department"
name = fields.Char(required=True, translate=True)
parent_id = fields.Many2one(comodel_name='crm.department')
children = fields.One2many(comodel_name='crm.department',
inverse_name='parent_id')
parent_left = fields.Integer('Parent Left', select=True)
parent_right = fields.Integer('Parent Right', select=True)
|
Set some fields as tranlate
|
Set some fields as tranlate
|
Python
|
agpl-3.0
|
acsone/partner-contact,diagramsoftware/partner-contact,Therp/partner-contact,Endika/partner-contact,open-synergy/partner-contact
|
# -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, fields
class CrmDepartment(models.Model):
_name = 'crm.department'
_order = "parent_left"
_parent_order = "name"
_parent_store = True
_description = "Department"
name = fields.Char(required=True)
parent_id = fields.Many2one(comodel_name='crm.department')
children = fields.One2many(comodel_name='crm.department',
inverse_name='parent_id')
parent_left = fields.Integer('Parent Left', select=True)
parent_right = fields.Integer('Parent Right', select=True)
Set some fields as tranlate
|
# -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, fields
class CrmDepartment(models.Model):
_name = 'crm.department'
_order = "parent_left"
_parent_order = "name"
_parent_store = True
_description = "Department"
name = fields.Char(required=True, translate=True)
parent_id = fields.Many2one(comodel_name='crm.department')
children = fields.One2many(comodel_name='crm.department',
inverse_name='parent_id')
parent_left = fields.Integer('Parent Left', select=True)
parent_right = fields.Integer('Parent Right', select=True)
|
<commit_before># -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, fields
class CrmDepartment(models.Model):
_name = 'crm.department'
_order = "parent_left"
_parent_order = "name"
_parent_store = True
_description = "Department"
name = fields.Char(required=True)
parent_id = fields.Many2one(comodel_name='crm.department')
children = fields.One2many(comodel_name='crm.department',
inverse_name='parent_id')
parent_left = fields.Integer('Parent Left', select=True)
parent_right = fields.Integer('Parent Right', select=True)
<commit_msg>Set some fields as tranlate<commit_after>
|
# -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, fields
class CrmDepartment(models.Model):
_name = 'crm.department'
_order = "parent_left"
_parent_order = "name"
_parent_store = True
_description = "Department"
name = fields.Char(required=True, translate=True)
parent_id = fields.Many2one(comodel_name='crm.department')
children = fields.One2many(comodel_name='crm.department',
inverse_name='parent_id')
parent_left = fields.Integer('Parent Left', select=True)
parent_right = fields.Integer('Parent Right', select=True)
|
# -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, fields
class CrmDepartment(models.Model):
_name = 'crm.department'
_order = "parent_left"
_parent_order = "name"
_parent_store = True
_description = "Department"
name = fields.Char(required=True)
parent_id = fields.Many2one(comodel_name='crm.department')
children = fields.One2many(comodel_name='crm.department',
inverse_name='parent_id')
parent_left = fields.Integer('Parent Left', select=True)
parent_right = fields.Integer('Parent Right', select=True)
Set some fields as tranlate# -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, fields
class CrmDepartment(models.Model):
_name = 'crm.department'
_order = "parent_left"
_parent_order = "name"
_parent_store = True
_description = "Department"
name = fields.Char(required=True, translate=True)
parent_id = fields.Many2one(comodel_name='crm.department')
children = fields.One2many(comodel_name='crm.department',
inverse_name='parent_id')
parent_left = fields.Integer('Parent Left', select=True)
parent_right = fields.Integer('Parent Right', select=True)
|
<commit_before># -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, fields
class CrmDepartment(models.Model):
_name = 'crm.department'
_order = "parent_left"
_parent_order = "name"
_parent_store = True
_description = "Department"
name = fields.Char(required=True)
parent_id = fields.Many2one(comodel_name='crm.department')
children = fields.One2many(comodel_name='crm.department',
inverse_name='parent_id')
parent_left = fields.Integer('Parent Left', select=True)
parent_right = fields.Integer('Parent Right', select=True)
<commit_msg>Set some fields as tranlate<commit_after># -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, fields
class CrmDepartment(models.Model):
_name = 'crm.department'
_order = "parent_left"
_parent_order = "name"
_parent_store = True
_description = "Department"
name = fields.Char(required=True, translate=True)
parent_id = fields.Many2one(comodel_name='crm.department')
children = fields.One2many(comodel_name='crm.department',
inverse_name='parent_id')
parent_left = fields.Integer('Parent Left', select=True)
parent_right = fields.Integer('Parent Right', select=True)
|
e7e8c9aee3b57187e8d239cb28a03125ab488886
|
fix_data.py
|
fix_data.py
|
from datetime import datetime, timedelta
from functools import wraps
import makerbase
from makerbase.models import *
def for_class(*classes):
def do_that(fn):
@wraps(fn)
def do_for_class():
for cls in classes:
keys = cls.get_bucket().get_keys()
for key in keys:
obj = cls.get(key)
fn(obj)
return do_for_class
return do_that
@for_class(Maker)
def fix_maker_history(maker):
for histitem in maker.history:
# Fix the actions.
if histitem.action == 'create':
histitem.action = 'addmaker'
elif histitem.action == 'edit':
histitem.action = 'editmaker'
# Make sure the maker is tagged.
if histitem.maker is None:
histitem.add_link(maker, tag='maker')
histitem.save()
@for_class(Project)
def fix_project_history(project):
for histitem in project.history:
# Fix the actions.
if histitem.action == 'create':
histitem.action = 'addproject'
elif histitem.action == 'edit':
histitem.action = 'editproject'
# Make sure the project is tagged.
if histitem.project is None:
histitem.add_link(project, tag='project')
histitem.save()
|
from datetime import datetime, timedelta
from functools import wraps
import makerbase
from makerbase.models import *
def for_class(*classes):
def do_that(fn):
@wraps(fn)
def do_for_class():
for cls in classes:
keys = cls.get_bucket().get_keys()
for key in keys:
obj = cls.get(key)
fn(obj)
return do_for_class
return do_that
@for_class(Maker)
def fix_maker_history(maker):
for histitem in maker.history:
# Fix the actions.
if histitem.action == 'create':
histitem.action = 'addmaker'
elif histitem.action == 'edit':
histitem.action = 'editmaker'
# Make sure the maker is tagged.
if histitem.maker is None:
histitem.add_link(maker, tag='maker')
histitem.save()
@for_class(Project)
def fix_project_history(project):
for histitem in project.history:
# Fix the actions.
if histitem.action == 'create':
histitem.action = 'addproject'
elif histitem.action == 'edit':
histitem.action = 'editproject'
# Make sure the project is tagged.
if histitem.project is None:
histitem.add_link(project, tag='project')
histitem.save()
@for_class(Project)
def save_all_projects(project):
project.save()
@for_class(Maker)
def save_all_makers(maker):
maker.save()
|
Add data fixers to reindex for search by re-saving everything
|
Add data fixers to reindex for search by re-saving everything
|
Python
|
mit
|
markpasc/makerbase,markpasc/makerbase
|
from datetime import datetime, timedelta
from functools import wraps
import makerbase
from makerbase.models import *
def for_class(*classes):
def do_that(fn):
@wraps(fn)
def do_for_class():
for cls in classes:
keys = cls.get_bucket().get_keys()
for key in keys:
obj = cls.get(key)
fn(obj)
return do_for_class
return do_that
@for_class(Maker)
def fix_maker_history(maker):
for histitem in maker.history:
# Fix the actions.
if histitem.action == 'create':
histitem.action = 'addmaker'
elif histitem.action == 'edit':
histitem.action = 'editmaker'
# Make sure the maker is tagged.
if histitem.maker is None:
histitem.add_link(maker, tag='maker')
histitem.save()
@for_class(Project)
def fix_project_history(project):
for histitem in project.history:
# Fix the actions.
if histitem.action == 'create':
histitem.action = 'addproject'
elif histitem.action == 'edit':
histitem.action = 'editproject'
# Make sure the project is tagged.
if histitem.project is None:
histitem.add_link(project, tag='project')
histitem.save()
Add data fixers to reindex for search by re-saving everything
|
from datetime import datetime, timedelta
from functools import wraps
import makerbase
from makerbase.models import *
def for_class(*classes):
def do_that(fn):
@wraps(fn)
def do_for_class():
for cls in classes:
keys = cls.get_bucket().get_keys()
for key in keys:
obj = cls.get(key)
fn(obj)
return do_for_class
return do_that
@for_class(Maker)
def fix_maker_history(maker):
for histitem in maker.history:
# Fix the actions.
if histitem.action == 'create':
histitem.action = 'addmaker'
elif histitem.action == 'edit':
histitem.action = 'editmaker'
# Make sure the maker is tagged.
if histitem.maker is None:
histitem.add_link(maker, tag='maker')
histitem.save()
@for_class(Project)
def fix_project_history(project):
for histitem in project.history:
# Fix the actions.
if histitem.action == 'create':
histitem.action = 'addproject'
elif histitem.action == 'edit':
histitem.action = 'editproject'
# Make sure the project is tagged.
if histitem.project is None:
histitem.add_link(project, tag='project')
histitem.save()
@for_class(Project)
def save_all_projects(project):
project.save()
@for_class(Maker)
def save_all_makers(maker):
maker.save()
|
<commit_before>from datetime import datetime, timedelta
from functools import wraps
import makerbase
from makerbase.models import *
def for_class(*classes):
def do_that(fn):
@wraps(fn)
def do_for_class():
for cls in classes:
keys = cls.get_bucket().get_keys()
for key in keys:
obj = cls.get(key)
fn(obj)
return do_for_class
return do_that
@for_class(Maker)
def fix_maker_history(maker):
for histitem in maker.history:
# Fix the actions.
if histitem.action == 'create':
histitem.action = 'addmaker'
elif histitem.action == 'edit':
histitem.action = 'editmaker'
# Make sure the maker is tagged.
if histitem.maker is None:
histitem.add_link(maker, tag='maker')
histitem.save()
@for_class(Project)
def fix_project_history(project):
for histitem in project.history:
# Fix the actions.
if histitem.action == 'create':
histitem.action = 'addproject'
elif histitem.action == 'edit':
histitem.action = 'editproject'
# Make sure the project is tagged.
if histitem.project is None:
histitem.add_link(project, tag='project')
histitem.save()
<commit_msg>Add data fixers to reindex for search by re-saving everything<commit_after>
|
from datetime import datetime, timedelta
from functools import wraps
import makerbase
from makerbase.models import *
def for_class(*classes):
def do_that(fn):
@wraps(fn)
def do_for_class():
for cls in classes:
keys = cls.get_bucket().get_keys()
for key in keys:
obj = cls.get(key)
fn(obj)
return do_for_class
return do_that
@for_class(Maker)
def fix_maker_history(maker):
for histitem in maker.history:
# Fix the actions.
if histitem.action == 'create':
histitem.action = 'addmaker'
elif histitem.action == 'edit':
histitem.action = 'editmaker'
# Make sure the maker is tagged.
if histitem.maker is None:
histitem.add_link(maker, tag='maker')
histitem.save()
@for_class(Project)
def fix_project_history(project):
for histitem in project.history:
# Fix the actions.
if histitem.action == 'create':
histitem.action = 'addproject'
elif histitem.action == 'edit':
histitem.action = 'editproject'
# Make sure the project is tagged.
if histitem.project is None:
histitem.add_link(project, tag='project')
histitem.save()
@for_class(Project)
def save_all_projects(project):
project.save()
@for_class(Maker)
def save_all_makers(maker):
maker.save()
|
from datetime import datetime, timedelta
from functools import wraps
import makerbase
from makerbase.models import *
def for_class(*classes):
def do_that(fn):
@wraps(fn)
def do_for_class():
for cls in classes:
keys = cls.get_bucket().get_keys()
for key in keys:
obj = cls.get(key)
fn(obj)
return do_for_class
return do_that
@for_class(Maker)
def fix_maker_history(maker):
for histitem in maker.history:
# Fix the actions.
if histitem.action == 'create':
histitem.action = 'addmaker'
elif histitem.action == 'edit':
histitem.action = 'editmaker'
# Make sure the maker is tagged.
if histitem.maker is None:
histitem.add_link(maker, tag='maker')
histitem.save()
@for_class(Project)
def fix_project_history(project):
for histitem in project.history:
# Fix the actions.
if histitem.action == 'create':
histitem.action = 'addproject'
elif histitem.action == 'edit':
histitem.action = 'editproject'
# Make sure the project is tagged.
if histitem.project is None:
histitem.add_link(project, tag='project')
histitem.save()
Add data fixers to reindex for search by re-saving everythingfrom datetime import datetime, timedelta
from functools import wraps
import makerbase
from makerbase.models import *
def for_class(*classes):
def do_that(fn):
@wraps(fn)
def do_for_class():
for cls in classes:
keys = cls.get_bucket().get_keys()
for key in keys:
obj = cls.get(key)
fn(obj)
return do_for_class
return do_that
@for_class(Maker)
def fix_maker_history(maker):
for histitem in maker.history:
# Fix the actions.
if histitem.action == 'create':
histitem.action = 'addmaker'
elif histitem.action == 'edit':
histitem.action = 'editmaker'
# Make sure the maker is tagged.
if histitem.maker is None:
histitem.add_link(maker, tag='maker')
histitem.save()
@for_class(Project)
def fix_project_history(project):
for histitem in project.history:
# Fix the actions.
if histitem.action == 'create':
histitem.action = 'addproject'
elif histitem.action == 'edit':
histitem.action = 'editproject'
# Make sure the project is tagged.
if histitem.project is None:
histitem.add_link(project, tag='project')
histitem.save()
@for_class(Project)
def save_all_projects(project):
project.save()
@for_class(Maker)
def save_all_makers(maker):
maker.save()
|
<commit_before>from datetime import datetime, timedelta
from functools import wraps
import makerbase
from makerbase.models import *
def for_class(*classes):
def do_that(fn):
@wraps(fn)
def do_for_class():
for cls in classes:
keys = cls.get_bucket().get_keys()
for key in keys:
obj = cls.get(key)
fn(obj)
return do_for_class
return do_that
@for_class(Maker)
def fix_maker_history(maker):
for histitem in maker.history:
# Fix the actions.
if histitem.action == 'create':
histitem.action = 'addmaker'
elif histitem.action == 'edit':
histitem.action = 'editmaker'
# Make sure the maker is tagged.
if histitem.maker is None:
histitem.add_link(maker, tag='maker')
histitem.save()
@for_class(Project)
def fix_project_history(project):
for histitem in project.history:
# Fix the actions.
if histitem.action == 'create':
histitem.action = 'addproject'
elif histitem.action == 'edit':
histitem.action = 'editproject'
# Make sure the project is tagged.
if histitem.project is None:
histitem.add_link(project, tag='project')
histitem.save()
<commit_msg>Add data fixers to reindex for search by re-saving everything<commit_after>from datetime import datetime, timedelta
from functools import wraps
import makerbase
from makerbase.models import *
def for_class(*classes):
def do_that(fn):
@wraps(fn)
def do_for_class():
for cls in classes:
keys = cls.get_bucket().get_keys()
for key in keys:
obj = cls.get(key)
fn(obj)
return do_for_class
return do_that
@for_class(Maker)
def fix_maker_history(maker):
for histitem in maker.history:
# Fix the actions.
if histitem.action == 'create':
histitem.action = 'addmaker'
elif histitem.action == 'edit':
histitem.action = 'editmaker'
# Make sure the maker is tagged.
if histitem.maker is None:
histitem.add_link(maker, tag='maker')
histitem.save()
@for_class(Project)
def fix_project_history(project):
for histitem in project.history:
# Fix the actions.
if histitem.action == 'create':
histitem.action = 'addproject'
elif histitem.action == 'edit':
histitem.action = 'editproject'
# Make sure the project is tagged.
if histitem.project is None:
histitem.add_link(project, tag='project')
histitem.save()
@for_class(Project)
def save_all_projects(project):
project.save()
@for_class(Maker)
def save_all_makers(maker):
maker.save()
|
49c99399c5b0e741e356cf320e338d019e06567d
|
taca/utils/config.py
|
taca/utils/config.py
|
"""Load and parse configuration file."""
import yaml
from io import open
CONFIG = {}
def load_config(config_file):
"""Loads a configuration file."""
config = {}
if type(config_file) is file:
config.update(yaml.load(config_file, Loader=yaml.FullLoader) or {})
return config
else:
try:
with open(config_file, 'r') as f:
content = yaml.load(f, Loader=yaml.FullLoader)
config.update(content)
return content
except IOError as e:
e.message = 'Could not open configuration file "{}".'.format(config_file)
raise e
def load_yaml_config(config_file):
"""Load YAML config file
:param str config_file: The path to the configuration file.
:returns: A dict of the parsed config file.
:rtype: dict
:raises IOError: If the config file cannot be opened.
"""
if type(config_file) is file:
CONFIG.update(yaml.load(config_file, Loader=yaml.FullLoader) or {})
return CONFIG
else:
try:
with open(config_file, 'r') as f:
content = yaml.load(f, Loader=yaml.FullLoader)
CONFIG.update(content)
return content
except IOError as e:
e.message = 'Could not open configuration file "{}".'.format(config_file)
raise e
|
"""Load and parse configuration file."""
import yaml
from io import open
CONFIG = {}
def load_config(config_file):
"""Loads a configuration file."""
config = {}
try:
with open(config_file, 'r') as f:
content = yaml.load(f, Loader=yaml.FullLoader)
config.update(content)
return content
except IOError as e:
e.message = 'Could not open configuration file "{}".'.format(config_file)
raise e
def load_yaml_config(config_file):
"""Load YAML config file
:param str config_file: The path to the configuration file.
:returns: A dict of the parsed config file.
:rtype: dict
:raises IOError: If the config file cannot be opened.
"""
try:
with open(config_file, 'r') as f:
content = yaml.load(f, Loader=yaml.FullLoader)
CONFIG.update(content)
return content
except IOError as e:
e.message = 'Could not open configuration file "{}".'.format(config_file)
raise e
|
Remove unused file type check
|
Remove unused file type check
|
Python
|
mit
|
SciLifeLab/TACA,SciLifeLab/TACA,SciLifeLab/TACA
|
"""Load and parse configuration file."""
import yaml
from io import open
CONFIG = {}
def load_config(config_file):
"""Loads a configuration file."""
config = {}
if type(config_file) is file:
config.update(yaml.load(config_file, Loader=yaml.FullLoader) or {})
return config
else:
try:
with open(config_file, 'r') as f:
content = yaml.load(f, Loader=yaml.FullLoader)
config.update(content)
return content
except IOError as e:
e.message = 'Could not open configuration file "{}".'.format(config_file)
raise e
def load_yaml_config(config_file):
"""Load YAML config file
:param str config_file: The path to the configuration file.
:returns: A dict of the parsed config file.
:rtype: dict
:raises IOError: If the config file cannot be opened.
"""
if type(config_file) is file:
CONFIG.update(yaml.load(config_file, Loader=yaml.FullLoader) or {})
return CONFIG
else:
try:
with open(config_file, 'r') as f:
content = yaml.load(f, Loader=yaml.FullLoader)
CONFIG.update(content)
return content
except IOError as e:
e.message = 'Could not open configuration file "{}".'.format(config_file)
raise e
Remove unused file type check
|
"""Load and parse configuration file."""
import yaml
from io import open
CONFIG = {}
def load_config(config_file):
"""Loads a configuration file."""
config = {}
try:
with open(config_file, 'r') as f:
content = yaml.load(f, Loader=yaml.FullLoader)
config.update(content)
return content
except IOError as e:
e.message = 'Could not open configuration file "{}".'.format(config_file)
raise e
def load_yaml_config(config_file):
"""Load YAML config file
:param str config_file: The path to the configuration file.
:returns: A dict of the parsed config file.
:rtype: dict
:raises IOError: If the config file cannot be opened.
"""
try:
with open(config_file, 'r') as f:
content = yaml.load(f, Loader=yaml.FullLoader)
CONFIG.update(content)
return content
except IOError as e:
e.message = 'Could not open configuration file "{}".'.format(config_file)
raise e
|
<commit_before>"""Load and parse configuration file."""
import yaml
from io import open
CONFIG = {}
def load_config(config_file):
"""Loads a configuration file."""
config = {}
if type(config_file) is file:
config.update(yaml.load(config_file, Loader=yaml.FullLoader) or {})
return config
else:
try:
with open(config_file, 'r') as f:
content = yaml.load(f, Loader=yaml.FullLoader)
config.update(content)
return content
except IOError as e:
e.message = 'Could not open configuration file "{}".'.format(config_file)
raise e
def load_yaml_config(config_file):
"""Load YAML config file
:param str config_file: The path to the configuration file.
:returns: A dict of the parsed config file.
:rtype: dict
:raises IOError: If the config file cannot be opened.
"""
if type(config_file) is file:
CONFIG.update(yaml.load(config_file, Loader=yaml.FullLoader) or {})
return CONFIG
else:
try:
with open(config_file, 'r') as f:
content = yaml.load(f, Loader=yaml.FullLoader)
CONFIG.update(content)
return content
except IOError as e:
e.message = 'Could not open configuration file "{}".'.format(config_file)
raise e
<commit_msg>Remove unused file type check<commit_after>
|
"""Load and parse configuration file."""
import yaml
from io import open
CONFIG = {}
def load_config(config_file):
"""Loads a configuration file."""
config = {}
try:
with open(config_file, 'r') as f:
content = yaml.load(f, Loader=yaml.FullLoader)
config.update(content)
return content
except IOError as e:
e.message = 'Could not open configuration file "{}".'.format(config_file)
raise e
def load_yaml_config(config_file):
"""Load YAML config file
:param str config_file: The path to the configuration file.
:returns: A dict of the parsed config file.
:rtype: dict
:raises IOError: If the config file cannot be opened.
"""
try:
with open(config_file, 'r') as f:
content = yaml.load(f, Loader=yaml.FullLoader)
CONFIG.update(content)
return content
except IOError as e:
e.message = 'Could not open configuration file "{}".'.format(config_file)
raise e
|
"""Load and parse configuration file."""
import yaml
from io import open
CONFIG = {}
def load_config(config_file):
"""Loads a configuration file."""
config = {}
if type(config_file) is file:
config.update(yaml.load(config_file, Loader=yaml.FullLoader) or {})
return config
else:
try:
with open(config_file, 'r') as f:
content = yaml.load(f, Loader=yaml.FullLoader)
config.update(content)
return content
except IOError as e:
e.message = 'Could not open configuration file "{}".'.format(config_file)
raise e
def load_yaml_config(config_file):
"""Load YAML config file
:param str config_file: The path to the configuration file.
:returns: A dict of the parsed config file.
:rtype: dict
:raises IOError: If the config file cannot be opened.
"""
if type(config_file) is file:
CONFIG.update(yaml.load(config_file, Loader=yaml.FullLoader) or {})
return CONFIG
else:
try:
with open(config_file, 'r') as f:
content = yaml.load(f, Loader=yaml.FullLoader)
CONFIG.update(content)
return content
except IOError as e:
e.message = 'Could not open configuration file "{}".'.format(config_file)
raise e
Remove unused file type check"""Load and parse configuration file."""
import yaml
from io import open
CONFIG = {}
def load_config(config_file):
"""Loads a configuration file."""
config = {}
try:
with open(config_file, 'r') as f:
content = yaml.load(f, Loader=yaml.FullLoader)
config.update(content)
return content
except IOError as e:
e.message = 'Could not open configuration file "{}".'.format(config_file)
raise e
def load_yaml_config(config_file):
"""Load YAML config file
:param str config_file: The path to the configuration file.
:returns: A dict of the parsed config file.
:rtype: dict
:raises IOError: If the config file cannot be opened.
"""
try:
with open(config_file, 'r') as f:
content = yaml.load(f, Loader=yaml.FullLoader)
CONFIG.update(content)
return content
except IOError as e:
e.message = 'Could not open configuration file "{}".'.format(config_file)
raise e
|
<commit_before>"""Load and parse configuration file."""
import yaml
from io import open
CONFIG = {}
def load_config(config_file):
"""Loads a configuration file."""
config = {}
if type(config_file) is file:
config.update(yaml.load(config_file, Loader=yaml.FullLoader) or {})
return config
else:
try:
with open(config_file, 'r') as f:
content = yaml.load(f, Loader=yaml.FullLoader)
config.update(content)
return content
except IOError as e:
e.message = 'Could not open configuration file "{}".'.format(config_file)
raise e
def load_yaml_config(config_file):
"""Load YAML config file
:param str config_file: The path to the configuration file.
:returns: A dict of the parsed config file.
:rtype: dict
:raises IOError: If the config file cannot be opened.
"""
if type(config_file) is file:
CONFIG.update(yaml.load(config_file, Loader=yaml.FullLoader) or {})
return CONFIG
else:
try:
with open(config_file, 'r') as f:
content = yaml.load(f, Loader=yaml.FullLoader)
CONFIG.update(content)
return content
except IOError as e:
e.message = 'Could not open configuration file "{}".'.format(config_file)
raise e
<commit_msg>Remove unused file type check<commit_after>"""Load and parse configuration file."""
import yaml
from io import open
CONFIG = {}
def load_config(config_file):
"""Loads a configuration file."""
config = {}
try:
with open(config_file, 'r') as f:
content = yaml.load(f, Loader=yaml.FullLoader)
config.update(content)
return content
except IOError as e:
e.message = 'Could not open configuration file "{}".'.format(config_file)
raise e
def load_yaml_config(config_file):
"""Load YAML config file
:param str config_file: The path to the configuration file.
:returns: A dict of the parsed config file.
:rtype: dict
:raises IOError: If the config file cannot be opened.
"""
try:
with open(config_file, 'r') as f:
content = yaml.load(f, Loader=yaml.FullLoader)
CONFIG.update(content)
return content
except IOError as e:
e.message = 'Could not open configuration file "{}".'.format(config_file)
raise e
|
8df58655f5a7a46a781fc0e126b148943a8d5b50
|
tests/sentry/metrics/test_datadog.py
|
tests/sentry/metrics/test_datadog.py
|
from __future__ import absolute_import
import socket
from mock import patch
from sentry.metrics.datadog import DatadogMetricsBackend
from sentry.testutils import TestCase
class DatadogMetricsBackendTest(TestCase):
def setUp(self):
self.backend = DatadogMetricsBackend(prefix='sentrytest.')
@patch('datadog.threadstats.base.ThreadStats.increment')
def test_incr(self, mock_incr):
self.backend.incr('foo', instance='bar')
mock_incr.assert_called_once_with(
'sentrytest.foo', 1,
sample_rate=1,
tags=['instance:bar'],
host=socket.gethostname(),
)
@patch('datadog.threadstats.base.ThreadStats.timing')
def test_timing(self, mock_timing):
self.backend.timing('foo', 30, instance='bar')
mock_timing.assert_called_once_with(
'sentrytest.foo', 30,
sample_rate=1,
tags=['instance:bar'],
host=socket.gethostname(),
)
|
from __future__ import absolute_import
import socket
from mock import patch
from sentry.metrics.datadog import DatadogMetricsBackend
from sentry.testutils import TestCase
class DatadogMetricsBackendTest(TestCase):
def setUp(self):
self.backend = DatadogMetricsBackend(prefix='sentrytest.')
@patch('datadog.threadstats.base.ThreadStats.increment')
def test_incr(self, mock_incr):
self.backend.incr('foo', instance='bar')
mock_incr.assert_called_once_with(
'sentrytest.foo', 1,
tags=['instance:bar'],
host=socket.gethostname(),
)
@patch('datadog.threadstats.base.ThreadStats.timing')
def test_timing(self, mock_timing):
self.backend.timing('foo', 30, instance='bar')
mock_timing.assert_called_once_with(
'sentrytest.foo', 30,
sample_rate=1,
tags=['instance:bar'],
host=socket.gethostname(),
)
|
Remove no longer valid test
|
Remove no longer valid test
|
Python
|
bsd-3-clause
|
BuildingLink/sentry,mvaled/sentry,jean/sentry,kevinlondon/sentry,imankulov/sentry,mitsuhiko/sentry,nicholasserra/sentry,ifduyue/sentry,gencer/sentry,fotinakis/sentry,mvaled/sentry,alexm92/sentry,alexm92/sentry,kevinlondon/sentry,beeftornado/sentry,looker/sentry,korealerts1/sentry,jean/sentry,beeftornado/sentry,fotinakis/sentry,ngonzalvez/sentry,JackDanger/sentry,ngonzalvez/sentry,nicholasserra/sentry,JamesMura/sentry,jean/sentry,gencer/sentry,daevaorn/sentry,imankulov/sentry,JamesMura/sentry,JackDanger/sentry,zenefits/sentry,alexm92/sentry,gencer/sentry,BayanGroup/sentry,ifduyue/sentry,felixbuenemann/sentry,mvaled/sentry,ifduyue/sentry,looker/sentry,ifduyue/sentry,mitsuhiko/sentry,BuildingLink/sentry,korealerts1/sentry,daevaorn/sentry,JamesMura/sentry,Natim/sentry,gencer/sentry,mvaled/sentry,Natim/sentry,zenefits/sentry,ngonzalvez/sentry,Kryz/sentry,BayanGroup/sentry,looker/sentry,daevaorn/sentry,JackDanger/sentry,daevaorn/sentry,imankulov/sentry,BuildingLink/sentry,JamesMura/sentry,ifduyue/sentry,zenefits/sentry,nicholasserra/sentry,BuildingLink/sentry,gencer/sentry,fotinakis/sentry,mvaled/sentry,Kryz/sentry,kevinlondon/sentry,JamesMura/sentry,Kryz/sentry,felixbuenemann/sentry,jean/sentry,fotinakis/sentry,Natim/sentry,looker/sentry,jean/sentry,beeftornado/sentry,zenefits/sentry,korealerts1/sentry,felixbuenemann/sentry,zenefits/sentry,BayanGroup/sentry,mvaled/sentry,BuildingLink/sentry,looker/sentry
|
from __future__ import absolute_import
import socket
from mock import patch
from sentry.metrics.datadog import DatadogMetricsBackend
from sentry.testutils import TestCase
class DatadogMetricsBackendTest(TestCase):
def setUp(self):
self.backend = DatadogMetricsBackend(prefix='sentrytest.')
@patch('datadog.threadstats.base.ThreadStats.increment')
def test_incr(self, mock_incr):
self.backend.incr('foo', instance='bar')
mock_incr.assert_called_once_with(
'sentrytest.foo', 1,
sample_rate=1,
tags=['instance:bar'],
host=socket.gethostname(),
)
@patch('datadog.threadstats.base.ThreadStats.timing')
def test_timing(self, mock_timing):
self.backend.timing('foo', 30, instance='bar')
mock_timing.assert_called_once_with(
'sentrytest.foo', 30,
sample_rate=1,
tags=['instance:bar'],
host=socket.gethostname(),
)
Remove no longer valid test
|
from __future__ import absolute_import
import socket
from mock import patch
from sentry.metrics.datadog import DatadogMetricsBackend
from sentry.testutils import TestCase
class DatadogMetricsBackendTest(TestCase):
def setUp(self):
self.backend = DatadogMetricsBackend(prefix='sentrytest.')
@patch('datadog.threadstats.base.ThreadStats.increment')
def test_incr(self, mock_incr):
self.backend.incr('foo', instance='bar')
mock_incr.assert_called_once_with(
'sentrytest.foo', 1,
tags=['instance:bar'],
host=socket.gethostname(),
)
@patch('datadog.threadstats.base.ThreadStats.timing')
def test_timing(self, mock_timing):
self.backend.timing('foo', 30, instance='bar')
mock_timing.assert_called_once_with(
'sentrytest.foo', 30,
sample_rate=1,
tags=['instance:bar'],
host=socket.gethostname(),
)
|
<commit_before>from __future__ import absolute_import
import socket
from mock import patch
from sentry.metrics.datadog import DatadogMetricsBackend
from sentry.testutils import TestCase
class DatadogMetricsBackendTest(TestCase):
def setUp(self):
self.backend = DatadogMetricsBackend(prefix='sentrytest.')
@patch('datadog.threadstats.base.ThreadStats.increment')
def test_incr(self, mock_incr):
self.backend.incr('foo', instance='bar')
mock_incr.assert_called_once_with(
'sentrytest.foo', 1,
sample_rate=1,
tags=['instance:bar'],
host=socket.gethostname(),
)
@patch('datadog.threadstats.base.ThreadStats.timing')
def test_timing(self, mock_timing):
self.backend.timing('foo', 30, instance='bar')
mock_timing.assert_called_once_with(
'sentrytest.foo', 30,
sample_rate=1,
tags=['instance:bar'],
host=socket.gethostname(),
)
<commit_msg>Remove no longer valid test<commit_after>
|
from __future__ import absolute_import
import socket
from mock import patch
from sentry.metrics.datadog import DatadogMetricsBackend
from sentry.testutils import TestCase
class DatadogMetricsBackendTest(TestCase):
def setUp(self):
self.backend = DatadogMetricsBackend(prefix='sentrytest.')
@patch('datadog.threadstats.base.ThreadStats.increment')
def test_incr(self, mock_incr):
self.backend.incr('foo', instance='bar')
mock_incr.assert_called_once_with(
'sentrytest.foo', 1,
tags=['instance:bar'],
host=socket.gethostname(),
)
@patch('datadog.threadstats.base.ThreadStats.timing')
def test_timing(self, mock_timing):
self.backend.timing('foo', 30, instance='bar')
mock_timing.assert_called_once_with(
'sentrytest.foo', 30,
sample_rate=1,
tags=['instance:bar'],
host=socket.gethostname(),
)
|
from __future__ import absolute_import
import socket
from mock import patch
from sentry.metrics.datadog import DatadogMetricsBackend
from sentry.testutils import TestCase
class DatadogMetricsBackendTest(TestCase):
def setUp(self):
self.backend = DatadogMetricsBackend(prefix='sentrytest.')
@patch('datadog.threadstats.base.ThreadStats.increment')
def test_incr(self, mock_incr):
self.backend.incr('foo', instance='bar')
mock_incr.assert_called_once_with(
'sentrytest.foo', 1,
sample_rate=1,
tags=['instance:bar'],
host=socket.gethostname(),
)
@patch('datadog.threadstats.base.ThreadStats.timing')
def test_timing(self, mock_timing):
self.backend.timing('foo', 30, instance='bar')
mock_timing.assert_called_once_with(
'sentrytest.foo', 30,
sample_rate=1,
tags=['instance:bar'],
host=socket.gethostname(),
)
Remove no longer valid testfrom __future__ import absolute_import
import socket
from mock import patch
from sentry.metrics.datadog import DatadogMetricsBackend
from sentry.testutils import TestCase
class DatadogMetricsBackendTest(TestCase):
def setUp(self):
self.backend = DatadogMetricsBackend(prefix='sentrytest.')
@patch('datadog.threadstats.base.ThreadStats.increment')
def test_incr(self, mock_incr):
self.backend.incr('foo', instance='bar')
mock_incr.assert_called_once_with(
'sentrytest.foo', 1,
tags=['instance:bar'],
host=socket.gethostname(),
)
@patch('datadog.threadstats.base.ThreadStats.timing')
def test_timing(self, mock_timing):
self.backend.timing('foo', 30, instance='bar')
mock_timing.assert_called_once_with(
'sentrytest.foo', 30,
sample_rate=1,
tags=['instance:bar'],
host=socket.gethostname(),
)
|
<commit_before>from __future__ import absolute_import
import socket
from mock import patch
from sentry.metrics.datadog import DatadogMetricsBackend
from sentry.testutils import TestCase
class DatadogMetricsBackendTest(TestCase):
def setUp(self):
self.backend = DatadogMetricsBackend(prefix='sentrytest.')
@patch('datadog.threadstats.base.ThreadStats.increment')
def test_incr(self, mock_incr):
self.backend.incr('foo', instance='bar')
mock_incr.assert_called_once_with(
'sentrytest.foo', 1,
sample_rate=1,
tags=['instance:bar'],
host=socket.gethostname(),
)
@patch('datadog.threadstats.base.ThreadStats.timing')
def test_timing(self, mock_timing):
self.backend.timing('foo', 30, instance='bar')
mock_timing.assert_called_once_with(
'sentrytest.foo', 30,
sample_rate=1,
tags=['instance:bar'],
host=socket.gethostname(),
)
<commit_msg>Remove no longer valid test<commit_after>from __future__ import absolute_import
import socket
from mock import patch
from sentry.metrics.datadog import DatadogMetricsBackend
from sentry.testutils import TestCase
class DatadogMetricsBackendTest(TestCase):
def setUp(self):
self.backend = DatadogMetricsBackend(prefix='sentrytest.')
@patch('datadog.threadstats.base.ThreadStats.increment')
def test_incr(self, mock_incr):
self.backend.incr('foo', instance='bar')
mock_incr.assert_called_once_with(
'sentrytest.foo', 1,
tags=['instance:bar'],
host=socket.gethostname(),
)
@patch('datadog.threadstats.base.ThreadStats.timing')
def test_timing(self, mock_timing):
self.backend.timing('foo', 30, instance='bar')
mock_timing.assert_called_once_with(
'sentrytest.foo', 30,
sample_rate=1,
tags=['instance:bar'],
host=socket.gethostname(),
)
|
7dc34b159f837d4fdc71666233f66d340cfd3419
|
src/info_retrieval/info_retrieval.py
|
src/info_retrieval/info_retrieval.py
|
# LING 573 Question Answering System
# Code last updated 4/17/14 by Clara Gordon
# This code implements an InfoRetriever for the question answering system.
from pymur import *
from general_classes import *
class InfoRetriever:
# builds a QueryEnvironment associated with the indexed document collection
def __init__(self, index_path):
# how to get this to link up to the doc collection?
self.path_to_idx = index_path
self.index = Index(self.path_to_idx)
self.query_env = QueryEnvironment()
self.query_env.addIndex(self.path_to_idx)
# creates a list of all the passages returned by all the queries generated by
# the query-processing module
def retrieve_passages(self, queries):
passages = []
for query in queries:
query = " ".join(query)
# second argument is the number of documents desired
docs = self.query_env.runQuery("#combine[passage50:25](" + query + ")", 20)
for doc in docs:
doc_num = doc.document
begin = doc.begin
end = doc.end
doc_id = self.query_env.documents([doc_num])[0].metadata['docno'] # need this for output
passage = Passage(self.index.document(doc_num, True)[begin:end], doc.score, doc_id)
passages.append(passage)
return passages
|
# LING 573 Question Answering System
# Code last updated 4/17/14 by Clara Gordon
# This code implements an InfoRetriever for the question answering system.
from pymur import *
from general_classes import *
import sys
class InfoRetriever:
# builds a QueryEnvironment associated with the indexed document collection
def __init__(self, index_path):
# how to get this to link up to the doc collection?
self.path_to_idx = index_path
self.index = Index(self.path_to_idx)
self.query_env = QueryEnvironment()
self.query_env.addIndex(self.path_to_idx)
# creates a list of all the passages returned by all the queries generated by
# the query-processing module
def retrieve_passages(self, queries):
passages = []
for query in queries:
query = " ".join(query)
sys.stderr.write(query)
# second argument is the number of documents desired
docs = self.query_env.runQuery("#combine[passage50:25](" + query + ")", 20)
for doc in docs:
doc_num = doc.document
begin = doc.begin
end = doc.end
doc_id = self.query_env.documents([doc_num])[0].metadata['docno'] # need this for output
passage = Passage(self.index.document(doc_num, True)[begin:end], doc.score, doc_id)
passages.append(passage)
return passages
|
Add debugging statement to retrieve_passages function
|
Add debugging statement to retrieve_passages function
|
Python
|
mit
|
amkahn/question-answering,amkahn/question-answering
|
# LING 573 Question Answering System
# Code last updated 4/17/14 by Clara Gordon
# This code implements an InfoRetriever for the question answering system.
from pymur import *
from general_classes import *
class InfoRetriever:
# builds a QueryEnvironment associated with the indexed document collection
def __init__(self, index_path):
# how to get this to link up to the doc collection?
self.path_to_idx = index_path
self.index = Index(self.path_to_idx)
self.query_env = QueryEnvironment()
self.query_env.addIndex(self.path_to_idx)
# creates a list of all the passages returned by all the queries generated by
# the query-processing module
def retrieve_passages(self, queries):
passages = []
for query in queries:
query = " ".join(query)
# second argument is the number of documents desired
docs = self.query_env.runQuery("#combine[passage50:25](" + query + ")", 20)
for doc in docs:
doc_num = doc.document
begin = doc.begin
end = doc.end
doc_id = self.query_env.documents([doc_num])[0].metadata['docno'] # need this for output
passage = Passage(self.index.document(doc_num, True)[begin:end], doc.score, doc_id)
passages.append(passage)
return passages
Add debugging statement to retrieve_passages function
|
# LING 573 Question Answering System
# Code last updated 4/17/14 by Clara Gordon
# This code implements an InfoRetriever for the question answering system.
from pymur import *
from general_classes import *
import sys
class InfoRetriever:
# builds a QueryEnvironment associated with the indexed document collection
def __init__(self, index_path):
# how to get this to link up to the doc collection?
self.path_to_idx = index_path
self.index = Index(self.path_to_idx)
self.query_env = QueryEnvironment()
self.query_env.addIndex(self.path_to_idx)
# creates a list of all the passages returned by all the queries generated by
# the query-processing module
def retrieve_passages(self, queries):
passages = []
for query in queries:
query = " ".join(query)
sys.stderr.write(query)
# second argument is the number of documents desired
docs = self.query_env.runQuery("#combine[passage50:25](" + query + ")", 20)
for doc in docs:
doc_num = doc.document
begin = doc.begin
end = doc.end
doc_id = self.query_env.documents([doc_num])[0].metadata['docno'] # need this for output
passage = Passage(self.index.document(doc_num, True)[begin:end], doc.score, doc_id)
passages.append(passage)
return passages
|
<commit_before># LING 573 Question Answering System
# Code last updated 4/17/14 by Clara Gordon
# This code implements an InfoRetriever for the question answering system.
from pymur import *
from general_classes import *
class InfoRetriever:
# builds a QueryEnvironment associated with the indexed document collection
def __init__(self, index_path):
# how to get this to link up to the doc collection?
self.path_to_idx = index_path
self.index = Index(self.path_to_idx)
self.query_env = QueryEnvironment()
self.query_env.addIndex(self.path_to_idx)
# creates a list of all the passages returned by all the queries generated by
# the query-processing module
def retrieve_passages(self, queries):
passages = []
for query in queries:
query = " ".join(query)
# second argument is the number of documents desired
docs = self.query_env.runQuery("#combine[passage50:25](" + query + ")", 20)
for doc in docs:
doc_num = doc.document
begin = doc.begin
end = doc.end
doc_id = self.query_env.documents([doc_num])[0].metadata['docno'] # need this for output
passage = Passage(self.index.document(doc_num, True)[begin:end], doc.score, doc_id)
passages.append(passage)
return passages
<commit_msg>Add debugging statement to retrieve_passages function<commit_after>
|
# LING 573 Question Answering System
# Code last updated 4/17/14 by Clara Gordon
# This code implements an InfoRetriever for the question answering system.
from pymur import *
from general_classes import *
import sys
class InfoRetriever:
# builds a QueryEnvironment associated with the indexed document collection
def __init__(self, index_path):
# how to get this to link up to the doc collection?
self.path_to_idx = index_path
self.index = Index(self.path_to_idx)
self.query_env = QueryEnvironment()
self.query_env.addIndex(self.path_to_idx)
# creates a list of all the passages returned by all the queries generated by
# the query-processing module
def retrieve_passages(self, queries):
passages = []
for query in queries:
query = " ".join(query)
sys.stderr.write(query)
# second argument is the number of documents desired
docs = self.query_env.runQuery("#combine[passage50:25](" + query + ")", 20)
for doc in docs:
doc_num = doc.document
begin = doc.begin
end = doc.end
doc_id = self.query_env.documents([doc_num])[0].metadata['docno'] # need this for output
passage = Passage(self.index.document(doc_num, True)[begin:end], doc.score, doc_id)
passages.append(passage)
return passages
|
# LING 573 Question Answering System
# Code last updated 4/17/14 by Clara Gordon
# This code implements an InfoRetriever for the question answering system.
from pymur import *
from general_classes import *
class InfoRetriever:
# builds a QueryEnvironment associated with the indexed document collection
def __init__(self, index_path):
# how to get this to link up to the doc collection?
self.path_to_idx = index_path
self.index = Index(self.path_to_idx)
self.query_env = QueryEnvironment()
self.query_env.addIndex(self.path_to_idx)
# creates a list of all the passages returned by all the queries generated by
# the query-processing module
def retrieve_passages(self, queries):
passages = []
for query in queries:
query = " ".join(query)
# second argument is the number of documents desired
docs = self.query_env.runQuery("#combine[passage50:25](" + query + ")", 20)
for doc in docs:
doc_num = doc.document
begin = doc.begin
end = doc.end
doc_id = self.query_env.documents([doc_num])[0].metadata['docno'] # need this for output
passage = Passage(self.index.document(doc_num, True)[begin:end], doc.score, doc_id)
passages.append(passage)
return passages
Add debugging statement to retrieve_passages function# LING 573 Question Answering System
# Code last updated 4/17/14 by Clara Gordon
# This code implements an InfoRetriever for the question answering system.
from pymur import *
from general_classes import *
import sys
class InfoRetriever:
# builds a QueryEnvironment associated with the indexed document collection
def __init__(self, index_path):
# how to get this to link up to the doc collection?
self.path_to_idx = index_path
self.index = Index(self.path_to_idx)
self.query_env = QueryEnvironment()
self.query_env.addIndex(self.path_to_idx)
# creates a list of all the passages returned by all the queries generated by
# the query-processing module
def retrieve_passages(self, queries):
passages = []
for query in queries:
query = " ".join(query)
sys.stderr.write(query)
# second argument is the number of documents desired
docs = self.query_env.runQuery("#combine[passage50:25](" + query + ")", 20)
for doc in docs:
doc_num = doc.document
begin = doc.begin
end = doc.end
doc_id = self.query_env.documents([doc_num])[0].metadata['docno'] # need this for output
passage = Passage(self.index.document(doc_num, True)[begin:end], doc.score, doc_id)
passages.append(passage)
return passages
|
<commit_before># LING 573 Question Answering System
# Code last updated 4/17/14 by Clara Gordon
# This code implements an InfoRetriever for the question answering system.
from pymur import *
from general_classes import *
class InfoRetriever:
# builds a QueryEnvironment associated with the indexed document collection
def __init__(self, index_path):
# how to get this to link up to the doc collection?
self.path_to_idx = index_path
self.index = Index(self.path_to_idx)
self.query_env = QueryEnvironment()
self.query_env.addIndex(self.path_to_idx)
# creates a list of all the passages returned by all the queries generated by
# the query-processing module
def retrieve_passages(self, queries):
passages = []
for query in queries:
query = " ".join(query)
# second argument is the number of documents desired
docs = self.query_env.runQuery("#combine[passage50:25](" + query + ")", 20)
for doc in docs:
doc_num = doc.document
begin = doc.begin
end = doc.end
doc_id = self.query_env.documents([doc_num])[0].metadata['docno'] # need this for output
passage = Passage(self.index.document(doc_num, True)[begin:end], doc.score, doc_id)
passages.append(passage)
return passages
<commit_msg>Add debugging statement to retrieve_passages function<commit_after># LING 573 Question Answering System
# Code last updated 4/17/14 by Clara Gordon
# This code implements an InfoRetriever for the question answering system.
from pymur import *
from general_classes import *
import sys
class InfoRetriever:
# builds a QueryEnvironment associated with the indexed document collection
def __init__(self, index_path):
# how to get this to link up to the doc collection?
self.path_to_idx = index_path
self.index = Index(self.path_to_idx)
self.query_env = QueryEnvironment()
self.query_env.addIndex(self.path_to_idx)
# creates a list of all the passages returned by all the queries generated by
# the query-processing module
def retrieve_passages(self, queries):
passages = []
for query in queries:
query = " ".join(query)
sys.stderr.write(query)
# second argument is the number of documents desired
docs = self.query_env.runQuery("#combine[passage50:25](" + query + ")", 20)
for doc in docs:
doc_num = doc.document
begin = doc.begin
end = doc.end
doc_id = self.query_env.documents([doc_num])[0].metadata['docno'] # need this for output
passage = Passage(self.index.document(doc_num, True)[begin:end], doc.score, doc_id)
passages.append(passage)
return passages
|
dc04c35177815ff2aee46088cac7d6790e6831dd
|
swimlane/core/search/search_result.py
|
swimlane/core/search/search_result.py
|
"""This module provides a SearchResults class."""
from types import GeneratorType
from ..resources import Record, StatsResult
from ..swimlane_dict import SwimlaneDict
__metaclass__ = type
class SearchResult:
"""A class that wraps a Swimlane search result."""
def __init__(self, report, resp):
"""Init a SearchResult.
Args:
report (Report): The report that was used to initiate the search.
resp (SwimlaneDict): The JSON response from a search request.
"""
self.is_stats = isinstance(resp, GeneratorType)
if self.is_stats:
self.stats = (StatsResult(SwimlaneDict(r)) for r in resp)
else:
self.report = report
self.count = resp["count"]
self.offset = resp["offset"]
self.limit = resp["limit"]
results = (resp["results"][report.applicationIds[0]]
if resp["results"] else [])
self.records = (Record(SwimlaneDict(r)) for r in results)
|
"""This module provides a SearchResults class."""
from types import GeneratorType
from ..resources import Record, StatsResult
from ..swimlane_dict import SwimlaneDict
__metaclass__ = type
class SearchResult:
"""A class that wraps a Swimlane search result."""
def __init__(self, report, resp):
"""Init a SearchResult.
Args:
report (Report): The report that was used to initiate the search.
resp (SwimlaneDict): The JSON response from a search request.
"""
self.is_stats = isinstance(resp, GeneratorType)
if self.is_stats:
self.stats = (StatsResult(SwimlaneDict(r)) for r in resp)
else:
self.report = report
self.count = resp["count"]
self.offset = resp["offset"]
self.limit = resp["limit"]
results = []
if report.applicationIds[0] in resp['results']:
results = resp["results"][report.applicationIds[0]]
self.records = (Record(SwimlaneDict(r)) for r in results)
|
Fix a KeyError that is raised when there are no reuslts
|
Fix a KeyError that is raised when there are no reuslts
|
Python
|
mit
|
Swimlane/sw-python-client
|
"""This module provides a SearchResults class."""
from types import GeneratorType
from ..resources import Record, StatsResult
from ..swimlane_dict import SwimlaneDict
__metaclass__ = type
class SearchResult:
"""A class that wraps a Swimlane search result."""
def __init__(self, report, resp):
"""Init a SearchResult.
Args:
report (Report): The report that was used to initiate the search.
resp (SwimlaneDict): The JSON response from a search request.
"""
self.is_stats = isinstance(resp, GeneratorType)
if self.is_stats:
self.stats = (StatsResult(SwimlaneDict(r)) for r in resp)
else:
self.report = report
self.count = resp["count"]
self.offset = resp["offset"]
self.limit = resp["limit"]
results = (resp["results"][report.applicationIds[0]]
if resp["results"] else [])
self.records = (Record(SwimlaneDict(r)) for r in results)
Fix a KeyError that is raised when there are no reuslts
|
"""This module provides a SearchResults class."""
from types import GeneratorType
from ..resources import Record, StatsResult
from ..swimlane_dict import SwimlaneDict
__metaclass__ = type
class SearchResult:
"""A class that wraps a Swimlane search result."""
def __init__(self, report, resp):
"""Init a SearchResult.
Args:
report (Report): The report that was used to initiate the search.
resp (SwimlaneDict): The JSON response from a search request.
"""
self.is_stats = isinstance(resp, GeneratorType)
if self.is_stats:
self.stats = (StatsResult(SwimlaneDict(r)) for r in resp)
else:
self.report = report
self.count = resp["count"]
self.offset = resp["offset"]
self.limit = resp["limit"]
results = []
if report.applicationIds[0] in resp['results']:
results = resp["results"][report.applicationIds[0]]
self.records = (Record(SwimlaneDict(r)) for r in results)
|
<commit_before>"""This module provides a SearchResults class."""
from types import GeneratorType
from ..resources import Record, StatsResult
from ..swimlane_dict import SwimlaneDict
__metaclass__ = type
class SearchResult:
"""A class that wraps a Swimlane search result."""
def __init__(self, report, resp):
"""Init a SearchResult.
Args:
report (Report): The report that was used to initiate the search.
resp (SwimlaneDict): The JSON response from a search request.
"""
self.is_stats = isinstance(resp, GeneratorType)
if self.is_stats:
self.stats = (StatsResult(SwimlaneDict(r)) for r in resp)
else:
self.report = report
self.count = resp["count"]
self.offset = resp["offset"]
self.limit = resp["limit"]
results = (resp["results"][report.applicationIds[0]]
if resp["results"] else [])
self.records = (Record(SwimlaneDict(r)) for r in results)
<commit_msg>Fix a KeyError that is raised when there are no reuslts<commit_after>
|
"""This module provides a SearchResults class."""
from types import GeneratorType
from ..resources import Record, StatsResult
from ..swimlane_dict import SwimlaneDict
__metaclass__ = type
class SearchResult:
"""A class that wraps a Swimlane search result."""
def __init__(self, report, resp):
"""Init a SearchResult.
Args:
report (Report): The report that was used to initiate the search.
resp (SwimlaneDict): The JSON response from a search request.
"""
self.is_stats = isinstance(resp, GeneratorType)
if self.is_stats:
self.stats = (StatsResult(SwimlaneDict(r)) for r in resp)
else:
self.report = report
self.count = resp["count"]
self.offset = resp["offset"]
self.limit = resp["limit"]
results = []
if report.applicationIds[0] in resp['results']:
results = resp["results"][report.applicationIds[0]]
self.records = (Record(SwimlaneDict(r)) for r in results)
|
"""This module provides a SearchResults class."""
from types import GeneratorType
from ..resources import Record, StatsResult
from ..swimlane_dict import SwimlaneDict
__metaclass__ = type
class SearchResult:
"""A class that wraps a Swimlane search result."""
def __init__(self, report, resp):
"""Init a SearchResult.
Args:
report (Report): The report that was used to initiate the search.
resp (SwimlaneDict): The JSON response from a search request.
"""
self.is_stats = isinstance(resp, GeneratorType)
if self.is_stats:
self.stats = (StatsResult(SwimlaneDict(r)) for r in resp)
else:
self.report = report
self.count = resp["count"]
self.offset = resp["offset"]
self.limit = resp["limit"]
results = (resp["results"][report.applicationIds[0]]
if resp["results"] else [])
self.records = (Record(SwimlaneDict(r)) for r in results)
Fix a KeyError that is raised when there are no reuslts"""This module provides a SearchResults class."""
from types import GeneratorType
from ..resources import Record, StatsResult
from ..swimlane_dict import SwimlaneDict
__metaclass__ = type
class SearchResult:
"""A class that wraps a Swimlane search result."""
def __init__(self, report, resp):
"""Init a SearchResult.
Args:
report (Report): The report that was used to initiate the search.
resp (SwimlaneDict): The JSON response from a search request.
"""
self.is_stats = isinstance(resp, GeneratorType)
if self.is_stats:
self.stats = (StatsResult(SwimlaneDict(r)) for r in resp)
else:
self.report = report
self.count = resp["count"]
self.offset = resp["offset"]
self.limit = resp["limit"]
results = []
if report.applicationIds[0] in resp['results']:
results = resp["results"][report.applicationIds[0]]
self.records = (Record(SwimlaneDict(r)) for r in results)
|
<commit_before>"""This module provides a SearchResults class."""
from types import GeneratorType
from ..resources import Record, StatsResult
from ..swimlane_dict import SwimlaneDict
__metaclass__ = type
class SearchResult:
"""A class that wraps a Swimlane search result."""
def __init__(self, report, resp):
"""Init a SearchResult.
Args:
report (Report): The report that was used to initiate the search.
resp (SwimlaneDict): The JSON response from a search request.
"""
self.is_stats = isinstance(resp, GeneratorType)
if self.is_stats:
self.stats = (StatsResult(SwimlaneDict(r)) for r in resp)
else:
self.report = report
self.count = resp["count"]
self.offset = resp["offset"]
self.limit = resp["limit"]
results = (resp["results"][report.applicationIds[0]]
if resp["results"] else [])
self.records = (Record(SwimlaneDict(r)) for r in results)
<commit_msg>Fix a KeyError that is raised when there are no reuslts<commit_after>"""This module provides a SearchResults class."""
from types import GeneratorType
from ..resources import Record, StatsResult
from ..swimlane_dict import SwimlaneDict
__metaclass__ = type
class SearchResult:
"""A class that wraps a Swimlane search result."""
def __init__(self, report, resp):
"""Init a SearchResult.
Args:
report (Report): The report that was used to initiate the search.
resp (SwimlaneDict): The JSON response from a search request.
"""
self.is_stats = isinstance(resp, GeneratorType)
if self.is_stats:
self.stats = (StatsResult(SwimlaneDict(r)) for r in resp)
else:
self.report = report
self.count = resp["count"]
self.offset = resp["offset"]
self.limit = resp["limit"]
results = []
if report.applicationIds[0] in resp['results']:
results = resp["results"][report.applicationIds[0]]
self.records = (Record(SwimlaneDict(r)) for r in results)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.