commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
986c0401da5247cd5021432ba6392df9034ee21c
|
pombola/south_africa/migrations/0002_add_parliamentary_sessions.py
|
pombola/south_africa/migrations/0002_add_parliamentary_sessions.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def add_parliamentary_sessions(apps, schema_editor):
ParliamentarySession = apps.get_model('core', 'ParliamentarySession')
Organisation = apps.get_model('core', 'Organisation')
PositionTitle = apps.get_model('core', 'PositionTitle')
# National Assembly:
if Organisation.objects.filter(slug='national-assembly').exists():
# Current:
ParliamentarySession.objects.create(
start_date='2014-05-21',
end_date='2019-05-21',
house=Organisation.objects.get(slug='national-assembly'),
position_title=PositionTitle.objects.get(slug='member'),
mapit_generation=1,
name='26th Parliament (National Assembly)',
slug='na26',
)
# Previous:
ParliamentarySession.objects.create(
start_date='2009-05-06',
end_date='2014-05-06',
house=Organisation.objects.get(slug='national-assembly'),
position_title=PositionTitle.objects.get(slug='member'),
mapit_generation=1,
name='25th Parliament (National Assembly)',
slug='na25',
)
if Organisation.objects.filter(slug='ncop').exists():
# NCOP:
# Current:
ParliamentarySession.objects.create(
start_date='2014-05-21',
end_date='2019-05-21',
house=Organisation.objects.get(slug='ncop'),
position_title=PositionTitle.objects.get(slug='delegate'),
mapit_generation=1,
name='26th Parliament (National Council of Provinces)',
slug='ncop26',
)
# Previous:
ParliamentarySession.objects.create(
start_date='2009-05-06',
end_date='2014-05-06',
house=Organisation.objects.get(slug='ncop'),
position_title=PositionTitle.objects.get(slug='delegate'),
mapit_generation=1,
name='25th Parliament (National Council of Provinces)',
slug='ncop25',
)
def remove_parliamentary_sessions(apps, schema_editor):
ParliamentarySession = apps.get_model('core', 'ParliamentarySession')
ParliamentarySession.objects.filter(
slug__in=('na25', 'na26', 'ncop25', 'ncop26')
).delete()
class Migration(migrations.Migration):
dependencies = [
('core', '0001_initial'),
('south_africa', '0001_initial'),
]
operations = [
migrations.RunPython(
add_parliamentary_sessions,
remove_parliamentary_sessions,
),
]
|
Add a data migration to add ParliamentarySession objects
|
ZA: Add a data migration to add ParliamentarySession objects
For Kenya, we've created instances of a `ParliamentarySession` model
to represent a term of a particular house of parliament. (The name is
a bit misleading - it should probably be `Term` instead.) This was
introduced primarily so that we could distinguish places from
different terms with different boundaries. Later we added links to
views of positions of a particular parliament: the position view, when
provided with a `session` query parameter, only includes positions
whose start and end date overlap with the start and end date of the
corresponding `ParliamentarySession`.
In Pombola, this mechanism provides the only easy way to see all the
people who were ever a representative in a particular house for a
given session, which means that since there are no such
`ParliamentarySession`s for South Africa, the EveryPolitician scraper
can only ever find the current representatives.
Essentially the only `/position/` URL that's used on People's Assembly
is the "MP Profiles" page which has been overridden and doesn't
include the session switch link, so adding these ParliamentarySession
objects will only affect pages that aren't linked to by default, but
the following links will still be helpful to the EP scrapers:
* /position/delegate/parliament/ncop/?session=ncop25
* /position/delegate/parliament/ncop/?session=ncop26
* /position/member/parliament/national-assembly/?session=na25
* /position/member/parliament/national-assembly/?session=na26
Note that because these views just look for overlapping dates with a
session, just creating the `ParliamentarySession` objects is enough -
they don't need to be linked with positions by foreign keys. (This is
probably a misfeature.)
PMG have been careful about setting precise start and end dates, so
this should accurately identify who was really a represenative during
each session.
|
Python
|
agpl-3.0
|
mysociety/pombola,mysociety/pombola,mysociety/pombola,mysociety/pombola,mysociety/pombola,mysociety/pombola
|
ZA: Add a data migration to add ParliamentarySession objects
For Kenya, we've created instances of a `ParliamentarySession` model
to represent a term of a particular house of parliament. (The name is
a bit misleading - it should probably be `Term` instead.) This was
introduced primarily so that we could distinguish places from
different terms with different boundaries. Later we added links to
views of positions of a particular parliament: the position view, when
provided with a `session` query parameter, only includes positions
whose start and end date overlap with the start and end date of the
corresponding `ParliamentarySession`.
In Pombola, this mechanism provides the only easy way to see all the
people who were ever a representative in a particular house for a
given session, which means that since there are no such
`ParliamentarySession`s for South Africa, the EveryPolitician scraper
can only ever find the current representatives.
Essentially the only `/position/` URL that's used on People's Assembly
is the "MP Profiles" page which has been overridden and doesn't
include the session switch link, so adding these ParliamentarySession
objects will only affect pages that aren't linked to by default, but
the following links will still be helpful to the EP scrapers:
* /position/delegate/parliament/ncop/?session=ncop25
* /position/delegate/parliament/ncop/?session=ncop26
* /position/member/parliament/national-assembly/?session=na25
* /position/member/parliament/national-assembly/?session=na26
Note that because these views just look for overlapping dates with a
session, just creating the `ParliamentarySession` objects is enough -
they don't need to be linked with positions by foreign keys. (This is
probably a misfeature.)
PMG have been careful about setting precise start and end dates, so
this should accurately identify who was really a represenative during
each session.
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def add_parliamentary_sessions(apps, schema_editor):
ParliamentarySession = apps.get_model('core', 'ParliamentarySession')
Organisation = apps.get_model('core', 'Organisation')
PositionTitle = apps.get_model('core', 'PositionTitle')
# National Assembly:
if Organisation.objects.filter(slug='national-assembly').exists():
# Current:
ParliamentarySession.objects.create(
start_date='2014-05-21',
end_date='2019-05-21',
house=Organisation.objects.get(slug='national-assembly'),
position_title=PositionTitle.objects.get(slug='member'),
mapit_generation=1,
name='26th Parliament (National Assembly)',
slug='na26',
)
# Previous:
ParliamentarySession.objects.create(
start_date='2009-05-06',
end_date='2014-05-06',
house=Organisation.objects.get(slug='national-assembly'),
position_title=PositionTitle.objects.get(slug='member'),
mapit_generation=1,
name='25th Parliament (National Assembly)',
slug='na25',
)
if Organisation.objects.filter(slug='ncop').exists():
# NCOP:
# Current:
ParliamentarySession.objects.create(
start_date='2014-05-21',
end_date='2019-05-21',
house=Organisation.objects.get(slug='ncop'),
position_title=PositionTitle.objects.get(slug='delegate'),
mapit_generation=1,
name='26th Parliament (National Council of Provinces)',
slug='ncop26',
)
# Previous:
ParliamentarySession.objects.create(
start_date='2009-05-06',
end_date='2014-05-06',
house=Organisation.objects.get(slug='ncop'),
position_title=PositionTitle.objects.get(slug='delegate'),
mapit_generation=1,
name='25th Parliament (National Council of Provinces)',
slug='ncop25',
)
def remove_parliamentary_sessions(apps, schema_editor):
ParliamentarySession = apps.get_model('core', 'ParliamentarySession')
ParliamentarySession.objects.filter(
slug__in=('na25', 'na26', 'ncop25', 'ncop26')
).delete()
class Migration(migrations.Migration):
dependencies = [
('core', '0001_initial'),
('south_africa', '0001_initial'),
]
operations = [
migrations.RunPython(
add_parliamentary_sessions,
remove_parliamentary_sessions,
),
]
|
<commit_before><commit_msg>ZA: Add a data migration to add ParliamentarySession objects
For Kenya, we've created instances of a `ParliamentarySession` model
to represent a term of a particular house of parliament. (The name is
a bit misleading - it should probably be `Term` instead.) This was
introduced primarily so that we could distinguish places from
different terms with different boundaries. Later we added links to
views of positions of a particular parliament: the position view, when
provided with a `session` query parameter, only includes positions
whose start and end date overlap with the start and end date of the
corresponding `ParliamentarySession`.
In Pombola, this mechanism provides the only easy way to see all the
people who were ever a representative in a particular house for a
given session, which means that since there are no such
`ParliamentarySession`s for South Africa, the EveryPolitician scraper
can only ever find the current representatives.
Essentially the only `/position/` URL that's used on People's Assembly
is the "MP Profiles" page which has been overridden and doesn't
include the session switch link, so adding these ParliamentarySession
objects will only affect pages that aren't linked to by default, but
the following links will still be helpful to the EP scrapers:
* /position/delegate/parliament/ncop/?session=ncop25
* /position/delegate/parliament/ncop/?session=ncop26
* /position/member/parliament/national-assembly/?session=na25
* /position/member/parliament/national-assembly/?session=na26
Note that because these views just look for overlapping dates with a
session, just creating the `ParliamentarySession` objects is enough -
they don't need to be linked with positions by foreign keys. (This is
probably a misfeature.)
PMG have been careful about setting precise start and end dates, so
this should accurately identify who was really a represenative during
each session.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def add_parliamentary_sessions(apps, schema_editor):
ParliamentarySession = apps.get_model('core', 'ParliamentarySession')
Organisation = apps.get_model('core', 'Organisation')
PositionTitle = apps.get_model('core', 'PositionTitle')
# National Assembly:
if Organisation.objects.filter(slug='national-assembly').exists():
# Current:
ParliamentarySession.objects.create(
start_date='2014-05-21',
end_date='2019-05-21',
house=Organisation.objects.get(slug='national-assembly'),
position_title=PositionTitle.objects.get(slug='member'),
mapit_generation=1,
name='26th Parliament (National Assembly)',
slug='na26',
)
# Previous:
ParliamentarySession.objects.create(
start_date='2009-05-06',
end_date='2014-05-06',
house=Organisation.objects.get(slug='national-assembly'),
position_title=PositionTitle.objects.get(slug='member'),
mapit_generation=1,
name='25th Parliament (National Assembly)',
slug='na25',
)
if Organisation.objects.filter(slug='ncop').exists():
# NCOP:
# Current:
ParliamentarySession.objects.create(
start_date='2014-05-21',
end_date='2019-05-21',
house=Organisation.objects.get(slug='ncop'),
position_title=PositionTitle.objects.get(slug='delegate'),
mapit_generation=1,
name='26th Parliament (National Council of Provinces)',
slug='ncop26',
)
# Previous:
ParliamentarySession.objects.create(
start_date='2009-05-06',
end_date='2014-05-06',
house=Organisation.objects.get(slug='ncop'),
position_title=PositionTitle.objects.get(slug='delegate'),
mapit_generation=1,
name='25th Parliament (National Council of Provinces)',
slug='ncop25',
)
def remove_parliamentary_sessions(apps, schema_editor):
ParliamentarySession = apps.get_model('core', 'ParliamentarySession')
ParliamentarySession.objects.filter(
slug__in=('na25', 'na26', 'ncop25', 'ncop26')
).delete()
class Migration(migrations.Migration):
dependencies = [
('core', '0001_initial'),
('south_africa', '0001_initial'),
]
operations = [
migrations.RunPython(
add_parliamentary_sessions,
remove_parliamentary_sessions,
),
]
|
ZA: Add a data migration to add ParliamentarySession objects
For Kenya, we've created instances of a `ParliamentarySession` model
to represent a term of a particular house of parliament. (The name is
a bit misleading - it should probably be `Term` instead.) This was
introduced primarily so that we could distinguish places from
different terms with different boundaries. Later we added links to
views of positions of a particular parliament: the position view, when
provided with a `session` query parameter, only includes positions
whose start and end date overlap with the start and end date of the
corresponding `ParliamentarySession`.
In Pombola, this mechanism provides the only easy way to see all the
people who were ever a representative in a particular house for a
given session, which means that since there are no such
`ParliamentarySession`s for South Africa, the EveryPolitician scraper
can only ever find the current representatives.
Essentially the only `/position/` URL that's used on People's Assembly
is the "MP Profiles" page which has been overridden and doesn't
include the session switch link, so adding these ParliamentarySession
objects will only affect pages that aren't linked to by default, but
the following links will still be helpful to the EP scrapers:
* /position/delegate/parliament/ncop/?session=ncop25
* /position/delegate/parliament/ncop/?session=ncop26
* /position/member/parliament/national-assembly/?session=na25
* /position/member/parliament/national-assembly/?session=na26
Note that because these views just look for overlapping dates with a
session, just creating the `ParliamentarySession` objects is enough -
they don't need to be linked with positions by foreign keys. (This is
probably a misfeature.)
PMG have been careful about setting precise start and end dates, so
this should accurately identify who was really a represenative during
each session.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def add_parliamentary_sessions(apps, schema_editor):
ParliamentarySession = apps.get_model('core', 'ParliamentarySession')
Organisation = apps.get_model('core', 'Organisation')
PositionTitle = apps.get_model('core', 'PositionTitle')
# National Assembly:
if Organisation.objects.filter(slug='national-assembly').exists():
# Current:
ParliamentarySession.objects.create(
start_date='2014-05-21',
end_date='2019-05-21',
house=Organisation.objects.get(slug='national-assembly'),
position_title=PositionTitle.objects.get(slug='member'),
mapit_generation=1,
name='26th Parliament (National Assembly)',
slug='na26',
)
# Previous:
ParliamentarySession.objects.create(
start_date='2009-05-06',
end_date='2014-05-06',
house=Organisation.objects.get(slug='national-assembly'),
position_title=PositionTitle.objects.get(slug='member'),
mapit_generation=1,
name='25th Parliament (National Assembly)',
slug='na25',
)
if Organisation.objects.filter(slug='ncop').exists():
# NCOP:
# Current:
ParliamentarySession.objects.create(
start_date='2014-05-21',
end_date='2019-05-21',
house=Organisation.objects.get(slug='ncop'),
position_title=PositionTitle.objects.get(slug='delegate'),
mapit_generation=1,
name='26th Parliament (National Council of Provinces)',
slug='ncop26',
)
# Previous:
ParliamentarySession.objects.create(
start_date='2009-05-06',
end_date='2014-05-06',
house=Organisation.objects.get(slug='ncop'),
position_title=PositionTitle.objects.get(slug='delegate'),
mapit_generation=1,
name='25th Parliament (National Council of Provinces)',
slug='ncop25',
)
def remove_parliamentary_sessions(apps, schema_editor):
ParliamentarySession = apps.get_model('core', 'ParliamentarySession')
ParliamentarySession.objects.filter(
slug__in=('na25', 'na26', 'ncop25', 'ncop26')
).delete()
class Migration(migrations.Migration):
dependencies = [
('core', '0001_initial'),
('south_africa', '0001_initial'),
]
operations = [
migrations.RunPython(
add_parliamentary_sessions,
remove_parliamentary_sessions,
),
]
|
<commit_before><commit_msg>ZA: Add a data migration to add ParliamentarySession objects
For Kenya, we've created instances of a `ParliamentarySession` model
to represent a term of a particular house of parliament. (The name is
a bit misleading - it should probably be `Term` instead.) This was
introduced primarily so that we could distinguish places from
different terms with different boundaries. Later we added links to
views of positions of a particular parliament: the position view, when
provided with a `session` query parameter, only includes positions
whose start and end date overlap with the start and end date of the
corresponding `ParliamentarySession`.
In Pombola, this mechanism provides the only easy way to see all the
people who were ever a representative in a particular house for a
given session, which means that since there are no such
`ParliamentarySession`s for South Africa, the EveryPolitician scraper
can only ever find the current representatives.
Essentially the only `/position/` URL that's used on People's Assembly
is the "MP Profiles" page which has been overridden and doesn't
include the session switch link, so adding these ParliamentarySession
objects will only affect pages that aren't linked to by default, but
the following links will still be helpful to the EP scrapers:
* /position/delegate/parliament/ncop/?session=ncop25
* /position/delegate/parliament/ncop/?session=ncop26
* /position/member/parliament/national-assembly/?session=na25
* /position/member/parliament/national-assembly/?session=na26
Note that because these views just look for overlapping dates with a
session, just creating the `ParliamentarySession` objects is enough -
they don't need to be linked with positions by foreign keys. (This is
probably a misfeature.)
PMG have been careful about setting precise start and end dates, so
this should accurately identify who was really a represenative during
each session.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def add_parliamentary_sessions(apps, schema_editor):
ParliamentarySession = apps.get_model('core', 'ParliamentarySession')
Organisation = apps.get_model('core', 'Organisation')
PositionTitle = apps.get_model('core', 'PositionTitle')
# National Assembly:
if Organisation.objects.filter(slug='national-assembly').exists():
# Current:
ParliamentarySession.objects.create(
start_date='2014-05-21',
end_date='2019-05-21',
house=Organisation.objects.get(slug='national-assembly'),
position_title=PositionTitle.objects.get(slug='member'),
mapit_generation=1,
name='26th Parliament (National Assembly)',
slug='na26',
)
# Previous:
ParliamentarySession.objects.create(
start_date='2009-05-06',
end_date='2014-05-06',
house=Organisation.objects.get(slug='national-assembly'),
position_title=PositionTitle.objects.get(slug='member'),
mapit_generation=1,
name='25th Parliament (National Assembly)',
slug='na25',
)
if Organisation.objects.filter(slug='ncop').exists():
# NCOP:
# Current:
ParliamentarySession.objects.create(
start_date='2014-05-21',
end_date='2019-05-21',
house=Organisation.objects.get(slug='ncop'),
position_title=PositionTitle.objects.get(slug='delegate'),
mapit_generation=1,
name='26th Parliament (National Council of Provinces)',
slug='ncop26',
)
# Previous:
ParliamentarySession.objects.create(
start_date='2009-05-06',
end_date='2014-05-06',
house=Organisation.objects.get(slug='ncop'),
position_title=PositionTitle.objects.get(slug='delegate'),
mapit_generation=1,
name='25th Parliament (National Council of Provinces)',
slug='ncop25',
)
def remove_parliamentary_sessions(apps, schema_editor):
ParliamentarySession = apps.get_model('core', 'ParliamentarySession')
ParliamentarySession.objects.filter(
slug__in=('na25', 'na26', 'ncop25', 'ncop26')
).delete()
class Migration(migrations.Migration):
dependencies = [
('core', '0001_initial'),
('south_africa', '0001_initial'),
]
operations = [
migrations.RunPython(
add_parliamentary_sessions,
remove_parliamentary_sessions,
),
]
|
|
c0f0164612b2cfe1e3728c81956fc93ec1a50101
|
python/misc/format-json.py
|
python/misc/format-json.py
|
#!/usr/bin/env python
import json
import sys
def main():
if len(sys.argv) < 3:
print('Usage: {} INFILE OUTFILE'.format(sys.argv[0]))
sys.exit()
with open(sys.argv[1], 'r') as infile, open(sys.argv[2], 'w') as outfile:
outfile.write(
json.dumps(
json.loads(
infile.read()
),
sort_keys=True,
indent=4
)
)
if __name__ == '__main__':
main()
|
Add quick script for formatting a flat json file
|
Add quick script for formatting a flat json file
|
Python
|
mit
|
bmaupin/junkpile,bmaupin/junkpile,bmaupin/junkpile,bmaupin/junkpile,bmaupin/junkpile,bmaupin/junkpile,bmaupin/junkpile,bmaupin/junkpile,bmaupin/junkpile
|
Add quick script for formatting a flat json file
|
#!/usr/bin/env python
import json
import sys
def main():
if len(sys.argv) < 3:
print('Usage: {} INFILE OUTFILE'.format(sys.argv[0]))
sys.exit()
with open(sys.argv[1], 'r') as infile, open(sys.argv[2], 'w') as outfile:
outfile.write(
json.dumps(
json.loads(
infile.read()
),
sort_keys=True,
indent=4
)
)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add quick script for formatting a flat json file<commit_after>
|
#!/usr/bin/env python
import json
import sys
def main():
if len(sys.argv) < 3:
print('Usage: {} INFILE OUTFILE'.format(sys.argv[0]))
sys.exit()
with open(sys.argv[1], 'r') as infile, open(sys.argv[2], 'w') as outfile:
outfile.write(
json.dumps(
json.loads(
infile.read()
),
sort_keys=True,
indent=4
)
)
if __name__ == '__main__':
main()
|
Add quick script for formatting a flat json file#!/usr/bin/env python
import json
import sys
def main():
if len(sys.argv) < 3:
print('Usage: {} INFILE OUTFILE'.format(sys.argv[0]))
sys.exit()
with open(sys.argv[1], 'r') as infile, open(sys.argv[2], 'w') as outfile:
outfile.write(
json.dumps(
json.loads(
infile.read()
),
sort_keys=True,
indent=4
)
)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add quick script for formatting a flat json file<commit_after>#!/usr/bin/env python
import json
import sys
def main():
if len(sys.argv) < 3:
print('Usage: {} INFILE OUTFILE'.format(sys.argv[0]))
sys.exit()
with open(sys.argv[1], 'r') as infile, open(sys.argv[2], 'w') as outfile:
outfile.write(
json.dumps(
json.loads(
infile.read()
),
sort_keys=True,
indent=4
)
)
if __name__ == '__main__':
main()
|
|
85728e993f6573054f8a6fa9475ea387957aae5d
|
tests/rules_tests/isValid_tests/__init__.py
|
tests/rules_tests/isValid_tests/__init__.py
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
|
Add directory for Rule.is_valid tests
|
Add directory for Rule.is_valid tests
|
Python
|
mit
|
PatrikValkovic/grammpy
|
Add directory for Rule.is_valid tests
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
|
<commit_before><commit_msg>Add directory for Rule.is_valid tests<commit_after>
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
|
Add directory for Rule.is_valid tests#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
|
<commit_before><commit_msg>Add directory for Rule.is_valid tests<commit_after>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
|
|
439b98cc221d29e25b3b5383e90e846a5c4968ac
|
users/migrations/0002_auto_20140922_0843.py
|
users/migrations/0002_auto_20140922_0843.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def add_echonest_forward(apps, schema_editor):
"""Create echonest user."""
User = apps.get_model("users", "User")
User.objects.update_or_create(email='echonest')
def add_echonest_backward(apps, schema_editor):
"""Delete echonest user."""
User = apps.get_model("users", "User")
User.objects.filter(email='echonest').delete()
class Migration(migrations.Migration):
dependencies = [
('users', '0001_initial'),
]
operations = [
migrations.RunPython(add_echonest_forward, add_echonest_backward)
]
|
Add migration to create echonest user
|
Add migration to create echonest user
|
Python
|
bsd-3-clause
|
FreeMusicNinja/api.freemusic.ninja
|
Add migration to create echonest user
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def add_echonest_forward(apps, schema_editor):
"""Create echonest user."""
User = apps.get_model("users", "User")
User.objects.update_or_create(email='echonest')
def add_echonest_backward(apps, schema_editor):
"""Delete echonest user."""
User = apps.get_model("users", "User")
User.objects.filter(email='echonest').delete()
class Migration(migrations.Migration):
dependencies = [
('users', '0001_initial'),
]
operations = [
migrations.RunPython(add_echonest_forward, add_echonest_backward)
]
|
<commit_before><commit_msg>Add migration to create echonest user<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def add_echonest_forward(apps, schema_editor):
"""Create echonest user."""
User = apps.get_model("users", "User")
User.objects.update_or_create(email='echonest')
def add_echonest_backward(apps, schema_editor):
"""Delete echonest user."""
User = apps.get_model("users", "User")
User.objects.filter(email='echonest').delete()
class Migration(migrations.Migration):
dependencies = [
('users', '0001_initial'),
]
operations = [
migrations.RunPython(add_echonest_forward, add_echonest_backward)
]
|
Add migration to create echonest user# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def add_echonest_forward(apps, schema_editor):
"""Create echonest user."""
User = apps.get_model("users", "User")
User.objects.update_or_create(email='echonest')
def add_echonest_backward(apps, schema_editor):
"""Delete echonest user."""
User = apps.get_model("users", "User")
User.objects.filter(email='echonest').delete()
class Migration(migrations.Migration):
dependencies = [
('users', '0001_initial'),
]
operations = [
migrations.RunPython(add_echonest_forward, add_echonest_backward)
]
|
<commit_before><commit_msg>Add migration to create echonest user<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def add_echonest_forward(apps, schema_editor):
"""Create echonest user."""
User = apps.get_model("users", "User")
User.objects.update_or_create(email='echonest')
def add_echonest_backward(apps, schema_editor):
"""Delete echonest user."""
User = apps.get_model("users", "User")
User.objects.filter(email='echonest').delete()
class Migration(migrations.Migration):
dependencies = [
('users', '0001_initial'),
]
operations = [
migrations.RunPython(add_echonest_forward, add_echonest_backward)
]
|
|
de1dbde3914ceb9e684df025063c44bc3a0e65da
|
ceph_deploy/tests/parser/test_purgedata.py
|
ceph_deploy/tests/parser/test_purgedata.py
|
import pytest
from ceph_deploy.cli import get_parser
class TestParserPurgeData(object):
def setup(self):
self.parser = get_parser()
def test_purgedata_help(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('purgedata --help'.split())
out, err = capsys.readouterr()
assert 'usage: ceph-deploy purgedata' in out
assert 'positional arguments:' in out
assert 'optional arguments:' in out
def test_purgedata_host_required(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('purgedata'.split())
out, err = capsys.readouterr()
assert "error: too few arguments" in err
def test_purgedata_one_host(self):
args = self.parser.parse_args('purgedata host1'.split())
assert args.host == ['host1']
def test_purgedata_multiple_hosts(self):
hostnames = ['host1', 'host2', 'host3']
args = self.parser.parse_args(['purgedata'] + hostnames)
assert frozenset(args.host) == frozenset(hostnames)
|
Add tests for argparse purgedata
|
[RM-11742] Add tests for argparse purgedata
Signed-off-by: Travis Rhoden <e5e44d6dbac12e32e01c3bb8b67940d8b42e225b@redhat.com>
|
Python
|
mit
|
SUSE/ceph-deploy-to-be-deleted,zhouyuan/ceph-deploy,ceph/ceph-deploy,isyippee/ceph-deploy,Vicente-Cheng/ceph-deploy,branto1/ceph-deploy,SUSE/ceph-deploy-to-be-deleted,SUSE/ceph-deploy,ghxandsky/ceph-deploy,branto1/ceph-deploy,trhoden/ceph-deploy,codenrhoden/ceph-deploy,osynge/ceph-deploy,Vicente-Cheng/ceph-deploy,isyippee/ceph-deploy,zhouyuan/ceph-deploy,osynge/ceph-deploy,ceph/ceph-deploy,imzhulei/ceph-deploy,shenhequnying/ceph-deploy,trhoden/ceph-deploy,shenhequnying/ceph-deploy,ghxandsky/ceph-deploy,imzhulei/ceph-deploy,codenrhoden/ceph-deploy,SUSE/ceph-deploy
|
[RM-11742] Add tests for argparse purgedata
Signed-off-by: Travis Rhoden <e5e44d6dbac12e32e01c3bb8b67940d8b42e225b@redhat.com>
|
import pytest
from ceph_deploy.cli import get_parser
class TestParserPurgeData(object):
def setup(self):
self.parser = get_parser()
def test_purgedata_help(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('purgedata --help'.split())
out, err = capsys.readouterr()
assert 'usage: ceph-deploy purgedata' in out
assert 'positional arguments:' in out
assert 'optional arguments:' in out
def test_purgedata_host_required(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('purgedata'.split())
out, err = capsys.readouterr()
assert "error: too few arguments" in err
def test_purgedata_one_host(self):
args = self.parser.parse_args('purgedata host1'.split())
assert args.host == ['host1']
def test_purgedata_multiple_hosts(self):
hostnames = ['host1', 'host2', 'host3']
args = self.parser.parse_args(['purgedata'] + hostnames)
assert frozenset(args.host) == frozenset(hostnames)
|
<commit_before><commit_msg>[RM-11742] Add tests for argparse purgedata
Signed-off-by: Travis Rhoden <e5e44d6dbac12e32e01c3bb8b67940d8b42e225b@redhat.com><commit_after>
|
import pytest
from ceph_deploy.cli import get_parser
class TestParserPurgeData(object):
def setup(self):
self.parser = get_parser()
def test_purgedata_help(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('purgedata --help'.split())
out, err = capsys.readouterr()
assert 'usage: ceph-deploy purgedata' in out
assert 'positional arguments:' in out
assert 'optional arguments:' in out
def test_purgedata_host_required(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('purgedata'.split())
out, err = capsys.readouterr()
assert "error: too few arguments" in err
def test_purgedata_one_host(self):
args = self.parser.parse_args('purgedata host1'.split())
assert args.host == ['host1']
def test_purgedata_multiple_hosts(self):
hostnames = ['host1', 'host2', 'host3']
args = self.parser.parse_args(['purgedata'] + hostnames)
assert frozenset(args.host) == frozenset(hostnames)
|
[RM-11742] Add tests for argparse purgedata
Signed-off-by: Travis Rhoden <e5e44d6dbac12e32e01c3bb8b67940d8b42e225b@redhat.com>import pytest
from ceph_deploy.cli import get_parser
class TestParserPurgeData(object):
def setup(self):
self.parser = get_parser()
def test_purgedata_help(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('purgedata --help'.split())
out, err = capsys.readouterr()
assert 'usage: ceph-deploy purgedata' in out
assert 'positional arguments:' in out
assert 'optional arguments:' in out
def test_purgedata_host_required(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('purgedata'.split())
out, err = capsys.readouterr()
assert "error: too few arguments" in err
def test_purgedata_one_host(self):
args = self.parser.parse_args('purgedata host1'.split())
assert args.host == ['host1']
def test_purgedata_multiple_hosts(self):
hostnames = ['host1', 'host2', 'host3']
args = self.parser.parse_args(['purgedata'] + hostnames)
assert frozenset(args.host) == frozenset(hostnames)
|
<commit_before><commit_msg>[RM-11742] Add tests for argparse purgedata
Signed-off-by: Travis Rhoden <e5e44d6dbac12e32e01c3bb8b67940d8b42e225b@redhat.com><commit_after>import pytest
from ceph_deploy.cli import get_parser
class TestParserPurgeData(object):
def setup(self):
self.parser = get_parser()
def test_purgedata_help(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('purgedata --help'.split())
out, err = capsys.readouterr()
assert 'usage: ceph-deploy purgedata' in out
assert 'positional arguments:' in out
assert 'optional arguments:' in out
def test_purgedata_host_required(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('purgedata'.split())
out, err = capsys.readouterr()
assert "error: too few arguments" in err
def test_purgedata_one_host(self):
args = self.parser.parse_args('purgedata host1'.split())
assert args.host == ['host1']
def test_purgedata_multiple_hosts(self):
hostnames = ['host1', 'host2', 'host3']
args = self.parser.parse_args(['purgedata'] + hostnames)
assert frozenset(args.host) == frozenset(hostnames)
|
|
6a08d2b6b5f7989b3a8aea5ac292e7baf1ed94e2
|
salt/grains/philips_hue.py
|
salt/grains/philips_hue.py
|
# -*- coding: utf-8 -*-
'''
Generate baseline proxy minion grains
'''
__proxyenabled__ = ['philips_hue']
__virtualname__ = 'hue'
def __virtual__():
if 'proxy' not in __opts__:
return False
else:
return __virtualname__
def kernel():
return {'kernel': 'RTOS'}
def os():
return {'os': 'FreeRTOS'}
def os_family():
return {'os_family': 'RTOS'}
def vendor():
return {'vendor': 'Philips'}
def product():
return {'product': 'HUE'}
|
Implement static grains for the Philips HUE
|
Implement static grains for the Philips HUE
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
Implement static grains for the Philips HUE
|
# -*- coding: utf-8 -*-
'''
Generate baseline proxy minion grains
'''
__proxyenabled__ = ['philips_hue']
__virtualname__ = 'hue'
def __virtual__():
if 'proxy' not in __opts__:
return False
else:
return __virtualname__
def kernel():
return {'kernel': 'RTOS'}
def os():
return {'os': 'FreeRTOS'}
def os_family():
return {'os_family': 'RTOS'}
def vendor():
return {'vendor': 'Philips'}
def product():
return {'product': 'HUE'}
|
<commit_before><commit_msg>Implement static grains for the Philips HUE<commit_after>
|
# -*- coding: utf-8 -*-
'''
Generate baseline proxy minion grains
'''
__proxyenabled__ = ['philips_hue']
__virtualname__ = 'hue'
def __virtual__():
if 'proxy' not in __opts__:
return False
else:
return __virtualname__
def kernel():
return {'kernel': 'RTOS'}
def os():
return {'os': 'FreeRTOS'}
def os_family():
return {'os_family': 'RTOS'}
def vendor():
return {'vendor': 'Philips'}
def product():
return {'product': 'HUE'}
|
Implement static grains for the Philips HUE# -*- coding: utf-8 -*-
'''
Generate baseline proxy minion grains
'''
__proxyenabled__ = ['philips_hue']
__virtualname__ = 'hue'
def __virtual__():
if 'proxy' not in __opts__:
return False
else:
return __virtualname__
def kernel():
return {'kernel': 'RTOS'}
def os():
return {'os': 'FreeRTOS'}
def os_family():
return {'os_family': 'RTOS'}
def vendor():
return {'vendor': 'Philips'}
def product():
return {'product': 'HUE'}
|
<commit_before><commit_msg>Implement static grains for the Philips HUE<commit_after># -*- coding: utf-8 -*-
'''
Generate baseline proxy minion grains
'''
__proxyenabled__ = ['philips_hue']
__virtualname__ = 'hue'
def __virtual__():
if 'proxy' not in __opts__:
return False
else:
return __virtualname__
def kernel():
return {'kernel': 'RTOS'}
def os():
return {'os': 'FreeRTOS'}
def os_family():
return {'os_family': 'RTOS'}
def vendor():
return {'vendor': 'Philips'}
def product():
return {'product': 'HUE'}
|
|
3d0f20cbcebc5e29a00c65ce51b2204f562a0a21
|
tests/app/soc/views/models/test_sponsor.py
|
tests/app/soc/views/models/test_sponsor.py
|
#!/usr/bin/env python2.5
#
# Copyright 2010 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__authors__ = [
'"Leo (Chong Liu)" <HiddenPython@gmail.com>',
]
import httplib
from django.http import HttpRequest
from django.core import urlresolvers
from django.utils import simplejson
from tests.test_utils import DjangoTestCase
from google.appengine.api import users
from soc.logic.models.user import logic as user_logic
from soc.logic.models.sponsor import logic as sponsor_logic
from soc.middleware.xsrf import XsrfMiddleware
from soc.logic.helper import xsrfutil
from django.test.client import Client
|
Add test for the sponsor views
|
Add test for the sponsor views
|
Python
|
apache-2.0
|
rhyolight/nupic.son,rhyolight/nupic.son,rhyolight/nupic.son
|
Add test for the sponsor views
|
#!/usr/bin/env python2.5
#
# Copyright 2010 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__authors__ = [
'"Leo (Chong Liu)" <HiddenPython@gmail.com>',
]
import httplib
from django.http import HttpRequest
from django.core import urlresolvers
from django.utils import simplejson
from tests.test_utils import DjangoTestCase
from google.appengine.api import users
from soc.logic.models.user import logic as user_logic
from soc.logic.models.sponsor import logic as sponsor_logic
from soc.middleware.xsrf import XsrfMiddleware
from soc.logic.helper import xsrfutil
from django.test.client import Client
|
<commit_before><commit_msg>Add test for the sponsor views<commit_after>
|
#!/usr/bin/env python2.5
#
# Copyright 2010 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__authors__ = [
'"Leo (Chong Liu)" <HiddenPython@gmail.com>',
]
import httplib
from django.http import HttpRequest
from django.core import urlresolvers
from django.utils import simplejson
from tests.test_utils import DjangoTestCase
from google.appengine.api import users
from soc.logic.models.user import logic as user_logic
from soc.logic.models.sponsor import logic as sponsor_logic
from soc.middleware.xsrf import XsrfMiddleware
from soc.logic.helper import xsrfutil
from django.test.client import Client
|
Add test for the sponsor views#!/usr/bin/env python2.5
#
# Copyright 2010 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__authors__ = [
'"Leo (Chong Liu)" <HiddenPython@gmail.com>',
]
import httplib
from django.http import HttpRequest
from django.core import urlresolvers
from django.utils import simplejson
from tests.test_utils import DjangoTestCase
from google.appengine.api import users
from soc.logic.models.user import logic as user_logic
from soc.logic.models.sponsor import logic as sponsor_logic
from soc.middleware.xsrf import XsrfMiddleware
from soc.logic.helper import xsrfutil
from django.test.client import Client
|
<commit_before><commit_msg>Add test for the sponsor views<commit_after>#!/usr/bin/env python2.5
#
# Copyright 2010 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__authors__ = [
'"Leo (Chong Liu)" <HiddenPython@gmail.com>',
]
import httplib
from django.http import HttpRequest
from django.core import urlresolvers
from django.utils import simplejson
from tests.test_utils import DjangoTestCase
from google.appengine.api import users
from soc.logic.models.user import logic as user_logic
from soc.logic.models.sponsor import logic as sponsor_logic
from soc.middleware.xsrf import XsrfMiddleware
from soc.logic.helper import xsrfutil
from django.test.client import Client
|
|
eb109a55bc1d4c3be961257d9713b23a5916f5ef
|
tests/monitoring/test_check_mesos_quorum.py
|
tests/monitoring/test_check_mesos_quorum.py
|
# Copyright 2015-2016 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
import pytest
from paasta_tools.monitoring.check_mesos_quorum import check_mesos_quorum
def test_check_mesos_quorum_ok(capfd):
with mock.patch(
'paasta_tools.metrics.metastatus_lib.get_num_masters', autospec=True,
return_value=3,
), mock.patch(
'paasta_tools.metrics.metastatus_lib.get_mesos_quorum', autospec=True,
return_value=2,
):
with pytest.raises(SystemExit) as error:
check_mesos_quorum()
out, err = capfd.readouterr()
assert "OK" in out
assert error.value.code == 0
def test_check_mesos_quorum_critical(capfd):
with mock.patch(
'paasta_tools.metrics.metastatus_lib.get_num_masters', autospec=True,
return_value=1,
), mock.patch(
'paasta_tools.metrics.metastatus_lib.get_mesos_quorum', autospec=True,
return_value=2,
):
with pytest.raises(SystemExit) as error:
check_mesos_quorum()
out, err = capfd.readouterr()
assert "CRITICAL" in out
assert error.value.code == 2
|
Add last metastatus check test
|
Add last metastatus check test
|
Python
|
apache-2.0
|
somic/paasta,Yelp/paasta,Yelp/paasta,somic/paasta
|
Add last metastatus check test
|
# Copyright 2015-2016 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
import pytest
from paasta_tools.monitoring.check_mesos_quorum import check_mesos_quorum
def test_check_mesos_quorum_ok(capfd):
with mock.patch(
'paasta_tools.metrics.metastatus_lib.get_num_masters', autospec=True,
return_value=3,
), mock.patch(
'paasta_tools.metrics.metastatus_lib.get_mesos_quorum', autospec=True,
return_value=2,
):
with pytest.raises(SystemExit) as error:
check_mesos_quorum()
out, err = capfd.readouterr()
assert "OK" in out
assert error.value.code == 0
def test_check_mesos_quorum_critical(capfd):
with mock.patch(
'paasta_tools.metrics.metastatus_lib.get_num_masters', autospec=True,
return_value=1,
), mock.patch(
'paasta_tools.metrics.metastatus_lib.get_mesos_quorum', autospec=True,
return_value=2,
):
with pytest.raises(SystemExit) as error:
check_mesos_quorum()
out, err = capfd.readouterr()
assert "CRITICAL" in out
assert error.value.code == 2
|
<commit_before><commit_msg>Add last metastatus check test<commit_after>
|
# Copyright 2015-2016 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
import pytest
from paasta_tools.monitoring.check_mesos_quorum import check_mesos_quorum
def test_check_mesos_quorum_ok(capfd):
with mock.patch(
'paasta_tools.metrics.metastatus_lib.get_num_masters', autospec=True,
return_value=3,
), mock.patch(
'paasta_tools.metrics.metastatus_lib.get_mesos_quorum', autospec=True,
return_value=2,
):
with pytest.raises(SystemExit) as error:
check_mesos_quorum()
out, err = capfd.readouterr()
assert "OK" in out
assert error.value.code == 0
def test_check_mesos_quorum_critical(capfd):
with mock.patch(
'paasta_tools.metrics.metastatus_lib.get_num_masters', autospec=True,
return_value=1,
), mock.patch(
'paasta_tools.metrics.metastatus_lib.get_mesos_quorum', autospec=True,
return_value=2,
):
with pytest.raises(SystemExit) as error:
check_mesos_quorum()
out, err = capfd.readouterr()
assert "CRITICAL" in out
assert error.value.code == 2
|
Add last metastatus check test# Copyright 2015-2016 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
import pytest
from paasta_tools.monitoring.check_mesos_quorum import check_mesos_quorum
def test_check_mesos_quorum_ok(capfd):
with mock.patch(
'paasta_tools.metrics.metastatus_lib.get_num_masters', autospec=True,
return_value=3,
), mock.patch(
'paasta_tools.metrics.metastatus_lib.get_mesos_quorum', autospec=True,
return_value=2,
):
with pytest.raises(SystemExit) as error:
check_mesos_quorum()
out, err = capfd.readouterr()
assert "OK" in out
assert error.value.code == 0
def test_check_mesos_quorum_critical(capfd):
with mock.patch(
'paasta_tools.metrics.metastatus_lib.get_num_masters', autospec=True,
return_value=1,
), mock.patch(
'paasta_tools.metrics.metastatus_lib.get_mesos_quorum', autospec=True,
return_value=2,
):
with pytest.raises(SystemExit) as error:
check_mesos_quorum()
out, err = capfd.readouterr()
assert "CRITICAL" in out
assert error.value.code == 2
|
<commit_before><commit_msg>Add last metastatus check test<commit_after># Copyright 2015-2016 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
import pytest
from paasta_tools.monitoring.check_mesos_quorum import check_mesos_quorum
def test_check_mesos_quorum_ok(capfd):
with mock.patch(
'paasta_tools.metrics.metastatus_lib.get_num_masters', autospec=True,
return_value=3,
), mock.patch(
'paasta_tools.metrics.metastatus_lib.get_mesos_quorum', autospec=True,
return_value=2,
):
with pytest.raises(SystemExit) as error:
check_mesos_quorum()
out, err = capfd.readouterr()
assert "OK" in out
assert error.value.code == 0
def test_check_mesos_quorum_critical(capfd):
with mock.patch(
'paasta_tools.metrics.metastatus_lib.get_num_masters', autospec=True,
return_value=1,
), mock.patch(
'paasta_tools.metrics.metastatus_lib.get_mesos_quorum', autospec=True,
return_value=2,
):
with pytest.raises(SystemExit) as error:
check_mesos_quorum()
out, err = capfd.readouterr()
assert "CRITICAL" in out
assert error.value.code == 2
|
|
c9137bdaf551d0e1203120a9c00af60541e3597f
|
scikits/talkbox/lpc/lpc.py
|
scikits/talkbox/lpc/lpc.py
|
#! /usr/bin/env python
# Last Change: Sun Sep 14 03:00 PM 2008 J
import numpy as np
from c_lpc import levinson as c_levinson
def levinson(r, order, axis = -1):
"""Levinson-Durbin recursion, to efficiently solve symmetric linear systems
with toeplitz structure.
Arguments
---------
r : array-like
input array to invert (since the matrix is symmetric Toeplitz, the
corresponding pxp matrix is defined by p items only). Generally the
autocorrelation of the signal for linear prediction coefficients
estimation. The first item must be a non zero real, and corresponds
to the autocorelation at lag 0 for linear prediction.
order : int
order of the recursion. For order p, you will get p+1 coefficients.
axis : int, optional
axis over which the algorithm is applied. -1 by default.
Returns
--------
a : array-like
the solution of the inversion (see notes).
e : array-like
the prediction error.
k : array-like
reflection coefficients.
Notes
-----
Levinson is a well-known algorithm to solve the Hermitian toeplitz
equation:
_ _
-R[1] = R[0] R[1] ... R[p-1] a[1]
: : : : * :
: : : _ * :
-R[p] = R[p-1] R[p-2] ... R[0] a[p]
_
with respect to a ( is the complex conjugate). Using the special symmetry
in the matrix, the inversion can be done in O(p^2) instead of O(p^3).
"""
if axis != -1:
r = np.swapaxes(r, -1)
a, e, k = c_levinson(r, order)
if axis != -1:
a = np.swapaxes(a, -1)
e = np.swapaxes(e, -1)
k = np.swapaxes(k, -1)
return a, e, k
|
Add python interface around C implementation of levinson.
|
Add python interface around C implementation of levinson.
|
Python
|
mit
|
cournape/talkbox,cournape/talkbox
|
Add python interface around C implementation of levinson.
|
#! /usr/bin/env python
# Last Change: Sun Sep 14 03:00 PM 2008 J
import numpy as np
from c_lpc import levinson as c_levinson
def levinson(r, order, axis = -1):
"""Levinson-Durbin recursion, to efficiently solve symmetric linear systems
with toeplitz structure.
Arguments
---------
r : array-like
input array to invert (since the matrix is symmetric Toeplitz, the
corresponding pxp matrix is defined by p items only). Generally the
autocorrelation of the signal for linear prediction coefficients
estimation. The first item must be a non zero real, and corresponds
to the autocorelation at lag 0 for linear prediction.
order : int
order of the recursion. For order p, you will get p+1 coefficients.
axis : int, optional
axis over which the algorithm is applied. -1 by default.
Returns
--------
a : array-like
the solution of the inversion (see notes).
e : array-like
the prediction error.
k : array-like
reflection coefficients.
Notes
-----
Levinson is a well-known algorithm to solve the Hermitian toeplitz
equation:
_ _
-R[1] = R[0] R[1] ... R[p-1] a[1]
: : : : * :
: : : _ * :
-R[p] = R[p-1] R[p-2] ... R[0] a[p]
_
with respect to a ( is the complex conjugate). Using the special symmetry
in the matrix, the inversion can be done in O(p^2) instead of O(p^3).
"""
if axis != -1:
r = np.swapaxes(r, -1)
a, e, k = c_levinson(r, order)
if axis != -1:
a = np.swapaxes(a, -1)
e = np.swapaxes(e, -1)
k = np.swapaxes(k, -1)
return a, e, k
|
<commit_before><commit_msg>Add python interface around C implementation of levinson.<commit_after>
|
#! /usr/bin/env python
# Last Change: Sun Sep 14 03:00 PM 2008 J
import numpy as np
from c_lpc import levinson as c_levinson
def levinson(r, order, axis = -1):
"""Levinson-Durbin recursion, to efficiently solve symmetric linear systems
with toeplitz structure.
Arguments
---------
r : array-like
input array to invert (since the matrix is symmetric Toeplitz, the
corresponding pxp matrix is defined by p items only). Generally the
autocorrelation of the signal for linear prediction coefficients
estimation. The first item must be a non zero real, and corresponds
to the autocorelation at lag 0 for linear prediction.
order : int
order of the recursion. For order p, you will get p+1 coefficients.
axis : int, optional
axis over which the algorithm is applied. -1 by default.
Returns
--------
a : array-like
the solution of the inversion (see notes).
e : array-like
the prediction error.
k : array-like
reflection coefficients.
Notes
-----
Levinson is a well-known algorithm to solve the Hermitian toeplitz
equation:
_ _
-R[1] = R[0] R[1] ... R[p-1] a[1]
: : : : * :
: : : _ * :
-R[p] = R[p-1] R[p-2] ... R[0] a[p]
_
with respect to a ( is the complex conjugate). Using the special symmetry
in the matrix, the inversion can be done in O(p^2) instead of O(p^3).
"""
if axis != -1:
r = np.swapaxes(r, -1)
a, e, k = c_levinson(r, order)
if axis != -1:
a = np.swapaxes(a, -1)
e = np.swapaxes(e, -1)
k = np.swapaxes(k, -1)
return a, e, k
|
Add python interface around C implementation of levinson.#! /usr/bin/env python
# Last Change: Sun Sep 14 03:00 PM 2008 J
import numpy as np
from c_lpc import levinson as c_levinson
def levinson(r, order, axis = -1):
"""Levinson-Durbin recursion, to efficiently solve symmetric linear systems
with toeplitz structure.
Arguments
---------
r : array-like
input array to invert (since the matrix is symmetric Toeplitz, the
corresponding pxp matrix is defined by p items only). Generally the
autocorrelation of the signal for linear prediction coefficients
estimation. The first item must be a non zero real, and corresponds
to the autocorelation at lag 0 for linear prediction.
order : int
order of the recursion. For order p, you will get p+1 coefficients.
axis : int, optional
axis over which the algorithm is applied. -1 by default.
Returns
--------
a : array-like
the solution of the inversion (see notes).
e : array-like
the prediction error.
k : array-like
reflection coefficients.
Notes
-----
Levinson is a well-known algorithm to solve the Hermitian toeplitz
equation:
_ _
-R[1] = R[0] R[1] ... R[p-1] a[1]
: : : : * :
: : : _ * :
-R[p] = R[p-1] R[p-2] ... R[0] a[p]
_
with respect to a ( is the complex conjugate). Using the special symmetry
in the matrix, the inversion can be done in O(p^2) instead of O(p^3).
"""
if axis != -1:
r = np.swapaxes(r, -1)
a, e, k = c_levinson(r, order)
if axis != -1:
a = np.swapaxes(a, -1)
e = np.swapaxes(e, -1)
k = np.swapaxes(k, -1)
return a, e, k
|
<commit_before><commit_msg>Add python interface around C implementation of levinson.<commit_after>#! /usr/bin/env python
# Last Change: Sun Sep 14 03:00 PM 2008 J
import numpy as np
from c_lpc import levinson as c_levinson
def levinson(r, order, axis = -1):
"""Levinson-Durbin recursion, to efficiently solve symmetric linear systems
with toeplitz structure.
Arguments
---------
r : array-like
input array to invert (since the matrix is symmetric Toeplitz, the
corresponding pxp matrix is defined by p items only). Generally the
autocorrelation of the signal for linear prediction coefficients
estimation. The first item must be a non zero real, and corresponds
to the autocorelation at lag 0 for linear prediction.
order : int
order of the recursion. For order p, you will get p+1 coefficients.
axis : int, optional
axis over which the algorithm is applied. -1 by default.
Returns
--------
a : array-like
the solution of the inversion (see notes).
e : array-like
the prediction error.
k : array-like
reflection coefficients.
Notes
-----
Levinson is a well-known algorithm to solve the Hermitian toeplitz
equation:
_ _
-R[1] = R[0] R[1] ... R[p-1] a[1]
: : : : * :
: : : _ * :
-R[p] = R[p-1] R[p-2] ... R[0] a[p]
_
with respect to a ( is the complex conjugate). Using the special symmetry
in the matrix, the inversion can be done in O(p^2) instead of O(p^3).
"""
if axis != -1:
r = np.swapaxes(r, -1)
a, e, k = c_levinson(r, order)
if axis != -1:
a = np.swapaxes(a, -1)
e = np.swapaxes(e, -1)
k = np.swapaxes(k, -1)
return a, e, k
|
|
0cb717548ee0f9086226c842dc4a5f16862f3f8e
|
apps/offlineevents/migrations/0002_require_date.py
|
apps/offlineevents/migrations/0002_require_date.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import datetime
class Migration(migrations.Migration):
dependencies = [
('meinberlin_offlineevents', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='offlineevent',
name='date',
field=models.DateTimeField(verbose_name='Date', default=datetime.datetime(1970, 1, 1, 1, 0)),
preserve_default=False,
),
]
|
Add offlineevent migration to require dates
|
Add offlineevent migration to require dates
|
Python
|
agpl-3.0
|
liqd/a4-meinberlin,liqd/a4-meinberlin,liqd/a4-meinberlin,liqd/a4-meinberlin
|
Add offlineevent migration to require dates
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import datetime
class Migration(migrations.Migration):
dependencies = [
('meinberlin_offlineevents', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='offlineevent',
name='date',
field=models.DateTimeField(verbose_name='Date', default=datetime.datetime(1970, 1, 1, 1, 0)),
preserve_default=False,
),
]
|
<commit_before><commit_msg>Add offlineevent migration to require dates<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import datetime
class Migration(migrations.Migration):
dependencies = [
('meinberlin_offlineevents', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='offlineevent',
name='date',
field=models.DateTimeField(verbose_name='Date', default=datetime.datetime(1970, 1, 1, 1, 0)),
preserve_default=False,
),
]
|
Add offlineevent migration to require dates# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import datetime
class Migration(migrations.Migration):
dependencies = [
('meinberlin_offlineevents', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='offlineevent',
name='date',
field=models.DateTimeField(verbose_name='Date', default=datetime.datetime(1970, 1, 1, 1, 0)),
preserve_default=False,
),
]
|
<commit_before><commit_msg>Add offlineevent migration to require dates<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import datetime
class Migration(migrations.Migration):
dependencies = [
('meinberlin_offlineevents', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='offlineevent',
name='date',
field=models.DateTimeField(verbose_name='Date', default=datetime.datetime(1970, 1, 1, 1, 0)),
preserve_default=False,
),
]
|
|
b2791d86aadde7fd0edb1d9e61ceece0cd47ba14
|
scripts/backup-database.py
|
scripts/backup-database.py
|
#!/usr/bin/python
# This is a small helper script to back up a CATMAID
# database.
# For example, I'm calling this script from cron with the following
# crontab entry, which will cause a backup to happen every 8 hours at
# 20 past the hour:
#
# 20 0-23/8 * * * CATMAID_CONFIGURATION=$HOME/.catmaid-db.whatever $HOME/catmaid/scripts/backup-database.py /mnt/catmaid-backups/
# You will need to create a .pgpass file so that your password can be
# found.
# You may need to install psycopg2, e.g. with:
# sudo apt-get install python-psycopg2
# Requires the file .catmaid-db to be present in your
# home directory, with the following format:
#
# host: localhost
# database: catmaid
# username: catmaid_user
# password: password_of_your_catmaid_user
import sys
import os
from common import db_database, db_username, db_password
from subprocess import check_call
import getpass
from psycopg2 import IntegrityError
from datetime import datetime
if len(sys.argv) != 2:
print >> sys.stderr, "Usage: %s <BACKUP-DIRECTORY>" % (sys.argv[0],)
sys.exit(1)
destination_directory = sys.argv[1]
output_filename = os.path.join(destination_directory,
datetime.now().strftime('%Y-%m-%dT%H-%M-%S'))
# You must specify your password in ~/.pgpass, as described here:
# http://www.postgresql.org/docs/current/static/libpq-pgpass.html
dump_command = ['pg_dump',
'-U',
db_username,
'--no-password',
db_database]
with open(output_filename, "w") as fp:
check_call(dump_command, stdout=fp)
check_call(['bzip2', output_filename])
|
Add a script for easy backups of CATMAID's database
|
Add a script for easy backups of CATMAID's database
|
Python
|
agpl-3.0
|
htem/CATMAID,fzadow/CATMAID,fzadow/CATMAID,htem/CATMAID,fzadow/CATMAID,htem/CATMAID,htem/CATMAID,fzadow/CATMAID
|
Add a script for easy backups of CATMAID's database
|
#!/usr/bin/python
# This is a small helper script to back up a CATMAID
# database.
# For example, I'm calling this script from cron with the following
# crontab entry, which will cause a backup to happen every 8 hours at
# 20 past the hour:
#
# 20 0-23/8 * * * CATMAID_CONFIGURATION=$HOME/.catmaid-db.whatever $HOME/catmaid/scripts/backup-database.py /mnt/catmaid-backups/
# You will need to create a .pgpass file so that your password can be
# found.
# You may need to install psycopg2, e.g. with:
# sudo apt-get install python-psycopg2
# Requires the file .catmaid-db to be present in your
# home directory, with the following format:
#
# host: localhost
# database: catmaid
# username: catmaid_user
# password: password_of_your_catmaid_user
import sys
import os
from common import db_database, db_username, db_password
from subprocess import check_call
import getpass
from psycopg2 import IntegrityError
from datetime import datetime
if len(sys.argv) != 2:
print >> sys.stderr, "Usage: %s <BACKUP-DIRECTORY>" % (sys.argv[0],)
sys.exit(1)
destination_directory = sys.argv[1]
output_filename = os.path.join(destination_directory,
datetime.now().strftime('%Y-%m-%dT%H-%M-%S'))
# You must specify your password in ~/.pgpass, as described here:
# http://www.postgresql.org/docs/current/static/libpq-pgpass.html
dump_command = ['pg_dump',
'-U',
db_username,
'--no-password',
db_database]
with open(output_filename, "w") as fp:
check_call(dump_command, stdout=fp)
check_call(['bzip2', output_filename])
|
<commit_before><commit_msg>Add a script for easy backups of CATMAID's database<commit_after>
|
#!/usr/bin/python
# This is a small helper script to back up a CATMAID
# database.
# For example, I'm calling this script from cron with the following
# crontab entry, which will cause a backup to happen every 8 hours at
# 20 past the hour:
#
# 20 0-23/8 * * * CATMAID_CONFIGURATION=$HOME/.catmaid-db.whatever $HOME/catmaid/scripts/backup-database.py /mnt/catmaid-backups/
# You will need to create a .pgpass file so that your password can be
# found.
# You may need to install psycopg2, e.g. with:
# sudo apt-get install python-psycopg2
# Requires the file .catmaid-db to be present in your
# home directory, with the following format:
#
# host: localhost
# database: catmaid
# username: catmaid_user
# password: password_of_your_catmaid_user
import sys
import os
from common import db_database, db_username, db_password
from subprocess import check_call
import getpass
from psycopg2 import IntegrityError
from datetime import datetime
if len(sys.argv) != 2:
print >> sys.stderr, "Usage: %s <BACKUP-DIRECTORY>" % (sys.argv[0],)
sys.exit(1)
destination_directory = sys.argv[1]
output_filename = os.path.join(destination_directory,
datetime.now().strftime('%Y-%m-%dT%H-%M-%S'))
# You must specify your password in ~/.pgpass, as described here:
# http://www.postgresql.org/docs/current/static/libpq-pgpass.html
dump_command = ['pg_dump',
'-U',
db_username,
'--no-password',
db_database]
with open(output_filename, "w") as fp:
check_call(dump_command, stdout=fp)
check_call(['bzip2', output_filename])
|
Add a script for easy backups of CATMAID's database#!/usr/bin/python
# This is a small helper script to back up a CATMAID
# database.
# For example, I'm calling this script from cron with the following
# crontab entry, which will cause a backup to happen every 8 hours at
# 20 past the hour:
#
# 20 0-23/8 * * * CATMAID_CONFIGURATION=$HOME/.catmaid-db.whatever $HOME/catmaid/scripts/backup-database.py /mnt/catmaid-backups/
# You will need to create a .pgpass file so that your password can be
# found.
# You may need to install psycopg2, e.g. with:
# sudo apt-get install python-psycopg2
# Requires the file .catmaid-db to be present in your
# home directory, with the following format:
#
# host: localhost
# database: catmaid
# username: catmaid_user
# password: password_of_your_catmaid_user
import sys
import os
from common import db_database, db_username, db_password
from subprocess import check_call
import getpass
from psycopg2 import IntegrityError
from datetime import datetime
if len(sys.argv) != 2:
print >> sys.stderr, "Usage: %s <BACKUP-DIRECTORY>" % (sys.argv[0],)
sys.exit(1)
destination_directory = sys.argv[1]
output_filename = os.path.join(destination_directory,
datetime.now().strftime('%Y-%m-%dT%H-%M-%S'))
# You must specify your password in ~/.pgpass, as described here:
# http://www.postgresql.org/docs/current/static/libpq-pgpass.html
dump_command = ['pg_dump',
'-U',
db_username,
'--no-password',
db_database]
with open(output_filename, "w") as fp:
check_call(dump_command, stdout=fp)
check_call(['bzip2', output_filename])
|
<commit_before><commit_msg>Add a script for easy backups of CATMAID's database<commit_after>#!/usr/bin/python
# This is a small helper script to back up a CATMAID
# database.
# For example, I'm calling this script from cron with the following
# crontab entry, which will cause a backup to happen every 8 hours at
# 20 past the hour:
#
# 20 0-23/8 * * * CATMAID_CONFIGURATION=$HOME/.catmaid-db.whatever $HOME/catmaid/scripts/backup-database.py /mnt/catmaid-backups/
# You will need to create a .pgpass file so that your password can be
# found.
# You may need to install psycopg2, e.g. with:
# sudo apt-get install python-psycopg2
# Requires the file .catmaid-db to be present in your
# home directory, with the following format:
#
# host: localhost
# database: catmaid
# username: catmaid_user
# password: password_of_your_catmaid_user
import sys
import os
from common import db_database, db_username, db_password
from subprocess import check_call
import getpass
from psycopg2 import IntegrityError
from datetime import datetime
if len(sys.argv) != 2:
print >> sys.stderr, "Usage: %s <BACKUP-DIRECTORY>" % (sys.argv[0],)
sys.exit(1)
destination_directory = sys.argv[1]
output_filename = os.path.join(destination_directory,
datetime.now().strftime('%Y-%m-%dT%H-%M-%S'))
# You must specify your password in ~/.pgpass, as described here:
# http://www.postgresql.org/docs/current/static/libpq-pgpass.html
dump_command = ['pg_dump',
'-U',
db_username,
'--no-password',
db_database]
with open(output_filename, "w") as fp:
check_call(dump_command, stdout=fp)
check_call(['bzip2', output_filename])
|
|
40212c8cea43e5adfc2ba30f18182480dea481b2
|
txircd/modules/core/channel_defaultmodes.py
|
txircd/modules/core/channel_defaultmodes.py
|
from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from txircd.utils import ModeType
from zope.interface import implements
class DefaultModes(ModuleData):
implements(IPlugin, IModuleData)
name = "DefaultModes"
core = True
def hookIRCd(self, ircd):
self.ircd = ircd
def actions(self):
return [ ("channelcreate", 10, self.setDefaults) ]
def setDefaults(self, channel, user):
modes = self.ircd.config.getWithDefault("channel_default_modes", "ont")
statusModes = set()
params = modes.split(" ")
modeList = list(params.pop(0))
for mode in modeList:
if mode not in self.ircd.channelModeTypes:
continue
if self.ircd.channelModeTypes[mode] == ModeType.Status:
statusModes.add(mode)
for mode in statusModes:
modeList.remove(mode)
for mode in statusModes:
modeList.append(mode)
params.append(user.nick)
channel.setModes(self.ircd.serverID, "".join(modeList), params)
defaultModes = DefaultModes()
|
Support setting default modes in channels
|
Support setting default modes in channels
|
Python
|
bsd-3-clause
|
ElementalAlchemist/txircd,Heufneutje/txircd
|
Support setting default modes in channels
|
from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from txircd.utils import ModeType
from zope.interface import implements
class DefaultModes(ModuleData):
implements(IPlugin, IModuleData)
name = "DefaultModes"
core = True
def hookIRCd(self, ircd):
self.ircd = ircd
def actions(self):
return [ ("channelcreate", 10, self.setDefaults) ]
def setDefaults(self, channel, user):
modes = self.ircd.config.getWithDefault("channel_default_modes", "ont")
statusModes = set()
params = modes.split(" ")
modeList = list(params.pop(0))
for mode in modeList:
if mode not in self.ircd.channelModeTypes:
continue
if self.ircd.channelModeTypes[mode] == ModeType.Status:
statusModes.add(mode)
for mode in statusModes:
modeList.remove(mode)
for mode in statusModes:
modeList.append(mode)
params.append(user.nick)
channel.setModes(self.ircd.serverID, "".join(modeList), params)
defaultModes = DefaultModes()
|
<commit_before><commit_msg>Support setting default modes in channels<commit_after>
|
from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from txircd.utils import ModeType
from zope.interface import implements
class DefaultModes(ModuleData):
implements(IPlugin, IModuleData)
name = "DefaultModes"
core = True
def hookIRCd(self, ircd):
self.ircd = ircd
def actions(self):
return [ ("channelcreate", 10, self.setDefaults) ]
def setDefaults(self, channel, user):
modes = self.ircd.config.getWithDefault("channel_default_modes", "ont")
statusModes = set()
params = modes.split(" ")
modeList = list(params.pop(0))
for mode in modeList:
if mode not in self.ircd.channelModeTypes:
continue
if self.ircd.channelModeTypes[mode] == ModeType.Status:
statusModes.add(mode)
for mode in statusModes:
modeList.remove(mode)
for mode in statusModes:
modeList.append(mode)
params.append(user.nick)
channel.setModes(self.ircd.serverID, "".join(modeList), params)
defaultModes = DefaultModes()
|
Support setting default modes in channelsfrom twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from txircd.utils import ModeType
from zope.interface import implements
class DefaultModes(ModuleData):
implements(IPlugin, IModuleData)
name = "DefaultModes"
core = True
def hookIRCd(self, ircd):
self.ircd = ircd
def actions(self):
return [ ("channelcreate", 10, self.setDefaults) ]
def setDefaults(self, channel, user):
modes = self.ircd.config.getWithDefault("channel_default_modes", "ont")
statusModes = set()
params = modes.split(" ")
modeList = list(params.pop(0))
for mode in modeList:
if mode not in self.ircd.channelModeTypes:
continue
if self.ircd.channelModeTypes[mode] == ModeType.Status:
statusModes.add(mode)
for mode in statusModes:
modeList.remove(mode)
for mode in statusModes:
modeList.append(mode)
params.append(user.nick)
channel.setModes(self.ircd.serverID, "".join(modeList), params)
defaultModes = DefaultModes()
|
<commit_before><commit_msg>Support setting default modes in channels<commit_after>from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from txircd.utils import ModeType
from zope.interface import implements
class DefaultModes(ModuleData):
implements(IPlugin, IModuleData)
name = "DefaultModes"
core = True
def hookIRCd(self, ircd):
self.ircd = ircd
def actions(self):
return [ ("channelcreate", 10, self.setDefaults) ]
def setDefaults(self, channel, user):
modes = self.ircd.config.getWithDefault("channel_default_modes", "ont")
statusModes = set()
params = modes.split(" ")
modeList = list(params.pop(0))
for mode in modeList:
if mode not in self.ircd.channelModeTypes:
continue
if self.ircd.channelModeTypes[mode] == ModeType.Status:
statusModes.add(mode)
for mode in statusModes:
modeList.remove(mode)
for mode in statusModes:
modeList.append(mode)
params.append(user.nick)
channel.setModes(self.ircd.serverID, "".join(modeList), params)
defaultModes = DefaultModes()
|
|
11717529ab66290464e34925b08a1b792ac00cd7
|
cms/migrations/0002_auto_20190507_1532.py
|
cms/migrations/0002_auto_20190507_1532.py
|
# Generated by Django 2.1.8 on 2019-05-07 13:32
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('cms', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='contentpage',
options={'verbose_name': 'Eine Seite mit einem Titel und Inhalt'},
),
migrations.AlterModelOptions(
name='glossarypage',
options={'verbose_name': 'Ein Glossar'},
),
migrations.AlterModelOptions(
name='indexpage',
options={'verbose_name': 'Die Startseite des CMS-Teils'},
),
]
|
Add outstanding migrations from wagtail
|
Add outstanding migrations from wagtail
|
Python
|
mit
|
meine-stadt-transparent/meine-stadt-transparent,meine-stadt-transparent/meine-stadt-transparent,meine-stadt-transparent/meine-stadt-transparent,meine-stadt-transparent/meine-stadt-transparent
|
Add outstanding migrations from wagtail
|
# Generated by Django 2.1.8 on 2019-05-07 13:32
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('cms', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='contentpage',
options={'verbose_name': 'Eine Seite mit einem Titel und Inhalt'},
),
migrations.AlterModelOptions(
name='glossarypage',
options={'verbose_name': 'Ein Glossar'},
),
migrations.AlterModelOptions(
name='indexpage',
options={'verbose_name': 'Die Startseite des CMS-Teils'},
),
]
|
<commit_before><commit_msg>Add outstanding migrations from wagtail<commit_after>
|
# Generated by Django 2.1.8 on 2019-05-07 13:32
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('cms', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='contentpage',
options={'verbose_name': 'Eine Seite mit einem Titel und Inhalt'},
),
migrations.AlterModelOptions(
name='glossarypage',
options={'verbose_name': 'Ein Glossar'},
),
migrations.AlterModelOptions(
name='indexpage',
options={'verbose_name': 'Die Startseite des CMS-Teils'},
),
]
|
Add outstanding migrations from wagtail# Generated by Django 2.1.8 on 2019-05-07 13:32
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('cms', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='contentpage',
options={'verbose_name': 'Eine Seite mit einem Titel und Inhalt'},
),
migrations.AlterModelOptions(
name='glossarypage',
options={'verbose_name': 'Ein Glossar'},
),
migrations.AlterModelOptions(
name='indexpage',
options={'verbose_name': 'Die Startseite des CMS-Teils'},
),
]
|
<commit_before><commit_msg>Add outstanding migrations from wagtail<commit_after># Generated by Django 2.1.8 on 2019-05-07 13:32
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('cms', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='contentpage',
options={'verbose_name': 'Eine Seite mit einem Titel und Inhalt'},
),
migrations.AlterModelOptions(
name='glossarypage',
options={'verbose_name': 'Ein Glossar'},
),
migrations.AlterModelOptions(
name='indexpage',
options={'verbose_name': 'Die Startseite des CMS-Teils'},
),
]
|
|
81c307848af0bf1a32253d67a1bbc279990e917b
|
myuw/management/commands/flush_memcache.py
|
myuw/management/commands/flush_memcache.py
|
import logging
from rc_django.cache_implementation.memcache import MemcachedCache
logger = logging.getLogger(__name__)
class Command(BaseCommand):
def handle(self, *args, **options):
client = MyUWMemcachedCache().client
logger.info("Stats before flush: {}".format(client.stats()))
logger.info("Flush all successful: {}".format(client.flush_all()))
logger.info("Stats after flush: {}".format(client.stats()))
|
Add a command to flush memcache
|
Add a command to flush memcache
|
Python
|
apache-2.0
|
uw-it-aca/myuw,uw-it-aca/myuw,uw-it-aca/myuw,uw-it-aca/myuw
|
Add a command to flush memcache
|
import logging
from rc_django.cache_implementation.memcache import MemcachedCache
logger = logging.getLogger(__name__)
class Command(BaseCommand):
def handle(self, *args, **options):
client = MyUWMemcachedCache().client
logger.info("Stats before flush: {}".format(client.stats()))
logger.info("Flush all successful: {}".format(client.flush_all()))
logger.info("Stats after flush: {}".format(client.stats()))
|
<commit_before><commit_msg>Add a command to flush memcache<commit_after>
|
import logging
from rc_django.cache_implementation.memcache import MemcachedCache
logger = logging.getLogger(__name__)
class Command(BaseCommand):
def handle(self, *args, **options):
client = MyUWMemcachedCache().client
logger.info("Stats before flush: {}".format(client.stats()))
logger.info("Flush all successful: {}".format(client.flush_all()))
logger.info("Stats after flush: {}".format(client.stats()))
|
Add a command to flush memcacheimport logging
from rc_django.cache_implementation.memcache import MemcachedCache
logger = logging.getLogger(__name__)
class Command(BaseCommand):
def handle(self, *args, **options):
client = MyUWMemcachedCache().client
logger.info("Stats before flush: {}".format(client.stats()))
logger.info("Flush all successful: {}".format(client.flush_all()))
logger.info("Stats after flush: {}".format(client.stats()))
|
<commit_before><commit_msg>Add a command to flush memcache<commit_after>import logging
from rc_django.cache_implementation.memcache import MemcachedCache
logger = logging.getLogger(__name__)
class Command(BaseCommand):
def handle(self, *args, **options):
client = MyUWMemcachedCache().client
logger.info("Stats before flush: {}".format(client.stats()))
logger.info("Flush all successful: {}".format(client.flush_all()))
logger.info("Stats after flush: {}".format(client.stats()))
|
|
64d74b5f059323fc81d5d4938d1ede8dc34c7c9a
|
sujmarkov/from_file.py
|
sujmarkov/from_file.py
|
"""Read lines from a file, feed them into the markov generator, and then
generate a few lines.
Usage:
python from_file.py <input_filename> <num_lines_to_generate>
"""
import sys
import sujmarkov
def extract_sentences(input_file):
# Each line is a sentence.
#
for raw_line in input_file:
line = raw_line.lower().strip()
# For now, ignore punctuation.
#
line = line.replace(",", "").replace("-", "").replace('"', '')
for sub_line in line.split("."):
raw_sentence = sub_line.split(" ")
sentence = [word for word in raw_sentence if word and word.strip() != ""]
if sentence:
yield sentence
if __name__ == "__main__":
m = sujmarkov.Markov(n=2)
with open(sys.argv[1], "r") as input_file:
for sentence in extract_sentences(input_file):
m.add(sentence)
num_required = int(sys.argv[2])
num_done = 0
while num_done < num_required:
generated_sentence = " ".join(m.generate())
print generated_sentence
num_done += 1
|
Add a util to generate sentences from a file
|
Add a util to generate sentences from a file
|
Python
|
bsd-2-clause
|
sujaymansingh/sujmarkov
|
Add a util to generate sentences from a file
|
"""Read lines from a file, feed them into the markov generator, and then
generate a few lines.
Usage:
python from_file.py <input_filename> <num_lines_to_generate>
"""
import sys
import sujmarkov
def extract_sentences(input_file):
# Each line is a sentence.
#
for raw_line in input_file:
line = raw_line.lower().strip()
# For now, ignore punctuation.
#
line = line.replace(",", "").replace("-", "").replace('"', '')
for sub_line in line.split("."):
raw_sentence = sub_line.split(" ")
sentence = [word for word in raw_sentence if word and word.strip() != ""]
if sentence:
yield sentence
if __name__ == "__main__":
m = sujmarkov.Markov(n=2)
with open(sys.argv[1], "r") as input_file:
for sentence in extract_sentences(input_file):
m.add(sentence)
num_required = int(sys.argv[2])
num_done = 0
while num_done < num_required:
generated_sentence = " ".join(m.generate())
print generated_sentence
num_done += 1
|
<commit_before><commit_msg>Add a util to generate sentences from a file<commit_after>
|
"""Read lines from a file, feed them into the markov generator, and then
generate a few lines.
Usage:
python from_file.py <input_filename> <num_lines_to_generate>
"""
import sys
import sujmarkov
def extract_sentences(input_file):
# Each line is a sentence.
#
for raw_line in input_file:
line = raw_line.lower().strip()
# For now, ignore punctuation.
#
line = line.replace(",", "").replace("-", "").replace('"', '')
for sub_line in line.split("."):
raw_sentence = sub_line.split(" ")
sentence = [word for word in raw_sentence if word and word.strip() != ""]
if sentence:
yield sentence
if __name__ == "__main__":
m = sujmarkov.Markov(n=2)
with open(sys.argv[1], "r") as input_file:
for sentence in extract_sentences(input_file):
m.add(sentence)
num_required = int(sys.argv[2])
num_done = 0
while num_done < num_required:
generated_sentence = " ".join(m.generate())
print generated_sentence
num_done += 1
|
Add a util to generate sentences from a file"""Read lines from a file, feed them into the markov generator, and then
generate a few lines.
Usage:
python from_file.py <input_filename> <num_lines_to_generate>
"""
import sys
import sujmarkov
def extract_sentences(input_file):
# Each line is a sentence.
#
for raw_line in input_file:
line = raw_line.lower().strip()
# For now, ignore punctuation.
#
line = line.replace(",", "").replace("-", "").replace('"', '')
for sub_line in line.split("."):
raw_sentence = sub_line.split(" ")
sentence = [word for word in raw_sentence if word and word.strip() != ""]
if sentence:
yield sentence
if __name__ == "__main__":
m = sujmarkov.Markov(n=2)
with open(sys.argv[1], "r") as input_file:
for sentence in extract_sentences(input_file):
m.add(sentence)
num_required = int(sys.argv[2])
num_done = 0
while num_done < num_required:
generated_sentence = " ".join(m.generate())
print generated_sentence
num_done += 1
|
<commit_before><commit_msg>Add a util to generate sentences from a file<commit_after>"""Read lines from a file, feed them into the markov generator, and then
generate a few lines.
Usage:
python from_file.py <input_filename> <num_lines_to_generate>
"""
import sys
import sujmarkov
def extract_sentences(input_file):
# Each line is a sentence.
#
for raw_line in input_file:
line = raw_line.lower().strip()
# For now, ignore punctuation.
#
line = line.replace(",", "").replace("-", "").replace('"', '')
for sub_line in line.split("."):
raw_sentence = sub_line.split(" ")
sentence = [word for word in raw_sentence if word and word.strip() != ""]
if sentence:
yield sentence
if __name__ == "__main__":
m = sujmarkov.Markov(n=2)
with open(sys.argv[1], "r") as input_file:
for sentence in extract_sentences(input_file):
m.add(sentence)
num_required = int(sys.argv[2])
num_done = 0
while num_done < num_required:
generated_sentence = " ".join(m.generate())
print generated_sentence
num_done += 1
|
|
8085c063ffcdfa60e24f17a8cd961d25f116a5d5
|
tests/test_lightgbm.py
|
tests/test_lightgbm.py
|
import unittest
import lightgbm as lgb
from sklearn.datasets import load_iris
class TestLightgbm(unittest.TestCase):
# Based on the "simple_example" from their documentation:
# https://github.com/Microsoft/LightGBM/blob/master/examples/python-guide/simple_example.py
def test_simple(self):
# Load a dataset aleady on disk
iris = load_iris()
lgb_train = lgb.Dataset(iris.data[:100], iris.target[:100])
lgb_eval = lgb.Dataset(iris.data[100:], iris.target[100:], reference=lgb_train)
params = {
'task': 'train',
'boosting_type': 'gbdt',
'objective': 'regression',
'metric': {'l2', 'auc'},
'num_leaves': 31,
'learning_rate': 0.05,
'feature_fraction': 0.9,
'bagging_fraction': 0.8,
'bagging_freq': 5,
'verbose': 0
}
# Run only one round for faster test
gbm = lgb.train(params, lgb_train, num_boost_round=1, valid_sets=lgb_eval, early_stopping_rounds=1)
self.assertEqual(1, gbm.best_iteration)
|
Add test for the lightgbm package
|
Add test for the lightgbm package
|
Python
|
apache-2.0
|
Kaggle/docker-python,Kaggle/docker-python
|
Add test for the lightgbm package
|
import unittest
import lightgbm as lgb
from sklearn.datasets import load_iris
class TestLightgbm(unittest.TestCase):
# Based on the "simple_example" from their documentation:
# https://github.com/Microsoft/LightGBM/blob/master/examples/python-guide/simple_example.py
def test_simple(self):
# Load a dataset aleady on disk
iris = load_iris()
lgb_train = lgb.Dataset(iris.data[:100], iris.target[:100])
lgb_eval = lgb.Dataset(iris.data[100:], iris.target[100:], reference=lgb_train)
params = {
'task': 'train',
'boosting_type': 'gbdt',
'objective': 'regression',
'metric': {'l2', 'auc'},
'num_leaves': 31,
'learning_rate': 0.05,
'feature_fraction': 0.9,
'bagging_fraction': 0.8,
'bagging_freq': 5,
'verbose': 0
}
# Run only one round for faster test
gbm = lgb.train(params, lgb_train, num_boost_round=1, valid_sets=lgb_eval, early_stopping_rounds=1)
self.assertEqual(1, gbm.best_iteration)
|
<commit_before><commit_msg>Add test for the lightgbm package<commit_after>
|
import unittest
import lightgbm as lgb
from sklearn.datasets import load_iris
class TestLightgbm(unittest.TestCase):
# Based on the "simple_example" from their documentation:
# https://github.com/Microsoft/LightGBM/blob/master/examples/python-guide/simple_example.py
def test_simple(self):
# Load a dataset aleady on disk
iris = load_iris()
lgb_train = lgb.Dataset(iris.data[:100], iris.target[:100])
lgb_eval = lgb.Dataset(iris.data[100:], iris.target[100:], reference=lgb_train)
params = {
'task': 'train',
'boosting_type': 'gbdt',
'objective': 'regression',
'metric': {'l2', 'auc'},
'num_leaves': 31,
'learning_rate': 0.05,
'feature_fraction': 0.9,
'bagging_fraction': 0.8,
'bagging_freq': 5,
'verbose': 0
}
# Run only one round for faster test
gbm = lgb.train(params, lgb_train, num_boost_round=1, valid_sets=lgb_eval, early_stopping_rounds=1)
self.assertEqual(1, gbm.best_iteration)
|
Add test for the lightgbm packageimport unittest
import lightgbm as lgb
from sklearn.datasets import load_iris
class TestLightgbm(unittest.TestCase):
# Based on the "simple_example" from their documentation:
# https://github.com/Microsoft/LightGBM/blob/master/examples/python-guide/simple_example.py
def test_simple(self):
# Load a dataset aleady on disk
iris = load_iris()
lgb_train = lgb.Dataset(iris.data[:100], iris.target[:100])
lgb_eval = lgb.Dataset(iris.data[100:], iris.target[100:], reference=lgb_train)
params = {
'task': 'train',
'boosting_type': 'gbdt',
'objective': 'regression',
'metric': {'l2', 'auc'},
'num_leaves': 31,
'learning_rate': 0.05,
'feature_fraction': 0.9,
'bagging_fraction': 0.8,
'bagging_freq': 5,
'verbose': 0
}
# Run only one round for faster test
gbm = lgb.train(params, lgb_train, num_boost_round=1, valid_sets=lgb_eval, early_stopping_rounds=1)
self.assertEqual(1, gbm.best_iteration)
|
<commit_before><commit_msg>Add test for the lightgbm package<commit_after>import unittest
import lightgbm as lgb
from sklearn.datasets import load_iris
class TestLightgbm(unittest.TestCase):
# Based on the "simple_example" from their documentation:
# https://github.com/Microsoft/LightGBM/blob/master/examples/python-guide/simple_example.py
def test_simple(self):
# Load a dataset aleady on disk
iris = load_iris()
lgb_train = lgb.Dataset(iris.data[:100], iris.target[:100])
lgb_eval = lgb.Dataset(iris.data[100:], iris.target[100:], reference=lgb_train)
params = {
'task': 'train',
'boosting_type': 'gbdt',
'objective': 'regression',
'metric': {'l2', 'auc'},
'num_leaves': 31,
'learning_rate': 0.05,
'feature_fraction': 0.9,
'bagging_fraction': 0.8,
'bagging_freq': 5,
'verbose': 0
}
# Run only one round for faster test
gbm = lgb.train(params, lgb_train, num_boost_round=1, valid_sets=lgb_eval, early_stopping_rounds=1)
self.assertEqual(1, gbm.best_iteration)
|
|
270724a7d6a6de7de092cab9c0bb8ac98e66b898
|
unity_setup.py
|
unity_setup.py
|
#!/usr/bin/env python
import os
import os.path
import subprocess
from subprocess import Popen, PIPE
def _set_value(plist_path, key, value):
subprocess.call(['plutil', '-replace', key, '-string', value, plist_path])
def _get_jdk_home():
proc = Popen(['/usr/libexec/java_home'], stdout=PIPE)
return proc.communicate()[0].splitlines()[0]
def _get_android_sdk_home():
base_dir = os.path.expanduser('/usr/local/Cellar/android-sdk')
options = os.listdir(base_dir)
return os.path.join(base_dir, max(options))
if __name__ == '__main__':
file_path = os.path.expanduser('~/Library/Preferences/com.unity3d.UnityEditor5.x.plist')
_set_value(file_path, 'CacheServerIPAddress', 'cardscachebox')
_set_value(file_path, 'JdkPath', _get_jdk_home())
_set_value(file_path, 'AndroidSdkRoot', _get_android_sdk_home())
|
Add script for configuring unity settings
|
Add script for configuring unity settings
|
Python
|
mit
|
akhosravian/laptop,akhosravian/laptop
|
Add script for configuring unity settings
|
#!/usr/bin/env python
import os
import os.path
import subprocess
from subprocess import Popen, PIPE
def _set_value(plist_path, key, value):
subprocess.call(['plutil', '-replace', key, '-string', value, plist_path])
def _get_jdk_home():
proc = Popen(['/usr/libexec/java_home'], stdout=PIPE)
return proc.communicate()[0].splitlines()[0]
def _get_android_sdk_home():
base_dir = os.path.expanduser('/usr/local/Cellar/android-sdk')
options = os.listdir(base_dir)
return os.path.join(base_dir, max(options))
if __name__ == '__main__':
file_path = os.path.expanduser('~/Library/Preferences/com.unity3d.UnityEditor5.x.plist')
_set_value(file_path, 'CacheServerIPAddress', 'cardscachebox')
_set_value(file_path, 'JdkPath', _get_jdk_home())
_set_value(file_path, 'AndroidSdkRoot', _get_android_sdk_home())
|
<commit_before><commit_msg>Add script for configuring unity settings<commit_after>
|
#!/usr/bin/env python
import os
import os.path
import subprocess
from subprocess import Popen, PIPE
def _set_value(plist_path, key, value):
subprocess.call(['plutil', '-replace', key, '-string', value, plist_path])
def _get_jdk_home():
proc = Popen(['/usr/libexec/java_home'], stdout=PIPE)
return proc.communicate()[0].splitlines()[0]
def _get_android_sdk_home():
base_dir = os.path.expanduser('/usr/local/Cellar/android-sdk')
options = os.listdir(base_dir)
return os.path.join(base_dir, max(options))
if __name__ == '__main__':
file_path = os.path.expanduser('~/Library/Preferences/com.unity3d.UnityEditor5.x.plist')
_set_value(file_path, 'CacheServerIPAddress', 'cardscachebox')
_set_value(file_path, 'JdkPath', _get_jdk_home())
_set_value(file_path, 'AndroidSdkRoot', _get_android_sdk_home())
|
Add script for configuring unity settings#!/usr/bin/env python
import os
import os.path
import subprocess
from subprocess import Popen, PIPE
def _set_value(plist_path, key, value):
subprocess.call(['plutil', '-replace', key, '-string', value, plist_path])
def _get_jdk_home():
proc = Popen(['/usr/libexec/java_home'], stdout=PIPE)
return proc.communicate()[0].splitlines()[0]
def _get_android_sdk_home():
base_dir = os.path.expanduser('/usr/local/Cellar/android-sdk')
options = os.listdir(base_dir)
return os.path.join(base_dir, max(options))
if __name__ == '__main__':
file_path = os.path.expanduser('~/Library/Preferences/com.unity3d.UnityEditor5.x.plist')
_set_value(file_path, 'CacheServerIPAddress', 'cardscachebox')
_set_value(file_path, 'JdkPath', _get_jdk_home())
_set_value(file_path, 'AndroidSdkRoot', _get_android_sdk_home())
|
<commit_before><commit_msg>Add script for configuring unity settings<commit_after>#!/usr/bin/env python
import os
import os.path
import subprocess
from subprocess import Popen, PIPE
def _set_value(plist_path, key, value):
subprocess.call(['plutil', '-replace', key, '-string', value, plist_path])
def _get_jdk_home():
proc = Popen(['/usr/libexec/java_home'], stdout=PIPE)
return proc.communicate()[0].splitlines()[0]
def _get_android_sdk_home():
base_dir = os.path.expanduser('/usr/local/Cellar/android-sdk')
options = os.listdir(base_dir)
return os.path.join(base_dir, max(options))
if __name__ == '__main__':
file_path = os.path.expanduser('~/Library/Preferences/com.unity3d.UnityEditor5.x.plist')
_set_value(file_path, 'CacheServerIPAddress', 'cardscachebox')
_set_value(file_path, 'JdkPath', _get_jdk_home())
_set_value(file_path, 'AndroidSdkRoot', _get_android_sdk_home())
|
|
83dfa6581eec1b9d32d519592c4212e6195998a3
|
taskflow/persistence/backends/sqlalchemy/alembic/versions/40fc8c914bd2_fix_atomdetails_failure_size.py
|
taskflow/persistence/backends/sqlalchemy/alembic/versions/40fc8c914bd2_fix_atomdetails_failure_size.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""fix atomdetails failure size
Revision ID: 40fc8c914bd2
Revises: 6df9422fcb43
Create Date: 2022-01-27 18:10:06.176006
"""
# revision identifiers, used by Alembic.
revision = '40fc8c914bd2'
down_revision = '6df9422fcb43'
from alembic import op
from sqlalchemy.dialects import mysql
def upgrade():
bind = op.get_bind()
engine = bind.engine
if engine.name == 'mysql':
op.alter_column('atomdetails', 'failure', type_=mysql.LONGTEXT,
existing_nullable=True)
op.alter_column('atomdetails', 'revert_failure', type_=mysql.LONGTEXT,
existing_nullable=True)
|
Fix atomdetails failure column size
|
Fix atomdetails failure column size
failure and revert_failure fields in atomdetails is defined as a JSON type,
but its data type is 'text' in mysql, which is limited to 64kbytes.
JSON data type should have the same size as a LONGTEXT.
Closes-Bug: #1959243
Change-Id: I65b6a6d896d3e8aad871dc19b0f8d0eddf48bdd6
|
Python
|
apache-2.0
|
openstack/taskflow,openstack/taskflow
|
Fix atomdetails failure column size
failure and revert_failure fields in atomdetails is defined as a JSON type,
but its data type is 'text' in mysql, which is limited to 64kbytes.
JSON data type should have the same size as a LONGTEXT.
Closes-Bug: #1959243
Change-Id: I65b6a6d896d3e8aad871dc19b0f8d0eddf48bdd6
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""fix atomdetails failure size
Revision ID: 40fc8c914bd2
Revises: 6df9422fcb43
Create Date: 2022-01-27 18:10:06.176006
"""
# revision identifiers, used by Alembic.
revision = '40fc8c914bd2'
down_revision = '6df9422fcb43'
from alembic import op
from sqlalchemy.dialects import mysql
def upgrade():
bind = op.get_bind()
engine = bind.engine
if engine.name == 'mysql':
op.alter_column('atomdetails', 'failure', type_=mysql.LONGTEXT,
existing_nullable=True)
op.alter_column('atomdetails', 'revert_failure', type_=mysql.LONGTEXT,
existing_nullable=True)
|
<commit_before><commit_msg>Fix atomdetails failure column size
failure and revert_failure fields in atomdetails is defined as a JSON type,
but its data type is 'text' in mysql, which is limited to 64kbytes.
JSON data type should have the same size as a LONGTEXT.
Closes-Bug: #1959243
Change-Id: I65b6a6d896d3e8aad871dc19b0f8d0eddf48bdd6<commit_after>
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""fix atomdetails failure size
Revision ID: 40fc8c914bd2
Revises: 6df9422fcb43
Create Date: 2022-01-27 18:10:06.176006
"""
# revision identifiers, used by Alembic.
revision = '40fc8c914bd2'
down_revision = '6df9422fcb43'
from alembic import op
from sqlalchemy.dialects import mysql
def upgrade():
bind = op.get_bind()
engine = bind.engine
if engine.name == 'mysql':
op.alter_column('atomdetails', 'failure', type_=mysql.LONGTEXT,
existing_nullable=True)
op.alter_column('atomdetails', 'revert_failure', type_=mysql.LONGTEXT,
existing_nullable=True)
|
Fix atomdetails failure column size
failure and revert_failure fields in atomdetails is defined as a JSON type,
but its data type is 'text' in mysql, which is limited to 64kbytes.
JSON data type should have the same size as a LONGTEXT.
Closes-Bug: #1959243
Change-Id: I65b6a6d896d3e8aad871dc19b0f8d0eddf48bdd6# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""fix atomdetails failure size
Revision ID: 40fc8c914bd2
Revises: 6df9422fcb43
Create Date: 2022-01-27 18:10:06.176006
"""
# revision identifiers, used by Alembic.
revision = '40fc8c914bd2'
down_revision = '6df9422fcb43'
from alembic import op
from sqlalchemy.dialects import mysql
def upgrade():
bind = op.get_bind()
engine = bind.engine
if engine.name == 'mysql':
op.alter_column('atomdetails', 'failure', type_=mysql.LONGTEXT,
existing_nullable=True)
op.alter_column('atomdetails', 'revert_failure', type_=mysql.LONGTEXT,
existing_nullable=True)
|
<commit_before><commit_msg>Fix atomdetails failure column size
failure and revert_failure fields in atomdetails is defined as a JSON type,
but its data type is 'text' in mysql, which is limited to 64kbytes.
JSON data type should have the same size as a LONGTEXT.
Closes-Bug: #1959243
Change-Id: I65b6a6d896d3e8aad871dc19b0f8d0eddf48bdd6<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""fix atomdetails failure size
Revision ID: 40fc8c914bd2
Revises: 6df9422fcb43
Create Date: 2022-01-27 18:10:06.176006
"""
# revision identifiers, used by Alembic.
revision = '40fc8c914bd2'
down_revision = '6df9422fcb43'
from alembic import op
from sqlalchemy.dialects import mysql
def upgrade():
bind = op.get_bind()
engine = bind.engine
if engine.name == 'mysql':
op.alter_column('atomdetails', 'failure', type_=mysql.LONGTEXT,
existing_nullable=True)
op.alter_column('atomdetails', 'revert_failure', type_=mysql.LONGTEXT,
existing_nullable=True)
|
|
fd747b412b7ba4e9b685ef6a9043272cdb3e41ab
|
oneflow/settings/snippets/djdt.py
|
oneflow/settings/snippets/djdt.py
|
# Debug-toolbar related
INSTALLED_APPS += ('debug_toolbar', )
MIDDLEWARE_CLASSES += ('debug_toolbar.middleware.DebugToolbarMiddleware', )
INTERNAL_IPS = ('127.0.0.1', )
DEBUG_TOOLBAR_PANELS = (
'debug_toolbar.panels.request_vars.RequestVarsDebugPanel',
'debug_toolbar.panels.headers.HeaderDebugPanel',
'debug_toolbar.panels.template.TemplateDebugPanel',
'debug_toolbar.panels.logger.LoggingPanel',
'debug_toolbar.panels.sql.SQLDebugPanel',
'debug_toolbar.panels.timer.TimerDebugPanel',
'debug_toolbar.panels.signals.SignalDebugPanel',
'debug_toolbar.panels.settings_vars.SettingsVarsDebugPanel',
'debug_toolbar.panels.version.VersionDebugPanel',
)
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
'ENABLE_STACKTRACES' : True,
#'SHOW_TOOLBAR_CALLBACK': custom_show_toolbar,
#'EXTRA_SIGNALS': ['myproject.signals.MySignal'],
#'HIDE_DJANGO_SQL': False,
#'TAG': 'div',
}
|
# Debug-toolbar related
INSTALLED_APPS += ('debug_toolbar', )
MIDDLEWARE_CLASSES += ('debug_toolbar.middleware.DebugToolbarMiddleware', )
INTERNAL_IPS = (
'127.0.0.1',
# leto.licorn.org
'82.236.133.193',
)
DEBUG_TOOLBAR_PANELS = (
'debug_toolbar.panels.request_vars.RequestVarsDebugPanel',
'debug_toolbar.panels.headers.HeaderDebugPanel',
'debug_toolbar.panels.template.TemplateDebugPanel',
'debug_toolbar.panels.logger.LoggingPanel',
'debug_toolbar.panels.sql.SQLDebugPanel',
'debug_toolbar.panels.timer.TimerDebugPanel',
'debug_toolbar.panels.signals.SignalDebugPanel',
'debug_toolbar.panels.settings_vars.SettingsVarsDebugPanel',
'debug_toolbar.panels.version.VersionDebugPanel',
)
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
'ENABLE_STACKTRACES' : True,
#'SHOW_TOOLBAR_CALLBACK': custom_show_toolbar,
#'EXTRA_SIGNALS': ['myproject.signals.MySignal'],
#'HIDE_DJANGO_SQL': False,
#'TAG': 'div',
}
|
Add my own public IP address to INTERNAL_IPS for `obi` testing.
|
Add my own public IP address to INTERNAL_IPS for `obi` testing.
|
Python
|
agpl-3.0
|
1flow/1flow,WillianPaiva/1flow,WillianPaiva/1flow,1flow/1flow,WillianPaiva/1flow,1flow/1flow,1flow/1flow,WillianPaiva/1flow,WillianPaiva/1flow,1flow/1flow
|
# Debug-toolbar related
INSTALLED_APPS += ('debug_toolbar', )
MIDDLEWARE_CLASSES += ('debug_toolbar.middleware.DebugToolbarMiddleware', )
INTERNAL_IPS = ('127.0.0.1', )
DEBUG_TOOLBAR_PANELS = (
'debug_toolbar.panels.request_vars.RequestVarsDebugPanel',
'debug_toolbar.panels.headers.HeaderDebugPanel',
'debug_toolbar.panels.template.TemplateDebugPanel',
'debug_toolbar.panels.logger.LoggingPanel',
'debug_toolbar.panels.sql.SQLDebugPanel',
'debug_toolbar.panels.timer.TimerDebugPanel',
'debug_toolbar.panels.signals.SignalDebugPanel',
'debug_toolbar.panels.settings_vars.SettingsVarsDebugPanel',
'debug_toolbar.panels.version.VersionDebugPanel',
)
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
'ENABLE_STACKTRACES' : True,
#'SHOW_TOOLBAR_CALLBACK': custom_show_toolbar,
#'EXTRA_SIGNALS': ['myproject.signals.MySignal'],
#'HIDE_DJANGO_SQL': False,
#'TAG': 'div',
}
Add my own public IP address to INTERNAL_IPS for `obi` testing.
|
# Debug-toolbar related
INSTALLED_APPS += ('debug_toolbar', )
MIDDLEWARE_CLASSES += ('debug_toolbar.middleware.DebugToolbarMiddleware', )
INTERNAL_IPS = (
'127.0.0.1',
# leto.licorn.org
'82.236.133.193',
)
DEBUG_TOOLBAR_PANELS = (
'debug_toolbar.panels.request_vars.RequestVarsDebugPanel',
'debug_toolbar.panels.headers.HeaderDebugPanel',
'debug_toolbar.panels.template.TemplateDebugPanel',
'debug_toolbar.panels.logger.LoggingPanel',
'debug_toolbar.panels.sql.SQLDebugPanel',
'debug_toolbar.panels.timer.TimerDebugPanel',
'debug_toolbar.panels.signals.SignalDebugPanel',
'debug_toolbar.panels.settings_vars.SettingsVarsDebugPanel',
'debug_toolbar.panels.version.VersionDebugPanel',
)
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
'ENABLE_STACKTRACES' : True,
#'SHOW_TOOLBAR_CALLBACK': custom_show_toolbar,
#'EXTRA_SIGNALS': ['myproject.signals.MySignal'],
#'HIDE_DJANGO_SQL': False,
#'TAG': 'div',
}
|
<commit_before># Debug-toolbar related
INSTALLED_APPS += ('debug_toolbar', )
MIDDLEWARE_CLASSES += ('debug_toolbar.middleware.DebugToolbarMiddleware', )
INTERNAL_IPS = ('127.0.0.1', )
DEBUG_TOOLBAR_PANELS = (
'debug_toolbar.panels.request_vars.RequestVarsDebugPanel',
'debug_toolbar.panels.headers.HeaderDebugPanel',
'debug_toolbar.panels.template.TemplateDebugPanel',
'debug_toolbar.panels.logger.LoggingPanel',
'debug_toolbar.panels.sql.SQLDebugPanel',
'debug_toolbar.panels.timer.TimerDebugPanel',
'debug_toolbar.panels.signals.SignalDebugPanel',
'debug_toolbar.panels.settings_vars.SettingsVarsDebugPanel',
'debug_toolbar.panels.version.VersionDebugPanel',
)
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
'ENABLE_STACKTRACES' : True,
#'SHOW_TOOLBAR_CALLBACK': custom_show_toolbar,
#'EXTRA_SIGNALS': ['myproject.signals.MySignal'],
#'HIDE_DJANGO_SQL': False,
#'TAG': 'div',
}
<commit_msg>Add my own public IP address to INTERNAL_IPS for `obi` testing.<commit_after>
|
# Debug-toolbar related
INSTALLED_APPS += ('debug_toolbar', )
MIDDLEWARE_CLASSES += ('debug_toolbar.middleware.DebugToolbarMiddleware', )
INTERNAL_IPS = (
'127.0.0.1',
# leto.licorn.org
'82.236.133.193',
)
DEBUG_TOOLBAR_PANELS = (
'debug_toolbar.panels.request_vars.RequestVarsDebugPanel',
'debug_toolbar.panels.headers.HeaderDebugPanel',
'debug_toolbar.panels.template.TemplateDebugPanel',
'debug_toolbar.panels.logger.LoggingPanel',
'debug_toolbar.panels.sql.SQLDebugPanel',
'debug_toolbar.panels.timer.TimerDebugPanel',
'debug_toolbar.panels.signals.SignalDebugPanel',
'debug_toolbar.panels.settings_vars.SettingsVarsDebugPanel',
'debug_toolbar.panels.version.VersionDebugPanel',
)
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
'ENABLE_STACKTRACES' : True,
#'SHOW_TOOLBAR_CALLBACK': custom_show_toolbar,
#'EXTRA_SIGNALS': ['myproject.signals.MySignal'],
#'HIDE_DJANGO_SQL': False,
#'TAG': 'div',
}
|
# Debug-toolbar related
INSTALLED_APPS += ('debug_toolbar', )
MIDDLEWARE_CLASSES += ('debug_toolbar.middleware.DebugToolbarMiddleware', )
INTERNAL_IPS = ('127.0.0.1', )
DEBUG_TOOLBAR_PANELS = (
'debug_toolbar.panels.request_vars.RequestVarsDebugPanel',
'debug_toolbar.panels.headers.HeaderDebugPanel',
'debug_toolbar.panels.template.TemplateDebugPanel',
'debug_toolbar.panels.logger.LoggingPanel',
'debug_toolbar.panels.sql.SQLDebugPanel',
'debug_toolbar.panels.timer.TimerDebugPanel',
'debug_toolbar.panels.signals.SignalDebugPanel',
'debug_toolbar.panels.settings_vars.SettingsVarsDebugPanel',
'debug_toolbar.panels.version.VersionDebugPanel',
)
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
'ENABLE_STACKTRACES' : True,
#'SHOW_TOOLBAR_CALLBACK': custom_show_toolbar,
#'EXTRA_SIGNALS': ['myproject.signals.MySignal'],
#'HIDE_DJANGO_SQL': False,
#'TAG': 'div',
}
Add my own public IP address to INTERNAL_IPS for `obi` testing.# Debug-toolbar related
INSTALLED_APPS += ('debug_toolbar', )
MIDDLEWARE_CLASSES += ('debug_toolbar.middleware.DebugToolbarMiddleware', )
INTERNAL_IPS = (
'127.0.0.1',
# leto.licorn.org
'82.236.133.193',
)
DEBUG_TOOLBAR_PANELS = (
'debug_toolbar.panels.request_vars.RequestVarsDebugPanel',
'debug_toolbar.panels.headers.HeaderDebugPanel',
'debug_toolbar.panels.template.TemplateDebugPanel',
'debug_toolbar.panels.logger.LoggingPanel',
'debug_toolbar.panels.sql.SQLDebugPanel',
'debug_toolbar.panels.timer.TimerDebugPanel',
'debug_toolbar.panels.signals.SignalDebugPanel',
'debug_toolbar.panels.settings_vars.SettingsVarsDebugPanel',
'debug_toolbar.panels.version.VersionDebugPanel',
)
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
'ENABLE_STACKTRACES' : True,
#'SHOW_TOOLBAR_CALLBACK': custom_show_toolbar,
#'EXTRA_SIGNALS': ['myproject.signals.MySignal'],
#'HIDE_DJANGO_SQL': False,
#'TAG': 'div',
}
|
<commit_before># Debug-toolbar related
INSTALLED_APPS += ('debug_toolbar', )
MIDDLEWARE_CLASSES += ('debug_toolbar.middleware.DebugToolbarMiddleware', )
INTERNAL_IPS = ('127.0.0.1', )
DEBUG_TOOLBAR_PANELS = (
'debug_toolbar.panels.request_vars.RequestVarsDebugPanel',
'debug_toolbar.panels.headers.HeaderDebugPanel',
'debug_toolbar.panels.template.TemplateDebugPanel',
'debug_toolbar.panels.logger.LoggingPanel',
'debug_toolbar.panels.sql.SQLDebugPanel',
'debug_toolbar.panels.timer.TimerDebugPanel',
'debug_toolbar.panels.signals.SignalDebugPanel',
'debug_toolbar.panels.settings_vars.SettingsVarsDebugPanel',
'debug_toolbar.panels.version.VersionDebugPanel',
)
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
'ENABLE_STACKTRACES' : True,
#'SHOW_TOOLBAR_CALLBACK': custom_show_toolbar,
#'EXTRA_SIGNALS': ['myproject.signals.MySignal'],
#'HIDE_DJANGO_SQL': False,
#'TAG': 'div',
}
<commit_msg>Add my own public IP address to INTERNAL_IPS for `obi` testing.<commit_after># Debug-toolbar related
INSTALLED_APPS += ('debug_toolbar', )
MIDDLEWARE_CLASSES += ('debug_toolbar.middleware.DebugToolbarMiddleware', )
INTERNAL_IPS = (
'127.0.0.1',
# leto.licorn.org
'82.236.133.193',
)
DEBUG_TOOLBAR_PANELS = (
'debug_toolbar.panels.request_vars.RequestVarsDebugPanel',
'debug_toolbar.panels.headers.HeaderDebugPanel',
'debug_toolbar.panels.template.TemplateDebugPanel',
'debug_toolbar.panels.logger.LoggingPanel',
'debug_toolbar.panels.sql.SQLDebugPanel',
'debug_toolbar.panels.timer.TimerDebugPanel',
'debug_toolbar.panels.signals.SignalDebugPanel',
'debug_toolbar.panels.settings_vars.SettingsVarsDebugPanel',
'debug_toolbar.panels.version.VersionDebugPanel',
)
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
'ENABLE_STACKTRACES' : True,
#'SHOW_TOOLBAR_CALLBACK': custom_show_toolbar,
#'EXTRA_SIGNALS': ['myproject.signals.MySignal'],
#'HIDE_DJANGO_SQL': False,
#'TAG': 'div',
}
|
574cd304e39b74ee4dbd7a7902d0e941d899351f
|
tests/lbrynet_test_bot.py
|
tests/lbrynet_test_bot.py
|
import xmlrpclib
import json
from datetime import datetime
from time import sleep
from slackclient import SlackClient
def get_conf():
f = open('testbot.conf', 'r')
token = f.readline().replace('\n', '')
channel = f.readline().replace('\n', '')
f.close()
return token, channel
def test_lbrynet(lbry, slack, channel):
logfile = open('lbrynet_test_log.txt', 'a')
try:
path = lbry.get('testlbrynet')['path']
except:
msg = '[' + str(datetime.now()) + '] ! Failed to obtain LBRYnet test file'
slack.rtm_connect()
slack.rtm_send_message(channel, msg)
print msg
logfile.write(msg + '\n')
file_name = path.split('/')[len(path.split('/'))-1]
for n in range(10):
files = [f for f in lbry.get_lbry_files() if (json.loads(f)['file_name'] == file_name) and json.loads(f)['completed']]
if files:
break
sleep(30)
if files:
msg = '[' + str(datetime.now()) + '] LBRYnet download test successful'
slack.rtm_connect()
# slack.rtm_send_message(channel, msg)
print msg
logfile.write(msg + '\n')
else:
msg = '[' + str(datetime.now()) + '] ! Failed to obtain LBRYnet test file'
slack.rtm_connect()
slack.rtm_send_message(channel, msg)
print msg
logfile.write(msg + '\n')
lbry.delete_lbry_file('test.jpg')
logfile.close()
token, channel = get_conf()
sc = SlackClient(token)
sc.rtm_connect()
print 'Connected to slack'
daemon = xmlrpclib.ServerProxy("http://localhost:7080")
while True:
test_lbrynet(daemon, sc, channel)
sleep(600)
|
Test ability to download from lbrynet
|
Test ability to download from lbrynet
This tries to download a small image with a five minute timeout. After this, the bot waits ten minutes and tries again. Any failures get posted to slack.
|
Python
|
mit
|
lbryio/lbry,DaveA50/lbry,zestyr/lbry,lbryio/lbry,DaveA50/lbry,lbryio/lbry,zestyr/lbry,zestyr/lbry
|
Test ability to download from lbrynet
This tries to download a small image with a five minute timeout. After this, the bot waits ten minutes and tries again. Any failures get posted to slack.
|
import xmlrpclib
import json
from datetime import datetime
from time import sleep
from slackclient import SlackClient
def get_conf():
f = open('testbot.conf', 'r')
token = f.readline().replace('\n', '')
channel = f.readline().replace('\n', '')
f.close()
return token, channel
def test_lbrynet(lbry, slack, channel):
logfile = open('lbrynet_test_log.txt', 'a')
try:
path = lbry.get('testlbrynet')['path']
except:
msg = '[' + str(datetime.now()) + '] ! Failed to obtain LBRYnet test file'
slack.rtm_connect()
slack.rtm_send_message(channel, msg)
print msg
logfile.write(msg + '\n')
file_name = path.split('/')[len(path.split('/'))-1]
for n in range(10):
files = [f for f in lbry.get_lbry_files() if (json.loads(f)['file_name'] == file_name) and json.loads(f)['completed']]
if files:
break
sleep(30)
if files:
msg = '[' + str(datetime.now()) + '] LBRYnet download test successful'
slack.rtm_connect()
# slack.rtm_send_message(channel, msg)
print msg
logfile.write(msg + '\n')
else:
msg = '[' + str(datetime.now()) + '] ! Failed to obtain LBRYnet test file'
slack.rtm_connect()
slack.rtm_send_message(channel, msg)
print msg
logfile.write(msg + '\n')
lbry.delete_lbry_file('test.jpg')
logfile.close()
token, channel = get_conf()
sc = SlackClient(token)
sc.rtm_connect()
print 'Connected to slack'
daemon = xmlrpclib.ServerProxy("http://localhost:7080")
while True:
test_lbrynet(daemon, sc, channel)
sleep(600)
|
<commit_before><commit_msg>Test ability to download from lbrynet
This tries to download a small image with a five minute timeout. After this, the bot waits ten minutes and tries again. Any failures get posted to slack.<commit_after>
|
import xmlrpclib
import json
from datetime import datetime
from time import sleep
from slackclient import SlackClient
def get_conf():
f = open('testbot.conf', 'r')
token = f.readline().replace('\n', '')
channel = f.readline().replace('\n', '')
f.close()
return token, channel
def test_lbrynet(lbry, slack, channel):
logfile = open('lbrynet_test_log.txt', 'a')
try:
path = lbry.get('testlbrynet')['path']
except:
msg = '[' + str(datetime.now()) + '] ! Failed to obtain LBRYnet test file'
slack.rtm_connect()
slack.rtm_send_message(channel, msg)
print msg
logfile.write(msg + '\n')
file_name = path.split('/')[len(path.split('/'))-1]
for n in range(10):
files = [f for f in lbry.get_lbry_files() if (json.loads(f)['file_name'] == file_name) and json.loads(f)['completed']]
if files:
break
sleep(30)
if files:
msg = '[' + str(datetime.now()) + '] LBRYnet download test successful'
slack.rtm_connect()
# slack.rtm_send_message(channel, msg)
print msg
logfile.write(msg + '\n')
else:
msg = '[' + str(datetime.now()) + '] ! Failed to obtain LBRYnet test file'
slack.rtm_connect()
slack.rtm_send_message(channel, msg)
print msg
logfile.write(msg + '\n')
lbry.delete_lbry_file('test.jpg')
logfile.close()
token, channel = get_conf()
sc = SlackClient(token)
sc.rtm_connect()
print 'Connected to slack'
daemon = xmlrpclib.ServerProxy("http://localhost:7080")
while True:
test_lbrynet(daemon, sc, channel)
sleep(600)
|
Test ability to download from lbrynet
This tries to download a small image with a five minute timeout. After this, the bot waits ten minutes and tries again. Any failures get posted to slack.import xmlrpclib
import json
from datetime import datetime
from time import sleep
from slackclient import SlackClient
def get_conf():
f = open('testbot.conf', 'r')
token = f.readline().replace('\n', '')
channel = f.readline().replace('\n', '')
f.close()
return token, channel
def test_lbrynet(lbry, slack, channel):
logfile = open('lbrynet_test_log.txt', 'a')
try:
path = lbry.get('testlbrynet')['path']
except:
msg = '[' + str(datetime.now()) + '] ! Failed to obtain LBRYnet test file'
slack.rtm_connect()
slack.rtm_send_message(channel, msg)
print msg
logfile.write(msg + '\n')
file_name = path.split('/')[len(path.split('/'))-1]
for n in range(10):
files = [f for f in lbry.get_lbry_files() if (json.loads(f)['file_name'] == file_name) and json.loads(f)['completed']]
if files:
break
sleep(30)
if files:
msg = '[' + str(datetime.now()) + '] LBRYnet download test successful'
slack.rtm_connect()
# slack.rtm_send_message(channel, msg)
print msg
logfile.write(msg + '\n')
else:
msg = '[' + str(datetime.now()) + '] ! Failed to obtain LBRYnet test file'
slack.rtm_connect()
slack.rtm_send_message(channel, msg)
print msg
logfile.write(msg + '\n')
lbry.delete_lbry_file('test.jpg')
logfile.close()
token, channel = get_conf()
sc = SlackClient(token)
sc.rtm_connect()
print 'Connected to slack'
daemon = xmlrpclib.ServerProxy("http://localhost:7080")
while True:
test_lbrynet(daemon, sc, channel)
sleep(600)
|
<commit_before><commit_msg>Test ability to download from lbrynet
This tries to download a small image with a five minute timeout. After this, the bot waits ten minutes and tries again. Any failures get posted to slack.<commit_after>import xmlrpclib
import json
from datetime import datetime
from time import sleep
from slackclient import SlackClient
def get_conf():
f = open('testbot.conf', 'r')
token = f.readline().replace('\n', '')
channel = f.readline().replace('\n', '')
f.close()
return token, channel
def test_lbrynet(lbry, slack, channel):
logfile = open('lbrynet_test_log.txt', 'a')
try:
path = lbry.get('testlbrynet')['path']
except:
msg = '[' + str(datetime.now()) + '] ! Failed to obtain LBRYnet test file'
slack.rtm_connect()
slack.rtm_send_message(channel, msg)
print msg
logfile.write(msg + '\n')
file_name = path.split('/')[len(path.split('/'))-1]
for n in range(10):
files = [f for f in lbry.get_lbry_files() if (json.loads(f)['file_name'] == file_name) and json.loads(f)['completed']]
if files:
break
sleep(30)
if files:
msg = '[' + str(datetime.now()) + '] LBRYnet download test successful'
slack.rtm_connect()
# slack.rtm_send_message(channel, msg)
print msg
logfile.write(msg + '\n')
else:
msg = '[' + str(datetime.now()) + '] ! Failed to obtain LBRYnet test file'
slack.rtm_connect()
slack.rtm_send_message(channel, msg)
print msg
logfile.write(msg + '\n')
lbry.delete_lbry_file('test.jpg')
logfile.close()
token, channel = get_conf()
sc = SlackClient(token)
sc.rtm_connect()
print 'Connected to slack'
daemon = xmlrpclib.ServerProxy("http://localhost:7080")
while True:
test_lbrynet(daemon, sc, channel)
sleep(600)
|
|
9df652a5997404452e9598640a4151035fd409cf
|
tests/test_cme_agriculture_calendar.py
|
tests/test_cme_agriculture_calendar.py
|
import pandas as pd
import pytz
from pandas_market_calendars.exchange_calendar_cme_agriculture import CMEAgricultureExchangeCalendar
def test_time_zone():
assert CMEAgricultureExchangeCalendar().tz == pytz.timezone('America/Chicago')
assert CMEAgricultureExchangeCalendar().name == 'CME_Agriculture'
def test_2016_holidays():
# good friday: 2016-03-25
# christmas (observed): 2016-12-26
# new years (observed): 2016-01-02
cme = CMEAgricultureExchangeCalendar()
good_dates = cme.valid_days('2016-01-01', '2016-12-31')
for date in ["2016-03-25", "2016-12-26", "2016-01-02"]:
assert pd.Timestamp(date, tz='UTC') not in good_dates
def test_2016_early_closes():
# mlk day: 2016-01-18
# presidents: 2016-02-15
# mem day: 2016-05-30
# july 4: 2016-07-04
# labor day: 2016-09-05
# thanksgiving: 2016-11-24
cme = CMEAgricultureExchangeCalendar()
schedule = cme.schedule('2016-01-01', '2016-12-31')
early_closes = cme.early_closes(schedule).index
for date in ["2016-01-18", "2016-02-15", "2016-05-30", "2016-07-04",
"2016-09-05", "2016-11-24"]:
dt = pd.Timestamp(date, tz='UTC')
assert dt in early_closes
market_close = schedule.loc[dt].market_close
assert market_close.tz_convert(cme.tz).hour == 12
def test_dec_jan():
cme = CMEAgricultureExchangeCalendar()
schedule = cme.schedule('2016-12-30', '2017-01-10')
assert schedule['market_open'].iloc[0] == pd.Timestamp('2016-12-29 23:01:00', tz='UTC')
assert schedule['market_close'].iloc[6] == pd.Timestamp('2017-01-10 23:00:00', tz='UTC')
|
Add test stub for agriculture
|
Add test stub for agriculture
|
Python
|
mit
|
rsheftel/pandas_market_calendars,rsheftel/pandas_market_calendars
|
Add test stub for agriculture
|
import pandas as pd
import pytz
from pandas_market_calendars.exchange_calendar_cme_agriculture import CMEAgricultureExchangeCalendar
def test_time_zone():
assert CMEAgricultureExchangeCalendar().tz == pytz.timezone('America/Chicago')
assert CMEAgricultureExchangeCalendar().name == 'CME_Agriculture'
def test_2016_holidays():
# good friday: 2016-03-25
# christmas (observed): 2016-12-26
# new years (observed): 2016-01-02
cme = CMEAgricultureExchangeCalendar()
good_dates = cme.valid_days('2016-01-01', '2016-12-31')
for date in ["2016-03-25", "2016-12-26", "2016-01-02"]:
assert pd.Timestamp(date, tz='UTC') not in good_dates
def test_2016_early_closes():
# mlk day: 2016-01-18
# presidents: 2016-02-15
# mem day: 2016-05-30
# july 4: 2016-07-04
# labor day: 2016-09-05
# thanksgiving: 2016-11-24
cme = CMEAgricultureExchangeCalendar()
schedule = cme.schedule('2016-01-01', '2016-12-31')
early_closes = cme.early_closes(schedule).index
for date in ["2016-01-18", "2016-02-15", "2016-05-30", "2016-07-04",
"2016-09-05", "2016-11-24"]:
dt = pd.Timestamp(date, tz='UTC')
assert dt in early_closes
market_close = schedule.loc[dt].market_close
assert market_close.tz_convert(cme.tz).hour == 12
def test_dec_jan():
cme = CMEAgricultureExchangeCalendar()
schedule = cme.schedule('2016-12-30', '2017-01-10')
assert schedule['market_open'].iloc[0] == pd.Timestamp('2016-12-29 23:01:00', tz='UTC')
assert schedule['market_close'].iloc[6] == pd.Timestamp('2017-01-10 23:00:00', tz='UTC')
|
<commit_before><commit_msg>Add test stub for agriculture<commit_after>
|
import pandas as pd
import pytz
from pandas_market_calendars.exchange_calendar_cme_agriculture import CMEAgricultureExchangeCalendar
def test_time_zone():
assert CMEAgricultureExchangeCalendar().tz == pytz.timezone('America/Chicago')
assert CMEAgricultureExchangeCalendar().name == 'CME_Agriculture'
def test_2016_holidays():
# good friday: 2016-03-25
# christmas (observed): 2016-12-26
# new years (observed): 2016-01-02
cme = CMEAgricultureExchangeCalendar()
good_dates = cme.valid_days('2016-01-01', '2016-12-31')
for date in ["2016-03-25", "2016-12-26", "2016-01-02"]:
assert pd.Timestamp(date, tz='UTC') not in good_dates
def test_2016_early_closes():
# mlk day: 2016-01-18
# presidents: 2016-02-15
# mem day: 2016-05-30
# july 4: 2016-07-04
# labor day: 2016-09-05
# thanksgiving: 2016-11-24
cme = CMEAgricultureExchangeCalendar()
schedule = cme.schedule('2016-01-01', '2016-12-31')
early_closes = cme.early_closes(schedule).index
for date in ["2016-01-18", "2016-02-15", "2016-05-30", "2016-07-04",
"2016-09-05", "2016-11-24"]:
dt = pd.Timestamp(date, tz='UTC')
assert dt in early_closes
market_close = schedule.loc[dt].market_close
assert market_close.tz_convert(cme.tz).hour == 12
def test_dec_jan():
cme = CMEAgricultureExchangeCalendar()
schedule = cme.schedule('2016-12-30', '2017-01-10')
assert schedule['market_open'].iloc[0] == pd.Timestamp('2016-12-29 23:01:00', tz='UTC')
assert schedule['market_close'].iloc[6] == pd.Timestamp('2017-01-10 23:00:00', tz='UTC')
|
Add test stub for agricultureimport pandas as pd
import pytz
from pandas_market_calendars.exchange_calendar_cme_agriculture import CMEAgricultureExchangeCalendar
def test_time_zone():
assert CMEAgricultureExchangeCalendar().tz == pytz.timezone('America/Chicago')
assert CMEAgricultureExchangeCalendar().name == 'CME_Agriculture'
def test_2016_holidays():
# good friday: 2016-03-25
# christmas (observed): 2016-12-26
# new years (observed): 2016-01-02
cme = CMEAgricultureExchangeCalendar()
good_dates = cme.valid_days('2016-01-01', '2016-12-31')
for date in ["2016-03-25", "2016-12-26", "2016-01-02"]:
assert pd.Timestamp(date, tz='UTC') not in good_dates
def test_2016_early_closes():
# mlk day: 2016-01-18
# presidents: 2016-02-15
# mem day: 2016-05-30
# july 4: 2016-07-04
# labor day: 2016-09-05
# thanksgiving: 2016-11-24
cme = CMEAgricultureExchangeCalendar()
schedule = cme.schedule('2016-01-01', '2016-12-31')
early_closes = cme.early_closes(schedule).index
for date in ["2016-01-18", "2016-02-15", "2016-05-30", "2016-07-04",
"2016-09-05", "2016-11-24"]:
dt = pd.Timestamp(date, tz='UTC')
assert dt in early_closes
market_close = schedule.loc[dt].market_close
assert market_close.tz_convert(cme.tz).hour == 12
def test_dec_jan():
cme = CMEAgricultureExchangeCalendar()
schedule = cme.schedule('2016-12-30', '2017-01-10')
assert schedule['market_open'].iloc[0] == pd.Timestamp('2016-12-29 23:01:00', tz='UTC')
assert schedule['market_close'].iloc[6] == pd.Timestamp('2017-01-10 23:00:00', tz='UTC')
|
<commit_before><commit_msg>Add test stub for agriculture<commit_after>import pandas as pd
import pytz
from pandas_market_calendars.exchange_calendar_cme_agriculture import CMEAgricultureExchangeCalendar
def test_time_zone():
assert CMEAgricultureExchangeCalendar().tz == pytz.timezone('America/Chicago')
assert CMEAgricultureExchangeCalendar().name == 'CME_Agriculture'
def test_2016_holidays():
# good friday: 2016-03-25
# christmas (observed): 2016-12-26
# new years (observed): 2016-01-02
cme = CMEAgricultureExchangeCalendar()
good_dates = cme.valid_days('2016-01-01', '2016-12-31')
for date in ["2016-03-25", "2016-12-26", "2016-01-02"]:
assert pd.Timestamp(date, tz='UTC') not in good_dates
def test_2016_early_closes():
# mlk day: 2016-01-18
# presidents: 2016-02-15
# mem day: 2016-05-30
# july 4: 2016-07-04
# labor day: 2016-09-05
# thanksgiving: 2016-11-24
cme = CMEAgricultureExchangeCalendar()
schedule = cme.schedule('2016-01-01', '2016-12-31')
early_closes = cme.early_closes(schedule).index
for date in ["2016-01-18", "2016-02-15", "2016-05-30", "2016-07-04",
"2016-09-05", "2016-11-24"]:
dt = pd.Timestamp(date, tz='UTC')
assert dt in early_closes
market_close = schedule.loc[dt].market_close
assert market_close.tz_convert(cme.tz).hour == 12
def test_dec_jan():
cme = CMEAgricultureExchangeCalendar()
schedule = cme.schedule('2016-12-30', '2017-01-10')
assert schedule['market_open'].iloc[0] == pd.Timestamp('2016-12-29 23:01:00', tz='UTC')
assert schedule['market_close'].iloc[6] == pd.Timestamp('2017-01-10 23:00:00', tz='UTC')
|
|
e5ceeb4b17525acb730e7440bc5c56c79eb7c32d
|
thread_safe_print_test.py
|
thread_safe_print_test.py
|
import thread_safe_print
from six.moves import queue
import thread_pool
import io_expectation as expect
import sys
import unittest
class TestThreadSafePrint(unittest.TestCase):
def _thread1(self, thread1_turn, thread2_turn):
thread1_turn.get()
sys.stdout.write('Thread 1 starts, ')
thread2_turn.put(True)
thread1_turn.get()
sys.stdout.write('thread 1 finishes.')
thread2_turn.put(True)
thread1_turn.get()
sys.stdout.write('\n')
thread2_turn.put(True)
def _thread2(self, thread1_turn, thread2_turn):
thread2_turn.get()
sys.stdout.write('Thread 2 starts, ')
thread1_turn.put(True)
thread2_turn.get()
sys.stdout.write('thread 2 finishes.')
thread1_turn.put(True)
thread2_turn.get()
sys.stdout.write('\n')
def testWrite(self):
mock_io = expect.ExpectedInputOutput()
sys.stdout = mock_io
thread1_turn = queue.Queue()
thread2_turn = queue.Queue()
thread1_turn.put(True)
with thread_safe_print.ThreadSafePrint():
with thread_pool.ThreadPool(2) as pool:
pool.add(self._thread1, thread1_turn, thread2_turn)
pool.add(self._thread2, thread1_turn, thread2_turn)
mock_io.assert_output_was([
'Thread 1 starts, thread 1 finishes.\n',
'Thread 2 starts, thread 2 finishes.\n'
])
|
Add a unit-test for thread_safe_print.py.
|
Add a unit-test for thread_safe_print.py.
|
Python
|
mit
|
graveljp/smugcli
|
Add a unit-test for thread_safe_print.py.
|
import thread_safe_print
from six.moves import queue
import thread_pool
import io_expectation as expect
import sys
import unittest
class TestThreadSafePrint(unittest.TestCase):
def _thread1(self, thread1_turn, thread2_turn):
thread1_turn.get()
sys.stdout.write('Thread 1 starts, ')
thread2_turn.put(True)
thread1_turn.get()
sys.stdout.write('thread 1 finishes.')
thread2_turn.put(True)
thread1_turn.get()
sys.stdout.write('\n')
thread2_turn.put(True)
def _thread2(self, thread1_turn, thread2_turn):
thread2_turn.get()
sys.stdout.write('Thread 2 starts, ')
thread1_turn.put(True)
thread2_turn.get()
sys.stdout.write('thread 2 finishes.')
thread1_turn.put(True)
thread2_turn.get()
sys.stdout.write('\n')
def testWrite(self):
mock_io = expect.ExpectedInputOutput()
sys.stdout = mock_io
thread1_turn = queue.Queue()
thread2_turn = queue.Queue()
thread1_turn.put(True)
with thread_safe_print.ThreadSafePrint():
with thread_pool.ThreadPool(2) as pool:
pool.add(self._thread1, thread1_turn, thread2_turn)
pool.add(self._thread2, thread1_turn, thread2_turn)
mock_io.assert_output_was([
'Thread 1 starts, thread 1 finishes.\n',
'Thread 2 starts, thread 2 finishes.\n'
])
|
<commit_before><commit_msg>Add a unit-test for thread_safe_print.py.<commit_after>
|
import thread_safe_print
from six.moves import queue
import thread_pool
import io_expectation as expect
import sys
import unittest
class TestThreadSafePrint(unittest.TestCase):
def _thread1(self, thread1_turn, thread2_turn):
thread1_turn.get()
sys.stdout.write('Thread 1 starts, ')
thread2_turn.put(True)
thread1_turn.get()
sys.stdout.write('thread 1 finishes.')
thread2_turn.put(True)
thread1_turn.get()
sys.stdout.write('\n')
thread2_turn.put(True)
def _thread2(self, thread1_turn, thread2_turn):
thread2_turn.get()
sys.stdout.write('Thread 2 starts, ')
thread1_turn.put(True)
thread2_turn.get()
sys.stdout.write('thread 2 finishes.')
thread1_turn.put(True)
thread2_turn.get()
sys.stdout.write('\n')
def testWrite(self):
mock_io = expect.ExpectedInputOutput()
sys.stdout = mock_io
thread1_turn = queue.Queue()
thread2_turn = queue.Queue()
thread1_turn.put(True)
with thread_safe_print.ThreadSafePrint():
with thread_pool.ThreadPool(2) as pool:
pool.add(self._thread1, thread1_turn, thread2_turn)
pool.add(self._thread2, thread1_turn, thread2_turn)
mock_io.assert_output_was([
'Thread 1 starts, thread 1 finishes.\n',
'Thread 2 starts, thread 2 finishes.\n'
])
|
Add a unit-test for thread_safe_print.py.import thread_safe_print
from six.moves import queue
import thread_pool
import io_expectation as expect
import sys
import unittest
class TestThreadSafePrint(unittest.TestCase):
def _thread1(self, thread1_turn, thread2_turn):
thread1_turn.get()
sys.stdout.write('Thread 1 starts, ')
thread2_turn.put(True)
thread1_turn.get()
sys.stdout.write('thread 1 finishes.')
thread2_turn.put(True)
thread1_turn.get()
sys.stdout.write('\n')
thread2_turn.put(True)
def _thread2(self, thread1_turn, thread2_turn):
thread2_turn.get()
sys.stdout.write('Thread 2 starts, ')
thread1_turn.put(True)
thread2_turn.get()
sys.stdout.write('thread 2 finishes.')
thread1_turn.put(True)
thread2_turn.get()
sys.stdout.write('\n')
def testWrite(self):
mock_io = expect.ExpectedInputOutput()
sys.stdout = mock_io
thread1_turn = queue.Queue()
thread2_turn = queue.Queue()
thread1_turn.put(True)
with thread_safe_print.ThreadSafePrint():
with thread_pool.ThreadPool(2) as pool:
pool.add(self._thread1, thread1_turn, thread2_turn)
pool.add(self._thread2, thread1_turn, thread2_turn)
mock_io.assert_output_was([
'Thread 1 starts, thread 1 finishes.\n',
'Thread 2 starts, thread 2 finishes.\n'
])
|
<commit_before><commit_msg>Add a unit-test for thread_safe_print.py.<commit_after>import thread_safe_print
from six.moves import queue
import thread_pool
import io_expectation as expect
import sys
import unittest
class TestThreadSafePrint(unittest.TestCase):
def _thread1(self, thread1_turn, thread2_turn):
thread1_turn.get()
sys.stdout.write('Thread 1 starts, ')
thread2_turn.put(True)
thread1_turn.get()
sys.stdout.write('thread 1 finishes.')
thread2_turn.put(True)
thread1_turn.get()
sys.stdout.write('\n')
thread2_turn.put(True)
def _thread2(self, thread1_turn, thread2_turn):
thread2_turn.get()
sys.stdout.write('Thread 2 starts, ')
thread1_turn.put(True)
thread2_turn.get()
sys.stdout.write('thread 2 finishes.')
thread1_turn.put(True)
thread2_turn.get()
sys.stdout.write('\n')
def testWrite(self):
mock_io = expect.ExpectedInputOutput()
sys.stdout = mock_io
thread1_turn = queue.Queue()
thread2_turn = queue.Queue()
thread1_turn.put(True)
with thread_safe_print.ThreadSafePrint():
with thread_pool.ThreadPool(2) as pool:
pool.add(self._thread1, thread1_turn, thread2_turn)
pool.add(self._thread2, thread1_turn, thread2_turn)
mock_io.assert_output_was([
'Thread 1 starts, thread 1 finishes.\n',
'Thread 2 starts, thread 2 finishes.\n'
])
|
|
abac33bc2c8713f5187529e13557ea6b58472079
|
Problems/shapeAreaCF.py
|
Problems/shapeAreaCF.py
|
def shapeArea(n):
if n < 1 or n > 10**4:
raise ValueError
if n == 1:
return 1
else:
innerArea = shapeArea(n - 1)
return innerArea + (n - 1) * 4
def main():
tests = [-1, 10**5, 1, 2, 3, 4]
results = [False, False, 1, 5, 13, 25]
for i, t in enumerate(tests):
try:
r = shapeArea(t)
if r == results[i]:
print("PASSED: shapeArea({}) returned {}".format(t, r))
else:
print("FAILED: shapeArea({}) returned\
{}, vs {}".format(t, r, results[i]))
except ValueError:
print("PASSED ValueError test")
if __name__ == '__main__':
main()
|
Add code fight shape area solution
|
Add code fight shape area solution
|
Python
|
mit
|
HKuz/Test_Code
|
Add code fight shape area solution
|
def shapeArea(n):
if n < 1 or n > 10**4:
raise ValueError
if n == 1:
return 1
else:
innerArea = shapeArea(n - 1)
return innerArea + (n - 1) * 4
def main():
tests = [-1, 10**5, 1, 2, 3, 4]
results = [False, False, 1, 5, 13, 25]
for i, t in enumerate(tests):
try:
r = shapeArea(t)
if r == results[i]:
print("PASSED: shapeArea({}) returned {}".format(t, r))
else:
print("FAILED: shapeArea({}) returned\
{}, vs {}".format(t, r, results[i]))
except ValueError:
print("PASSED ValueError test")
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add code fight shape area solution<commit_after>
|
def shapeArea(n):
if n < 1 or n > 10**4:
raise ValueError
if n == 1:
return 1
else:
innerArea = shapeArea(n - 1)
return innerArea + (n - 1) * 4
def main():
tests = [-1, 10**5, 1, 2, 3, 4]
results = [False, False, 1, 5, 13, 25]
for i, t in enumerate(tests):
try:
r = shapeArea(t)
if r == results[i]:
print("PASSED: shapeArea({}) returned {}".format(t, r))
else:
print("FAILED: shapeArea({}) returned\
{}, vs {}".format(t, r, results[i]))
except ValueError:
print("PASSED ValueError test")
if __name__ == '__main__':
main()
|
Add code fight shape area solutiondef shapeArea(n):
if n < 1 or n > 10**4:
raise ValueError
if n == 1:
return 1
else:
innerArea = shapeArea(n - 1)
return innerArea + (n - 1) * 4
def main():
tests = [-1, 10**5, 1, 2, 3, 4]
results = [False, False, 1, 5, 13, 25]
for i, t in enumerate(tests):
try:
r = shapeArea(t)
if r == results[i]:
print("PASSED: shapeArea({}) returned {}".format(t, r))
else:
print("FAILED: shapeArea({}) returned\
{}, vs {}".format(t, r, results[i]))
except ValueError:
print("PASSED ValueError test")
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add code fight shape area solution<commit_after>def shapeArea(n):
if n < 1 or n > 10**4:
raise ValueError
if n == 1:
return 1
else:
innerArea = shapeArea(n - 1)
return innerArea + (n - 1) * 4
def main():
tests = [-1, 10**5, 1, 2, 3, 4]
results = [False, False, 1, 5, 13, 25]
for i, t in enumerate(tests):
try:
r = shapeArea(t)
if r == results[i]:
print("PASSED: shapeArea({}) returned {}".format(t, r))
else:
print("FAILED: shapeArea({}) returned\
{}, vs {}".format(t, r, results[i]))
except ValueError:
print("PASSED ValueError test")
if __name__ == '__main__':
main()
|
|
1bb3a5276e46363cbf2ad88a5f97868f65a99b30
|
tutorials/dataframe/tdf003_profiles.py
|
tutorials/dataframe/tdf003_profiles.py
|
## \file
## \ingroup tutorial_tdataframe
## \notebook -nodraw
## This tutorial illustrates how to use TProfiles in combination with the
## TDataFrame. See the documentation of TProfile and TProfile2D to better
## understand the analogy of this code with the example one.
##
## \macro_code
##
## \date February 2017
## \author Danilo Piparo
import ROOT
TDataFrame = ROOT.ROOT.Experimental.TDataFrame
# A simple helper function to fill a test tree: this makes the example
# stand-alone.
def fill_tree(treeName, fileName):
d = TDataFrame(25000)
d.Define("px", "gRandom->Gaus()")\
.Define("py", "gRandom->Gaus()")\
.Define("pz", "sqrt(px * px + py * py)")\
.Snapshot(treeName, fileName)
# We prepare an input tree to run on
fileName = "tdf003_profiles.root"
treeName = "myTree"
fill_tree(treeName, fileName)
# We read the tree from the file and create a TDataFrame.
columns = ROOT.vector('string')()
columns.push_back("px")
columns.push_back("py")
columns.push_back("pz")
d = TDataFrame(treeName, fileName, columns)
# Create the profiles
hprof1d = d.Profile1D(("hprof1d", "Profile of pz versus px", 64, -4, 4))
hprof2d = d.Profile2D(("hprof2d", "Profile of pz versus px and py", 40, -4, 4, 40, -4, 4, 0, 20))
# And Draw
c1 = ROOT.TCanvas("c1", "Profile histogram example", 200, 10, 700, 500)
hprof1d.Draw()
c2 = ROOT.TCanvas("c2", "Profile2D histogram example", 200, 10, 700, 500)
c2.cd()
hprof2d.Draw()
|
Add a profiles tutorial in Python
|
[TDF] Add a profiles tutorial in Python
|
Python
|
lgpl-2.1
|
olifre/root,karies/root,olifre/root,karies/root,zzxuanyuan/root,olifre/root,karies/root,karies/root,olifre/root,zzxuanyuan/root,zzxuanyuan/root,zzxuanyuan/root,zzxuanyuan/root,karies/root,root-mirror/root,root-mirror/root,root-mirror/root,olifre/root,karies/root,zzxuanyuan/root,olifre/root,karies/root,olifre/root,karies/root,karies/root,root-mirror/root,zzxuanyuan/root,olifre/root,root-mirror/root,root-mirror/root,olifre/root,zzxuanyuan/root,olifre/root,root-mirror/root,olifre/root,zzxuanyuan/root,zzxuanyuan/root,zzxuanyuan/root,zzxuanyuan/root,karies/root,root-mirror/root,karies/root,root-mirror/root,root-mirror/root,root-mirror/root
|
[TDF] Add a profiles tutorial in Python
|
## \file
## \ingroup tutorial_tdataframe
## \notebook -nodraw
## This tutorial illustrates how to use TProfiles in combination with the
## TDataFrame. See the documentation of TProfile and TProfile2D to better
## understand the analogy of this code with the example one.
##
## \macro_code
##
## \date February 2017
## \author Danilo Piparo
import ROOT
TDataFrame = ROOT.ROOT.Experimental.TDataFrame
# A simple helper function to fill a test tree: this makes the example
# stand-alone.
def fill_tree(treeName, fileName):
d = TDataFrame(25000)
d.Define("px", "gRandom->Gaus()")\
.Define("py", "gRandom->Gaus()")\
.Define("pz", "sqrt(px * px + py * py)")\
.Snapshot(treeName, fileName)
# We prepare an input tree to run on
fileName = "tdf003_profiles.root"
treeName = "myTree"
fill_tree(treeName, fileName)
# We read the tree from the file and create a TDataFrame.
columns = ROOT.vector('string')()
columns.push_back("px")
columns.push_back("py")
columns.push_back("pz")
d = TDataFrame(treeName, fileName, columns)
# Create the profiles
hprof1d = d.Profile1D(("hprof1d", "Profile of pz versus px", 64, -4, 4))
hprof2d = d.Profile2D(("hprof2d", "Profile of pz versus px and py", 40, -4, 4, 40, -4, 4, 0, 20))
# And Draw
c1 = ROOT.TCanvas("c1", "Profile histogram example", 200, 10, 700, 500)
hprof1d.Draw()
c2 = ROOT.TCanvas("c2", "Profile2D histogram example", 200, 10, 700, 500)
c2.cd()
hprof2d.Draw()
|
<commit_before><commit_msg>[TDF] Add a profiles tutorial in Python<commit_after>
|
## \file
## \ingroup tutorial_tdataframe
## \notebook -nodraw
## This tutorial illustrates how to use TProfiles in combination with the
## TDataFrame. See the documentation of TProfile and TProfile2D to better
## understand the analogy of this code with the example one.
##
## \macro_code
##
## \date February 2017
## \author Danilo Piparo
import ROOT
TDataFrame = ROOT.ROOT.Experimental.TDataFrame
# A simple helper function to fill a test tree: this makes the example
# stand-alone.
def fill_tree(treeName, fileName):
d = TDataFrame(25000)
d.Define("px", "gRandom->Gaus()")\
.Define("py", "gRandom->Gaus()")\
.Define("pz", "sqrt(px * px + py * py)")\
.Snapshot(treeName, fileName)
# We prepare an input tree to run on
fileName = "tdf003_profiles.root"
treeName = "myTree"
fill_tree(treeName, fileName)
# We read the tree from the file and create a TDataFrame.
columns = ROOT.vector('string')()
columns.push_back("px")
columns.push_back("py")
columns.push_back("pz")
d = TDataFrame(treeName, fileName, columns)
# Create the profiles
hprof1d = d.Profile1D(("hprof1d", "Profile of pz versus px", 64, -4, 4))
hprof2d = d.Profile2D(("hprof2d", "Profile of pz versus px and py", 40, -4, 4, 40, -4, 4, 0, 20))
# And Draw
c1 = ROOT.TCanvas("c1", "Profile histogram example", 200, 10, 700, 500)
hprof1d.Draw()
c2 = ROOT.TCanvas("c2", "Profile2D histogram example", 200, 10, 700, 500)
c2.cd()
hprof2d.Draw()
|
[TDF] Add a profiles tutorial in Python## \file
## \ingroup tutorial_tdataframe
## \notebook -nodraw
## This tutorial illustrates how to use TProfiles in combination with the
## TDataFrame. See the documentation of TProfile and TProfile2D to better
## understand the analogy of this code with the example one.
##
## \macro_code
##
## \date February 2017
## \author Danilo Piparo
import ROOT
TDataFrame = ROOT.ROOT.Experimental.TDataFrame
# A simple helper function to fill a test tree: this makes the example
# stand-alone.
def fill_tree(treeName, fileName):
d = TDataFrame(25000)
d.Define("px", "gRandom->Gaus()")\
.Define("py", "gRandom->Gaus()")\
.Define("pz", "sqrt(px * px + py * py)")\
.Snapshot(treeName, fileName)
# We prepare an input tree to run on
fileName = "tdf003_profiles.root"
treeName = "myTree"
fill_tree(treeName, fileName)
# We read the tree from the file and create a TDataFrame.
columns = ROOT.vector('string')()
columns.push_back("px")
columns.push_back("py")
columns.push_back("pz")
d = TDataFrame(treeName, fileName, columns)
# Create the profiles
hprof1d = d.Profile1D(("hprof1d", "Profile of pz versus px", 64, -4, 4))
hprof2d = d.Profile2D(("hprof2d", "Profile of pz versus px and py", 40, -4, 4, 40, -4, 4, 0, 20))
# And Draw
c1 = ROOT.TCanvas("c1", "Profile histogram example", 200, 10, 700, 500)
hprof1d.Draw()
c2 = ROOT.TCanvas("c2", "Profile2D histogram example", 200, 10, 700, 500)
c2.cd()
hprof2d.Draw()
|
<commit_before><commit_msg>[TDF] Add a profiles tutorial in Python<commit_after>## \file
## \ingroup tutorial_tdataframe
## \notebook -nodraw
## This tutorial illustrates how to use TProfiles in combination with the
## TDataFrame. See the documentation of TProfile and TProfile2D to better
## understand the analogy of this code with the example one.
##
## \macro_code
##
## \date February 2017
## \author Danilo Piparo
import ROOT
TDataFrame = ROOT.ROOT.Experimental.TDataFrame
# A simple helper function to fill a test tree: this makes the example
# stand-alone.
def fill_tree(treeName, fileName):
d = TDataFrame(25000)
d.Define("px", "gRandom->Gaus()")\
.Define("py", "gRandom->Gaus()")\
.Define("pz", "sqrt(px * px + py * py)")\
.Snapshot(treeName, fileName)
# We prepare an input tree to run on
fileName = "tdf003_profiles.root"
treeName = "myTree"
fill_tree(treeName, fileName)
# We read the tree from the file and create a TDataFrame.
columns = ROOT.vector('string')()
columns.push_back("px")
columns.push_back("py")
columns.push_back("pz")
d = TDataFrame(treeName, fileName, columns)
# Create the profiles
hprof1d = d.Profile1D(("hprof1d", "Profile of pz versus px", 64, -4, 4))
hprof2d = d.Profile2D(("hprof2d", "Profile of pz versus px and py", 40, -4, 4, 40, -4, 4, 0, 20))
# And Draw
c1 = ROOT.TCanvas("c1", "Profile histogram example", 200, 10, 700, 500)
hprof1d.Draw()
c2 = ROOT.TCanvas("c2", "Profile2D histogram example", 200, 10, 700, 500)
c2.cd()
hprof2d.Draw()
|
|
adc17c25b6602818defec29c10c862a73a2a23bf
|
python/src/singleNumber/testSingleNumber.py
|
python/src/singleNumber/testSingleNumber.py
|
import unittest
from singleNumber import Solution
class TestSingleNumber(unittest.TestCase):
def setUp(self):
self.solution = Solution()
def testArrayOfSingleIntReturnsOnlyValue(self):
A = [1]
self.assertEqual(self.solution.singleNumber(A), 1)
def testArrayOfTwoTypesOfNumbersReturnsOnlySingleValueNumber(self):
A = [2, 2, 1]
self.assertEqual(self.solution.singleNumber(A), 1)
def testArrayOfThreeTypesOfNumbersReturnsOnlySingleValueNumber(self):
A = [2, 3, 1, 3, 2]
self.assertEqual(self.solution.singleNumber(A), 1)
if __name__ == '__main__':
unittest.main()
|
Add 3 test cases for singleNumber problem.
|
Add 3 test cases for singleNumber problem.
|
Python
|
mit
|
TheGhostHuCodes/leetCode
|
Add 3 test cases for singleNumber problem.
|
import unittest
from singleNumber import Solution
class TestSingleNumber(unittest.TestCase):
def setUp(self):
self.solution = Solution()
def testArrayOfSingleIntReturnsOnlyValue(self):
A = [1]
self.assertEqual(self.solution.singleNumber(A), 1)
def testArrayOfTwoTypesOfNumbersReturnsOnlySingleValueNumber(self):
A = [2, 2, 1]
self.assertEqual(self.solution.singleNumber(A), 1)
def testArrayOfThreeTypesOfNumbersReturnsOnlySingleValueNumber(self):
A = [2, 3, 1, 3, 2]
self.assertEqual(self.solution.singleNumber(A), 1)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add 3 test cases for singleNumber problem.<commit_after>
|
import unittest
from singleNumber import Solution
class TestSingleNumber(unittest.TestCase):
def setUp(self):
self.solution = Solution()
def testArrayOfSingleIntReturnsOnlyValue(self):
A = [1]
self.assertEqual(self.solution.singleNumber(A), 1)
def testArrayOfTwoTypesOfNumbersReturnsOnlySingleValueNumber(self):
A = [2, 2, 1]
self.assertEqual(self.solution.singleNumber(A), 1)
def testArrayOfThreeTypesOfNumbersReturnsOnlySingleValueNumber(self):
A = [2, 3, 1, 3, 2]
self.assertEqual(self.solution.singleNumber(A), 1)
if __name__ == '__main__':
unittest.main()
|
Add 3 test cases for singleNumber problem.import unittest
from singleNumber import Solution
class TestSingleNumber(unittest.TestCase):
def setUp(self):
self.solution = Solution()
def testArrayOfSingleIntReturnsOnlyValue(self):
A = [1]
self.assertEqual(self.solution.singleNumber(A), 1)
def testArrayOfTwoTypesOfNumbersReturnsOnlySingleValueNumber(self):
A = [2, 2, 1]
self.assertEqual(self.solution.singleNumber(A), 1)
def testArrayOfThreeTypesOfNumbersReturnsOnlySingleValueNumber(self):
A = [2, 3, 1, 3, 2]
self.assertEqual(self.solution.singleNumber(A), 1)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add 3 test cases for singleNumber problem.<commit_after>import unittest
from singleNumber import Solution
class TestSingleNumber(unittest.TestCase):
def setUp(self):
self.solution = Solution()
def testArrayOfSingleIntReturnsOnlyValue(self):
A = [1]
self.assertEqual(self.solution.singleNumber(A), 1)
def testArrayOfTwoTypesOfNumbersReturnsOnlySingleValueNumber(self):
A = [2, 2, 1]
self.assertEqual(self.solution.singleNumber(A), 1)
def testArrayOfThreeTypesOfNumbersReturnsOnlySingleValueNumber(self):
A = [2, 3, 1, 3, 2]
self.assertEqual(self.solution.singleNumber(A), 1)
if __name__ == '__main__':
unittest.main()
|
|
f9537e9d38d50d3fbe95132629577d07f961b1c7
|
scikits/image/io/_plugins/test_freeimage.py
|
scikits/image/io/_plugins/test_freeimage.py
|
import os
import scikits.image as si
import scikits.image.io as sio
sio.use_plugin('matplotlib', 'imshow')
sio.use_plugin('freeimage', 'imread')
img = sio.imread(os.path.join(si.data_dir, 'color.png'))
sio.imshow(img)
sio.show()
|
Add a free standing test for freeimage.
|
ENH: Add a free standing test for freeimage.
|
Python
|
bsd-3-clause
|
SamHames/scikit-image,Britefury/scikit-image,warmspringwinds/scikit-image,newville/scikit-image,ofgulban/scikit-image,emmanuelle/scikits.image,pratapvardhan/scikit-image,rjeli/scikit-image,robintw/scikit-image,robintw/scikit-image,Midafi/scikit-image,ClinicalGraphics/scikit-image,vighneshbirodkar/scikit-image,michaelaye/scikit-image,vighneshbirodkar/scikit-image,rjeli/scikit-image,michaelpacer/scikit-image,chintak/scikit-image,ajaybhat/scikit-image,bennlich/scikit-image,michaelaye/scikit-image,jwiggins/scikit-image,blink1073/scikit-image,Hiyorimi/scikit-image,emon10005/scikit-image,bennlich/scikit-image,pratapvardhan/scikit-image,SamHames/scikit-image,chintak/scikit-image,emmanuelle/scikits.image,chintak/scikit-image,ajaybhat/scikit-image,chriscrosscutler/scikit-image,keflavich/scikit-image,bsipocz/scikit-image,juliusbierk/scikit-image,emon10005/scikit-image,SamHames/scikit-image,jwiggins/scikit-image,oew1v07/scikit-image,ClinicalGraphics/scikit-image,emmanuelle/scikits.image,keflavich/scikit-image,GaZ3ll3/scikit-image,dpshelio/scikit-image,almarklein/scikit-image,GaelVaroquaux/scikits.image,almarklein/scikit-image,chriscrosscutler/scikit-image,almarklein/scikit-image,GaelVaroquaux/scikits.image,ofgulban/scikit-image,paalge/scikit-image,Hiyorimi/scikit-image,paalge/scikit-image,warmspringwinds/scikit-image,GaZ3ll3/scikit-image,youprofit/scikit-image,Midafi/scikit-image,almarklein/scikit-image,bsipocz/scikit-image,ofgulban/scikit-image,WarrenWeckesser/scikits-image,vighneshbirodkar/scikit-image,SamHames/scikit-image,blink1073/scikit-image,chintak/scikit-image,dpshelio/scikit-image,rjeli/scikit-image,emmanuelle/scikits.image,Britefury/scikit-image,WarrenWeckesser/scikits-image,michaelpacer/scikit-image,youprofit/scikit-image,paalge/scikit-image,newville/scikit-image,juliusbierk/scikit-image,oew1v07/scikit-image
|
ENH: Add a free standing test for freeimage.
|
import os
import scikits.image as si
import scikits.image.io as sio
sio.use_plugin('matplotlib', 'imshow')
sio.use_plugin('freeimage', 'imread')
img = sio.imread(os.path.join(si.data_dir, 'color.png'))
sio.imshow(img)
sio.show()
|
<commit_before><commit_msg>ENH: Add a free standing test for freeimage.<commit_after>
|
import os
import scikits.image as si
import scikits.image.io as sio
sio.use_plugin('matplotlib', 'imshow')
sio.use_plugin('freeimage', 'imread')
img = sio.imread(os.path.join(si.data_dir, 'color.png'))
sio.imshow(img)
sio.show()
|
ENH: Add a free standing test for freeimage.import os
import scikits.image as si
import scikits.image.io as sio
sio.use_plugin('matplotlib', 'imshow')
sio.use_plugin('freeimage', 'imread')
img = sio.imread(os.path.join(si.data_dir, 'color.png'))
sio.imshow(img)
sio.show()
|
<commit_before><commit_msg>ENH: Add a free standing test for freeimage.<commit_after>import os
import scikits.image as si
import scikits.image.io as sio
sio.use_plugin('matplotlib', 'imshow')
sio.use_plugin('freeimage', 'imread')
img = sio.imread(os.path.join(si.data_dir, 'color.png'))
sio.imshow(img)
sio.show()
|
|
feb2dd41d4a7368f2a056375b9e9a867a3d5b42e
|
vsub/settings/gunicorn.py
|
vsub/settings/gunicorn.py
|
"""gunicorn WSGI server configuration."""
# Based on https://github.com/rdegges/django-skel/blob/master/gunicorn.py.ini.
import os
from multiprocessing import cpu_count
def max_workers():
return cpu_count()
bind = '0.0.0.0:' + os.environ.get('PORT', '8000')
max_requests = 1000
worker_class = 'gevent'
workers = max_workers()
|
Add a configuration file for Gunicorn.
|
Add a configuration file for Gunicorn.
|
Python
|
mit
|
PrecisionMojo/pm-www,PrecisionMojo/pm-www
|
Add a configuration file for Gunicorn.
|
"""gunicorn WSGI server configuration."""
# Based on https://github.com/rdegges/django-skel/blob/master/gunicorn.py.ini.
import os
from multiprocessing import cpu_count
def max_workers():
return cpu_count()
bind = '0.0.0.0:' + os.environ.get('PORT', '8000')
max_requests = 1000
worker_class = 'gevent'
workers = max_workers()
|
<commit_before><commit_msg>Add a configuration file for Gunicorn.<commit_after>
|
"""gunicorn WSGI server configuration."""
# Based on https://github.com/rdegges/django-skel/blob/master/gunicorn.py.ini.
import os
from multiprocessing import cpu_count
def max_workers():
return cpu_count()
bind = '0.0.0.0:' + os.environ.get('PORT', '8000')
max_requests = 1000
worker_class = 'gevent'
workers = max_workers()
|
Add a configuration file for Gunicorn."""gunicorn WSGI server configuration."""
# Based on https://github.com/rdegges/django-skel/blob/master/gunicorn.py.ini.
import os
from multiprocessing import cpu_count
def max_workers():
return cpu_count()
bind = '0.0.0.0:' + os.environ.get('PORT', '8000')
max_requests = 1000
worker_class = 'gevent'
workers = max_workers()
|
<commit_before><commit_msg>Add a configuration file for Gunicorn.<commit_after>"""gunicorn WSGI server configuration."""
# Based on https://github.com/rdegges/django-skel/blob/master/gunicorn.py.ini.
import os
from multiprocessing import cpu_count
def max_workers():
return cpu_count()
bind = '0.0.0.0:' + os.environ.get('PORT', '8000')
max_requests = 1000
worker_class = 'gevent'
workers = max_workers()
|
|
26b9157f2c8c7eb490b5fa712908c70dcf51f6d0
|
cli/commands/cmd_routes.py
|
cli/commands/cmd_routes.py
|
import logging
import click
from catwatch.app import create_app
app = create_app()
@click.command()
def cli():
""" List all of the available routes. """
output = {}
for rule in app.url_map.iter_rules():
route = {
'path': rule.rule,
'methods': '({0})'.format(', '.join(rule.methods))
}
output[rule.endpoint] = route
endpoint_padding = max(len(endpoint) for endpoint in output.keys()) + 2
for key in sorted(output):
logging.info('{0: >{1}}: {2}'.format(key, endpoint_padding,
output[key]))
|
Add CLI command 'run routes' to list all of the routes
|
Add CLI command 'run routes' to list all of the routes
|
Python
|
mit
|
nickjj/build-a-saas-app-with-flask,nickjj/build-a-saas-app-with-flask,z123/build-a-saas-app-with-flask,nickjj/build-a-saas-app-with-flask,z123/build-a-saas-app-with-flask,z123/build-a-saas-app-with-flask,nickjj/build-a-saas-app-with-flask
|
Add CLI command 'run routes' to list all of the routes
|
import logging
import click
from catwatch.app import create_app
app = create_app()
@click.command()
def cli():
""" List all of the available routes. """
output = {}
for rule in app.url_map.iter_rules():
route = {
'path': rule.rule,
'methods': '({0})'.format(', '.join(rule.methods))
}
output[rule.endpoint] = route
endpoint_padding = max(len(endpoint) for endpoint in output.keys()) + 2
for key in sorted(output):
logging.info('{0: >{1}}: {2}'.format(key, endpoint_padding,
output[key]))
|
<commit_before><commit_msg>Add CLI command 'run routes' to list all of the routes<commit_after>
|
import logging
import click
from catwatch.app import create_app
app = create_app()
@click.command()
def cli():
""" List all of the available routes. """
output = {}
for rule in app.url_map.iter_rules():
route = {
'path': rule.rule,
'methods': '({0})'.format(', '.join(rule.methods))
}
output[rule.endpoint] = route
endpoint_padding = max(len(endpoint) for endpoint in output.keys()) + 2
for key in sorted(output):
logging.info('{0: >{1}}: {2}'.format(key, endpoint_padding,
output[key]))
|
Add CLI command 'run routes' to list all of the routesimport logging
import click
from catwatch.app import create_app
app = create_app()
@click.command()
def cli():
""" List all of the available routes. """
output = {}
for rule in app.url_map.iter_rules():
route = {
'path': rule.rule,
'methods': '({0})'.format(', '.join(rule.methods))
}
output[rule.endpoint] = route
endpoint_padding = max(len(endpoint) for endpoint in output.keys()) + 2
for key in sorted(output):
logging.info('{0: >{1}}: {2}'.format(key, endpoint_padding,
output[key]))
|
<commit_before><commit_msg>Add CLI command 'run routes' to list all of the routes<commit_after>import logging
import click
from catwatch.app import create_app
app = create_app()
@click.command()
def cli():
""" List all of the available routes. """
output = {}
for rule in app.url_map.iter_rules():
route = {
'path': rule.rule,
'methods': '({0})'.format(', '.join(rule.methods))
}
output[rule.endpoint] = route
endpoint_padding = max(len(endpoint) for endpoint in output.keys()) + 2
for key in sorted(output):
logging.info('{0: >{1}}: {2}'.format(key, endpoint_padding,
output[key]))
|
|
94d99ba174ba38f00d4198b8d45e9471bf9a32e5
|
apps/profile/management/commands/startup.py
|
apps/profile/management/commands/startup.py
|
#add homepage user from settings
#add popular user
from settings import HOMEPAGE_USERNAME
from apps.profile.models import create_profile
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand
class Command(BaseCommand):
def handle(self, *args, **options):
instance = User.objects.create(username=HOMEPAGE_USERNAME)
instance.save()
create_profile(None, instance, None)
print("User {0} created".format(HOMEPAGE_USERNAME))
|
Add management command to create homepage user
|
Add management command to create homepage user
|
Python
|
mit
|
samuelclay/NewsBlur,samuelclay/NewsBlur,samuelclay/NewsBlur,samuelclay/NewsBlur,samuelclay/NewsBlur,samuelclay/NewsBlur,samuelclay/NewsBlur,samuelclay/NewsBlur,samuelclay/NewsBlur,samuelclay/NewsBlur,samuelclay/NewsBlur
|
Add management command to create homepage user
|
#add homepage user from settings
#add popular user
from settings import HOMEPAGE_USERNAME
from apps.profile.models import create_profile
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand
class Command(BaseCommand):
def handle(self, *args, **options):
instance = User.objects.create(username=HOMEPAGE_USERNAME)
instance.save()
create_profile(None, instance, None)
print("User {0} created".format(HOMEPAGE_USERNAME))
|
<commit_before><commit_msg>Add management command to create homepage user<commit_after>
|
#add homepage user from settings
#add popular user
from settings import HOMEPAGE_USERNAME
from apps.profile.models import create_profile
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand
class Command(BaseCommand):
def handle(self, *args, **options):
instance = User.objects.create(username=HOMEPAGE_USERNAME)
instance.save()
create_profile(None, instance, None)
print("User {0} created".format(HOMEPAGE_USERNAME))
|
Add management command to create homepage user#add homepage user from settings
#add popular user
from settings import HOMEPAGE_USERNAME
from apps.profile.models import create_profile
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand
class Command(BaseCommand):
def handle(self, *args, **options):
instance = User.objects.create(username=HOMEPAGE_USERNAME)
instance.save()
create_profile(None, instance, None)
print("User {0} created".format(HOMEPAGE_USERNAME))
|
<commit_before><commit_msg>Add management command to create homepage user<commit_after>#add homepage user from settings
#add popular user
from settings import HOMEPAGE_USERNAME
from apps.profile.models import create_profile
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand
class Command(BaseCommand):
def handle(self, *args, **options):
instance = User.objects.create(username=HOMEPAGE_USERNAME)
instance.save()
create_profile(None, instance, None)
print("User {0} created".format(HOMEPAGE_USERNAME))
|
|
9b2ef4cf70e9895b96ca2d0213dc0d08c0703831
|
comics/comics/hjalmarbt.py
|
comics/comics/hjalmarbt.py
|
from comics.aggregator.crawler import CrawlerBase, CrawlerImage
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = "Hjalmar (bt.no)"
language = "no"
url = "https://www.bt.no/kultur/tegneserier/"
rights = "Nils Axle Kanten"
class Crawler(CrawlerBase):
history_capable_date = "2013-01-15"
schedule = "Mo,Tu,We,Th,Fr,Sa,Su"
def crawl(self, pub_date):
url = "https://cartoon-prod.schibsted.tech/rocky/%s.gif" % (
pub_date.strftime("%d%m%y"),
)
return CrawlerImage(url)
|
Add crawler for "Hjalmar" from bt.no
|
Add crawler for "Hjalmar" from bt.no
|
Python
|
agpl-3.0
|
jodal/comics,datagutten/comics,jodal/comics,datagutten/comics,jodal/comics,jodal/comics,datagutten/comics,datagutten/comics
|
Add crawler for "Hjalmar" from bt.no
|
from comics.aggregator.crawler import CrawlerBase, CrawlerImage
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = "Hjalmar (bt.no)"
language = "no"
url = "https://www.bt.no/kultur/tegneserier/"
rights = "Nils Axle Kanten"
class Crawler(CrawlerBase):
history_capable_date = "2013-01-15"
schedule = "Mo,Tu,We,Th,Fr,Sa,Su"
def crawl(self, pub_date):
url = "https://cartoon-prod.schibsted.tech/rocky/%s.gif" % (
pub_date.strftime("%d%m%y"),
)
return CrawlerImage(url)
|
<commit_before><commit_msg>Add crawler for "Hjalmar" from bt.no<commit_after>
|
from comics.aggregator.crawler import CrawlerBase, CrawlerImage
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = "Hjalmar (bt.no)"
language = "no"
url = "https://www.bt.no/kultur/tegneserier/"
rights = "Nils Axle Kanten"
class Crawler(CrawlerBase):
history_capable_date = "2013-01-15"
schedule = "Mo,Tu,We,Th,Fr,Sa,Su"
def crawl(self, pub_date):
url = "https://cartoon-prod.schibsted.tech/rocky/%s.gif" % (
pub_date.strftime("%d%m%y"),
)
return CrawlerImage(url)
|
Add crawler for "Hjalmar" from bt.nofrom comics.aggregator.crawler import CrawlerBase, CrawlerImage
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = "Hjalmar (bt.no)"
language = "no"
url = "https://www.bt.no/kultur/tegneserier/"
rights = "Nils Axle Kanten"
class Crawler(CrawlerBase):
history_capable_date = "2013-01-15"
schedule = "Mo,Tu,We,Th,Fr,Sa,Su"
def crawl(self, pub_date):
url = "https://cartoon-prod.schibsted.tech/rocky/%s.gif" % (
pub_date.strftime("%d%m%y"),
)
return CrawlerImage(url)
|
<commit_before><commit_msg>Add crawler for "Hjalmar" from bt.no<commit_after>from comics.aggregator.crawler import CrawlerBase, CrawlerImage
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = "Hjalmar (bt.no)"
language = "no"
url = "https://www.bt.no/kultur/tegneserier/"
rights = "Nils Axle Kanten"
class Crawler(CrawlerBase):
history_capable_date = "2013-01-15"
schedule = "Mo,Tu,We,Th,Fr,Sa,Su"
def crawl(self, pub_date):
url = "https://cartoon-prod.schibsted.tech/rocky/%s.gif" % (
pub_date.strftime("%d%m%y"),
)
return CrawlerImage(url)
|
|
57f2fde877f00a726d204734e01b86805ae3ed81
|
pombola/core/management/commands/core_render_template.py
|
pombola/core/management/commands/core_render_template.py
|
from django.core.management.base import LabelCommand
from django.template.loader import render_to_string
from django.template import RequestContext
from django.http import HttpRequest
class Command(LabelCommand):
help = 'Render a template to STDOUT'
args = '<template path>'
def handle_label(self, template_path, **options):
# create a minimal fake request and request context
request = HttpRequest()
request.META = {
"SERVER_NAME": 'fake',
"SERVER_PORT": 80,
}
request_context = RequestContext(request)
print render_to_string(template_path, {}, request_context)
|
Create a management command to output a given template
|
Create a management command to output a given template
|
Python
|
agpl-3.0
|
mysociety/pombola,ken-muturi/pombola,patricmutwiri/pombola,patricmutwiri/pombola,geoffkilpin/pombola,hzj123/56th,mysociety/pombola,hzj123/56th,patricmutwiri/pombola,geoffkilpin/pombola,patricmutwiri/pombola,geoffkilpin/pombola,ken-muturi/pombola,ken-muturi/pombola,ken-muturi/pombola,patricmutwiri/pombola,geoffkilpin/pombola,hzj123/56th,mysociety/pombola,patricmutwiri/pombola,mysociety/pombola,geoffkilpin/pombola,hzj123/56th,geoffkilpin/pombola,mysociety/pombola,ken-muturi/pombola,ken-muturi/pombola,hzj123/56th,mysociety/pombola,hzj123/56th
|
Create a management command to output a given template
|
from django.core.management.base import LabelCommand
from django.template.loader import render_to_string
from django.template import RequestContext
from django.http import HttpRequest
class Command(LabelCommand):
help = 'Render a template to STDOUT'
args = '<template path>'
def handle_label(self, template_path, **options):
# create a minimal fake request and request context
request = HttpRequest()
request.META = {
"SERVER_NAME": 'fake',
"SERVER_PORT": 80,
}
request_context = RequestContext(request)
print render_to_string(template_path, {}, request_context)
|
<commit_before><commit_msg>Create a management command to output a given template<commit_after>
|
from django.core.management.base import LabelCommand
from django.template.loader import render_to_string
from django.template import RequestContext
from django.http import HttpRequest
class Command(LabelCommand):
help = 'Render a template to STDOUT'
args = '<template path>'
def handle_label(self, template_path, **options):
# create a minimal fake request and request context
request = HttpRequest()
request.META = {
"SERVER_NAME": 'fake',
"SERVER_PORT": 80,
}
request_context = RequestContext(request)
print render_to_string(template_path, {}, request_context)
|
Create a management command to output a given templatefrom django.core.management.base import LabelCommand
from django.template.loader import render_to_string
from django.template import RequestContext
from django.http import HttpRequest
class Command(LabelCommand):
help = 'Render a template to STDOUT'
args = '<template path>'
def handle_label(self, template_path, **options):
# create a minimal fake request and request context
request = HttpRequest()
request.META = {
"SERVER_NAME": 'fake',
"SERVER_PORT": 80,
}
request_context = RequestContext(request)
print render_to_string(template_path, {}, request_context)
|
<commit_before><commit_msg>Create a management command to output a given template<commit_after>from django.core.management.base import LabelCommand
from django.template.loader import render_to_string
from django.template import RequestContext
from django.http import HttpRequest
class Command(LabelCommand):
help = 'Render a template to STDOUT'
args = '<template path>'
def handle_label(self, template_path, **options):
# create a minimal fake request and request context
request = HttpRequest()
request.META = {
"SERVER_NAME": 'fake',
"SERVER_PORT": 80,
}
request_context = RequestContext(request)
print render_to_string(template_path, {}, request_context)
|
|
6c56f9bc68950c4fa6ab2fe4bbdc5700654eb604
|
misc/utils/LSL_Tests/RecieveGaitStream.py
|
misc/utils/LSL_Tests/RecieveGaitStream.py
|
"""Example program to show how to read a marker time series from LSL."""
from pylsl import StreamInlet, resolve_stream
import sys
import os
from collections import deque
os.system('cls' if os.name == 'nt' else 'clear')
MAX_ELEMENTS_IN_QUEUE = 5
# first resolve an EEG stream on the lab network
print("looking for an Gait stream...")
streams = resolve_stream('type', 'Force')
streamsFound = len(streams)
if (streamsFound > 0):
print 'found ' + str(streamsFound)
else:
print 'found none'
# create a new inlet to read from the stream
inlet = StreamInlet(streams[0])
markerStack = deque([''])
while True:
sample, timestamp = inlet.pull_sample()
markerStack.append(str(timestamp) + '\t' + sample[0])
if len(markerStack) > MAX_ELEMENTS_IN_QUEUE:
markerStack.popleft()
os.system('cls' if os.name == 'nt' else 'clear')
for marker in reversed(markerStack):
print marker
|
Add an test script for recieving a custom Gait stream
|
Add an test script for recieving a custom Gait stream
|
Python
|
mit
|
xfleckx/BeMoBI,xfleckx/BeMoBI
|
Add an test script for recieving a custom Gait stream
|
"""Example program to show how to read a marker time series from LSL."""
from pylsl import StreamInlet, resolve_stream
import sys
import os
from collections import deque
os.system('cls' if os.name == 'nt' else 'clear')
MAX_ELEMENTS_IN_QUEUE = 5
# first resolve an EEG stream on the lab network
print("looking for an Gait stream...")
streams = resolve_stream('type', 'Force')
streamsFound = len(streams)
if (streamsFound > 0):
print 'found ' + str(streamsFound)
else:
print 'found none'
# create a new inlet to read from the stream
inlet = StreamInlet(streams[0])
markerStack = deque([''])
while True:
sample, timestamp = inlet.pull_sample()
markerStack.append(str(timestamp) + '\t' + sample[0])
if len(markerStack) > MAX_ELEMENTS_IN_QUEUE:
markerStack.popleft()
os.system('cls' if os.name == 'nt' else 'clear')
for marker in reversed(markerStack):
print marker
|
<commit_before><commit_msg>Add an test script for recieving a custom Gait stream<commit_after>
|
"""Example program to show how to read a marker time series from LSL."""
from pylsl import StreamInlet, resolve_stream
import sys
import os
from collections import deque
os.system('cls' if os.name == 'nt' else 'clear')
MAX_ELEMENTS_IN_QUEUE = 5
# first resolve an EEG stream on the lab network
print("looking for an Gait stream...")
streams = resolve_stream('type', 'Force')
streamsFound = len(streams)
if (streamsFound > 0):
print 'found ' + str(streamsFound)
else:
print 'found none'
# create a new inlet to read from the stream
inlet = StreamInlet(streams[0])
markerStack = deque([''])
while True:
sample, timestamp = inlet.pull_sample()
markerStack.append(str(timestamp) + '\t' + sample[0])
if len(markerStack) > MAX_ELEMENTS_IN_QUEUE:
markerStack.popleft()
os.system('cls' if os.name == 'nt' else 'clear')
for marker in reversed(markerStack):
print marker
|
Add an test script for recieving a custom Gait stream"""Example program to show how to read a marker time series from LSL."""
from pylsl import StreamInlet, resolve_stream
import sys
import os
from collections import deque
os.system('cls' if os.name == 'nt' else 'clear')
MAX_ELEMENTS_IN_QUEUE = 5
# first resolve an EEG stream on the lab network
print("looking for an Gait stream...")
streams = resolve_stream('type', 'Force')
streamsFound = len(streams)
if (streamsFound > 0):
print 'found ' + str(streamsFound)
else:
print 'found none'
# create a new inlet to read from the stream
inlet = StreamInlet(streams[0])
markerStack = deque([''])
while True:
sample, timestamp = inlet.pull_sample()
markerStack.append(str(timestamp) + '\t' + sample[0])
if len(markerStack) > MAX_ELEMENTS_IN_QUEUE:
markerStack.popleft()
os.system('cls' if os.name == 'nt' else 'clear')
for marker in reversed(markerStack):
print marker
|
<commit_before><commit_msg>Add an test script for recieving a custom Gait stream<commit_after>"""Example program to show how to read a marker time series from LSL."""
from pylsl import StreamInlet, resolve_stream
import sys
import os
from collections import deque
os.system('cls' if os.name == 'nt' else 'clear')
MAX_ELEMENTS_IN_QUEUE = 5
# first resolve an EEG stream on the lab network
print("looking for an Gait stream...")
streams = resolve_stream('type', 'Force')
streamsFound = len(streams)
if (streamsFound > 0):
print 'found ' + str(streamsFound)
else:
print 'found none'
# create a new inlet to read from the stream
inlet = StreamInlet(streams[0])
markerStack = deque([''])
while True:
sample, timestamp = inlet.pull_sample()
markerStack.append(str(timestamp) + '\t' + sample[0])
if len(markerStack) > MAX_ELEMENTS_IN_QUEUE:
markerStack.popleft()
os.system('cls' if os.name == 'nt' else 'clear')
for marker in reversed(markerStack):
print marker
|
|
c44db3982db1a0ea163bdb29589c8ac546c7038c
|
test/read_output_test.py
|
test/read_output_test.py
|
'''Unit tests for Aronnax'''
from contextlib import contextmanager
import os.path as p
import re
import numpy as np
from scipy.io import FortranFile
import aronnax as aro
from aronnax.utils import working_directory
import pytest
import glob
self_path = p.dirname(p.abspath(__file__))
def test_open_mfdataarray():
'''Open a number of files and assert that the length of the time
dimension is the same as the number of files.'''
xlen = 1e6
ylen = 2e6
nx = 10; ny = 20
layers = 1
grid = aro.Grid(nx, ny, layers, xlen / nx, ylen / ny)
with working_directory(p.join(self_path, "beta_plane_gyre_red_grav")):
output_files = glob.glob('output/snap.h*')
ds = aro.open_mfdataarray(output_files, grid)
assert len(output_files) == ds.time.shape[0]
def test_open_mfdataarray_multiple_variables():
'''This test tries to open multiple different variables in the same call,
and should fail.'''
xlen = 1e6
ylen = 2e6
nx = 10; ny = 20
layers = 1
grid = aro.Grid(nx, ny, layers, xlen / nx, ylen / ny)
with working_directory(p.join(self_path, "beta_plane_gyre_red_grav")):
with pytest.raises(Exception):
output_files = glob.glob('output/snap.*')
ds = aro.open_mfdataarray(output_files, grid)
|
Test for new function for reading output
|
Test for new function for reading output
|
Python
|
mit
|
edoddridge/MIM,edoddridge/aronnax
|
Test for new function for reading output
|
'''Unit tests for Aronnax'''
from contextlib import contextmanager
import os.path as p
import re
import numpy as np
from scipy.io import FortranFile
import aronnax as aro
from aronnax.utils import working_directory
import pytest
import glob
self_path = p.dirname(p.abspath(__file__))
def test_open_mfdataarray():
'''Open a number of files and assert that the length of the time
dimension is the same as the number of files.'''
xlen = 1e6
ylen = 2e6
nx = 10; ny = 20
layers = 1
grid = aro.Grid(nx, ny, layers, xlen / nx, ylen / ny)
with working_directory(p.join(self_path, "beta_plane_gyre_red_grav")):
output_files = glob.glob('output/snap.h*')
ds = aro.open_mfdataarray(output_files, grid)
assert len(output_files) == ds.time.shape[0]
def test_open_mfdataarray_multiple_variables():
'''This test tries to open multiple different variables in the same call,
and should fail.'''
xlen = 1e6
ylen = 2e6
nx = 10; ny = 20
layers = 1
grid = aro.Grid(nx, ny, layers, xlen / nx, ylen / ny)
with working_directory(p.join(self_path, "beta_plane_gyre_red_grav")):
with pytest.raises(Exception):
output_files = glob.glob('output/snap.*')
ds = aro.open_mfdataarray(output_files, grid)
|
<commit_before><commit_msg>Test for new function for reading output<commit_after>
|
'''Unit tests for Aronnax'''
from contextlib import contextmanager
import os.path as p
import re
import numpy as np
from scipy.io import FortranFile
import aronnax as aro
from aronnax.utils import working_directory
import pytest
import glob
self_path = p.dirname(p.abspath(__file__))
def test_open_mfdataarray():
'''Open a number of files and assert that the length of the time
dimension is the same as the number of files.'''
xlen = 1e6
ylen = 2e6
nx = 10; ny = 20
layers = 1
grid = aro.Grid(nx, ny, layers, xlen / nx, ylen / ny)
with working_directory(p.join(self_path, "beta_plane_gyre_red_grav")):
output_files = glob.glob('output/snap.h*')
ds = aro.open_mfdataarray(output_files, grid)
assert len(output_files) == ds.time.shape[0]
def test_open_mfdataarray_multiple_variables():
'''This test tries to open multiple different variables in the same call,
and should fail.'''
xlen = 1e6
ylen = 2e6
nx = 10; ny = 20
layers = 1
grid = aro.Grid(nx, ny, layers, xlen / nx, ylen / ny)
with working_directory(p.join(self_path, "beta_plane_gyre_red_grav")):
with pytest.raises(Exception):
output_files = glob.glob('output/snap.*')
ds = aro.open_mfdataarray(output_files, grid)
|
Test for new function for reading output'''Unit tests for Aronnax'''
from contextlib import contextmanager
import os.path as p
import re
import numpy as np
from scipy.io import FortranFile
import aronnax as aro
from aronnax.utils import working_directory
import pytest
import glob
self_path = p.dirname(p.abspath(__file__))
def test_open_mfdataarray():
'''Open a number of files and assert that the length of the time
dimension is the same as the number of files.'''
xlen = 1e6
ylen = 2e6
nx = 10; ny = 20
layers = 1
grid = aro.Grid(nx, ny, layers, xlen / nx, ylen / ny)
with working_directory(p.join(self_path, "beta_plane_gyre_red_grav")):
output_files = glob.glob('output/snap.h*')
ds = aro.open_mfdataarray(output_files, grid)
assert len(output_files) == ds.time.shape[0]
def test_open_mfdataarray_multiple_variables():
'''This test tries to open multiple different variables in the same call,
and should fail.'''
xlen = 1e6
ylen = 2e6
nx = 10; ny = 20
layers = 1
grid = aro.Grid(nx, ny, layers, xlen / nx, ylen / ny)
with working_directory(p.join(self_path, "beta_plane_gyre_red_grav")):
with pytest.raises(Exception):
output_files = glob.glob('output/snap.*')
ds = aro.open_mfdataarray(output_files, grid)
|
<commit_before><commit_msg>Test for new function for reading output<commit_after>'''Unit tests for Aronnax'''
from contextlib import contextmanager
import os.path as p
import re
import numpy as np
from scipy.io import FortranFile
import aronnax as aro
from aronnax.utils import working_directory
import pytest
import glob
self_path = p.dirname(p.abspath(__file__))
def test_open_mfdataarray():
'''Open a number of files and assert that the length of the time
dimension is the same as the number of files.'''
xlen = 1e6
ylen = 2e6
nx = 10; ny = 20
layers = 1
grid = aro.Grid(nx, ny, layers, xlen / nx, ylen / ny)
with working_directory(p.join(self_path, "beta_plane_gyre_red_grav")):
output_files = glob.glob('output/snap.h*')
ds = aro.open_mfdataarray(output_files, grid)
assert len(output_files) == ds.time.shape[0]
def test_open_mfdataarray_multiple_variables():
'''This test tries to open multiple different variables in the same call,
and should fail.'''
xlen = 1e6
ylen = 2e6
nx = 10; ny = 20
layers = 1
grid = aro.Grid(nx, ny, layers, xlen / nx, ylen / ny)
with working_directory(p.join(self_path, "beta_plane_gyre_red_grav")):
with pytest.raises(Exception):
output_files = glob.glob('output/snap.*')
ds = aro.open_mfdataarray(output_files, grid)
|
|
06e1357ac6b14a26dda72a6080f6228226e8ed91
|
rubix.py
|
rubix.py
|
#!/usr/bin/python
import re
import argparse
import time
# Let's time the execution for display in -v
start_time = time.time()
# String to hold regex
regex = ''
# Create a storage area for sorting words by size
sortedDict = {}
# For comparison, let's keep record of the sorted string of characters that was input
sortedLetters = ''
# Setup argument parsing
parser = argparse.ArgumentParser(description='Generate list of valid Words With Friends words from given letters.')
parser.add_argument("letters", help="List of letters to lookup words by.")
parser.add_argument("-v", "--verbose", help="increase output verbosity", action="store_true")
args = parser.parse_args()
sortedLetters = ''.join(sorted(args.letters))
regex = '^[' + args.letters.lower() + ']+$'
m = re.compile(regex)
# Read in word list
words = [line.strip() for line in open('enable1.txt')]
# Find words that match the letters given
for word in words:
if m.match(word) is not None:
wordLen = len(word)
# Make sure the key of dict exists
if wordLen not in sortedDict:
sortedDict[wordLen] = []
# Add item to list
sortedDict[wordLen].append(word)
# If verbose output is enabled
if args.verbose:
print 'Your letters: ' + args.letters
print 'Regular expression used: ' + regex
print '# of words searched: ' + str(len(words))
print ''
# Print words by size
for wordLength, words in sortedDict.iteritems():
print wordLength
print words
if args.verbose:
print ''
print 'Ececution time: ' + str(time.time() - start_time) + ' seconds'
|
Change file name. Add timing and verbose flag. Still using letters more times than possible.
|
Change file name. Add timing and verbose flag. Still using letters more times than possible.
|
Python
|
mit
|
chaseconey/rubix
|
Change file name. Add timing and verbose flag. Still using letters more times than possible.
|
#!/usr/bin/python
import re
import argparse
import time
# Let's time the execution for display in -v
start_time = time.time()
# String to hold regex
regex = ''
# Create a storage area for sorting words by size
sortedDict = {}
# For comparison, let's keep record of the sorted string of characters that was input
sortedLetters = ''
# Setup argument parsing
parser = argparse.ArgumentParser(description='Generate list of valid Words With Friends words from given letters.')
parser.add_argument("letters", help="List of letters to lookup words by.")
parser.add_argument("-v", "--verbose", help="increase output verbosity", action="store_true")
args = parser.parse_args()
sortedLetters = ''.join(sorted(args.letters))
regex = '^[' + args.letters.lower() + ']+$'
m = re.compile(regex)
# Read in word list
words = [line.strip() for line in open('enable1.txt')]
# Find words that match the letters given
for word in words:
if m.match(word) is not None:
wordLen = len(word)
# Make sure the key of dict exists
if wordLen not in sortedDict:
sortedDict[wordLen] = []
# Add item to list
sortedDict[wordLen].append(word)
# If verbose output is enabled
if args.verbose:
print 'Your letters: ' + args.letters
print 'Regular expression used: ' + regex
print '# of words searched: ' + str(len(words))
print ''
# Print words by size
for wordLength, words in sortedDict.iteritems():
print wordLength
print words
if args.verbose:
print ''
print 'Ececution time: ' + str(time.time() - start_time) + ' seconds'
|
<commit_before><commit_msg>Change file name. Add timing and verbose flag. Still using letters more times than possible.<commit_after>
|
#!/usr/bin/python
import re
import argparse
import time
# Let's time the execution for display in -v
start_time = time.time()
# String to hold regex
regex = ''
# Create a storage area for sorting words by size
sortedDict = {}
# For comparison, let's keep record of the sorted string of characters that was input
sortedLetters = ''
# Setup argument parsing
parser = argparse.ArgumentParser(description='Generate list of valid Words With Friends words from given letters.')
parser.add_argument("letters", help="List of letters to lookup words by.")
parser.add_argument("-v", "--verbose", help="increase output verbosity", action="store_true")
args = parser.parse_args()
sortedLetters = ''.join(sorted(args.letters))
regex = '^[' + args.letters.lower() + ']+$'
m = re.compile(regex)
# Read in word list
words = [line.strip() for line in open('enable1.txt')]
# Find words that match the letters given
for word in words:
if m.match(word) is not None:
wordLen = len(word)
# Make sure the key of dict exists
if wordLen not in sortedDict:
sortedDict[wordLen] = []
# Add item to list
sortedDict[wordLen].append(word)
# If verbose output is enabled
if args.verbose:
print 'Your letters: ' + args.letters
print 'Regular expression used: ' + regex
print '# of words searched: ' + str(len(words))
print ''
# Print words by size
for wordLength, words in sortedDict.iteritems():
print wordLength
print words
if args.verbose:
print ''
print 'Ececution time: ' + str(time.time() - start_time) + ' seconds'
|
Change file name. Add timing and verbose flag. Still using letters more times than possible.#!/usr/bin/python
import re
import argparse
import time
# Let's time the execution for display in -v
start_time = time.time()
# String to hold regex
regex = ''
# Create a storage area for sorting words by size
sortedDict = {}
# For comparison, let's keep record of the sorted string of characters that was input
sortedLetters = ''
# Setup argument parsing
parser = argparse.ArgumentParser(description='Generate list of valid Words With Friends words from given letters.')
parser.add_argument("letters", help="List of letters to lookup words by.")
parser.add_argument("-v", "--verbose", help="increase output verbosity", action="store_true")
args = parser.parse_args()
sortedLetters = ''.join(sorted(args.letters))
regex = '^[' + args.letters.lower() + ']+$'
m = re.compile(regex)
# Read in word list
words = [line.strip() for line in open('enable1.txt')]
# Find words that match the letters given
for word in words:
if m.match(word) is not None:
wordLen = len(word)
# Make sure the key of dict exists
if wordLen not in sortedDict:
sortedDict[wordLen] = []
# Add item to list
sortedDict[wordLen].append(word)
# If verbose output is enabled
if args.verbose:
print 'Your letters: ' + args.letters
print 'Regular expression used: ' + regex
print '# of words searched: ' + str(len(words))
print ''
# Print words by size
for wordLength, words in sortedDict.iteritems():
print wordLength
print words
if args.verbose:
print ''
print 'Ececution time: ' + str(time.time() - start_time) + ' seconds'
|
<commit_before><commit_msg>Change file name. Add timing and verbose flag. Still using letters more times than possible.<commit_after>#!/usr/bin/python
import re
import argparse
import time
# Let's time the execution for display in -v
start_time = time.time()
# String to hold regex
regex = ''
# Create a storage area for sorting words by size
sortedDict = {}
# For comparison, let's keep record of the sorted string of characters that was input
sortedLetters = ''
# Setup argument parsing
parser = argparse.ArgumentParser(description='Generate list of valid Words With Friends words from given letters.')
parser.add_argument("letters", help="List of letters to lookup words by.")
parser.add_argument("-v", "--verbose", help="increase output verbosity", action="store_true")
args = parser.parse_args()
sortedLetters = ''.join(sorted(args.letters))
regex = '^[' + args.letters.lower() + ']+$'
m = re.compile(regex)
# Read in word list
words = [line.strip() for line in open('enable1.txt')]
# Find words that match the letters given
for word in words:
if m.match(word) is not None:
wordLen = len(word)
# Make sure the key of dict exists
if wordLen not in sortedDict:
sortedDict[wordLen] = []
# Add item to list
sortedDict[wordLen].append(word)
# If verbose output is enabled
if args.verbose:
print 'Your letters: ' + args.letters
print 'Regular expression used: ' + regex
print '# of words searched: ' + str(len(words))
print ''
# Print words by size
for wordLength, words in sortedDict.iteritems():
print wordLength
print words
if args.verbose:
print ''
print 'Ececution time: ' + str(time.time() - start_time) + ' seconds'
|
|
45bea73cecaee8f18fc7519066dc8ec51b53f13b
|
tests/functional/test_api_v1.py
|
tests/functional/test_api_v1.py
|
import pytest
from decimal import Decimal
from django.core.urlresolvers import reverse
from rest_framework.test import APIClient
from wightinvoices.invoice import factories
pytestmark = pytest.mark.django_db
def test_get_invoice():
test_client = APIClient()
owner = factories.User.create(password="clear$abc$toto")
assert test_client.login(username=owner.username, password="toto")
invoice = factories.Invoice.create()
invoice_item = factories.InvoiceItem.create(invoice=invoice)
url = reverse('api-invoice-detail', kwargs={'pk': invoice.id})
response = test_client.get(url)
assert response.data == {
'id': invoice.id,
'client': 'client1',
'comments': None,
'items': [{
'id': invoice_item.id,
'description': invoice_item.description,
'quantity': invoice_item.quantity,
'vat': Decimal(invoice_item.vat),
'amount': Decimal(invoice_item.amount),
}],
'owner': owner.email,
'status': 'draft',
}
|
Test to get the invoice.
|
Test to get the invoice.
|
Python
|
mit
|
linovia/wight-invoices
|
Test to get the invoice.
|
import pytest
from decimal import Decimal
from django.core.urlresolvers import reverse
from rest_framework.test import APIClient
from wightinvoices.invoice import factories
pytestmark = pytest.mark.django_db
def test_get_invoice():
test_client = APIClient()
owner = factories.User.create(password="clear$abc$toto")
assert test_client.login(username=owner.username, password="toto")
invoice = factories.Invoice.create()
invoice_item = factories.InvoiceItem.create(invoice=invoice)
url = reverse('api-invoice-detail', kwargs={'pk': invoice.id})
response = test_client.get(url)
assert response.data == {
'id': invoice.id,
'client': 'client1',
'comments': None,
'items': [{
'id': invoice_item.id,
'description': invoice_item.description,
'quantity': invoice_item.quantity,
'vat': Decimal(invoice_item.vat),
'amount': Decimal(invoice_item.amount),
}],
'owner': owner.email,
'status': 'draft',
}
|
<commit_before><commit_msg>Test to get the invoice.<commit_after>
|
import pytest
from decimal import Decimal
from django.core.urlresolvers import reverse
from rest_framework.test import APIClient
from wightinvoices.invoice import factories
pytestmark = pytest.mark.django_db
def test_get_invoice():
test_client = APIClient()
owner = factories.User.create(password="clear$abc$toto")
assert test_client.login(username=owner.username, password="toto")
invoice = factories.Invoice.create()
invoice_item = factories.InvoiceItem.create(invoice=invoice)
url = reverse('api-invoice-detail', kwargs={'pk': invoice.id})
response = test_client.get(url)
assert response.data == {
'id': invoice.id,
'client': 'client1',
'comments': None,
'items': [{
'id': invoice_item.id,
'description': invoice_item.description,
'quantity': invoice_item.quantity,
'vat': Decimal(invoice_item.vat),
'amount': Decimal(invoice_item.amount),
}],
'owner': owner.email,
'status': 'draft',
}
|
Test to get the invoice.import pytest
from decimal import Decimal
from django.core.urlresolvers import reverse
from rest_framework.test import APIClient
from wightinvoices.invoice import factories
pytestmark = pytest.mark.django_db
def test_get_invoice():
test_client = APIClient()
owner = factories.User.create(password="clear$abc$toto")
assert test_client.login(username=owner.username, password="toto")
invoice = factories.Invoice.create()
invoice_item = factories.InvoiceItem.create(invoice=invoice)
url = reverse('api-invoice-detail', kwargs={'pk': invoice.id})
response = test_client.get(url)
assert response.data == {
'id': invoice.id,
'client': 'client1',
'comments': None,
'items': [{
'id': invoice_item.id,
'description': invoice_item.description,
'quantity': invoice_item.quantity,
'vat': Decimal(invoice_item.vat),
'amount': Decimal(invoice_item.amount),
}],
'owner': owner.email,
'status': 'draft',
}
|
<commit_before><commit_msg>Test to get the invoice.<commit_after>import pytest
from decimal import Decimal
from django.core.urlresolvers import reverse
from rest_framework.test import APIClient
from wightinvoices.invoice import factories
pytestmark = pytest.mark.django_db
def test_get_invoice():
test_client = APIClient()
owner = factories.User.create(password="clear$abc$toto")
assert test_client.login(username=owner.username, password="toto")
invoice = factories.Invoice.create()
invoice_item = factories.InvoiceItem.create(invoice=invoice)
url = reverse('api-invoice-detail', kwargs={'pk': invoice.id})
response = test_client.get(url)
assert response.data == {
'id': invoice.id,
'client': 'client1',
'comments': None,
'items': [{
'id': invoice_item.id,
'description': invoice_item.description,
'quantity': invoice_item.quantity,
'vat': Decimal(invoice_item.vat),
'amount': Decimal(invoice_item.amount),
}],
'owner': owner.email,
'status': 'draft',
}
|
|
5adaa6ec7b1c379d0fcbf7b488cc48e183739f0e
|
timed/tests/test_serializers.py
|
timed/tests/test_serializers.py
|
from datetime import timedelta
import pytest
from rest_framework_json_api.serializers import DurationField, IntegerField
from timed.serializers import PkDictSerializer
class MyPkDictSerializer(PkDictSerializer):
test_duration = DurationField()
test_nr = IntegerField()
class Meta:
pk_key = 'test_nr'
resource_name = 'my-resource'
@pytest.fixture
def data():
return {
'test_nr': 123,
'test_duration': timedelta(hours=1),
'invalid_field': '1234'
}
def test_pk_dict_serializer_single(data):
serializer = MyPkDictSerializer(data)
expected_data = {
'test_duration': '01:00:00',
'test_nr': 123,
}
assert expected_data == serializer.data
def test_pk_dict_serializer_many(data):
list_data = [
data,
data
]
serializer = MyPkDictSerializer(list_data, many=True)
expected_data = [
{
'test_duration': '01:00:00',
'test_nr': 123,
},
{
'test_duration': '01:00:00',
'test_nr': 123,
},
]
assert expected_data == serializer.data
|
Add unit test for pk dict serializer
|
Add unit test for pk dict serializer
|
Python
|
agpl-3.0
|
adfinis-sygroup/timed-backend,adfinis-sygroup/timed-backend,adfinis-sygroup/timed-backend
|
Add unit test for pk dict serializer
|
from datetime import timedelta
import pytest
from rest_framework_json_api.serializers import DurationField, IntegerField
from timed.serializers import PkDictSerializer
class MyPkDictSerializer(PkDictSerializer):
test_duration = DurationField()
test_nr = IntegerField()
class Meta:
pk_key = 'test_nr'
resource_name = 'my-resource'
@pytest.fixture
def data():
return {
'test_nr': 123,
'test_duration': timedelta(hours=1),
'invalid_field': '1234'
}
def test_pk_dict_serializer_single(data):
serializer = MyPkDictSerializer(data)
expected_data = {
'test_duration': '01:00:00',
'test_nr': 123,
}
assert expected_data == serializer.data
def test_pk_dict_serializer_many(data):
list_data = [
data,
data
]
serializer = MyPkDictSerializer(list_data, many=True)
expected_data = [
{
'test_duration': '01:00:00',
'test_nr': 123,
},
{
'test_duration': '01:00:00',
'test_nr': 123,
},
]
assert expected_data == serializer.data
|
<commit_before><commit_msg>Add unit test for pk dict serializer<commit_after>
|
from datetime import timedelta
import pytest
from rest_framework_json_api.serializers import DurationField, IntegerField
from timed.serializers import PkDictSerializer
class MyPkDictSerializer(PkDictSerializer):
test_duration = DurationField()
test_nr = IntegerField()
class Meta:
pk_key = 'test_nr'
resource_name = 'my-resource'
@pytest.fixture
def data():
return {
'test_nr': 123,
'test_duration': timedelta(hours=1),
'invalid_field': '1234'
}
def test_pk_dict_serializer_single(data):
serializer = MyPkDictSerializer(data)
expected_data = {
'test_duration': '01:00:00',
'test_nr': 123,
}
assert expected_data == serializer.data
def test_pk_dict_serializer_many(data):
list_data = [
data,
data
]
serializer = MyPkDictSerializer(list_data, many=True)
expected_data = [
{
'test_duration': '01:00:00',
'test_nr': 123,
},
{
'test_duration': '01:00:00',
'test_nr': 123,
},
]
assert expected_data == serializer.data
|
Add unit test for pk dict serializerfrom datetime import timedelta
import pytest
from rest_framework_json_api.serializers import DurationField, IntegerField
from timed.serializers import PkDictSerializer
class MyPkDictSerializer(PkDictSerializer):
test_duration = DurationField()
test_nr = IntegerField()
class Meta:
pk_key = 'test_nr'
resource_name = 'my-resource'
@pytest.fixture
def data():
return {
'test_nr': 123,
'test_duration': timedelta(hours=1),
'invalid_field': '1234'
}
def test_pk_dict_serializer_single(data):
serializer = MyPkDictSerializer(data)
expected_data = {
'test_duration': '01:00:00',
'test_nr': 123,
}
assert expected_data == serializer.data
def test_pk_dict_serializer_many(data):
list_data = [
data,
data
]
serializer = MyPkDictSerializer(list_data, many=True)
expected_data = [
{
'test_duration': '01:00:00',
'test_nr': 123,
},
{
'test_duration': '01:00:00',
'test_nr': 123,
},
]
assert expected_data == serializer.data
|
<commit_before><commit_msg>Add unit test for pk dict serializer<commit_after>from datetime import timedelta
import pytest
from rest_framework_json_api.serializers import DurationField, IntegerField
from timed.serializers import PkDictSerializer
class MyPkDictSerializer(PkDictSerializer):
test_duration = DurationField()
test_nr = IntegerField()
class Meta:
pk_key = 'test_nr'
resource_name = 'my-resource'
@pytest.fixture
def data():
return {
'test_nr': 123,
'test_duration': timedelta(hours=1),
'invalid_field': '1234'
}
def test_pk_dict_serializer_single(data):
serializer = MyPkDictSerializer(data)
expected_data = {
'test_duration': '01:00:00',
'test_nr': 123,
}
assert expected_data == serializer.data
def test_pk_dict_serializer_many(data):
list_data = [
data,
data
]
serializer = MyPkDictSerializer(list_data, many=True)
expected_data = [
{
'test_duration': '01:00:00',
'test_nr': 123,
},
{
'test_duration': '01:00:00',
'test_nr': 123,
},
]
assert expected_data == serializer.data
|
|
717b3ec17b76d347894591f813d45e5e56bba0b2
|
staging_settings.py
|
staging_settings.py
|
# To activate these settings as the current local_settings, create a symlink
# called local_settings.py pointing to this file.
from secret_settings import STAGING_DB_PASSWORD
import os
DEBUG = False
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
MEDIA_ROOT = os.path.join(os.path.dirname(PROJECT_ROOT),
"site_media")
STATIC_ROOT = os.path.join(MEDIA_ROOT, "static")
DATABASES = {
"sqlite": {
"ENGINE": "django.db.backends.sqlite3", # Add "postgresql_psycopg2", "postgresql", "mysql", "sqlite3" or "oracle".
"NAME": os.path.join(PROJECT_ROOT, "db", "dev.db"), # Or path to database file if using sqlite3.
"USER": "", # Not used with sqlite3.
"PASSWORD": "", # Not used with sqlite3.
"HOST": "", # Set to empty string for localhost. Not used with sqlite3.
"PORT": "", # Set to empty string for default. Not used with sqlite3.
},
"default": {
"ENGINE": "django.db.backends.postgresql_psycopg2",
"NAME": "smeuhsocial_staging",
"USER": "smeuhsocial_staging",
"PASSWORD": STAGING_DB_PASSWORD,
"HOST": "localhost", # Set to empty string for localhost. Not used with sqlite3.
"PORT": "", # Set to empty string for default. Not used with sqlite3.
}
}
DEFAULT_HTTP_PROTOCOL = "https"
DEFAULT_FROM_EMAIL = 'al@smeuh.org'
|
Add settings override for staging environment
|
Add settings override for staging environment
|
Python
|
mit
|
amarandon/smeuhsocial,fgirault/smeuhsocial,amarandon/smeuhsocial,amarandon/smeuhsocial,fgirault/smeuhsocial,fgirault/smeuhsocial
|
Add settings override for staging environment
|
# To activate these settings as the current local_settings, create a symlink
# called local_settings.py pointing to this file.
from secret_settings import STAGING_DB_PASSWORD
import os
DEBUG = False
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
MEDIA_ROOT = os.path.join(os.path.dirname(PROJECT_ROOT),
"site_media")
STATIC_ROOT = os.path.join(MEDIA_ROOT, "static")
DATABASES = {
"sqlite": {
"ENGINE": "django.db.backends.sqlite3", # Add "postgresql_psycopg2", "postgresql", "mysql", "sqlite3" or "oracle".
"NAME": os.path.join(PROJECT_ROOT, "db", "dev.db"), # Or path to database file if using sqlite3.
"USER": "", # Not used with sqlite3.
"PASSWORD": "", # Not used with sqlite3.
"HOST": "", # Set to empty string for localhost. Not used with sqlite3.
"PORT": "", # Set to empty string for default. Not used with sqlite3.
},
"default": {
"ENGINE": "django.db.backends.postgresql_psycopg2",
"NAME": "smeuhsocial_staging",
"USER": "smeuhsocial_staging",
"PASSWORD": STAGING_DB_PASSWORD,
"HOST": "localhost", # Set to empty string for localhost. Not used with sqlite3.
"PORT": "", # Set to empty string for default. Not used with sqlite3.
}
}
DEFAULT_HTTP_PROTOCOL = "https"
DEFAULT_FROM_EMAIL = 'al@smeuh.org'
|
<commit_before><commit_msg>Add settings override for staging environment<commit_after>
|
# To activate these settings as the current local_settings, create a symlink
# called local_settings.py pointing to this file.
from secret_settings import STAGING_DB_PASSWORD
import os
DEBUG = False
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
MEDIA_ROOT = os.path.join(os.path.dirname(PROJECT_ROOT),
"site_media")
STATIC_ROOT = os.path.join(MEDIA_ROOT, "static")
DATABASES = {
"sqlite": {
"ENGINE": "django.db.backends.sqlite3", # Add "postgresql_psycopg2", "postgresql", "mysql", "sqlite3" or "oracle".
"NAME": os.path.join(PROJECT_ROOT, "db", "dev.db"), # Or path to database file if using sqlite3.
"USER": "", # Not used with sqlite3.
"PASSWORD": "", # Not used with sqlite3.
"HOST": "", # Set to empty string for localhost. Not used with sqlite3.
"PORT": "", # Set to empty string for default. Not used with sqlite3.
},
"default": {
"ENGINE": "django.db.backends.postgresql_psycopg2",
"NAME": "smeuhsocial_staging",
"USER": "smeuhsocial_staging",
"PASSWORD": STAGING_DB_PASSWORD,
"HOST": "localhost", # Set to empty string for localhost. Not used with sqlite3.
"PORT": "", # Set to empty string for default. Not used with sqlite3.
}
}
DEFAULT_HTTP_PROTOCOL = "https"
DEFAULT_FROM_EMAIL = 'al@smeuh.org'
|
Add settings override for staging environment# To activate these settings as the current local_settings, create a symlink
# called local_settings.py pointing to this file.
from secret_settings import STAGING_DB_PASSWORD
import os
DEBUG = False
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
MEDIA_ROOT = os.path.join(os.path.dirname(PROJECT_ROOT),
"site_media")
STATIC_ROOT = os.path.join(MEDIA_ROOT, "static")
DATABASES = {
"sqlite": {
"ENGINE": "django.db.backends.sqlite3", # Add "postgresql_psycopg2", "postgresql", "mysql", "sqlite3" or "oracle".
"NAME": os.path.join(PROJECT_ROOT, "db", "dev.db"), # Or path to database file if using sqlite3.
"USER": "", # Not used with sqlite3.
"PASSWORD": "", # Not used with sqlite3.
"HOST": "", # Set to empty string for localhost. Not used with sqlite3.
"PORT": "", # Set to empty string for default. Not used with sqlite3.
},
"default": {
"ENGINE": "django.db.backends.postgresql_psycopg2",
"NAME": "smeuhsocial_staging",
"USER": "smeuhsocial_staging",
"PASSWORD": STAGING_DB_PASSWORD,
"HOST": "localhost", # Set to empty string for localhost. Not used with sqlite3.
"PORT": "", # Set to empty string for default. Not used with sqlite3.
}
}
DEFAULT_HTTP_PROTOCOL = "https"
DEFAULT_FROM_EMAIL = 'al@smeuh.org'
|
<commit_before><commit_msg>Add settings override for staging environment<commit_after># To activate these settings as the current local_settings, create a symlink
# called local_settings.py pointing to this file.
from secret_settings import STAGING_DB_PASSWORD
import os
DEBUG = False
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
MEDIA_ROOT = os.path.join(os.path.dirname(PROJECT_ROOT),
"site_media")
STATIC_ROOT = os.path.join(MEDIA_ROOT, "static")
DATABASES = {
"sqlite": {
"ENGINE": "django.db.backends.sqlite3", # Add "postgresql_psycopg2", "postgresql", "mysql", "sqlite3" or "oracle".
"NAME": os.path.join(PROJECT_ROOT, "db", "dev.db"), # Or path to database file if using sqlite3.
"USER": "", # Not used with sqlite3.
"PASSWORD": "", # Not used with sqlite3.
"HOST": "", # Set to empty string for localhost. Not used with sqlite3.
"PORT": "", # Set to empty string for default. Not used with sqlite3.
},
"default": {
"ENGINE": "django.db.backends.postgresql_psycopg2",
"NAME": "smeuhsocial_staging",
"USER": "smeuhsocial_staging",
"PASSWORD": STAGING_DB_PASSWORD,
"HOST": "localhost", # Set to empty string for localhost. Not used with sqlite3.
"PORT": "", # Set to empty string for default. Not used with sqlite3.
}
}
DEFAULT_HTTP_PROTOCOL = "https"
DEFAULT_FROM_EMAIL = 'al@smeuh.org'
|
|
be4a40ce2e1977aa349e02cf639cd663ec32b8dd
|
autopush/tests/test_protocol.py
|
autopush/tests/test_protocol.py
|
from mock import Mock
from nose.tools import eq_
from twisted.trial import unittest
from twisted.web.client import Response
from autopush.protocol import IgnoreBody
class ProtocolTestCase(unittest.TestCase):
def test_ignore(self):
mock_reason = Mock()
mock_reason.check.return_value = True
def deliverBody(proto):
proto.dataReceived("some data to ignore")
proto.connectionLost(mock_reason)
mock_response = Mock(spec=Response)
mock_response.deliverBody.side_effect = deliverBody
d = IgnoreBody.ignore(mock_response)
def verifyResponse(result):
eq_(result, mock_response)
eq_(len(mock_reason.mock_calls), 1)
d.addCallback(verifyResponse)
return d
def test_ignore_check_false(self):
mock_reason = Mock()
mock_reason.check.return_value = False
def deliverBody(proto):
proto.dataReceived("some data to ignore")
proto.connectionLost(mock_reason)
mock_response = Mock(spec=Response)
mock_response.deliverBody.side_effect = deliverBody
d = IgnoreBody.ignore(mock_response)
def verifyResponse(result):
eq_(result.value, mock_reason)
eq_(len(mock_reason.mock_calls), 1)
d.addErrback(verifyResponse)
return d
|
Add 100% unit test for IgnorBody protocol.
|
Add 100% unit test for IgnorBody protocol.
|
Python
|
mpl-2.0
|
mozilla-services/autopush,mozilla-services/autopush
|
Add 100% unit test for IgnorBody protocol.
|
from mock import Mock
from nose.tools import eq_
from twisted.trial import unittest
from twisted.web.client import Response
from autopush.protocol import IgnoreBody
class ProtocolTestCase(unittest.TestCase):
def test_ignore(self):
mock_reason = Mock()
mock_reason.check.return_value = True
def deliverBody(proto):
proto.dataReceived("some data to ignore")
proto.connectionLost(mock_reason)
mock_response = Mock(spec=Response)
mock_response.deliverBody.side_effect = deliverBody
d = IgnoreBody.ignore(mock_response)
def verifyResponse(result):
eq_(result, mock_response)
eq_(len(mock_reason.mock_calls), 1)
d.addCallback(verifyResponse)
return d
def test_ignore_check_false(self):
mock_reason = Mock()
mock_reason.check.return_value = False
def deliverBody(proto):
proto.dataReceived("some data to ignore")
proto.connectionLost(mock_reason)
mock_response = Mock(spec=Response)
mock_response.deliverBody.side_effect = deliverBody
d = IgnoreBody.ignore(mock_response)
def verifyResponse(result):
eq_(result.value, mock_reason)
eq_(len(mock_reason.mock_calls), 1)
d.addErrback(verifyResponse)
return d
|
<commit_before><commit_msg>Add 100% unit test for IgnorBody protocol.<commit_after>
|
from mock import Mock
from nose.tools import eq_
from twisted.trial import unittest
from twisted.web.client import Response
from autopush.protocol import IgnoreBody
class ProtocolTestCase(unittest.TestCase):
def test_ignore(self):
mock_reason = Mock()
mock_reason.check.return_value = True
def deliverBody(proto):
proto.dataReceived("some data to ignore")
proto.connectionLost(mock_reason)
mock_response = Mock(spec=Response)
mock_response.deliverBody.side_effect = deliverBody
d = IgnoreBody.ignore(mock_response)
def verifyResponse(result):
eq_(result, mock_response)
eq_(len(mock_reason.mock_calls), 1)
d.addCallback(verifyResponse)
return d
def test_ignore_check_false(self):
mock_reason = Mock()
mock_reason.check.return_value = False
def deliverBody(proto):
proto.dataReceived("some data to ignore")
proto.connectionLost(mock_reason)
mock_response = Mock(spec=Response)
mock_response.deliverBody.side_effect = deliverBody
d = IgnoreBody.ignore(mock_response)
def verifyResponse(result):
eq_(result.value, mock_reason)
eq_(len(mock_reason.mock_calls), 1)
d.addErrback(verifyResponse)
return d
|
Add 100% unit test for IgnorBody protocol.from mock import Mock
from nose.tools import eq_
from twisted.trial import unittest
from twisted.web.client import Response
from autopush.protocol import IgnoreBody
class ProtocolTestCase(unittest.TestCase):
def test_ignore(self):
mock_reason = Mock()
mock_reason.check.return_value = True
def deliverBody(proto):
proto.dataReceived("some data to ignore")
proto.connectionLost(mock_reason)
mock_response = Mock(spec=Response)
mock_response.deliverBody.side_effect = deliverBody
d = IgnoreBody.ignore(mock_response)
def verifyResponse(result):
eq_(result, mock_response)
eq_(len(mock_reason.mock_calls), 1)
d.addCallback(verifyResponse)
return d
def test_ignore_check_false(self):
mock_reason = Mock()
mock_reason.check.return_value = False
def deliverBody(proto):
proto.dataReceived("some data to ignore")
proto.connectionLost(mock_reason)
mock_response = Mock(spec=Response)
mock_response.deliverBody.side_effect = deliverBody
d = IgnoreBody.ignore(mock_response)
def verifyResponse(result):
eq_(result.value, mock_reason)
eq_(len(mock_reason.mock_calls), 1)
d.addErrback(verifyResponse)
return d
|
<commit_before><commit_msg>Add 100% unit test for IgnorBody protocol.<commit_after>from mock import Mock
from nose.tools import eq_
from twisted.trial import unittest
from twisted.web.client import Response
from autopush.protocol import IgnoreBody
class ProtocolTestCase(unittest.TestCase):
def test_ignore(self):
mock_reason = Mock()
mock_reason.check.return_value = True
def deliverBody(proto):
proto.dataReceived("some data to ignore")
proto.connectionLost(mock_reason)
mock_response = Mock(spec=Response)
mock_response.deliverBody.side_effect = deliverBody
d = IgnoreBody.ignore(mock_response)
def verifyResponse(result):
eq_(result, mock_response)
eq_(len(mock_reason.mock_calls), 1)
d.addCallback(verifyResponse)
return d
def test_ignore_check_false(self):
mock_reason = Mock()
mock_reason.check.return_value = False
def deliverBody(proto):
proto.dataReceived("some data to ignore")
proto.connectionLost(mock_reason)
mock_response = Mock(spec=Response)
mock_response.deliverBody.side_effect = deliverBody
d = IgnoreBody.ignore(mock_response)
def verifyResponse(result):
eq_(result.value, mock_reason)
eq_(len(mock_reason.mock_calls), 1)
d.addErrback(verifyResponse)
return d
|
|
be0639d7f7904c9e8c54883f51a36ad834f1a635
|
thinc/tests/unit/test_linear.py
|
thinc/tests/unit/test_linear.py
|
# coding: utf8
from __future__ import unicode_literals
import numpy
from ...linear.linear import LinearModel
def test_init():
model = LinearModel(3)
keys = numpy.ones((5,), dtype="uint64")
values = numpy.ones((5,), dtype="f")
lengths = numpy.zeros((2,), dtype="long")
lengths[0] = 3
lengths[1] = 2
scores, backprop = model.begin_update((keys, values, lengths))
assert scores.shape == (2, 3)
d_feats = backprop(scores)
assert d_feats is None
|
Add basic test for LinearModel
|
Add basic test for LinearModel
|
Python
|
mit
|
spacy-io/thinc,spacy-io/thinc,explosion/thinc,explosion/thinc,explosion/thinc,explosion/thinc,spacy-io/thinc
|
Add basic test for LinearModel
|
# coding: utf8
from __future__ import unicode_literals
import numpy
from ...linear.linear import LinearModel
def test_init():
model = LinearModel(3)
keys = numpy.ones((5,), dtype="uint64")
values = numpy.ones((5,), dtype="f")
lengths = numpy.zeros((2,), dtype="long")
lengths[0] = 3
lengths[1] = 2
scores, backprop = model.begin_update((keys, values, lengths))
assert scores.shape == (2, 3)
d_feats = backprop(scores)
assert d_feats is None
|
<commit_before><commit_msg>Add basic test for LinearModel<commit_after>
|
# coding: utf8
from __future__ import unicode_literals
import numpy
from ...linear.linear import LinearModel
def test_init():
model = LinearModel(3)
keys = numpy.ones((5,), dtype="uint64")
values = numpy.ones((5,), dtype="f")
lengths = numpy.zeros((2,), dtype="long")
lengths[0] = 3
lengths[1] = 2
scores, backprop = model.begin_update((keys, values, lengths))
assert scores.shape == (2, 3)
d_feats = backprop(scores)
assert d_feats is None
|
Add basic test for LinearModel# coding: utf8
from __future__ import unicode_literals
import numpy
from ...linear.linear import LinearModel
def test_init():
model = LinearModel(3)
keys = numpy.ones((5,), dtype="uint64")
values = numpy.ones((5,), dtype="f")
lengths = numpy.zeros((2,), dtype="long")
lengths[0] = 3
lengths[1] = 2
scores, backprop = model.begin_update((keys, values, lengths))
assert scores.shape == (2, 3)
d_feats = backprop(scores)
assert d_feats is None
|
<commit_before><commit_msg>Add basic test for LinearModel<commit_after># coding: utf8
from __future__ import unicode_literals
import numpy
from ...linear.linear import LinearModel
def test_init():
model = LinearModel(3)
keys = numpy.ones((5,), dtype="uint64")
values = numpy.ones((5,), dtype="f")
lengths = numpy.zeros((2,), dtype="long")
lengths[0] = 3
lengths[1] = 2
scores, backprop = model.begin_update((keys, values, lengths))
assert scores.shape == (2, 3)
d_feats = backprop(scores)
assert d_feats is None
|
|
82529522f6f281c5c647cbb8a79fcf5830bf6911
|
setup.py
|
setup.py
|
import multiprocessing # noqa # stop tests breaking tox
from setuptools import setup
import tvrenamr
setup(
name=tvrenamr.__title__,
version=tvrenamr.__version__,
description='Rename tv show files using online databases',
long_description=open('README.rst').read() + '\n\n' +
open('CHANGELOG.rst').read(),
author=tvrenamr.__author__,
author_email='george@ghickman.co.uk',
url='http://tvrenamr.info',
license='MIT',
packages=['tvrenamr'],
entry_points={'console_scripts': ['tvr=tvrenamr.frontend:run']},
classifiers=[
'Development Status :: 6 - Mature',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Topic :: Utilities',
],
)
|
import multiprocessing # noqa # stop tests breaking tox
from setuptools import find_packages, setup
import tvrenamr
setup(
name=tvrenamr.__title__,
version=tvrenamr.__version__,
description='Rename tv show files using online databases',
long_description=open('README.rst').read() + '\n\n' +
open('CHANGELOG.rst').read(),
author=tvrenamr.__author__,
author_email='george@ghickman.co.uk',
url='http://tvrenamr.info',
license='MIT',
packages=find_packages(exclude=['docs', 'tests']),
entry_points={'console_scripts': ['tvr=tvrenamr.frontend:run']},
classifiers=[
'Development Status :: 6 - Mature',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Topic :: Utilities',
],
)
|
Use find_packages to deal with vendoring
|
Use find_packages to deal with vendoring
|
Python
|
mit
|
ghickman/tvrenamr,wintersandroid/tvrenamr
|
import multiprocessing # noqa # stop tests breaking tox
from setuptools import setup
import tvrenamr
setup(
name=tvrenamr.__title__,
version=tvrenamr.__version__,
description='Rename tv show files using online databases',
long_description=open('README.rst').read() + '\n\n' +
open('CHANGELOG.rst').read(),
author=tvrenamr.__author__,
author_email='george@ghickman.co.uk',
url='http://tvrenamr.info',
license='MIT',
packages=['tvrenamr'],
entry_points={'console_scripts': ['tvr=tvrenamr.frontend:run']},
classifiers=[
'Development Status :: 6 - Mature',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Topic :: Utilities',
],
)
Use find_packages to deal with vendoring
|
import multiprocessing # noqa # stop tests breaking tox
from setuptools import find_packages, setup
import tvrenamr
setup(
name=tvrenamr.__title__,
version=tvrenamr.__version__,
description='Rename tv show files using online databases',
long_description=open('README.rst').read() + '\n\n' +
open('CHANGELOG.rst').read(),
author=tvrenamr.__author__,
author_email='george@ghickman.co.uk',
url='http://tvrenamr.info',
license='MIT',
packages=find_packages(exclude=['docs', 'tests']),
entry_points={'console_scripts': ['tvr=tvrenamr.frontend:run']},
classifiers=[
'Development Status :: 6 - Mature',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Topic :: Utilities',
],
)
|
<commit_before>import multiprocessing # noqa # stop tests breaking tox
from setuptools import setup
import tvrenamr
setup(
name=tvrenamr.__title__,
version=tvrenamr.__version__,
description='Rename tv show files using online databases',
long_description=open('README.rst').read() + '\n\n' +
open('CHANGELOG.rst').read(),
author=tvrenamr.__author__,
author_email='george@ghickman.co.uk',
url='http://tvrenamr.info',
license='MIT',
packages=['tvrenamr'],
entry_points={'console_scripts': ['tvr=tvrenamr.frontend:run']},
classifiers=[
'Development Status :: 6 - Mature',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Topic :: Utilities',
],
)
<commit_msg>Use find_packages to deal with vendoring<commit_after>
|
import multiprocessing # noqa # stop tests breaking tox
from setuptools import find_packages, setup
import tvrenamr
setup(
name=tvrenamr.__title__,
version=tvrenamr.__version__,
description='Rename tv show files using online databases',
long_description=open('README.rst').read() + '\n\n' +
open('CHANGELOG.rst').read(),
author=tvrenamr.__author__,
author_email='george@ghickman.co.uk',
url='http://tvrenamr.info',
license='MIT',
packages=find_packages(exclude=['docs', 'tests']),
entry_points={'console_scripts': ['tvr=tvrenamr.frontend:run']},
classifiers=[
'Development Status :: 6 - Mature',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Topic :: Utilities',
],
)
|
import multiprocessing # noqa # stop tests breaking tox
from setuptools import setup
import tvrenamr
setup(
name=tvrenamr.__title__,
version=tvrenamr.__version__,
description='Rename tv show files using online databases',
long_description=open('README.rst').read() + '\n\n' +
open('CHANGELOG.rst').read(),
author=tvrenamr.__author__,
author_email='george@ghickman.co.uk',
url='http://tvrenamr.info',
license='MIT',
packages=['tvrenamr'],
entry_points={'console_scripts': ['tvr=tvrenamr.frontend:run']},
classifiers=[
'Development Status :: 6 - Mature',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Topic :: Utilities',
],
)
Use find_packages to deal with vendoringimport multiprocessing # noqa # stop tests breaking tox
from setuptools import find_packages, setup
import tvrenamr
setup(
name=tvrenamr.__title__,
version=tvrenamr.__version__,
description='Rename tv show files using online databases',
long_description=open('README.rst').read() + '\n\n' +
open('CHANGELOG.rst').read(),
author=tvrenamr.__author__,
author_email='george@ghickman.co.uk',
url='http://tvrenamr.info',
license='MIT',
packages=find_packages(exclude=['docs', 'tests']),
entry_points={'console_scripts': ['tvr=tvrenamr.frontend:run']},
classifiers=[
'Development Status :: 6 - Mature',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Topic :: Utilities',
],
)
|
<commit_before>import multiprocessing # noqa # stop tests breaking tox
from setuptools import setup
import tvrenamr
setup(
name=tvrenamr.__title__,
version=tvrenamr.__version__,
description='Rename tv show files using online databases',
long_description=open('README.rst').read() + '\n\n' +
open('CHANGELOG.rst').read(),
author=tvrenamr.__author__,
author_email='george@ghickman.co.uk',
url='http://tvrenamr.info',
license='MIT',
packages=['tvrenamr'],
entry_points={'console_scripts': ['tvr=tvrenamr.frontend:run']},
classifiers=[
'Development Status :: 6 - Mature',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Topic :: Utilities',
],
)
<commit_msg>Use find_packages to deal with vendoring<commit_after>import multiprocessing # noqa # stop tests breaking tox
from setuptools import find_packages, setup
import tvrenamr
setup(
name=tvrenamr.__title__,
version=tvrenamr.__version__,
description='Rename tv show files using online databases',
long_description=open('README.rst').read() + '\n\n' +
open('CHANGELOG.rst').read(),
author=tvrenamr.__author__,
author_email='george@ghickman.co.uk',
url='http://tvrenamr.info',
license='MIT',
packages=find_packages(exclude=['docs', 'tests']),
entry_points={'console_scripts': ['tvr=tvrenamr.frontend:run']},
classifiers=[
'Development Status :: 6 - Mature',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Topic :: Utilities',
],
)
|
972b6ad21509e313d7cfd901b12020135e202c51
|
logos/migrations/0004_auto_20160518_2120.py
|
logos/migrations/0004_auto_20160518_2120.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('logos', '0003_auto_20160217_2158'),
]
operations = [
migrations.DeleteModel(
name='CapturedUrls',
),
migrations.RemoveField(
model_name='reportedtweets',
name='tweet',
),
migrations.DeleteModel(
name='TwitterFollows',
),
migrations.DeleteModel(
name='ReportedTweets',
),
migrations.DeleteModel(
name='TwitterStatuses',
),
]
|
Add some old models deleted in logos/ migration
|
Add some old models deleted in logos/ migration
|
Python
|
apache-2.0
|
kiwiheretic/logos-v2,kiwiheretic/logos-v2,kiwiheretic/logos-v2,kiwiheretic/logos-v2
|
Add some old models deleted in logos/ migration
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('logos', '0003_auto_20160217_2158'),
]
operations = [
migrations.DeleteModel(
name='CapturedUrls',
),
migrations.RemoveField(
model_name='reportedtweets',
name='tweet',
),
migrations.DeleteModel(
name='TwitterFollows',
),
migrations.DeleteModel(
name='ReportedTweets',
),
migrations.DeleteModel(
name='TwitterStatuses',
),
]
|
<commit_before><commit_msg>Add some old models deleted in logos/ migration<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('logos', '0003_auto_20160217_2158'),
]
operations = [
migrations.DeleteModel(
name='CapturedUrls',
),
migrations.RemoveField(
model_name='reportedtweets',
name='tweet',
),
migrations.DeleteModel(
name='TwitterFollows',
),
migrations.DeleteModel(
name='ReportedTweets',
),
migrations.DeleteModel(
name='TwitterStatuses',
),
]
|
Add some old models deleted in logos/ migration# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('logos', '0003_auto_20160217_2158'),
]
operations = [
migrations.DeleteModel(
name='CapturedUrls',
),
migrations.RemoveField(
model_name='reportedtweets',
name='tweet',
),
migrations.DeleteModel(
name='TwitterFollows',
),
migrations.DeleteModel(
name='ReportedTweets',
),
migrations.DeleteModel(
name='TwitterStatuses',
),
]
|
<commit_before><commit_msg>Add some old models deleted in logos/ migration<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('logos', '0003_auto_20160217_2158'),
]
operations = [
migrations.DeleteModel(
name='CapturedUrls',
),
migrations.RemoveField(
model_name='reportedtweets',
name='tweet',
),
migrations.DeleteModel(
name='TwitterFollows',
),
migrations.DeleteModel(
name='ReportedTweets',
),
migrations.DeleteModel(
name='TwitterStatuses',
),
]
|
|
efad39e0932b60000e89e486fc16239833ea4d98
|
copy/opt/core/bin/core-ns-download.py
|
copy/opt/core/bin/core-ns-download.py
|
#!/usr/bin/env python
# core-ns-download
# Check and download zip archive with zone information but only
# if required.
import requests
import os, zipfile
import argparse
from io import BytesIO
import json
# Parse arguments
parser = argparse.ArgumentParser(description='Download multible zip archives with zone information for NSD.')
parser.add_argument('url', metavar='url', nargs='+', help='URLs with the ZIP archive')
args = parser.parse_args()
# Cache file
cfile = '/tmp/core-ns-cache.json'
# Nameserver directory
nsdir = '/tmp'
# Read cache information if available
cache = {}
try:
with open(cfile) as cache_file:
cache = json.load(cache_file)
except:
pass
# Get all information
for url in args.url:
header = {}
if cache.get(url):
header = {
'If-None-Match': cache.get(url)
}
try:
req = requests.get(url, headers=header)
req.raise_for_status()
except Exception as e:
print('Problem: ' + str(e))
continue
cache[url] = req.headers.get('etag')
if req.ok and req.status_code == 200:
try:
z = zipfile.ZipFile(BytesIO(req.content))
z.extractall(nsdir)
subprocess.call(['nsd-control', 'reload'])
except Exception as e:
print(str(e))
# Write cache information
try:
with open(cfile, 'w+') as cache_file:
json.dump(cache, cache_file)
except Exception as e:
print(str(e))
|
Add our awesome core-ns download script
|
Add our awesome core-ns download script
|
Python
|
mit
|
skylime/mi-core-ns,skylime/mi-core-ns
|
Add our awesome core-ns download script
|
#!/usr/bin/env python
# core-ns-download
# Check and download zip archive with zone information but only
# if required.
import requests
import os, zipfile
import argparse
from io import BytesIO
import json
# Parse arguments
parser = argparse.ArgumentParser(description='Download multible zip archives with zone information for NSD.')
parser.add_argument('url', metavar='url', nargs='+', help='URLs with the ZIP archive')
args = parser.parse_args()
# Cache file
cfile = '/tmp/core-ns-cache.json'
# Nameserver directory
nsdir = '/tmp'
# Read cache information if available
cache = {}
try:
with open(cfile) as cache_file:
cache = json.load(cache_file)
except:
pass
# Get all information
for url in args.url:
header = {}
if cache.get(url):
header = {
'If-None-Match': cache.get(url)
}
try:
req = requests.get(url, headers=header)
req.raise_for_status()
except Exception as e:
print('Problem: ' + str(e))
continue
cache[url] = req.headers.get('etag')
if req.ok and req.status_code == 200:
try:
z = zipfile.ZipFile(BytesIO(req.content))
z.extractall(nsdir)
subprocess.call(['nsd-control', 'reload'])
except Exception as e:
print(str(e))
# Write cache information
try:
with open(cfile, 'w+') as cache_file:
json.dump(cache, cache_file)
except Exception as e:
print(str(e))
|
<commit_before><commit_msg>Add our awesome core-ns download script<commit_after>
|
#!/usr/bin/env python
# core-ns-download
# Check and download zip archive with zone information but only
# if required.
import requests
import os, zipfile
import argparse
from io import BytesIO
import json
# Parse arguments
parser = argparse.ArgumentParser(description='Download multible zip archives with zone information for NSD.')
parser.add_argument('url', metavar='url', nargs='+', help='URLs with the ZIP archive')
args = parser.parse_args()
# Cache file
cfile = '/tmp/core-ns-cache.json'
# Nameserver directory
nsdir = '/tmp'
# Read cache information if available
cache = {}
try:
with open(cfile) as cache_file:
cache = json.load(cache_file)
except:
pass
# Get all information
for url in args.url:
header = {}
if cache.get(url):
header = {
'If-None-Match': cache.get(url)
}
try:
req = requests.get(url, headers=header)
req.raise_for_status()
except Exception as e:
print('Problem: ' + str(e))
continue
cache[url] = req.headers.get('etag')
if req.ok and req.status_code == 200:
try:
z = zipfile.ZipFile(BytesIO(req.content))
z.extractall(nsdir)
subprocess.call(['nsd-control', 'reload'])
except Exception as e:
print(str(e))
# Write cache information
try:
with open(cfile, 'w+') as cache_file:
json.dump(cache, cache_file)
except Exception as e:
print(str(e))
|
Add our awesome core-ns download script#!/usr/bin/env python
# core-ns-download
# Check and download zip archive with zone information but only
# if required.
import requests
import os, zipfile
import argparse
from io import BytesIO
import json
# Parse arguments
parser = argparse.ArgumentParser(description='Download multible zip archives with zone information for NSD.')
parser.add_argument('url', metavar='url', nargs='+', help='URLs with the ZIP archive')
args = parser.parse_args()
# Cache file
cfile = '/tmp/core-ns-cache.json'
# Nameserver directory
nsdir = '/tmp'
# Read cache information if available
cache = {}
try:
with open(cfile) as cache_file:
cache = json.load(cache_file)
except:
pass
# Get all information
for url in args.url:
header = {}
if cache.get(url):
header = {
'If-None-Match': cache.get(url)
}
try:
req = requests.get(url, headers=header)
req.raise_for_status()
except Exception as e:
print('Problem: ' + str(e))
continue
cache[url] = req.headers.get('etag')
if req.ok and req.status_code == 200:
try:
z = zipfile.ZipFile(BytesIO(req.content))
z.extractall(nsdir)
subprocess.call(['nsd-control', 'reload'])
except Exception as e:
print(str(e))
# Write cache information
try:
with open(cfile, 'w+') as cache_file:
json.dump(cache, cache_file)
except Exception as e:
print(str(e))
|
<commit_before><commit_msg>Add our awesome core-ns download script<commit_after>#!/usr/bin/env python
# core-ns-download
# Check and download zip archive with zone information but only
# if required.
import requests
import os, zipfile
import argparse
from io import BytesIO
import json
# Parse arguments
parser = argparse.ArgumentParser(description='Download multible zip archives with zone information for NSD.')
parser.add_argument('url', metavar='url', nargs='+', help='URLs with the ZIP archive')
args = parser.parse_args()
# Cache file
cfile = '/tmp/core-ns-cache.json'
# Nameserver directory
nsdir = '/tmp'
# Read cache information if available
cache = {}
try:
with open(cfile) as cache_file:
cache = json.load(cache_file)
except:
pass
# Get all information
for url in args.url:
header = {}
if cache.get(url):
header = {
'If-None-Match': cache.get(url)
}
try:
req = requests.get(url, headers=header)
req.raise_for_status()
except Exception as e:
print('Problem: ' + str(e))
continue
cache[url] = req.headers.get('etag')
if req.ok and req.status_code == 200:
try:
z = zipfile.ZipFile(BytesIO(req.content))
z.extractall(nsdir)
subprocess.call(['nsd-control', 'reload'])
except Exception as e:
print(str(e))
# Write cache information
try:
with open(cfile, 'w+') as cache_file:
json.dump(cache, cache_file)
except Exception as e:
print(str(e))
|
|
d3fd0d4f2220cee440f0af1a9ed3efd5cfd9444c
|
sale_exception_nostock_by_line/__init__.py
|
sale_exception_nostock_by_line/__init__.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Guewen Baconnier
# Copyright 2013 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import model
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Joel Grand-Guillaume
# Copyright 2013 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import model
|
Correct author (../trunk-generic/ rev 29.1.22)
|
[FIX] Correct author
(../trunk-generic/ rev 29.1.22)
|
Python
|
agpl-3.0
|
jabibi/sale-workflow,anas-taji/sale-workflow,brain-tec/sale-workflow,alexsandrohaag/sale-workflow,richard-willowit/sale-workflow,kittiu/sale-workflow,BT-fgarbely/sale-workflow,jjscarafia/sale-workflow,Endika/sale-workflow,BT-jmichaud/sale-workflow,luistorresm/sale-workflow,fevxie/sale-workflow,adhoc-dev/sale-workflow,Eficent/sale-workflow,Antiun/sale-workflow,Rona111/sale-workflow,VitalPet/sale-workflow,acsone/sale-workflow,guewen/sale-workflow,BT-cserra/sale-workflow,diagramsoftware/sale-workflow,numerigraphe/sale-workflow,grap/sale-workflow,ddico/sale-workflow,anybox/sale-workflow,akretion/sale-workflow,factorlibre/sale-workflow,BT-ojossen/sale-workflow,gurneyalex/sale-workflow,thomaspaulb/sale-workflow,damdam-s/sale-workflow,kittiu/sale-workflow,xpansa/sale-workflow,credativUK/sale-workflow,numerigraphe/sale-workflow,acsone/sale-workflow,open-synergy/sale-workflow,clubit/sale-workflow,akretion/sale-workflow,brain-tec/sale-workflow
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Guewen Baconnier
# Copyright 2013 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import model
[FIX] Correct author
(../trunk-generic/ rev 29.1.22)
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Joel Grand-Guillaume
# Copyright 2013 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import model
|
<commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# Author: Guewen Baconnier
# Copyright 2013 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import model
<commit_msg>[FIX] Correct author
(../trunk-generic/ rev 29.1.22)<commit_after>
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Joel Grand-Guillaume
# Copyright 2013 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import model
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Guewen Baconnier
# Copyright 2013 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import model
[FIX] Correct author
(../trunk-generic/ rev 29.1.22)# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Joel Grand-Guillaume
# Copyright 2013 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import model
|
<commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# Author: Guewen Baconnier
# Copyright 2013 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import model
<commit_msg>[FIX] Correct author
(../trunk-generic/ rev 29.1.22)<commit_after># -*- coding: utf-8 -*-
##############################################################################
#
# Author: Joel Grand-Guillaume
# Copyright 2013 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import model
|
0e5a6019f88093691724532221fd1f4b24ad84d6
|
app.py
|
app.py
|
def app(env, start_response):
start_response('200 OK', [('Content-Type','text/html')])
return [b"Hello World"]
if __name__ == '__main__':
try:
from wsgiref.simple_server import make_server
httpd = make_server('', 8080, app)
print('Serving on port 8080...')
httpd.serve_forever()
except KeyboardInterrupt:
print('Goodbye.')
|
Return 'Hello World' by wsgiref
|
Return 'Hello World' by wsgiref
|
Python
|
mit
|
c-bata/kobin,kobinpy/kobin,kobinpy/kobin,c-bata/kobin
|
Return 'Hello World' by wsgiref
|
def app(env, start_response):
start_response('200 OK', [('Content-Type','text/html')])
return [b"Hello World"]
if __name__ == '__main__':
try:
from wsgiref.simple_server import make_server
httpd = make_server('', 8080, app)
print('Serving on port 8080...')
httpd.serve_forever()
except KeyboardInterrupt:
print('Goodbye.')
|
<commit_before><commit_msg>Return 'Hello World' by wsgiref<commit_after>
|
def app(env, start_response):
start_response('200 OK', [('Content-Type','text/html')])
return [b"Hello World"]
if __name__ == '__main__':
try:
from wsgiref.simple_server import make_server
httpd = make_server('', 8080, app)
print('Serving on port 8080...')
httpd.serve_forever()
except KeyboardInterrupt:
print('Goodbye.')
|
Return 'Hello World' by wsgirefdef app(env, start_response):
start_response('200 OK', [('Content-Type','text/html')])
return [b"Hello World"]
if __name__ == '__main__':
try:
from wsgiref.simple_server import make_server
httpd = make_server('', 8080, app)
print('Serving on port 8080...')
httpd.serve_forever()
except KeyboardInterrupt:
print('Goodbye.')
|
<commit_before><commit_msg>Return 'Hello World' by wsgiref<commit_after>def app(env, start_response):
start_response('200 OK', [('Content-Type','text/html')])
return [b"Hello World"]
if __name__ == '__main__':
try:
from wsgiref.simple_server import make_server
httpd = make_server('', 8080, app)
print('Serving on port 8080...')
httpd.serve_forever()
except KeyboardInterrupt:
print('Goodbye.')
|
|
6b940e8d0f4ae4deac7a71b133e0c9e863f324d7
|
stars/middleware.py
|
stars/middleware.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
class StarFilterMiddleware(object):
"""
Stores star filter data in user's session.
"""
def process_request(self, request):
assert hasattr(request, 'session'), "StarFilterMiddleware requires session middleware to be installed."
limiting_magnitude = request.GET.get('limiting_magnitude')
if limiting_magnitude:
if limiting_magnitude == 'None':
limiting_magnitude = None
request.session['limiting_magnitude'] = limiting_magnitude
|
Store limiting magnitude in session, if requested from filter.
|
Store limiting magnitude in session, if requested from filter.
|
Python
|
mit
|
zsiciarz/variablestars.net,zsiciarz/variablestars.net,zsiciarz/variablestars.net
|
Store limiting magnitude in session, if requested from filter.
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
class StarFilterMiddleware(object):
"""
Stores star filter data in user's session.
"""
def process_request(self, request):
assert hasattr(request, 'session'), "StarFilterMiddleware requires session middleware to be installed."
limiting_magnitude = request.GET.get('limiting_magnitude')
if limiting_magnitude:
if limiting_magnitude == 'None':
limiting_magnitude = None
request.session['limiting_magnitude'] = limiting_magnitude
|
<commit_before><commit_msg>Store limiting magnitude in session, if requested from filter.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
class StarFilterMiddleware(object):
"""
Stores star filter data in user's session.
"""
def process_request(self, request):
assert hasattr(request, 'session'), "StarFilterMiddleware requires session middleware to be installed."
limiting_magnitude = request.GET.get('limiting_magnitude')
if limiting_magnitude:
if limiting_magnitude == 'None':
limiting_magnitude = None
request.session['limiting_magnitude'] = limiting_magnitude
|
Store limiting magnitude in session, if requested from filter.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
class StarFilterMiddleware(object):
"""
Stores star filter data in user's session.
"""
def process_request(self, request):
assert hasattr(request, 'session'), "StarFilterMiddleware requires session middleware to be installed."
limiting_magnitude = request.GET.get('limiting_magnitude')
if limiting_magnitude:
if limiting_magnitude == 'None':
limiting_magnitude = None
request.session['limiting_magnitude'] = limiting_magnitude
|
<commit_before><commit_msg>Store limiting magnitude in session, if requested from filter.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
class StarFilterMiddleware(object):
"""
Stores star filter data in user's session.
"""
def process_request(self, request):
assert hasattr(request, 'session'), "StarFilterMiddleware requires session middleware to be installed."
limiting_magnitude = request.GET.get('limiting_magnitude')
if limiting_magnitude:
if limiting_magnitude == 'None':
limiting_magnitude = None
request.session['limiting_magnitude'] = limiting_magnitude
|
|
010f1b9526c666986e2557f6b693efe5a194d30f
|
scrapi/harvesters/lshtm.py
|
scrapi/harvesters/lshtm.py
|
'''
Harvester for the LSHTM Research Online for the SHARE project
Example API call: http://researchonline.lshtm.ac.uk/cgi/oai2?verb=ListRecords&metadataPrefix=oai_dc
'''
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
class LshtmHarvester(OAIHarvester):
short_name = 'lshtm'
long_name = 'London School of Hygiene and Tropical Medicine Research Online'
url = 'http://researchonline.lshtm.ac.uk'
base_url = 'http://researchonline.lshtm.ac.uk/cgi/oai2'
property_list = ['date', 'type', 'identifier', 'relation', 'setSpec']
timezone_granularity = True
|
Add harvester for the London School of Hygene and Tropical Medicine
|
Add harvester for the London School of Hygene and Tropical Medicine
Closes [#SHARE-123]
|
Python
|
apache-2.0
|
CenterForOpenScience/scrapi,erinspace/scrapi,erinspace/scrapi,fabianvf/scrapi,CenterForOpenScience/scrapi,fabianvf/scrapi
|
Add harvester for the London School of Hygene and Tropical Medicine
Closes [#SHARE-123]
|
'''
Harvester for the LSHTM Research Online for the SHARE project
Example API call: http://researchonline.lshtm.ac.uk/cgi/oai2?verb=ListRecords&metadataPrefix=oai_dc
'''
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
class LshtmHarvester(OAIHarvester):
short_name = 'lshtm'
long_name = 'London School of Hygiene and Tropical Medicine Research Online'
url = 'http://researchonline.lshtm.ac.uk'
base_url = 'http://researchonline.lshtm.ac.uk/cgi/oai2'
property_list = ['date', 'type', 'identifier', 'relation', 'setSpec']
timezone_granularity = True
|
<commit_before><commit_msg>Add harvester for the London School of Hygene and Tropical Medicine
Closes [#SHARE-123]<commit_after>
|
'''
Harvester for the LSHTM Research Online for the SHARE project
Example API call: http://researchonline.lshtm.ac.uk/cgi/oai2?verb=ListRecords&metadataPrefix=oai_dc
'''
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
class LshtmHarvester(OAIHarvester):
short_name = 'lshtm'
long_name = 'London School of Hygiene and Tropical Medicine Research Online'
url = 'http://researchonline.lshtm.ac.uk'
base_url = 'http://researchonline.lshtm.ac.uk/cgi/oai2'
property_list = ['date', 'type', 'identifier', 'relation', 'setSpec']
timezone_granularity = True
|
Add harvester for the London School of Hygene and Tropical Medicine
Closes [#SHARE-123]'''
Harvester for the LSHTM Research Online for the SHARE project
Example API call: http://researchonline.lshtm.ac.uk/cgi/oai2?verb=ListRecords&metadataPrefix=oai_dc
'''
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
class LshtmHarvester(OAIHarvester):
short_name = 'lshtm'
long_name = 'London School of Hygiene and Tropical Medicine Research Online'
url = 'http://researchonline.lshtm.ac.uk'
base_url = 'http://researchonline.lshtm.ac.uk/cgi/oai2'
property_list = ['date', 'type', 'identifier', 'relation', 'setSpec']
timezone_granularity = True
|
<commit_before><commit_msg>Add harvester for the London School of Hygene and Tropical Medicine
Closes [#SHARE-123]<commit_after>'''
Harvester for the LSHTM Research Online for the SHARE project
Example API call: http://researchonline.lshtm.ac.uk/cgi/oai2?verb=ListRecords&metadataPrefix=oai_dc
'''
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
class LshtmHarvester(OAIHarvester):
short_name = 'lshtm'
long_name = 'London School of Hygiene and Tropical Medicine Research Online'
url = 'http://researchonline.lshtm.ac.uk'
base_url = 'http://researchonline.lshtm.ac.uk/cgi/oai2'
property_list = ['date', 'type', 'identifier', 'relation', 'setSpec']
timezone_granularity = True
|
|
317aeeec2279f68daf70d0b7bf8e14ee1859f599
|
examples/remote_cluster.py
|
examples/remote_cluster.py
|
# Copyright 2016 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This example demonstrate communication with a remove Kube cluster from a
# server outside of the cluster without kube client installed on it.
# The communication is secured with the use of Bearer token.
from kubernetes import client, config
def main():
# Define the barer token we are going to use to authenticate.
# See here to create the token:
# https://kubernetes.io/docs/tasks/access-application-cluster/access-cluster/
aToken = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
# Create a configuration object
configuration = client.Configuration()
# Specify the endpoint of your Kube cluster
configuration.host = "https://XXX.XXX.XXX.XXX:443"
# Security part.
# In this simple example we are not going to verify the SSL certificate of
# the remote cluster (for simplicity reason)
configuration.verify_ssl = False
# Nevertheless if you want to do it you can with these 2 parameters
# configuration.verify_ssl=True
# ssl_ca_cert is the filepath to the file that contains the certificate.
# configuration.ssl_ca_cert="certificate"
configuration.api_key = {"authorization": "Bearer " + aToken}
# Use our configuration
client.Configuration.set_default(configuration)
# Do calls
v1 = client.CoreV1Api()
print("Listing pods with their IPs:")
ret = v1.list_pod_for_all_namespaces(watch=False)
for i in ret.items:
print("%s\t%s\t%s" %
(i.status.pod_ip, i.metadata.namespace, i.metadata.name))
if __name__ == '__main__':
main()
|
Add example for remote cluster without kube client on server
|
Add example for remote cluster without kube client on server
|
Python
|
apache-2.0
|
kubernetes-client/python,kubernetes-client/python
|
Add example for remote cluster without kube client on server
|
# Copyright 2016 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This example demonstrate communication with a remove Kube cluster from a
# server outside of the cluster without kube client installed on it.
# The communication is secured with the use of Bearer token.
from kubernetes import client, config
def main():
# Define the barer token we are going to use to authenticate.
# See here to create the token:
# https://kubernetes.io/docs/tasks/access-application-cluster/access-cluster/
aToken = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
# Create a configuration object
configuration = client.Configuration()
# Specify the endpoint of your Kube cluster
configuration.host = "https://XXX.XXX.XXX.XXX:443"
# Security part.
# In this simple example we are not going to verify the SSL certificate of
# the remote cluster (for simplicity reason)
configuration.verify_ssl = False
# Nevertheless if you want to do it you can with these 2 parameters
# configuration.verify_ssl=True
# ssl_ca_cert is the filepath to the file that contains the certificate.
# configuration.ssl_ca_cert="certificate"
configuration.api_key = {"authorization": "Bearer " + aToken}
# Use our configuration
client.Configuration.set_default(configuration)
# Do calls
v1 = client.CoreV1Api()
print("Listing pods with their IPs:")
ret = v1.list_pod_for_all_namespaces(watch=False)
for i in ret.items:
print("%s\t%s\t%s" %
(i.status.pod_ip, i.metadata.namespace, i.metadata.name))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add example for remote cluster without kube client on server<commit_after>
|
# Copyright 2016 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This example demonstrate communication with a remove Kube cluster from a
# server outside of the cluster without kube client installed on it.
# The communication is secured with the use of Bearer token.
from kubernetes import client, config
def main():
# Define the barer token we are going to use to authenticate.
# See here to create the token:
# https://kubernetes.io/docs/tasks/access-application-cluster/access-cluster/
aToken = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
# Create a configuration object
configuration = client.Configuration()
# Specify the endpoint of your Kube cluster
configuration.host = "https://XXX.XXX.XXX.XXX:443"
# Security part.
# In this simple example we are not going to verify the SSL certificate of
# the remote cluster (for simplicity reason)
configuration.verify_ssl = False
# Nevertheless if you want to do it you can with these 2 parameters
# configuration.verify_ssl=True
# ssl_ca_cert is the filepath to the file that contains the certificate.
# configuration.ssl_ca_cert="certificate"
configuration.api_key = {"authorization": "Bearer " + aToken}
# Use our configuration
client.Configuration.set_default(configuration)
# Do calls
v1 = client.CoreV1Api()
print("Listing pods with their IPs:")
ret = v1.list_pod_for_all_namespaces(watch=False)
for i in ret.items:
print("%s\t%s\t%s" %
(i.status.pod_ip, i.metadata.namespace, i.metadata.name))
if __name__ == '__main__':
main()
|
Add example for remote cluster without kube client on server# Copyright 2016 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This example demonstrate communication with a remove Kube cluster from a
# server outside of the cluster without kube client installed on it.
# The communication is secured with the use of Bearer token.
from kubernetes import client, config
def main():
# Define the barer token we are going to use to authenticate.
# See here to create the token:
# https://kubernetes.io/docs/tasks/access-application-cluster/access-cluster/
aToken = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
# Create a configuration object
configuration = client.Configuration()
# Specify the endpoint of your Kube cluster
configuration.host = "https://XXX.XXX.XXX.XXX:443"
# Security part.
# In this simple example we are not going to verify the SSL certificate of
# the remote cluster (for simplicity reason)
configuration.verify_ssl = False
# Nevertheless if you want to do it you can with these 2 parameters
# configuration.verify_ssl=True
# ssl_ca_cert is the filepath to the file that contains the certificate.
# configuration.ssl_ca_cert="certificate"
configuration.api_key = {"authorization": "Bearer " + aToken}
# Use our configuration
client.Configuration.set_default(configuration)
# Do calls
v1 = client.CoreV1Api()
print("Listing pods with their IPs:")
ret = v1.list_pod_for_all_namespaces(watch=False)
for i in ret.items:
print("%s\t%s\t%s" %
(i.status.pod_ip, i.metadata.namespace, i.metadata.name))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add example for remote cluster without kube client on server<commit_after># Copyright 2016 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This example demonstrate communication with a remove Kube cluster from a
# server outside of the cluster without kube client installed on it.
# The communication is secured with the use of Bearer token.
from kubernetes import client, config
def main():
# Define the barer token we are going to use to authenticate.
# See here to create the token:
# https://kubernetes.io/docs/tasks/access-application-cluster/access-cluster/
aToken = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
# Create a configuration object
configuration = client.Configuration()
# Specify the endpoint of your Kube cluster
configuration.host = "https://XXX.XXX.XXX.XXX:443"
# Security part.
# In this simple example we are not going to verify the SSL certificate of
# the remote cluster (for simplicity reason)
configuration.verify_ssl = False
# Nevertheless if you want to do it you can with these 2 parameters
# configuration.verify_ssl=True
# ssl_ca_cert is the filepath to the file that contains the certificate.
# configuration.ssl_ca_cert="certificate"
configuration.api_key = {"authorization": "Bearer " + aToken}
# Use our configuration
client.Configuration.set_default(configuration)
# Do calls
v1 = client.CoreV1Api()
print("Listing pods with their IPs:")
ret = v1.list_pod_for_all_namespaces(watch=False)
for i in ret.items:
print("%s\t%s\t%s" %
(i.status.pod_ip, i.metadata.namespace, i.metadata.name))
if __name__ == '__main__':
main()
|
|
163434cb3d1dfd9774def6de9c3b005f9130504b
|
tests/cupy_tests/random_tests/test_random.py
|
tests/cupy_tests/random_tests/test_random.py
|
import unittest
from cupy import random
from cupy import testing
class TestResetSeed(unittest.TestCase):
@testing.for_float_dtypes(no_float16=True)
def test_reset_seed(self, dtype):
rs = random.get_random_state()
rs.seed(0)
l1 = rs.rand(10, dtype=dtype)
rs = random.get_random_state()
rs.seed(0)
l2 = rs.rand(10, dtype=dtype)
testing.assert_array_equal(l1, l2)
|
Write functional test for seed
|
Write functional test for seed
|
Python
|
mit
|
hvy/chainer,wkentaro/chainer,benob/chainer,jnishi/chainer,AlpacaDB/chainer,ronekko/chainer,wkentaro/chainer,cupy/cupy,keisuke-umezawa/chainer,kikusu/chainer,wkentaro/chainer,niboshi/chainer,muupan/chainer,jnishi/chainer,cupy/cupy,jnishi/chainer,sinhrks/chainer,wkentaro/chainer,kashif/chainer,ktnyt/chainer,chainer/chainer,niboshi/chainer,kiyukuta/chainer,niboshi/chainer,ktnyt/chainer,muupan/chainer,anaruse/chainer,keisuke-umezawa/chainer,okuta/chainer,t-abe/chainer,cemoody/chainer,ysekky/chainer,keisuke-umezawa/chainer,tscohen/chainer,truongdq/chainer,keisuke-umezawa/chainer,chainer/chainer,tkerola/chainer,t-abe/chainer,kikusu/chainer,hvy/chainer,pfnet/chainer,minhpqn/chainer,aonotas/chainer,ktnyt/chainer,benob/chainer,truongdq/chainer,okuta/chainer,delta2323/chainer,hvy/chainer,cupy/cupy,cupy/cupy,chainer/chainer,hvy/chainer,sinhrks/chainer,jnishi/chainer,niboshi/chainer,chainer/chainer,okuta/chainer,okuta/chainer,AlpacaDB/chainer,ktnyt/chainer,rezoo/chainer
|
Write functional test for seed
|
import unittest
from cupy import random
from cupy import testing
class TestResetSeed(unittest.TestCase):
@testing.for_float_dtypes(no_float16=True)
def test_reset_seed(self, dtype):
rs = random.get_random_state()
rs.seed(0)
l1 = rs.rand(10, dtype=dtype)
rs = random.get_random_state()
rs.seed(0)
l2 = rs.rand(10, dtype=dtype)
testing.assert_array_equal(l1, l2)
|
<commit_before><commit_msg>Write functional test for seed<commit_after>
|
import unittest
from cupy import random
from cupy import testing
class TestResetSeed(unittest.TestCase):
@testing.for_float_dtypes(no_float16=True)
def test_reset_seed(self, dtype):
rs = random.get_random_state()
rs.seed(0)
l1 = rs.rand(10, dtype=dtype)
rs = random.get_random_state()
rs.seed(0)
l2 = rs.rand(10, dtype=dtype)
testing.assert_array_equal(l1, l2)
|
Write functional test for seedimport unittest
from cupy import random
from cupy import testing
class TestResetSeed(unittest.TestCase):
@testing.for_float_dtypes(no_float16=True)
def test_reset_seed(self, dtype):
rs = random.get_random_state()
rs.seed(0)
l1 = rs.rand(10, dtype=dtype)
rs = random.get_random_state()
rs.seed(0)
l2 = rs.rand(10, dtype=dtype)
testing.assert_array_equal(l1, l2)
|
<commit_before><commit_msg>Write functional test for seed<commit_after>import unittest
from cupy import random
from cupy import testing
class TestResetSeed(unittest.TestCase):
@testing.for_float_dtypes(no_float16=True)
def test_reset_seed(self, dtype):
rs = random.get_random_state()
rs.seed(0)
l1 = rs.rand(10, dtype=dtype)
rs = random.get_random_state()
rs.seed(0)
l2 = rs.rand(10, dtype=dtype)
testing.assert_array_equal(l1, l2)
|
|
4e99176621f81b9f115613fe87f8036d07a80ddc
|
encryptit/tests/packets/test_generic_packet_body.py
|
encryptit/tests/packets/test_generic_packet_body.py
|
from io import BytesIO
from nose.tools import assert_equal, assert_raises
from encryptit.packets import GenericPacketBody
from encryptit.exceptions import MalformedPacketError
BODY = bytearray([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
TESTS = [
(0, 10, bytearray([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])),
(1, 9, bytearray([1, 2, 3, 4, 5, 6, 7, 8, 9])),
(0, 9, bytearray([0, 1, 2, 3, 4, 5, 6, 7, 8])),
]
def _make_body(body_start, body_length):
return GenericPacketBody.from_stream(
BytesIO(BODY), body_start, body_length)
def test_raw_reads_correct_start_and_length():
for body_start, body_length, expected_raw in TESTS:
body = _make_body(body_start, body_length)
assert_equal(expected_raw, body.raw)
def test_that_raw_works_multiple_times():
body = GenericPacketBody.from_stream(BytesIO(BODY), 3, 5)
assert_equal(body.raw, body.raw)
def test_that_reading_too_much_data_raises_malformed_packet_error():
for body_start in range(10):
body_length = 11 - body_start # 1 byte too long
yield assert_detects_malformed_packet, body_start, body_length
def assert_detects_malformed_packet(body_start, body_length):
body = _make_body(body_start, body_length)
def get_raw():
return body.raw
assert_raises(MalformedPacketError, get_raw)
def test_that_body_start_after_end_of_data_raises_malformed_packet_error():
assert_detects_malformed_packet(10, 1)
def test_that_invalid_body_start_or_length_explodes_at_instantiation():
yield assert_raises, ValueError, _make_body, -1, 1
yield assert_raises, ValueError, _make_body, 0, -1
yield assert_raises, ValueError, _make_body, 0, 0 # body can't be zero
|
Add (failing!) tests for GenericPacketBody
|
Add (failing!) tests for GenericPacketBody
See #40
|
Python
|
agpl-3.0
|
paulfurley/encryptit,paulfurley/encryptit
|
Add (failing!) tests for GenericPacketBody
See #40
|
from io import BytesIO
from nose.tools import assert_equal, assert_raises
from encryptit.packets import GenericPacketBody
from encryptit.exceptions import MalformedPacketError
BODY = bytearray([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
TESTS = [
(0, 10, bytearray([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])),
(1, 9, bytearray([1, 2, 3, 4, 5, 6, 7, 8, 9])),
(0, 9, bytearray([0, 1, 2, 3, 4, 5, 6, 7, 8])),
]
def _make_body(body_start, body_length):
return GenericPacketBody.from_stream(
BytesIO(BODY), body_start, body_length)
def test_raw_reads_correct_start_and_length():
for body_start, body_length, expected_raw in TESTS:
body = _make_body(body_start, body_length)
assert_equal(expected_raw, body.raw)
def test_that_raw_works_multiple_times():
body = GenericPacketBody.from_stream(BytesIO(BODY), 3, 5)
assert_equal(body.raw, body.raw)
def test_that_reading_too_much_data_raises_malformed_packet_error():
for body_start in range(10):
body_length = 11 - body_start # 1 byte too long
yield assert_detects_malformed_packet, body_start, body_length
def assert_detects_malformed_packet(body_start, body_length):
body = _make_body(body_start, body_length)
def get_raw():
return body.raw
assert_raises(MalformedPacketError, get_raw)
def test_that_body_start_after_end_of_data_raises_malformed_packet_error():
assert_detects_malformed_packet(10, 1)
def test_that_invalid_body_start_or_length_explodes_at_instantiation():
yield assert_raises, ValueError, _make_body, -1, 1
yield assert_raises, ValueError, _make_body, 0, -1
yield assert_raises, ValueError, _make_body, 0, 0 # body can't be zero
|
<commit_before><commit_msg>Add (failing!) tests for GenericPacketBody
See #40<commit_after>
|
from io import BytesIO
from nose.tools import assert_equal, assert_raises
from encryptit.packets import GenericPacketBody
from encryptit.exceptions import MalformedPacketError
BODY = bytearray([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
TESTS = [
(0, 10, bytearray([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])),
(1, 9, bytearray([1, 2, 3, 4, 5, 6, 7, 8, 9])),
(0, 9, bytearray([0, 1, 2, 3, 4, 5, 6, 7, 8])),
]
def _make_body(body_start, body_length):
return GenericPacketBody.from_stream(
BytesIO(BODY), body_start, body_length)
def test_raw_reads_correct_start_and_length():
for body_start, body_length, expected_raw in TESTS:
body = _make_body(body_start, body_length)
assert_equal(expected_raw, body.raw)
def test_that_raw_works_multiple_times():
body = GenericPacketBody.from_stream(BytesIO(BODY), 3, 5)
assert_equal(body.raw, body.raw)
def test_that_reading_too_much_data_raises_malformed_packet_error():
for body_start in range(10):
body_length = 11 - body_start # 1 byte too long
yield assert_detects_malformed_packet, body_start, body_length
def assert_detects_malformed_packet(body_start, body_length):
body = _make_body(body_start, body_length)
def get_raw():
return body.raw
assert_raises(MalformedPacketError, get_raw)
def test_that_body_start_after_end_of_data_raises_malformed_packet_error():
assert_detects_malformed_packet(10, 1)
def test_that_invalid_body_start_or_length_explodes_at_instantiation():
yield assert_raises, ValueError, _make_body, -1, 1
yield assert_raises, ValueError, _make_body, 0, -1
yield assert_raises, ValueError, _make_body, 0, 0 # body can't be zero
|
Add (failing!) tests for GenericPacketBody
See #40from io import BytesIO
from nose.tools import assert_equal, assert_raises
from encryptit.packets import GenericPacketBody
from encryptit.exceptions import MalformedPacketError
BODY = bytearray([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
TESTS = [
(0, 10, bytearray([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])),
(1, 9, bytearray([1, 2, 3, 4, 5, 6, 7, 8, 9])),
(0, 9, bytearray([0, 1, 2, 3, 4, 5, 6, 7, 8])),
]
def _make_body(body_start, body_length):
return GenericPacketBody.from_stream(
BytesIO(BODY), body_start, body_length)
def test_raw_reads_correct_start_and_length():
for body_start, body_length, expected_raw in TESTS:
body = _make_body(body_start, body_length)
assert_equal(expected_raw, body.raw)
def test_that_raw_works_multiple_times():
body = GenericPacketBody.from_stream(BytesIO(BODY), 3, 5)
assert_equal(body.raw, body.raw)
def test_that_reading_too_much_data_raises_malformed_packet_error():
for body_start in range(10):
body_length = 11 - body_start # 1 byte too long
yield assert_detects_malformed_packet, body_start, body_length
def assert_detects_malformed_packet(body_start, body_length):
body = _make_body(body_start, body_length)
def get_raw():
return body.raw
assert_raises(MalformedPacketError, get_raw)
def test_that_body_start_after_end_of_data_raises_malformed_packet_error():
assert_detects_malformed_packet(10, 1)
def test_that_invalid_body_start_or_length_explodes_at_instantiation():
yield assert_raises, ValueError, _make_body, -1, 1
yield assert_raises, ValueError, _make_body, 0, -1
yield assert_raises, ValueError, _make_body, 0, 0 # body can't be zero
|
<commit_before><commit_msg>Add (failing!) tests for GenericPacketBody
See #40<commit_after>from io import BytesIO
from nose.tools import assert_equal, assert_raises
from encryptit.packets import GenericPacketBody
from encryptit.exceptions import MalformedPacketError
BODY = bytearray([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
TESTS = [
(0, 10, bytearray([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])),
(1, 9, bytearray([1, 2, 3, 4, 5, 6, 7, 8, 9])),
(0, 9, bytearray([0, 1, 2, 3, 4, 5, 6, 7, 8])),
]
def _make_body(body_start, body_length):
return GenericPacketBody.from_stream(
BytesIO(BODY), body_start, body_length)
def test_raw_reads_correct_start_and_length():
for body_start, body_length, expected_raw in TESTS:
body = _make_body(body_start, body_length)
assert_equal(expected_raw, body.raw)
def test_that_raw_works_multiple_times():
body = GenericPacketBody.from_stream(BytesIO(BODY), 3, 5)
assert_equal(body.raw, body.raw)
def test_that_reading_too_much_data_raises_malformed_packet_error():
for body_start in range(10):
body_length = 11 - body_start # 1 byte too long
yield assert_detects_malformed_packet, body_start, body_length
def assert_detects_malformed_packet(body_start, body_length):
body = _make_body(body_start, body_length)
def get_raw():
return body.raw
assert_raises(MalformedPacketError, get_raw)
def test_that_body_start_after_end_of_data_raises_malformed_packet_error():
assert_detects_malformed_packet(10, 1)
def test_that_invalid_body_start_or_length_explodes_at_instantiation():
yield assert_raises, ValueError, _make_body, -1, 1
yield assert_raises, ValueError, _make_body, 0, -1
yield assert_raises, ValueError, _make_body, 0, 0 # body can't be zero
|
|
5f8f44363cfa133b959e6aa3a4e284362eb53aa8
|
plot_s_curve.py
|
plot_s_curve.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import matplotlib.pyplot as plt
import sys
x = []
y = []
infile = open(sys.argv[1])
for line in infile:
data = line.replace('\n','').split()
print(data)
x.append(float(data[0]))
y.append(float(data[1]))
figManager = plt.get_current_fig_manager()
figManager.window.showMaximized()
plt.xscale('log')
plt.yscale('log')
plt.plot(x,y)
plt.show()
|
Add python script to plot the S curve
|
Add python script to plot the S curve
Rudimentary version to help debugging
|
Python
|
mit
|
M2-AAIS/BAD
|
Add python script to plot the S curve
Rudimentary version to help debugging
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import matplotlib.pyplot as plt
import sys
x = []
y = []
infile = open(sys.argv[1])
for line in infile:
data = line.replace('\n','').split()
print(data)
x.append(float(data[0]))
y.append(float(data[1]))
figManager = plt.get_current_fig_manager()
figManager.window.showMaximized()
plt.xscale('log')
plt.yscale('log')
plt.plot(x,y)
plt.show()
|
<commit_before><commit_msg>Add python script to plot the S curve
Rudimentary version to help debugging<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import matplotlib.pyplot as plt
import sys
x = []
y = []
infile = open(sys.argv[1])
for line in infile:
data = line.replace('\n','').split()
print(data)
x.append(float(data[0]))
y.append(float(data[1]))
figManager = plt.get_current_fig_manager()
figManager.window.showMaximized()
plt.xscale('log')
plt.yscale('log')
plt.plot(x,y)
plt.show()
|
Add python script to plot the S curve
Rudimentary version to help debugging#!/usr/bin/env python
# -*- coding: utf-8 -*-
import matplotlib.pyplot as plt
import sys
x = []
y = []
infile = open(sys.argv[1])
for line in infile:
data = line.replace('\n','').split()
print(data)
x.append(float(data[0]))
y.append(float(data[1]))
figManager = plt.get_current_fig_manager()
figManager.window.showMaximized()
plt.xscale('log')
plt.yscale('log')
plt.plot(x,y)
plt.show()
|
<commit_before><commit_msg>Add python script to plot the S curve
Rudimentary version to help debugging<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import matplotlib.pyplot as plt
import sys
x = []
y = []
infile = open(sys.argv[1])
for line in infile:
data = line.replace('\n','').split()
print(data)
x.append(float(data[0]))
y.append(float(data[1]))
figManager = plt.get_current_fig_manager()
figManager.window.showMaximized()
plt.xscale('log')
plt.yscale('log')
plt.plot(x,y)
plt.show()
|
|
94212e2b02e7e585e815bb659662253dceee9d55
|
tensorflow/contrib/autograph/core/annos.py
|
tensorflow/contrib/autograph/core/annos.py
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Annotations specific to AutoGraph."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from enum import Enum
class NoValue(Enum):
def __repr__(self):
return self.name
class NodeAnno(NoValue):
"""Additional annotations used by AutoGraph converters.
These are in addition to the basic annotations declared in pyct/anno.py and
pyct/static_analysis/annos.py.
"""
# The directives collection - see directives.py
DIRECTIVES = (
'Dict depicting static directive calls. See the directives converter.')
|
Add a set of annotations specific to AutoGraph.
|
Add a set of annotations specific to AutoGraph.
PiperOrigin-RevId: 202972265
|
Python
|
apache-2.0
|
davidzchen/tensorflow,DavidNorman/tensorflow,frreiss/tensorflow-fred,Bismarrck/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,gunan/tensorflow,karllessard/tensorflow,freedomtan/tensorflow,Intel-Corporation/tensorflow,aldian/tensorflow,jbedorf/tensorflow,jart/tensorflow,jart/tensorflow,ghchinoy/tensorflow,annarev/tensorflow,aam-at/tensorflow,ppwwyyxx/tensorflow,dongjoon-hyun/tensorflow,karllessard/tensorflow,sarvex/tensorflow,ZhangXinNan/tensorflow,freedomtan/tensorflow,cxxgtxy/tensorflow,petewarden/tensorflow,seanli9jan/tensorflow,brchiu/tensorflow,paolodedios/tensorflow,asimshankar/tensorflow,arborh/tensorflow,hfp/tensorflow-xsmm,drpngx/tensorflow,aldian/tensorflow,manipopopo/tensorflow,tensorflow/tensorflow,jendap/tensorflow,dancingdan/tensorflow,kevin-coder/tensorflow-fork,aldian/tensorflow,renyi533/tensorflow,freedomtan/tensorflow,apark263/tensorflow,jendap/tensorflow,dancingdan/tensorflow,hfp/tensorflow-xsmm,theflofly/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,ppwwyyxx/tensorflow,tensorflow/tensorflow-pywrap_saved_model,kobejean/tensorflow,Intel-Corporation/tensorflow,xzturn/tensorflow,ageron/tensorflow,chemelnucfin/tensorflow,arborh/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,hehongliang/tensorflow,gautam1858/tensorflow,apark263/tensorflow,arborh/tensorflow,kobejean/tensorflow,renyi533/tensorflow,petewarden/tensorflow,adit-chandra/tensorflow,jbedorf/tensorflow,chemelnucfin/tensorflow,aselle/tensorflow,kobejean/tensorflow,gautam1858/tensorflow,Bismarrck/tensorflow,freedomtan/tensorflow,caisq/tensorflow,jendap/tensorflow,asimshankar/tensorflow,AnishShah/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,jhseu/tensorflow,Intel-tensorflow/tensorflow,ZhangXinNan/tensorflow,girving/tensorflow,kobejean/tensorflow,seanli9jan/tensorflow,ghchinoy/tensorflow,davidzchen/tensorflow,xzturn/tensorflow,brchiu/tensorflow,arborh/tensorflow,chemelnucfin/tensorflow,arborh/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow,kevin-coder/tensorflow-fork,davidzchen/tensorflow,aselle/tensorflow,aam-at/tensorflow,frreiss/tensorflow-fred,karllessard/tensorflow,gunan/tensorflow,alsrgv/tensorflow,adit-chandra/tensorflow,hehongliang/tensorflow,ageron/tensorflow,jhseu/tensorflow,brchiu/tensorflow,snnn/tensorflow,adit-chandra/tensorflow,aselle/tensorflow,alshedivat/tensorflow,caisq/tensorflow,Intel-tensorflow/tensorflow,asimshankar/tensorflow,jart/tensorflow,yongtang/tensorflow,jalexvig/tensorflow,freedomtan/tensorflow,aam-at/tensorflow,freedomtan/tensorflow,Bismarrck/tensorflow,chemelnucfin/tensorflow,jhseu/tensorflow,AnishShah/tensorflow,AnishShah/tensorflow,jalexvig/tensorflow,theflofly/tensorflow,adit-chandra/tensorflow,jart/tensorflow,petewarden/tensorflow,alsrgv/tensorflow,tensorflow/tensorflow,asimshankar/tensorflow,girving/tensorflow,DavidNorman/tensorflow,caisq/tensorflow,alshedivat/tensorflow,apark263/tensorflow,Intel-Corporation/tensorflow,paolodedios/tensorflow,xodus7/tensorflow,manipopopo/tensorflow,AnishShah/tensorflow,Intel-tensorflow/tensorflow,chemelnucfin/tensorflow,tensorflow/tensorflow,jbedorf/tensorflow,petewarden/tensorflow,aldian/tensorflow,Bismarrck/tensorflow,brchiu/tensorflow,xzturn/tensorflow,tensorflow/tensorflow-pywrap_saved_model,theflofly/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,snnn/tensorflow,ageron/tensorflow,freedomtan/tensorflow,ghchinoy/tensorflow,tensorflow/tensorflow-pywrap_saved_model,ageron/tensorflow,ageron/tensorflow,karllessard/tensorflow,hfp/tensorflow-xsmm,jbedorf/tensorflow,adit-chandra/tensorflow,xzturn/tensorflow,gautam1858/tensorflow,chemelnucfin/tensorflow,xodus7/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,arborh/tensorflow,dongjoon-hyun/tensorflow,renyi533/tensorflow,cxxgtxy/tensorflow,theflofly/tensorflow,aam-at/tensorflow,girving/tensorflow,theflofly/tensorflow,kevin-coder/tensorflow-fork,ghchinoy/tensorflow,ppwwyyxx/tensorflow,hfp/tensorflow-xsmm,jbedorf/tensorflow,ghchinoy/tensorflow,alshedivat/tensorflow,dancingdan/tensorflow,ZhangXinNan/tensorflow,Bismarrck/tensorflow,ageron/tensorflow,yongtang/tensorflow,AnishShah/tensorflow,jalexvig/tensorflow,dongjoon-hyun/tensorflow,brchiu/tensorflow,frreiss/tensorflow-fred,annarev/tensorflow,arborh/tensorflow,freedomtan/tensorflow,aselle/tensorflow,aselle/tensorflow,paolodedios/tensorflow,hfp/tensorflow-xsmm,aam-at/tensorflow,paolodedios/tensorflow,seanli9jan/tensorflow,dancingdan/tensorflow,alshedivat/tensorflow,annarev/tensorflow,sarvex/tensorflow,petewarden/tensorflow,snnn/tensorflow,ZhangXinNan/tensorflow,xzturn/tensorflow,aam-at/tensorflow,seanli9jan/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,davidzchen/tensorflow,DavidNorman/tensorflow,renyi533/tensorflow,DavidNorman/tensorflow,tensorflow/tensorflow-pywrap_saved_model,hfp/tensorflow-xsmm,alshedivat/tensorflow,xzturn/tensorflow,renyi533/tensorflow,freedomtan/tensorflow,aselle/tensorflow,kobejean/tensorflow,xzturn/tensorflow,ppwwyyxx/tensorflow,gunan/tensorflow,asimshankar/tensorflow,renyi533/tensorflow,gunan/tensorflow,renyi533/tensorflow,petewarden/tensorflow,ZhangXinNan/tensorflow,asimshankar/tensorflow,dancingdan/tensorflow,drpngx/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,alsrgv/tensorflow,DavidNorman/tensorflow,dongjoon-hyun/tensorflow,kevin-coder/tensorflow-fork,jbedorf/tensorflow,adit-chandra/tensorflow,adit-chandra/tensorflow,hehongliang/tensorflow,annarev/tensorflow,jart/tensorflow,Bismarrck/tensorflow,dongjoon-hyun/tensorflow,gautam1858/tensorflow,karllessard/tensorflow,hfp/tensorflow-xsmm,frreiss/tensorflow-fred,snnn/tensorflow,jendap/tensorflow,kevin-coder/tensorflow-fork,snnn/tensorflow,yongtang/tensorflow,hfp/tensorflow-xsmm,jbedorf/tensorflow,kobejean/tensorflow,cxxgtxy/tensorflow,aselle/tensorflow,xodus7/tensorflow,frreiss/tensorflow-fred,dongjoon-hyun/tensorflow,tensorflow/tensorflow,annarev/tensorflow,paolodedios/tensorflow,DavidNorman/tensorflow,ghchinoy/tensorflow,petewarden/tensorflow,yongtang/tensorflow,ghchinoy/tensorflow,drpngx/tensorflow,theflofly/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow,alshedivat/tensorflow,drpngx/tensorflow,dancingdan/tensorflow,jendap/tensorflow,hfp/tensorflow-xsmm,xodus7/tensorflow,sarvex/tensorflow,frreiss/tensorflow-fred,dongjoon-hyun/tensorflow,ghchinoy/tensorflow,freedomtan/tensorflow,ghchinoy/tensorflow,sarvex/tensorflow,alsrgv/tensorflow,Intel-tensorflow/tensorflow,jbedorf/tensorflow,jhseu/tensorflow,arborh/tensorflow,renyi533/tensorflow,theflofly/tensorflow,aselle/tensorflow,tensorflow/tensorflow,yongtang/tensorflow,dancingdan/tensorflow,AnishShah/tensorflow,apark263/tensorflow,xodus7/tensorflow,sarvex/tensorflow,kevin-coder/tensorflow-fork,xzturn/tensorflow,AnishShah/tensorflow,sarvex/tensorflow,tensorflow/tensorflow-pywrap_saved_model,girving/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,annarev/tensorflow,ghchinoy/tensorflow,davidzchen/tensorflow,DavidNorman/tensorflow,girving/tensorflow,manipopopo/tensorflow,gunan/tensorflow,apark263/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,asimshankar/tensorflow,tensorflow/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_saved_model,jart/tensorflow,davidzchen/tensorflow,paolodedios/tensorflow,jalexvig/tensorflow,aldian/tensorflow,chemelnucfin/tensorflow,jbedorf/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,alsrgv/tensorflow,chemelnucfin/tensorflow,caisq/tensorflow,drpngx/tensorflow,jendap/tensorflow,kobejean/tensorflow,freedomtan/tensorflow,manipopopo/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,ageron/tensorflow,Intel-Corporation/tensorflow,kevin-coder/tensorflow-fork,Intel-tensorflow/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-pywrap_saved_model,asimshankar/tensorflow,seanli9jan/tensorflow,yongtang/tensorflow,alsrgv/tensorflow,girving/tensorflow,Intel-tensorflow/tensorflow,apark263/tensorflow,gautam1858/tensorflow,paolodedios/tensorflow,manipopopo/tensorflow,jalexvig/tensorflow,adit-chandra/tensorflow,jart/tensorflow,caisq/tensorflow,drpngx/tensorflow,kobejean/tensorflow,petewarden/tensorflow,theflofly/tensorflow,jendap/tensorflow,chemelnucfin/tensorflow,kobejean/tensorflow,alsrgv/tensorflow,caisq/tensorflow,cxxgtxy/tensorflow,jendap/tensorflow,snnn/tensorflow,renyi533/tensorflow,caisq/tensorflow,ZhangXinNan/tensorflow,jhseu/tensorflow,karllessard/tensorflow,gunan/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,apark263/tensorflow,xodus7/tensorflow,alshedivat/tensorflow,girving/tensorflow,brchiu/tensorflow,petewarden/tensorflow,theflofly/tensorflow,brchiu/tensorflow,jart/tensorflow,jendap/tensorflow,Intel-tensorflow/tensorflow,manipopopo/tensorflow,dancingdan/tensorflow,gunan/tensorflow,xzturn/tensorflow,jhseu/tensorflow,DavidNorman/tensorflow,asimshankar/tensorflow,annarev/tensorflow,ZhangXinNan/tensorflow,brchiu/tensorflow,jhseu/tensorflow,petewarden/tensorflow,tensorflow/tensorflow-pywrap_saved_model,xzturn/tensorflow,gunan/tensorflow,ppwwyyxx/tensorflow,gunan/tensorflow,apark263/tensorflow,kevin-coder/tensorflow-fork,Intel-Corporation/tensorflow,ppwwyyxx/tensorflow,gautam1858/tensorflow,dancingdan/tensorflow,drpngx/tensorflow,aldian/tensorflow,aam-at/tensorflow,seanli9jan/tensorflow,ageron/tensorflow,ageron/tensorflow,caisq/tensorflow,arborh/tensorflow,kobejean/tensorflow,drpngx/tensorflow,arborh/tensorflow,paolodedios/tensorflow,manipopopo/tensorflow,Intel-tensorflow/tensorflow,freedomtan/tensorflow,aam-at/tensorflow,cxxgtxy/tensorflow,DavidNorman/tensorflow,manipopopo/tensorflow,dancingdan/tensorflow,xodus7/tensorflow,jart/tensorflow,jhseu/tensorflow,snnn/tensorflow,alsrgv/tensorflow,brchiu/tensorflow,yongtang/tensorflow,gunan/tensorflow,seanli9jan/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,ageron/tensorflow,ZhangXinNan/tensorflow,jalexvig/tensorflow,cxxgtxy/tensorflow,ageron/tensorflow,hfp/tensorflow-xsmm,petewarden/tensorflow,jhseu/tensorflow,AnishShah/tensorflow,hehongliang/tensorflow,paolodedios/tensorflow,hfp/tensorflow-xsmm,annarev/tensorflow,DavidNorman/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,jalexvig/tensorflow,seanli9jan/tensorflow,jalexvig/tensorflow,ppwwyyxx/tensorflow,manipopopo/tensorflow,girving/tensorflow,chemelnucfin/tensorflow,manipopopo/tensorflow,gautam1858/tensorflow,theflofly/tensorflow,karllessard/tensorflow,Bismarrck/tensorflow,davidzchen/tensorflow,caisq/tensorflow,ZhangXinNan/tensorflow,ageron/tensorflow,Bismarrck/tensorflow,kevin-coder/tensorflow-fork,arborh/tensorflow,jbedorf/tensorflow,AnishShah/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,chemelnucfin/tensorflow,dancingdan/tensorflow,alshedivat/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,manipopopo/tensorflow,aselle/tensorflow,aselle/tensorflow,davidzchen/tensorflow,davidzchen/tensorflow,alsrgv/tensorflow,cxxgtxy/tensorflow,AnishShah/tensorflow,gunan/tensorflow,ppwwyyxx/tensorflow,xodus7/tensorflow,hehongliang/tensorflow,snnn/tensorflow,renyi533/tensorflow,jhseu/tensorflow,xodus7/tensorflow,adit-chandra/tensorflow,xodus7/tensorflow,Bismarrck/tensorflow,gunan/tensorflow,DavidNorman/tensorflow,ghchinoy/tensorflow,ZhangXinNan/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,davidzchen/tensorflow,apark263/tensorflow,tensorflow/tensorflow,aselle/tensorflow,adit-chandra/tensorflow,girving/tensorflow,drpngx/tensorflow,brchiu/tensorflow,DavidNorman/tensorflow,sarvex/tensorflow,seanli9jan/tensorflow,drpngx/tensorflow,adit-chandra/tensorflow,alsrgv/tensorflow,ppwwyyxx/tensorflow,tensorflow/tensorflow,aam-at/tensorflow,frreiss/tensorflow-fred,brchiu/tensorflow,jart/tensorflow,asimshankar/tensorflow,girving/tensorflow,xzturn/tensorflow,gautam1858/tensorflow,dongjoon-hyun/tensorflow,jhseu/tensorflow,alshedivat/tensorflow,davidzchen/tensorflow,alshedivat/tensorflow,Bismarrck/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,cxxgtxy/tensorflow,dongjoon-hyun/tensorflow,frreiss/tensorflow-fred,xodus7/tensorflow,tensorflow/tensorflow-pywrap_saved_model,ppwwyyxx/tensorflow,Intel-Corporation/tensorflow,yongtang/tensorflow,apark263/tensorflow,kobejean/tensorflow,apark263/tensorflow,aam-at/tensorflow,alsrgv/tensorflow,girving/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,chemelnucfin/tensorflow,alshedivat/tensorflow,jalexvig/tensorflow,ppwwyyxx/tensorflow,aldian/tensorflow,renyi533/tensorflow,gautam1858/tensorflow,seanli9jan/tensorflow,ZhangXinNan/tensorflow,jalexvig/tensorflow,paolodedios/tensorflow,dongjoon-hyun/tensorflow,Bismarrck/tensorflow,jalexvig/tensorflow,caisq/tensorflow,alsrgv/tensorflow,snnn/tensorflow,hehongliang/tensorflow,asimshankar/tensorflow,Intel-Corporation/tensorflow,seanli9jan/tensorflow,aam-at/tensorflow,ghchinoy/tensorflow,jendap/tensorflow,frreiss/tensorflow-fred,petewarden/tensorflow,jbedorf/tensorflow,annarev/tensorflow,kevin-coder/tensorflow-fork,gautam1858/tensorflow,snnn/tensorflow,aam-at/tensorflow,annarev/tensorflow,jhseu/tensorflow,snnn/tensorflow,kevin-coder/tensorflow-fork,AnishShah/tensorflow,arborh/tensorflow,jbedorf/tensorflow,xzturn/tensorflow,aldian/tensorflow,jendap/tensorflow,adit-chandra/tensorflow,hehongliang/tensorflow,renyi533/tensorflow,ppwwyyxx/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,theflofly/tensorflow,annarev/tensorflow,frreiss/tensorflow-fred,theflofly/tensorflow,dongjoon-hyun/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,davidzchen/tensorflow,sarvex/tensorflow
|
Add a set of annotations specific to AutoGraph.
PiperOrigin-RevId: 202972265
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Annotations specific to AutoGraph."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from enum import Enum
class NoValue(Enum):
def __repr__(self):
return self.name
class NodeAnno(NoValue):
"""Additional annotations used by AutoGraph converters.
These are in addition to the basic annotations declared in pyct/anno.py and
pyct/static_analysis/annos.py.
"""
# The directives collection - see directives.py
DIRECTIVES = (
'Dict depicting static directive calls. See the directives converter.')
|
<commit_before><commit_msg>Add a set of annotations specific to AutoGraph.
PiperOrigin-RevId: 202972265<commit_after>
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Annotations specific to AutoGraph."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from enum import Enum
class NoValue(Enum):
def __repr__(self):
return self.name
class NodeAnno(NoValue):
"""Additional annotations used by AutoGraph converters.
These are in addition to the basic annotations declared in pyct/anno.py and
pyct/static_analysis/annos.py.
"""
# The directives collection - see directives.py
DIRECTIVES = (
'Dict depicting static directive calls. See the directives converter.')
|
Add a set of annotations specific to AutoGraph.
PiperOrigin-RevId: 202972265# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Annotations specific to AutoGraph."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from enum import Enum
class NoValue(Enum):
def __repr__(self):
return self.name
class NodeAnno(NoValue):
"""Additional annotations used by AutoGraph converters.
These are in addition to the basic annotations declared in pyct/anno.py and
pyct/static_analysis/annos.py.
"""
# The directives collection - see directives.py
DIRECTIVES = (
'Dict depicting static directive calls. See the directives converter.')
|
<commit_before><commit_msg>Add a set of annotations specific to AutoGraph.
PiperOrigin-RevId: 202972265<commit_after># Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Annotations specific to AutoGraph."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from enum import Enum
class NoValue(Enum):
def __repr__(self):
return self.name
class NodeAnno(NoValue):
"""Additional annotations used by AutoGraph converters.
These are in addition to the basic annotations declared in pyct/anno.py and
pyct/static_analysis/annos.py.
"""
# The directives collection - see directives.py
DIRECTIVES = (
'Dict depicting static directive calls. See the directives converter.')
|
|
6315a38c01b001acd36ddd0a528138903e19ff80
|
blox/compile.py
|
blox/compile.py
|
'''blox/compile.py
Creates an optimized programattically generated template from an html file
Copyright (C) 2015 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
'''
from blox.base import Blox, Text, UnsafeText
INDENT = " "
SCRIPT_TEMPLATE = """# WARNING: DON'T EDIT AUTO-GENERATED
from blox.base import Blox, Text, UnsafeText
elementsExpanded = False
%(cacheElements)s
%(staticElements)s
class Template(Blox):
__slots__ = %(accessors)s
def build(factory):
template = Template()
global elementsExpanded
if not elementsExpanded:
products = factory.products
%(defineElements)s
elementsExpanded = True
%(buildTemplate)s
return template"""
|
Add initial file for template compilation
|
Add initial file for template compilation
|
Python
|
mit
|
timothycrosley/blox,timothycrosley/blox,timothycrosley/blox
|
Add initial file for template compilation
|
'''blox/compile.py
Creates an optimized programattically generated template from an html file
Copyright (C) 2015 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
'''
from blox.base import Blox, Text, UnsafeText
INDENT = " "
SCRIPT_TEMPLATE = """# WARNING: DON'T EDIT AUTO-GENERATED
from blox.base import Blox, Text, UnsafeText
elementsExpanded = False
%(cacheElements)s
%(staticElements)s
class Template(Blox):
__slots__ = %(accessors)s
def build(factory):
template = Template()
global elementsExpanded
if not elementsExpanded:
products = factory.products
%(defineElements)s
elementsExpanded = True
%(buildTemplate)s
return template"""
|
<commit_before><commit_msg>Add initial file for template compilation<commit_after>
|
'''blox/compile.py
Creates an optimized programattically generated template from an html file
Copyright (C) 2015 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
'''
from blox.base import Blox, Text, UnsafeText
INDENT = " "
SCRIPT_TEMPLATE = """# WARNING: DON'T EDIT AUTO-GENERATED
from blox.base import Blox, Text, UnsafeText
elementsExpanded = False
%(cacheElements)s
%(staticElements)s
class Template(Blox):
__slots__ = %(accessors)s
def build(factory):
template = Template()
global elementsExpanded
if not elementsExpanded:
products = factory.products
%(defineElements)s
elementsExpanded = True
%(buildTemplate)s
return template"""
|
Add initial file for template compilation'''blox/compile.py
Creates an optimized programattically generated template from an html file
Copyright (C) 2015 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
'''
from blox.base import Blox, Text, UnsafeText
INDENT = " "
SCRIPT_TEMPLATE = """# WARNING: DON'T EDIT AUTO-GENERATED
from blox.base import Blox, Text, UnsafeText
elementsExpanded = False
%(cacheElements)s
%(staticElements)s
class Template(Blox):
__slots__ = %(accessors)s
def build(factory):
template = Template()
global elementsExpanded
if not elementsExpanded:
products = factory.products
%(defineElements)s
elementsExpanded = True
%(buildTemplate)s
return template"""
|
<commit_before><commit_msg>Add initial file for template compilation<commit_after>'''blox/compile.py
Creates an optimized programattically generated template from an html file
Copyright (C) 2015 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
'''
from blox.base import Blox, Text, UnsafeText
INDENT = " "
SCRIPT_TEMPLATE = """# WARNING: DON'T EDIT AUTO-GENERATED
from blox.base import Blox, Text, UnsafeText
elementsExpanded = False
%(cacheElements)s
%(staticElements)s
class Template(Blox):
__slots__ = %(accessors)s
def build(factory):
template = Template()
global elementsExpanded
if not elementsExpanded:
products = factory.products
%(defineElements)s
elementsExpanded = True
%(buildTemplate)s
return template"""
|
|
133bb56417f2ac1080959c2e4d4db7012890d429
|
sympy/categories/tests/test_baseclasses.py
|
sympy/categories/tests/test_baseclasses.py
|
from sympy.categories import Object, Morphism
def test_object():
A = Object("A")
assert A.name == "A"
def test_morphism():
A = Object("A")
B = Object("B")
C = Object("C")
D = Object("D")
f = Morphism(A, B, "f")
g = Morphism(B, C, "g")
h = Morphism(C, D, "h")
assert f.name == "f"
assert f.domain == A
assert f.codomain == B
assert f.components == [f]
assert f * g == None
assert f * f == None
k = g.compose(f, "k")
assert k.domain == A
assert k.codomain == C
assert k.name == "k"
assert k.components == [f, g]
k = g * f
p = h * g
u = h * g * f
assert k.domain == A
assert k.codomain == C
assert k.name == "g f"
assert k.components == [f, g]
assert h * k == u
assert p * f == u
assert u.domain == A
assert u.codomain == D
assert u.name == "h g f"
assert u.components == [f, g, h]
u1 = u.flatten()
assert u1.domain == A
assert u1.codomain == D
assert u1.name == "h g f"
assert u1.components == [u1]
u1 = u.flatten("u")
assert u1.domain == A
assert u1.codomain == D
assert u1.name == "u"
assert u1.components == [u1]
|
Write the tests for categories.Object and categories.Morphism.
|
Write the tests for categories.Object and categories.Morphism.
The tests do not pass at the moment. The goal is to define the desired
behaviour of the methods in Morphism by means of tests.
|
Python
|
bsd-3-clause
|
kaushik94/sympy,moble/sympy,dqnykamp/sympy,shipci/sympy,abhiii5459/sympy,emon10005/sympy,cccfran/sympy,vipulroxx/sympy,ChristinaZografou/sympy,madan96/sympy,vipulroxx/sympy,hrashk/sympy,lidavidm/sympy,madan96/sympy,mafiya69/sympy,atreyv/sympy,Sumith1896/sympy,pandeyadarsh/sympy,farhaanbukhsh/sympy,jamesblunt/sympy,skidzo/sympy,Gadal/sympy,garvitr/sympy,beni55/sympy,sahmed95/sympy,mcdaniel67/sympy,flacjacket/sympy,shipci/sympy,wyom/sympy,lindsayad/sympy,hrashk/sympy,cccfran/sympy,Vishluck/sympy,MridulS/sympy,debugger22/sympy,sahilshekhawat/sympy,Curious72/sympy,souravsingh/sympy,bukzor/sympy,Davidjohnwilson/sympy,oliverlee/sympy,sunny94/temp,aktech/sympy,asm666/sympy,kaichogami/sympy,skidzo/sympy,Shaswat27/sympy,mcdaniel67/sympy,drufat/sympy,yukoba/sympy,jaimahajan1997/sympy,beni55/sympy,sunny94/temp,Shaswat27/sympy,mcdaniel67/sympy,abloomston/sympy,diofant/diofant,vipulroxx/sympy,emon10005/sympy,Vishluck/sympy,liangjiaxing/sympy,sahilshekhawat/sympy,amitjamadagni/sympy,Designist/sympy,toolforger/sympy,oliverlee/sympy,toolforger/sympy,kaushik94/sympy,ga7g08/sympy,atsao72/sympy,bukzor/sympy,ga7g08/sympy,hrashk/sympy,Vishluck/sympy,Titan-C/sympy,jerli/sympy,kaichogami/sympy,Titan-C/sympy,yukoba/sympy,ga7g08/sympy,atsao72/sympy,MridulS/sympy,VaibhavAgarwalVA/sympy,iamutkarshtiwari/sympy,amitjamadagni/sympy,kumarkrishna/sympy,Davidjohnwilson/sympy,jerli/sympy,beni55/sympy,postvakje/sympy,skirpichev/omg,pbrady/sympy,MechCoder/sympy,sunny94/temp,AkademieOlympia/sympy,saurabhjn76/sympy,wanglongqi/sympy,cswiercz/sympy,mafiya69/sympy,sahilshekhawat/sympy,meghana1995/sympy,sahmed95/sympy,rahuldan/sympy,kumarkrishna/sympy,Mitchkoens/sympy,AunShiLord/sympy,souravsingh/sympy,atsao72/sympy,atreyv/sympy,ahhda/sympy,kaushik94/sympy,grevutiu-gabriel/sympy,saurabhjn76/sympy,hargup/sympy,debugger22/sympy,MechCoder/sympy,Arafatk/sympy,asm666/sympy,yashsharan/sympy,jbbskinny/sympy,Gadal/sympy,maniteja123/sympy,jamesblunt/sympy,pandeyadarsh/sympy,postvakje/sympy,meghana1995/sympy,skidzo/sympy,abhiii5459/sympy,yashsharan/sympy,moble/sympy,saurabhjn76/sympy,moble/sympy,aktech/sympy,cswiercz/sympy,jaimahajan1997/sympy,rahuldan/sympy,jamesblunt/sympy,lidavidm/sympy,pbrady/sympy,AkademieOlympia/sympy,Mitchkoens/sympy,Arafatk/sympy,oliverlee/sympy,asm666/sympy,AkademieOlympia/sympy,VaibhavAgarwalVA/sympy,pbrady/sympy,aktech/sympy,meghana1995/sympy,drufat/sympy,srjoglekar246/sympy,Gadal/sympy,chaffra/sympy,AunShiLord/sympy,kevalds51/sympy,pandeyadarsh/sympy,kumarkrishna/sympy,yashsharan/sympy,jbbskinny/sympy,kevalds51/sympy,ChristinaZografou/sympy,Shaswat27/sympy,Sumith1896/sympy,yukoba/sympy,souravsingh/sympy,Davidjohnwilson/sympy,jbbskinny/sympy,wanglongqi/sympy,atreyv/sympy,iamutkarshtiwari/sympy,toolforger/sympy,cswiercz/sympy,sampadsaha5/sympy,VaibhavAgarwalVA/sympy,drufat/sympy,shikil/sympy,kaichogami/sympy,grevutiu-gabriel/sympy,shikil/sympy,ahhda/sympy,Titan-C/sympy,postvakje/sympy,hargup/sympy,bukzor/sympy,garvitr/sympy,hargup/sympy,lindsayad/sympy,jaimahajan1997/sympy,abhiii5459/sympy,Mitchkoens/sympy,ChristinaZografou/sympy,abloomston/sympy,wyom/sympy,madan96/sympy,Arafatk/sympy,MechCoder/sympy,Sumith1896/sympy,sampadsaha5/sympy,jerli/sympy,Curious72/sympy,kmacinnis/sympy,maniteja123/sympy,MridulS/sympy,sampadsaha5/sympy,chaffra/sympy,wanglongqi/sympy,cccfran/sympy,farhaanbukhsh/sympy,lindsayad/sympy,sahmed95/sympy,shipci/sympy,chaffra/sympy,debugger22/sympy,ahhda/sympy,grevutiu-gabriel/sympy,dqnykamp/sympy,maniteja123/sympy,kmacinnis/sympy,shikil/sympy,Designist/sympy,farhaanbukhsh/sympy,emon10005/sympy,lidavidm/sympy,AunShiLord/sympy,liangjiaxing/sympy,Designist/sympy,kmacinnis/sympy,Curious72/sympy,wyom/sympy,liangjiaxing/sympy,rahuldan/sympy,iamutkarshtiwari/sympy,dqnykamp/sympy,abloomston/sympy,kevalds51/sympy,garvitr/sympy,mafiya69/sympy
|
Write the tests for categories.Object and categories.Morphism.
The tests do not pass at the moment. The goal is to define the desired
behaviour of the methods in Morphism by means of tests.
|
from sympy.categories import Object, Morphism
def test_object():
A = Object("A")
assert A.name == "A"
def test_morphism():
A = Object("A")
B = Object("B")
C = Object("C")
D = Object("D")
f = Morphism(A, B, "f")
g = Morphism(B, C, "g")
h = Morphism(C, D, "h")
assert f.name == "f"
assert f.domain == A
assert f.codomain == B
assert f.components == [f]
assert f * g == None
assert f * f == None
k = g.compose(f, "k")
assert k.domain == A
assert k.codomain == C
assert k.name == "k"
assert k.components == [f, g]
k = g * f
p = h * g
u = h * g * f
assert k.domain == A
assert k.codomain == C
assert k.name == "g f"
assert k.components == [f, g]
assert h * k == u
assert p * f == u
assert u.domain == A
assert u.codomain == D
assert u.name == "h g f"
assert u.components == [f, g, h]
u1 = u.flatten()
assert u1.domain == A
assert u1.codomain == D
assert u1.name == "h g f"
assert u1.components == [u1]
u1 = u.flatten("u")
assert u1.domain == A
assert u1.codomain == D
assert u1.name == "u"
assert u1.components == [u1]
|
<commit_before><commit_msg>Write the tests for categories.Object and categories.Morphism.
The tests do not pass at the moment. The goal is to define the desired
behaviour of the methods in Morphism by means of tests.<commit_after>
|
from sympy.categories import Object, Morphism
def test_object():
A = Object("A")
assert A.name == "A"
def test_morphism():
A = Object("A")
B = Object("B")
C = Object("C")
D = Object("D")
f = Morphism(A, B, "f")
g = Morphism(B, C, "g")
h = Morphism(C, D, "h")
assert f.name == "f"
assert f.domain == A
assert f.codomain == B
assert f.components == [f]
assert f * g == None
assert f * f == None
k = g.compose(f, "k")
assert k.domain == A
assert k.codomain == C
assert k.name == "k"
assert k.components == [f, g]
k = g * f
p = h * g
u = h * g * f
assert k.domain == A
assert k.codomain == C
assert k.name == "g f"
assert k.components == [f, g]
assert h * k == u
assert p * f == u
assert u.domain == A
assert u.codomain == D
assert u.name == "h g f"
assert u.components == [f, g, h]
u1 = u.flatten()
assert u1.domain == A
assert u1.codomain == D
assert u1.name == "h g f"
assert u1.components == [u1]
u1 = u.flatten("u")
assert u1.domain == A
assert u1.codomain == D
assert u1.name == "u"
assert u1.components == [u1]
|
Write the tests for categories.Object and categories.Morphism.
The tests do not pass at the moment. The goal is to define the desired
behaviour of the methods in Morphism by means of tests.from sympy.categories import Object, Morphism
def test_object():
A = Object("A")
assert A.name == "A"
def test_morphism():
A = Object("A")
B = Object("B")
C = Object("C")
D = Object("D")
f = Morphism(A, B, "f")
g = Morphism(B, C, "g")
h = Morphism(C, D, "h")
assert f.name == "f"
assert f.domain == A
assert f.codomain == B
assert f.components == [f]
assert f * g == None
assert f * f == None
k = g.compose(f, "k")
assert k.domain == A
assert k.codomain == C
assert k.name == "k"
assert k.components == [f, g]
k = g * f
p = h * g
u = h * g * f
assert k.domain == A
assert k.codomain == C
assert k.name == "g f"
assert k.components == [f, g]
assert h * k == u
assert p * f == u
assert u.domain == A
assert u.codomain == D
assert u.name == "h g f"
assert u.components == [f, g, h]
u1 = u.flatten()
assert u1.domain == A
assert u1.codomain == D
assert u1.name == "h g f"
assert u1.components == [u1]
u1 = u.flatten("u")
assert u1.domain == A
assert u1.codomain == D
assert u1.name == "u"
assert u1.components == [u1]
|
<commit_before><commit_msg>Write the tests for categories.Object and categories.Morphism.
The tests do not pass at the moment. The goal is to define the desired
behaviour of the methods in Morphism by means of tests.<commit_after>from sympy.categories import Object, Morphism
def test_object():
A = Object("A")
assert A.name == "A"
def test_morphism():
A = Object("A")
B = Object("B")
C = Object("C")
D = Object("D")
f = Morphism(A, B, "f")
g = Morphism(B, C, "g")
h = Morphism(C, D, "h")
assert f.name == "f"
assert f.domain == A
assert f.codomain == B
assert f.components == [f]
assert f * g == None
assert f * f == None
k = g.compose(f, "k")
assert k.domain == A
assert k.codomain == C
assert k.name == "k"
assert k.components == [f, g]
k = g * f
p = h * g
u = h * g * f
assert k.domain == A
assert k.codomain == C
assert k.name == "g f"
assert k.components == [f, g]
assert h * k == u
assert p * f == u
assert u.domain == A
assert u.codomain == D
assert u.name == "h g f"
assert u.components == [f, g, h]
u1 = u.flatten()
assert u1.domain == A
assert u1.codomain == D
assert u1.name == "h g f"
assert u1.components == [u1]
u1 = u.flatten("u")
assert u1.domain == A
assert u1.codomain == D
assert u1.name == "u"
assert u1.components == [u1]
|
|
b27a76a70cc24c403fba85a0c205bd76de1468e4
|
tutorials/pyroot/pyroot003_prettyPrinting.py
|
tutorials/pyroot/pyroot003_prettyPrinting.py
|
## \file
## \ingroup tutorial_pyroot
## \notebook -nodraw
## This tutorial illustrates the pretty printing feature of PyROOT, which reveals
## the content of the object if a string representation is requested, e.g., by
## Python's print statement. The printing behaves similar to the ROOT prompt
## powered by the C++ interpreter cling.
##
## \macro_code
##
## \date June 2018
## \author Stefan Wunsch
import ROOT
# Create an object with PyROOT
obj = ROOT.std.vector("int")(3)
for i in range(obj.size()):
obj[i] = i
# Print the object, which reveals the content. Note that `print` calls the special
# method `__str__` of the object internally.
print(obj)
# The output can be retrieved as string by any function that triggers the `__str__`
# special method of the object, e.g., `str` or `format`.
print(str(obj))
print("{}".format(obj))
# Note that the interactive Python prompt does not call `__str__`, it calls
# `__repr__`, which implements a formal and unique string representation of
# the object.
print(repr(obj))
obj
# The print output behaves similar to the ROOT prompt, e.g., here for a ROOT histogram.
hist = ROOT.TH1F("name", "title", 10, 0, 1)
print(hist)
|
Add tutorial for pretty printing feature
|
[PyROOT] Add tutorial for pretty printing feature
|
Python
|
lgpl-2.1
|
karies/root,root-mirror/root,root-mirror/root,olifre/root,karies/root,olifre/root,root-mirror/root,karies/root,olifre/root,olifre/root,olifre/root,root-mirror/root,karies/root,root-mirror/root,olifre/root,olifre/root,karies/root,olifre/root,root-mirror/root,root-mirror/root,root-mirror/root,root-mirror/root,olifre/root,karies/root,karies/root,karies/root,root-mirror/root,karies/root,karies/root,olifre/root,karies/root,root-mirror/root,olifre/root
|
[PyROOT] Add tutorial for pretty printing feature
|
## \file
## \ingroup tutorial_pyroot
## \notebook -nodraw
## This tutorial illustrates the pretty printing feature of PyROOT, which reveals
## the content of the object if a string representation is requested, e.g., by
## Python's print statement. The printing behaves similar to the ROOT prompt
## powered by the C++ interpreter cling.
##
## \macro_code
##
## \date June 2018
## \author Stefan Wunsch
import ROOT
# Create an object with PyROOT
obj = ROOT.std.vector("int")(3)
for i in range(obj.size()):
obj[i] = i
# Print the object, which reveals the content. Note that `print` calls the special
# method `__str__` of the object internally.
print(obj)
# The output can be retrieved as string by any function that triggers the `__str__`
# special method of the object, e.g., `str` or `format`.
print(str(obj))
print("{}".format(obj))
# Note that the interactive Python prompt does not call `__str__`, it calls
# `__repr__`, which implements a formal and unique string representation of
# the object.
print(repr(obj))
obj
# The print output behaves similar to the ROOT prompt, e.g., here for a ROOT histogram.
hist = ROOT.TH1F("name", "title", 10, 0, 1)
print(hist)
|
<commit_before><commit_msg>[PyROOT] Add tutorial for pretty printing feature<commit_after>
|
## \file
## \ingroup tutorial_pyroot
## \notebook -nodraw
## This tutorial illustrates the pretty printing feature of PyROOT, which reveals
## the content of the object if a string representation is requested, e.g., by
## Python's print statement. The printing behaves similar to the ROOT prompt
## powered by the C++ interpreter cling.
##
## \macro_code
##
## \date June 2018
## \author Stefan Wunsch
import ROOT
# Create an object with PyROOT
obj = ROOT.std.vector("int")(3)
for i in range(obj.size()):
obj[i] = i
# Print the object, which reveals the content. Note that `print` calls the special
# method `__str__` of the object internally.
print(obj)
# The output can be retrieved as string by any function that triggers the `__str__`
# special method of the object, e.g., `str` or `format`.
print(str(obj))
print("{}".format(obj))
# Note that the interactive Python prompt does not call `__str__`, it calls
# `__repr__`, which implements a formal and unique string representation of
# the object.
print(repr(obj))
obj
# The print output behaves similar to the ROOT prompt, e.g., here for a ROOT histogram.
hist = ROOT.TH1F("name", "title", 10, 0, 1)
print(hist)
|
[PyROOT] Add tutorial for pretty printing feature## \file
## \ingroup tutorial_pyroot
## \notebook -nodraw
## This tutorial illustrates the pretty printing feature of PyROOT, which reveals
## the content of the object if a string representation is requested, e.g., by
## Python's print statement. The printing behaves similar to the ROOT prompt
## powered by the C++ interpreter cling.
##
## \macro_code
##
## \date June 2018
## \author Stefan Wunsch
import ROOT
# Create an object with PyROOT
obj = ROOT.std.vector("int")(3)
for i in range(obj.size()):
obj[i] = i
# Print the object, which reveals the content. Note that `print` calls the special
# method `__str__` of the object internally.
print(obj)
# The output can be retrieved as string by any function that triggers the `__str__`
# special method of the object, e.g., `str` or `format`.
print(str(obj))
print("{}".format(obj))
# Note that the interactive Python prompt does not call `__str__`, it calls
# `__repr__`, which implements a formal and unique string representation of
# the object.
print(repr(obj))
obj
# The print output behaves similar to the ROOT prompt, e.g., here for a ROOT histogram.
hist = ROOT.TH1F("name", "title", 10, 0, 1)
print(hist)
|
<commit_before><commit_msg>[PyROOT] Add tutorial for pretty printing feature<commit_after>## \file
## \ingroup tutorial_pyroot
## \notebook -nodraw
## This tutorial illustrates the pretty printing feature of PyROOT, which reveals
## the content of the object if a string representation is requested, e.g., by
## Python's print statement. The printing behaves similar to the ROOT prompt
## powered by the C++ interpreter cling.
##
## \macro_code
##
## \date June 2018
## \author Stefan Wunsch
import ROOT
# Create an object with PyROOT
obj = ROOT.std.vector("int")(3)
for i in range(obj.size()):
obj[i] = i
# Print the object, which reveals the content. Note that `print` calls the special
# method `__str__` of the object internally.
print(obj)
# The output can be retrieved as string by any function that triggers the `__str__`
# special method of the object, e.g., `str` or `format`.
print(str(obj))
print("{}".format(obj))
# Note that the interactive Python prompt does not call `__str__`, it calls
# `__repr__`, which implements a formal and unique string representation of
# the object.
print(repr(obj))
obj
# The print output behaves similar to the ROOT prompt, e.g., here for a ROOT histogram.
hist = ROOT.TH1F("name", "title", 10, 0, 1)
print(hist)
|
|
73b69c0205b29cda9107d71a88e94320be98d09e
|
tests/test_sqlite_db.py
|
tests/test_sqlite_db.py
|
'''
Module for testing the SQLite DB.
Fairly similar to the test_api tests...
'''
import os
import sys
import json
import unittest
CWD = os.path.dirname(os.path.abspath(__file__))
MS_WD = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Allow import of sqlite_driver
if os.path.join(MS_WD, 'storage') not in sys.path:
sys.path.insert(0, os.path.join(MS_WD, 'storage'))
# Use multiscanner in ../
sys.path.insert(0, os.path.dirname(CWD))
from sqlite_driver import Database, Record
TEST_DB_PATH = os.path.join(CWD, 'testing.db')
TEST_UPLOAD_FOLDER = os.path.join(CWD, 'tmp')
TEST_REPORT = {'MD5': '96b47da202ddba8d7a6b91fecbf89a41', 'SHA256': '26d11f0ea5cc77a59b6e47deee859440f26d2d14440beb712dbac8550d35ef1f', 'libmagic': 'a /bin/python script text executable', 'filename': '/opt/other_file'}
class TestRecordSerialization(unittest.TestCase):
def setUp(self):
self.sql_db = Database(TEST_DB_PATH)
self.sql_db.init_sqlite_db()
self.record = Record(
task_id = 1,
task_status = 'Pending',
report_id = None
)
def test_record_dict_serialization(self):
self.assertDictEqual(
{'task_id': 1, 'task_status': 'Pending', 'report_id': None},
self.record.to_dict()
)
def test_record_json_serialization(self):
self.assertEqual(
json.dumps({'task_id': 1, 'task_status': 'Pending', 'report_id': None}),
self.record.to_json()
)
def tearDown(self):
os.remove(TEST_DB_PATH)
|
Add first unit tests for sqlite db
|
Add first unit tests for sqlite db
|
Python
|
mpl-2.0
|
jmlong1027/multiscanner,awest1339/multiscanner,mitre/multiscanner,awest1339/multiscanner,awest1339/multiscanner,jmlong1027/multiscanner,jmlong1027/multiscanner,awest1339/multiscanner,MITRECND/multiscanner,mitre/multiscanner,mitre/multiscanner,MITRECND/multiscanner,jmlong1027/multiscanner
|
Add first unit tests for sqlite db
|
'''
Module for testing the SQLite DB.
Fairly similar to the test_api tests...
'''
import os
import sys
import json
import unittest
CWD = os.path.dirname(os.path.abspath(__file__))
MS_WD = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Allow import of sqlite_driver
if os.path.join(MS_WD, 'storage') not in sys.path:
sys.path.insert(0, os.path.join(MS_WD, 'storage'))
# Use multiscanner in ../
sys.path.insert(0, os.path.dirname(CWD))
from sqlite_driver import Database, Record
TEST_DB_PATH = os.path.join(CWD, 'testing.db')
TEST_UPLOAD_FOLDER = os.path.join(CWD, 'tmp')
TEST_REPORT = {'MD5': '96b47da202ddba8d7a6b91fecbf89a41', 'SHA256': '26d11f0ea5cc77a59b6e47deee859440f26d2d14440beb712dbac8550d35ef1f', 'libmagic': 'a /bin/python script text executable', 'filename': '/opt/other_file'}
class TestRecordSerialization(unittest.TestCase):
def setUp(self):
self.sql_db = Database(TEST_DB_PATH)
self.sql_db.init_sqlite_db()
self.record = Record(
task_id = 1,
task_status = 'Pending',
report_id = None
)
def test_record_dict_serialization(self):
self.assertDictEqual(
{'task_id': 1, 'task_status': 'Pending', 'report_id': None},
self.record.to_dict()
)
def test_record_json_serialization(self):
self.assertEqual(
json.dumps({'task_id': 1, 'task_status': 'Pending', 'report_id': None}),
self.record.to_json()
)
def tearDown(self):
os.remove(TEST_DB_PATH)
|
<commit_before><commit_msg>Add first unit tests for sqlite db<commit_after>
|
'''
Module for testing the SQLite DB.
Fairly similar to the test_api tests...
'''
import os
import sys
import json
import unittest
CWD = os.path.dirname(os.path.abspath(__file__))
MS_WD = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Allow import of sqlite_driver
if os.path.join(MS_WD, 'storage') not in sys.path:
sys.path.insert(0, os.path.join(MS_WD, 'storage'))
# Use multiscanner in ../
sys.path.insert(0, os.path.dirname(CWD))
from sqlite_driver import Database, Record
TEST_DB_PATH = os.path.join(CWD, 'testing.db')
TEST_UPLOAD_FOLDER = os.path.join(CWD, 'tmp')
TEST_REPORT = {'MD5': '96b47da202ddba8d7a6b91fecbf89a41', 'SHA256': '26d11f0ea5cc77a59b6e47deee859440f26d2d14440beb712dbac8550d35ef1f', 'libmagic': 'a /bin/python script text executable', 'filename': '/opt/other_file'}
class TestRecordSerialization(unittest.TestCase):
def setUp(self):
self.sql_db = Database(TEST_DB_PATH)
self.sql_db.init_sqlite_db()
self.record = Record(
task_id = 1,
task_status = 'Pending',
report_id = None
)
def test_record_dict_serialization(self):
self.assertDictEqual(
{'task_id': 1, 'task_status': 'Pending', 'report_id': None},
self.record.to_dict()
)
def test_record_json_serialization(self):
self.assertEqual(
json.dumps({'task_id': 1, 'task_status': 'Pending', 'report_id': None}),
self.record.to_json()
)
def tearDown(self):
os.remove(TEST_DB_PATH)
|
Add first unit tests for sqlite db'''
Module for testing the SQLite DB.
Fairly similar to the test_api tests...
'''
import os
import sys
import json
import unittest
CWD = os.path.dirname(os.path.abspath(__file__))
MS_WD = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Allow import of sqlite_driver
if os.path.join(MS_WD, 'storage') not in sys.path:
sys.path.insert(0, os.path.join(MS_WD, 'storage'))
# Use multiscanner in ../
sys.path.insert(0, os.path.dirname(CWD))
from sqlite_driver import Database, Record
TEST_DB_PATH = os.path.join(CWD, 'testing.db')
TEST_UPLOAD_FOLDER = os.path.join(CWD, 'tmp')
TEST_REPORT = {'MD5': '96b47da202ddba8d7a6b91fecbf89a41', 'SHA256': '26d11f0ea5cc77a59b6e47deee859440f26d2d14440beb712dbac8550d35ef1f', 'libmagic': 'a /bin/python script text executable', 'filename': '/opt/other_file'}
class TestRecordSerialization(unittest.TestCase):
def setUp(self):
self.sql_db = Database(TEST_DB_PATH)
self.sql_db.init_sqlite_db()
self.record = Record(
task_id = 1,
task_status = 'Pending',
report_id = None
)
def test_record_dict_serialization(self):
self.assertDictEqual(
{'task_id': 1, 'task_status': 'Pending', 'report_id': None},
self.record.to_dict()
)
def test_record_json_serialization(self):
self.assertEqual(
json.dumps({'task_id': 1, 'task_status': 'Pending', 'report_id': None}),
self.record.to_json()
)
def tearDown(self):
os.remove(TEST_DB_PATH)
|
<commit_before><commit_msg>Add first unit tests for sqlite db<commit_after>'''
Module for testing the SQLite DB.
Fairly similar to the test_api tests...
'''
import os
import sys
import json
import unittest
CWD = os.path.dirname(os.path.abspath(__file__))
MS_WD = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Allow import of sqlite_driver
if os.path.join(MS_WD, 'storage') not in sys.path:
sys.path.insert(0, os.path.join(MS_WD, 'storage'))
# Use multiscanner in ../
sys.path.insert(0, os.path.dirname(CWD))
from sqlite_driver import Database, Record
TEST_DB_PATH = os.path.join(CWD, 'testing.db')
TEST_UPLOAD_FOLDER = os.path.join(CWD, 'tmp')
TEST_REPORT = {'MD5': '96b47da202ddba8d7a6b91fecbf89a41', 'SHA256': '26d11f0ea5cc77a59b6e47deee859440f26d2d14440beb712dbac8550d35ef1f', 'libmagic': 'a /bin/python script text executable', 'filename': '/opt/other_file'}
class TestRecordSerialization(unittest.TestCase):
def setUp(self):
self.sql_db = Database(TEST_DB_PATH)
self.sql_db.init_sqlite_db()
self.record = Record(
task_id = 1,
task_status = 'Pending',
report_id = None
)
def test_record_dict_serialization(self):
self.assertDictEqual(
{'task_id': 1, 'task_status': 'Pending', 'report_id': None},
self.record.to_dict()
)
def test_record_json_serialization(self):
self.assertEqual(
json.dumps({'task_id': 1, 'task_status': 'Pending', 'report_id': None}),
self.record.to_json()
)
def tearDown(self):
os.remove(TEST_DB_PATH)
|
|
2f0db51462a3d182602fe5c604afb3c9a31c811a
|
examples/leaderboard_example.py
|
examples/leaderboard_example.py
|
import requests
r = requests.get("https://stepic.org/api/leaders")
print "Current leaders:\n"
for leader in r.json()["leaders"]:
user = requests.get("https://stepic.org/api/users/" + str(leader["user"])).json()["users"][0]
print leader["score"], '\t', user['first_name'], user['last_name']
|
Add a new example, showing the list of leaders
|
Add a new example, showing the list of leaders
|
Python
|
mit
|
StepicOrg/Stepic-API
|
Add a new example, showing the list of leaders
|
import requests
r = requests.get("https://stepic.org/api/leaders")
print "Current leaders:\n"
for leader in r.json()["leaders"]:
user = requests.get("https://stepic.org/api/users/" + str(leader["user"])).json()["users"][0]
print leader["score"], '\t', user['first_name'], user['last_name']
|
<commit_before><commit_msg>Add a new example, showing the list of leaders<commit_after>
|
import requests
r = requests.get("https://stepic.org/api/leaders")
print "Current leaders:\n"
for leader in r.json()["leaders"]:
user = requests.get("https://stepic.org/api/users/" + str(leader["user"])).json()["users"][0]
print leader["score"], '\t', user['first_name'], user['last_name']
|
Add a new example, showing the list of leadersimport requests
r = requests.get("https://stepic.org/api/leaders")
print "Current leaders:\n"
for leader in r.json()["leaders"]:
user = requests.get("https://stepic.org/api/users/" + str(leader["user"])).json()["users"][0]
print leader["score"], '\t', user['first_name'], user['last_name']
|
<commit_before><commit_msg>Add a new example, showing the list of leaders<commit_after>import requests
r = requests.get("https://stepic.org/api/leaders")
print "Current leaders:\n"
for leader in r.json()["leaders"]:
user = requests.get("https://stepic.org/api/users/" + str(leader["user"])).json()["users"][0]
print leader["score"], '\t', user['first_name'], user['last_name']
|
|
9a145fd5df247d1b018cc8cb66ce89659a39f874
|
pygame/color.py
|
pygame/color.py
|
class Color(object):
def __init__(self, *args):
if len(args) == 4:
r, g, b, a = args
else:
raise NotImplementedError("implement me")
self.r = r
self.g = g
self.b = b
self.a = a
|
Make blitting work (the basics anyway)
|
Make blitting work (the basics anyway)
|
Python
|
lgpl-2.1
|
caseyc37/pygame_cffi,GertBurger/pygame_cffi,CTPUG/pygame_cffi,caseyc37/pygame_cffi,CTPUG/pygame_cffi,GertBurger/pygame_cffi,GertBurger/pygame_cffi,GertBurger/pygame_cffi,CTPUG/pygame_cffi,caseyc37/pygame_cffi
|
Make blitting work (the basics anyway)
|
class Color(object):
def __init__(self, *args):
if len(args) == 4:
r, g, b, a = args
else:
raise NotImplementedError("implement me")
self.r = r
self.g = g
self.b = b
self.a = a
|
<commit_before><commit_msg>Make blitting work (the basics anyway)<commit_after>
|
class Color(object):
def __init__(self, *args):
if len(args) == 4:
r, g, b, a = args
else:
raise NotImplementedError("implement me")
self.r = r
self.g = g
self.b = b
self.a = a
|
Make blitting work (the basics anyway)
class Color(object):
def __init__(self, *args):
if len(args) == 4:
r, g, b, a = args
else:
raise NotImplementedError("implement me")
self.r = r
self.g = g
self.b = b
self.a = a
|
<commit_before><commit_msg>Make blitting work (the basics anyway)<commit_after>
class Color(object):
def __init__(self, *args):
if len(args) == 4:
r, g, b, a = args
else:
raise NotImplementedError("implement me")
self.r = r
self.g = g
self.b = b
self.a = a
|
|
9cb8d712798ca3fe8c0585b68995b14fd965a191
|
tests/test_simpleflow/swf/test_helpers.py
|
tests/test_simpleflow/swf/test_helpers.py
|
import json
from mock import patch
import unittest
from sure import expect
from simpleflow.swf.helpers import swf_identity
@patch("socket.gethostname")
@patch("getpass.getuser")
@patch("os.getpid")
class TestSwfHelpers(unittest.TestCase):
def test_swf_identity_standard_case(self, mock_pid, mock_user, mock_host):
mock_host.return_value = "foo.example.com"
mock_user.return_value = "root"
mock_pid.return_value = 1234
expect(
json.loads(swf_identity())
).to.equal({
"hostname": "foo.example.com",
"user": "root",
"pid": 1234,
})
def test_swf_identity_truncated(self, mock_pid, mock_user, mock_host):
"""
The result should be truncated to 256 characters. Producing an invalid
JSON string is better than producing an invalid SWF response (for now).
Later we might externalize this another way (think Data Converters).
"""
mock_host.return_value = "a" * 250
mock_user.return_value = "root"
mock_pid.return_value = 1234
expect(swf_identity()).to.have.length_of(256)
|
Add basic tests for swf_identity() helper
|
Add basic tests for swf_identity() helper
|
Python
|
mit
|
botify-labs/simpleflow,botify-labs/simpleflow
|
Add basic tests for swf_identity() helper
|
import json
from mock import patch
import unittest
from sure import expect
from simpleflow.swf.helpers import swf_identity
@patch("socket.gethostname")
@patch("getpass.getuser")
@patch("os.getpid")
class TestSwfHelpers(unittest.TestCase):
def test_swf_identity_standard_case(self, mock_pid, mock_user, mock_host):
mock_host.return_value = "foo.example.com"
mock_user.return_value = "root"
mock_pid.return_value = 1234
expect(
json.loads(swf_identity())
).to.equal({
"hostname": "foo.example.com",
"user": "root",
"pid": 1234,
})
def test_swf_identity_truncated(self, mock_pid, mock_user, mock_host):
"""
The result should be truncated to 256 characters. Producing an invalid
JSON string is better than producing an invalid SWF response (for now).
Later we might externalize this another way (think Data Converters).
"""
mock_host.return_value = "a" * 250
mock_user.return_value = "root"
mock_pid.return_value = 1234
expect(swf_identity()).to.have.length_of(256)
|
<commit_before><commit_msg>Add basic tests for swf_identity() helper<commit_after>
|
import json
from mock import patch
import unittest
from sure import expect
from simpleflow.swf.helpers import swf_identity
@patch("socket.gethostname")
@patch("getpass.getuser")
@patch("os.getpid")
class TestSwfHelpers(unittest.TestCase):
def test_swf_identity_standard_case(self, mock_pid, mock_user, mock_host):
mock_host.return_value = "foo.example.com"
mock_user.return_value = "root"
mock_pid.return_value = 1234
expect(
json.loads(swf_identity())
).to.equal({
"hostname": "foo.example.com",
"user": "root",
"pid": 1234,
})
def test_swf_identity_truncated(self, mock_pid, mock_user, mock_host):
"""
The result should be truncated to 256 characters. Producing an invalid
JSON string is better than producing an invalid SWF response (for now).
Later we might externalize this another way (think Data Converters).
"""
mock_host.return_value = "a" * 250
mock_user.return_value = "root"
mock_pid.return_value = 1234
expect(swf_identity()).to.have.length_of(256)
|
Add basic tests for swf_identity() helperimport json
from mock import patch
import unittest
from sure import expect
from simpleflow.swf.helpers import swf_identity
@patch("socket.gethostname")
@patch("getpass.getuser")
@patch("os.getpid")
class TestSwfHelpers(unittest.TestCase):
def test_swf_identity_standard_case(self, mock_pid, mock_user, mock_host):
mock_host.return_value = "foo.example.com"
mock_user.return_value = "root"
mock_pid.return_value = 1234
expect(
json.loads(swf_identity())
).to.equal({
"hostname": "foo.example.com",
"user": "root",
"pid": 1234,
})
def test_swf_identity_truncated(self, mock_pid, mock_user, mock_host):
"""
The result should be truncated to 256 characters. Producing an invalid
JSON string is better than producing an invalid SWF response (for now).
Later we might externalize this another way (think Data Converters).
"""
mock_host.return_value = "a" * 250
mock_user.return_value = "root"
mock_pid.return_value = 1234
expect(swf_identity()).to.have.length_of(256)
|
<commit_before><commit_msg>Add basic tests for swf_identity() helper<commit_after>import json
from mock import patch
import unittest
from sure import expect
from simpleflow.swf.helpers import swf_identity
@patch("socket.gethostname")
@patch("getpass.getuser")
@patch("os.getpid")
class TestSwfHelpers(unittest.TestCase):
def test_swf_identity_standard_case(self, mock_pid, mock_user, mock_host):
mock_host.return_value = "foo.example.com"
mock_user.return_value = "root"
mock_pid.return_value = 1234
expect(
json.loads(swf_identity())
).to.equal({
"hostname": "foo.example.com",
"user": "root",
"pid": 1234,
})
def test_swf_identity_truncated(self, mock_pid, mock_user, mock_host):
"""
The result should be truncated to 256 characters. Producing an invalid
JSON string is better than producing an invalid SWF response (for now).
Later we might externalize this another way (think Data Converters).
"""
mock_host.return_value = "a" * 250
mock_user.return_value = "root"
mock_pid.return_value = 1234
expect(swf_identity()).to.have.length_of(256)
|
|
06260f50624068915ec84ba9e1e2993370a37471
|
poradnia/events/migrations/0007_reminder.py
|
poradnia/events/migrations/0007_reminder.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-07-26 18:50
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('events', '0006_remove_event_for_client'),
]
operations = [
migrations.CreateModel(
name='Reminder',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('triggered', models.BooleanField(default=False)),
('event', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='user_alarms', to='events.Event')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'Reminder',
'verbose_name_plural': 'Reminders',
},
),
]
|
Add migration with Reminder model
|
Add migration with Reminder model
|
Python
|
mit
|
watchdogpolska/poradnia.siecobywatelska.pl,rwakulszowa/poradnia,rwakulszowa/poradnia,watchdogpolska/poradnia,watchdogpolska/poradnia.siecobywatelska.pl,watchdogpolska/poradnia,rwakulszowa/poradnia,watchdogpolska/poradnia.siecobywatelska.pl,watchdogpolska/poradnia,rwakulszowa/poradnia,watchdogpolska/poradnia
|
Add migration with Reminder model
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-07-26 18:50
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('events', '0006_remove_event_for_client'),
]
operations = [
migrations.CreateModel(
name='Reminder',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('triggered', models.BooleanField(default=False)),
('event', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='user_alarms', to='events.Event')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'Reminder',
'verbose_name_plural': 'Reminders',
},
),
]
|
<commit_before><commit_msg>Add migration with Reminder model<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-07-26 18:50
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('events', '0006_remove_event_for_client'),
]
operations = [
migrations.CreateModel(
name='Reminder',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('triggered', models.BooleanField(default=False)),
('event', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='user_alarms', to='events.Event')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'Reminder',
'verbose_name_plural': 'Reminders',
},
),
]
|
Add migration with Reminder model# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-07-26 18:50
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('events', '0006_remove_event_for_client'),
]
operations = [
migrations.CreateModel(
name='Reminder',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('triggered', models.BooleanField(default=False)),
('event', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='user_alarms', to='events.Event')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'Reminder',
'verbose_name_plural': 'Reminders',
},
),
]
|
<commit_before><commit_msg>Add migration with Reminder model<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-07-26 18:50
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('events', '0006_remove_event_for_client'),
]
operations = [
migrations.CreateModel(
name='Reminder',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('triggered', models.BooleanField(default=False)),
('event', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='user_alarms', to='events.Event')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'Reminder',
'verbose_name_plural': 'Reminders',
},
),
]
|
|
a8574ad9d7b7b933bb70fa47f84b3e396d058033
|
src/escpos/capabilities.py
|
src/escpos/capabilities.py
|
import re
from os import path
import yaml
with open(path.join(path.dirname(__file__), 'capabilities.yml')) as f:
PROFILES = yaml.load(f)
class Profile(object):
profile_data = {}
def __init__(self, columns=None):
self.default_columns = columns
def __getattr__(self, name):
return self.profile_data[name]
def get_columns(self, font):
""" Return the number of columns for the given font.
"""
if self.default_columns:
return self.default_columns
if 'columnConfigs' in self.profile_data:
columns_def = self.columnConfigs[self.defaultColumnConfig]
elif 'columns' in self.profile_data:
columns_def = self.columns
if isinstance(columns_def, int):
return columns_def
return columns_def[font]
def get_profile(name=None, **kwargs):
if isinstance(name, Profile):
return name
clazz = get_profile_class(name or 'default')
return clazz(**kwargs)
CLASS_CACHE = {}
def get_profile_class(name):
if not name in CLASS_CACHE:
profile_data = resolve_profile_data(name)
class_name = '%sProfile' % clean(name)
new_class = type(class_name, (Profile,), {'profile_data': profile_data})
CLASS_CACHE[name] = new_class
return CLASS_CACHE[name]
def clean(s):
# Remove invalid characters
s = re.sub('[^0-9a-zA-Z_]', '', s)
# Remove leading characters until we find a letter or underscore
s = re.sub('^[^a-zA-Z_]+', '', s)
return str(s)
def resolve_profile_data(name):
data = PROFILES[name]
inherits = data.get('inherits')
if not inherits:
return data
if not isinstance(inherits, (tuple, list)):
inherits = [inherits]
merged = {}
for base in reversed(inherits):
base_data = resolve_profile_data(base)
merged.update(base_data)
merged.update(data)
return merged
|
Support loading capabilites YAML into Python classes.
|
Support loading capabilites YAML into Python classes.
|
Python
|
mit
|
python-escpos/python-escpos,braveheuel/python-escpos,belono/python-escpos
|
Support loading capabilites YAML into Python classes.
|
import re
from os import path
import yaml
with open(path.join(path.dirname(__file__), 'capabilities.yml')) as f:
PROFILES = yaml.load(f)
class Profile(object):
profile_data = {}
def __init__(self, columns=None):
self.default_columns = columns
def __getattr__(self, name):
return self.profile_data[name]
def get_columns(self, font):
""" Return the number of columns for the given font.
"""
if self.default_columns:
return self.default_columns
if 'columnConfigs' in self.profile_data:
columns_def = self.columnConfigs[self.defaultColumnConfig]
elif 'columns' in self.profile_data:
columns_def = self.columns
if isinstance(columns_def, int):
return columns_def
return columns_def[font]
def get_profile(name=None, **kwargs):
if isinstance(name, Profile):
return name
clazz = get_profile_class(name or 'default')
return clazz(**kwargs)
CLASS_CACHE = {}
def get_profile_class(name):
if not name in CLASS_CACHE:
profile_data = resolve_profile_data(name)
class_name = '%sProfile' % clean(name)
new_class = type(class_name, (Profile,), {'profile_data': profile_data})
CLASS_CACHE[name] = new_class
return CLASS_CACHE[name]
def clean(s):
# Remove invalid characters
s = re.sub('[^0-9a-zA-Z_]', '', s)
# Remove leading characters until we find a letter or underscore
s = re.sub('^[^a-zA-Z_]+', '', s)
return str(s)
def resolve_profile_data(name):
data = PROFILES[name]
inherits = data.get('inherits')
if not inherits:
return data
if not isinstance(inherits, (tuple, list)):
inherits = [inherits]
merged = {}
for base in reversed(inherits):
base_data = resolve_profile_data(base)
merged.update(base_data)
merged.update(data)
return merged
|
<commit_before><commit_msg>Support loading capabilites YAML into Python classes.<commit_after>
|
import re
from os import path
import yaml
with open(path.join(path.dirname(__file__), 'capabilities.yml')) as f:
PROFILES = yaml.load(f)
class Profile(object):
profile_data = {}
def __init__(self, columns=None):
self.default_columns = columns
def __getattr__(self, name):
return self.profile_data[name]
def get_columns(self, font):
""" Return the number of columns for the given font.
"""
if self.default_columns:
return self.default_columns
if 'columnConfigs' in self.profile_data:
columns_def = self.columnConfigs[self.defaultColumnConfig]
elif 'columns' in self.profile_data:
columns_def = self.columns
if isinstance(columns_def, int):
return columns_def
return columns_def[font]
def get_profile(name=None, **kwargs):
if isinstance(name, Profile):
return name
clazz = get_profile_class(name or 'default')
return clazz(**kwargs)
CLASS_CACHE = {}
def get_profile_class(name):
if not name in CLASS_CACHE:
profile_data = resolve_profile_data(name)
class_name = '%sProfile' % clean(name)
new_class = type(class_name, (Profile,), {'profile_data': profile_data})
CLASS_CACHE[name] = new_class
return CLASS_CACHE[name]
def clean(s):
# Remove invalid characters
s = re.sub('[^0-9a-zA-Z_]', '', s)
# Remove leading characters until we find a letter or underscore
s = re.sub('^[^a-zA-Z_]+', '', s)
return str(s)
def resolve_profile_data(name):
data = PROFILES[name]
inherits = data.get('inherits')
if not inherits:
return data
if not isinstance(inherits, (tuple, list)):
inherits = [inherits]
merged = {}
for base in reversed(inherits):
base_data = resolve_profile_data(base)
merged.update(base_data)
merged.update(data)
return merged
|
Support loading capabilites YAML into Python classes.import re
from os import path
import yaml
with open(path.join(path.dirname(__file__), 'capabilities.yml')) as f:
PROFILES = yaml.load(f)
class Profile(object):
profile_data = {}
def __init__(self, columns=None):
self.default_columns = columns
def __getattr__(self, name):
return self.profile_data[name]
def get_columns(self, font):
""" Return the number of columns for the given font.
"""
if self.default_columns:
return self.default_columns
if 'columnConfigs' in self.profile_data:
columns_def = self.columnConfigs[self.defaultColumnConfig]
elif 'columns' in self.profile_data:
columns_def = self.columns
if isinstance(columns_def, int):
return columns_def
return columns_def[font]
def get_profile(name=None, **kwargs):
if isinstance(name, Profile):
return name
clazz = get_profile_class(name or 'default')
return clazz(**kwargs)
CLASS_CACHE = {}
def get_profile_class(name):
if not name in CLASS_CACHE:
profile_data = resolve_profile_data(name)
class_name = '%sProfile' % clean(name)
new_class = type(class_name, (Profile,), {'profile_data': profile_data})
CLASS_CACHE[name] = new_class
return CLASS_CACHE[name]
def clean(s):
# Remove invalid characters
s = re.sub('[^0-9a-zA-Z_]', '', s)
# Remove leading characters until we find a letter or underscore
s = re.sub('^[^a-zA-Z_]+', '', s)
return str(s)
def resolve_profile_data(name):
data = PROFILES[name]
inherits = data.get('inherits')
if not inherits:
return data
if not isinstance(inherits, (tuple, list)):
inherits = [inherits]
merged = {}
for base in reversed(inherits):
base_data = resolve_profile_data(base)
merged.update(base_data)
merged.update(data)
return merged
|
<commit_before><commit_msg>Support loading capabilites YAML into Python classes.<commit_after>import re
from os import path
import yaml
with open(path.join(path.dirname(__file__), 'capabilities.yml')) as f:
PROFILES = yaml.load(f)
class Profile(object):
profile_data = {}
def __init__(self, columns=None):
self.default_columns = columns
def __getattr__(self, name):
return self.profile_data[name]
def get_columns(self, font):
""" Return the number of columns for the given font.
"""
if self.default_columns:
return self.default_columns
if 'columnConfigs' in self.profile_data:
columns_def = self.columnConfigs[self.defaultColumnConfig]
elif 'columns' in self.profile_data:
columns_def = self.columns
if isinstance(columns_def, int):
return columns_def
return columns_def[font]
def get_profile(name=None, **kwargs):
if isinstance(name, Profile):
return name
clazz = get_profile_class(name or 'default')
return clazz(**kwargs)
CLASS_CACHE = {}
def get_profile_class(name):
if not name in CLASS_CACHE:
profile_data = resolve_profile_data(name)
class_name = '%sProfile' % clean(name)
new_class = type(class_name, (Profile,), {'profile_data': profile_data})
CLASS_CACHE[name] = new_class
return CLASS_CACHE[name]
def clean(s):
# Remove invalid characters
s = re.sub('[^0-9a-zA-Z_]', '', s)
# Remove leading characters until we find a letter or underscore
s = re.sub('^[^a-zA-Z_]+', '', s)
return str(s)
def resolve_profile_data(name):
data = PROFILES[name]
inherits = data.get('inherits')
if not inherits:
return data
if not isinstance(inherits, (tuple, list)):
inherits = [inherits]
merged = {}
for base in reversed(inherits):
base_data = resolve_profile_data(base)
merged.update(base_data)
merged.update(data)
return merged
|
|
df30466b43a765bc4771c7541cc8d99cae5f7932
|
examples/safecast_logger.py
|
examples/safecast_logger.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Log measurements to Safecast open-data API (https://api.safecast.org).
Require the SafecastPy packages:
https://github.com/MonsieurV/SafecastPy
You'll also need a Safecast API key:
https://api.safecast.org/en-US/users/sign_up
Released under MIT License. See LICENSE file.
By Yoan Tournade <yoan@ytotech.com>
"""
from PiPocketGeiger import RadiationWatch
import time
import datetime.datetime
from SafecastPy
# Safecast API key.
API_KEY = 'your_api_key'
# Log on production or development instance.
SAFECAST_INSTANCE = SafecastPy.DEVELOPMENT_API_URL
# Radiation Watch Pocket Geiger is registered:
# - As device id 90 on developement instance
# http://dev.safecast.org/en-US/devices/90
# - As device id 145 on production instance
# https://api.safecast.org/en-US/devices/145
DEVICE_ID = 90
# Your location name.
MY_LOCATION_NAME = "(A Rue du Grand Ferré, Compiègne, France)"
# Your exact location.
MY_LOCATION = {
'latitude': 49.418683
'longitude': 2.823469
}
# Period for publishing on Safecast API, in minutes.
# Five minutes is fine for background monitoring.
LOGGING_PERIOD = 5
if __name__ == "__main__":
print("Logging each {0} minutes.".format(LOGGING_PERIOD))
safecast = SafecastPy(api_key=API_KEY, api_url=SAFECAST_INSTANCE)
with RadiationWatch(24, 23) as radiationWatch:
while 1:
# Sleep first so we can sample enough data to stabilize results.
time.sleep(LOGGING_PERIOD)
try:
readings = radiationWatch.status()
print("Logging... {0}.".format(readings))
measurement = safecast.add_measurement(json={
'latitude': MY_LOCATION['latitude'],
'longitude': MY_LOCATION['longitude'],
'value': readings['uSvh'],
'unit': SafecastPy.UNIT_USV,
'captured_at': datetime.datetime.utcnow().isoformat() + '+00:00',
'device_id': DEVICE_ID,
'location_name': MY_PLACE_NAME
})
print("Ok. Measurement published with id {0}".format(
measurement['id']))
except Exception as e:
# A catch-all to keep the thing alive even if we have transient
# network or service errors.
print(e)
|
Add an example to publish data on the Safecast API
|
Add an example to publish data on the Safecast API
|
Python
|
mit
|
MonsieurV/PiPocketGeiger
|
Add an example to publish data on the Safecast API
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Log measurements to Safecast open-data API (https://api.safecast.org).
Require the SafecastPy packages:
https://github.com/MonsieurV/SafecastPy
You'll also need a Safecast API key:
https://api.safecast.org/en-US/users/sign_up
Released under MIT License. See LICENSE file.
By Yoan Tournade <yoan@ytotech.com>
"""
from PiPocketGeiger import RadiationWatch
import time
import datetime.datetime
from SafecastPy
# Safecast API key.
API_KEY = 'your_api_key'
# Log on production or development instance.
SAFECAST_INSTANCE = SafecastPy.DEVELOPMENT_API_URL
# Radiation Watch Pocket Geiger is registered:
# - As device id 90 on developement instance
# http://dev.safecast.org/en-US/devices/90
# - As device id 145 on production instance
# https://api.safecast.org/en-US/devices/145
DEVICE_ID = 90
# Your location name.
MY_LOCATION_NAME = "(A Rue du Grand Ferré, Compiègne, France)"
# Your exact location.
MY_LOCATION = {
'latitude': 49.418683
'longitude': 2.823469
}
# Period for publishing on Safecast API, in minutes.
# Five minutes is fine for background monitoring.
LOGGING_PERIOD = 5
if __name__ == "__main__":
print("Logging each {0} minutes.".format(LOGGING_PERIOD))
safecast = SafecastPy(api_key=API_KEY, api_url=SAFECAST_INSTANCE)
with RadiationWatch(24, 23) as radiationWatch:
while 1:
# Sleep first so we can sample enough data to stabilize results.
time.sleep(LOGGING_PERIOD)
try:
readings = radiationWatch.status()
print("Logging... {0}.".format(readings))
measurement = safecast.add_measurement(json={
'latitude': MY_LOCATION['latitude'],
'longitude': MY_LOCATION['longitude'],
'value': readings['uSvh'],
'unit': SafecastPy.UNIT_USV,
'captured_at': datetime.datetime.utcnow().isoformat() + '+00:00',
'device_id': DEVICE_ID,
'location_name': MY_PLACE_NAME
})
print("Ok. Measurement published with id {0}".format(
measurement['id']))
except Exception as e:
# A catch-all to keep the thing alive even if we have transient
# network or service errors.
print(e)
|
<commit_before><commit_msg>Add an example to publish data on the Safecast API<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Log measurements to Safecast open-data API (https://api.safecast.org).
Require the SafecastPy packages:
https://github.com/MonsieurV/SafecastPy
You'll also need a Safecast API key:
https://api.safecast.org/en-US/users/sign_up
Released under MIT License. See LICENSE file.
By Yoan Tournade <yoan@ytotech.com>
"""
from PiPocketGeiger import RadiationWatch
import time
import datetime.datetime
from SafecastPy
# Safecast API key.
API_KEY = 'your_api_key'
# Log on production or development instance.
SAFECAST_INSTANCE = SafecastPy.DEVELOPMENT_API_URL
# Radiation Watch Pocket Geiger is registered:
# - As device id 90 on developement instance
# http://dev.safecast.org/en-US/devices/90
# - As device id 145 on production instance
# https://api.safecast.org/en-US/devices/145
DEVICE_ID = 90
# Your location name.
MY_LOCATION_NAME = "(A Rue du Grand Ferré, Compiègne, France)"
# Your exact location.
MY_LOCATION = {
'latitude': 49.418683
'longitude': 2.823469
}
# Period for publishing on Safecast API, in minutes.
# Five minutes is fine for background monitoring.
LOGGING_PERIOD = 5
if __name__ == "__main__":
print("Logging each {0} minutes.".format(LOGGING_PERIOD))
safecast = SafecastPy(api_key=API_KEY, api_url=SAFECAST_INSTANCE)
with RadiationWatch(24, 23) as radiationWatch:
while 1:
# Sleep first so we can sample enough data to stabilize results.
time.sleep(LOGGING_PERIOD)
try:
readings = radiationWatch.status()
print("Logging... {0}.".format(readings))
measurement = safecast.add_measurement(json={
'latitude': MY_LOCATION['latitude'],
'longitude': MY_LOCATION['longitude'],
'value': readings['uSvh'],
'unit': SafecastPy.UNIT_USV,
'captured_at': datetime.datetime.utcnow().isoformat() + '+00:00',
'device_id': DEVICE_ID,
'location_name': MY_PLACE_NAME
})
print("Ok. Measurement published with id {0}".format(
measurement['id']))
except Exception as e:
# A catch-all to keep the thing alive even if we have transient
# network or service errors.
print(e)
|
Add an example to publish data on the Safecast API#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Log measurements to Safecast open-data API (https://api.safecast.org).
Require the SafecastPy packages:
https://github.com/MonsieurV/SafecastPy
You'll also need a Safecast API key:
https://api.safecast.org/en-US/users/sign_up
Released under MIT License. See LICENSE file.
By Yoan Tournade <yoan@ytotech.com>
"""
from PiPocketGeiger import RadiationWatch
import time
import datetime.datetime
from SafecastPy
# Safecast API key.
API_KEY = 'your_api_key'
# Log on production or development instance.
SAFECAST_INSTANCE = SafecastPy.DEVELOPMENT_API_URL
# Radiation Watch Pocket Geiger is registered:
# - As device id 90 on developement instance
# http://dev.safecast.org/en-US/devices/90
# - As device id 145 on production instance
# https://api.safecast.org/en-US/devices/145
DEVICE_ID = 90
# Your location name.
MY_LOCATION_NAME = "(A Rue du Grand Ferré, Compiègne, France)"
# Your exact location.
MY_LOCATION = {
'latitude': 49.418683
'longitude': 2.823469
}
# Period for publishing on Safecast API, in minutes.
# Five minutes is fine for background monitoring.
LOGGING_PERIOD = 5
if __name__ == "__main__":
print("Logging each {0} minutes.".format(LOGGING_PERIOD))
safecast = SafecastPy(api_key=API_KEY, api_url=SAFECAST_INSTANCE)
with RadiationWatch(24, 23) as radiationWatch:
while 1:
# Sleep first so we can sample enough data to stabilize results.
time.sleep(LOGGING_PERIOD)
try:
readings = radiationWatch.status()
print("Logging... {0}.".format(readings))
measurement = safecast.add_measurement(json={
'latitude': MY_LOCATION['latitude'],
'longitude': MY_LOCATION['longitude'],
'value': readings['uSvh'],
'unit': SafecastPy.UNIT_USV,
'captured_at': datetime.datetime.utcnow().isoformat() + '+00:00',
'device_id': DEVICE_ID,
'location_name': MY_PLACE_NAME
})
print("Ok. Measurement published with id {0}".format(
measurement['id']))
except Exception as e:
# A catch-all to keep the thing alive even if we have transient
# network or service errors.
print(e)
|
<commit_before><commit_msg>Add an example to publish data on the Safecast API<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Log measurements to Safecast open-data API (https://api.safecast.org).
Require the SafecastPy packages:
https://github.com/MonsieurV/SafecastPy
You'll also need a Safecast API key:
https://api.safecast.org/en-US/users/sign_up
Released under MIT License. See LICENSE file.
By Yoan Tournade <yoan@ytotech.com>
"""
from PiPocketGeiger import RadiationWatch
import time
import datetime.datetime
from SafecastPy
# Safecast API key.
API_KEY = 'your_api_key'
# Log on production or development instance.
SAFECAST_INSTANCE = SafecastPy.DEVELOPMENT_API_URL
# Radiation Watch Pocket Geiger is registered:
# - As device id 90 on developement instance
# http://dev.safecast.org/en-US/devices/90
# - As device id 145 on production instance
# https://api.safecast.org/en-US/devices/145
DEVICE_ID = 90
# Your location name.
MY_LOCATION_NAME = "(A Rue du Grand Ferré, Compiègne, France)"
# Your exact location.
MY_LOCATION = {
'latitude': 49.418683
'longitude': 2.823469
}
# Period for publishing on Safecast API, in minutes.
# Five minutes is fine for background monitoring.
LOGGING_PERIOD = 5
if __name__ == "__main__":
print("Logging each {0} minutes.".format(LOGGING_PERIOD))
safecast = SafecastPy(api_key=API_KEY, api_url=SAFECAST_INSTANCE)
with RadiationWatch(24, 23) as radiationWatch:
while 1:
# Sleep first so we can sample enough data to stabilize results.
time.sleep(LOGGING_PERIOD)
try:
readings = radiationWatch.status()
print("Logging... {0}.".format(readings))
measurement = safecast.add_measurement(json={
'latitude': MY_LOCATION['latitude'],
'longitude': MY_LOCATION['longitude'],
'value': readings['uSvh'],
'unit': SafecastPy.UNIT_USV,
'captured_at': datetime.datetime.utcnow().isoformat() + '+00:00',
'device_id': DEVICE_ID,
'location_name': MY_PLACE_NAME
})
print("Ok. Measurement published with id {0}".format(
measurement['id']))
except Exception as e:
# A catch-all to keep the thing alive even if we have transient
# network or service errors.
print(e)
|
|
79bf320f18db1b1dc89383a1c8e2f1080391c56c
|
tests/zeus/api/resources/test_revision_artifacts.py
|
tests/zeus/api/resources/test_revision_artifacts.py
|
from datetime import timedelta
from zeus import factories
from zeus.models import RepositoryAccess, RepositoryBackend, RepositoryProvider
from zeus.utils import timezone
def test_revision_artifacts(
client, db_session, default_login, default_user, git_repo_config
):
repo = factories.RepositoryFactory.create(
backend=RepositoryBackend.git,
provider=RepositoryProvider.github,
url=git_repo_config.url,
)
db_session.add(RepositoryAccess(user=default_user, repository=repo))
db_session.flush()
revision = factories.RevisionFactory.create(
sha=git_repo_config.commits[0], repository=repo
)
source = factories.SourceFactory.create(revision=revision)
factories.BuildFactory.create(
source=source, date_created=timezone.now() - timedelta(minutes=1)
)
build = factories.BuildFactory.create(source=source, date_created=timezone.now())
job = factories.JobFactory.create(build=build)
artifact = factories.ArtifactFactory.create(job=job)
resp = client.get(
"/api/repos/{}/revisions/{}/artifacts".format(
repo.get_full_name(), revision.sha
)
)
assert resp.status_code == 200
data = resp.json()
assert len(data) == 1
assert data[0]["id"] == str(artifact.id)
|
Add coverage for revision artifacts endpoint
|
test: Add coverage for revision artifacts endpoint
|
Python
|
apache-2.0
|
getsentry/zeus,getsentry/zeus,getsentry/zeus,getsentry/zeus
|
test: Add coverage for revision artifacts endpoint
|
from datetime import timedelta
from zeus import factories
from zeus.models import RepositoryAccess, RepositoryBackend, RepositoryProvider
from zeus.utils import timezone
def test_revision_artifacts(
client, db_session, default_login, default_user, git_repo_config
):
repo = factories.RepositoryFactory.create(
backend=RepositoryBackend.git,
provider=RepositoryProvider.github,
url=git_repo_config.url,
)
db_session.add(RepositoryAccess(user=default_user, repository=repo))
db_session.flush()
revision = factories.RevisionFactory.create(
sha=git_repo_config.commits[0], repository=repo
)
source = factories.SourceFactory.create(revision=revision)
factories.BuildFactory.create(
source=source, date_created=timezone.now() - timedelta(minutes=1)
)
build = factories.BuildFactory.create(source=source, date_created=timezone.now())
job = factories.JobFactory.create(build=build)
artifact = factories.ArtifactFactory.create(job=job)
resp = client.get(
"/api/repos/{}/revisions/{}/artifacts".format(
repo.get_full_name(), revision.sha
)
)
assert resp.status_code == 200
data = resp.json()
assert len(data) == 1
assert data[0]["id"] == str(artifact.id)
|
<commit_before><commit_msg>test: Add coverage for revision artifacts endpoint<commit_after>
|
from datetime import timedelta
from zeus import factories
from zeus.models import RepositoryAccess, RepositoryBackend, RepositoryProvider
from zeus.utils import timezone
def test_revision_artifacts(
client, db_session, default_login, default_user, git_repo_config
):
repo = factories.RepositoryFactory.create(
backend=RepositoryBackend.git,
provider=RepositoryProvider.github,
url=git_repo_config.url,
)
db_session.add(RepositoryAccess(user=default_user, repository=repo))
db_session.flush()
revision = factories.RevisionFactory.create(
sha=git_repo_config.commits[0], repository=repo
)
source = factories.SourceFactory.create(revision=revision)
factories.BuildFactory.create(
source=source, date_created=timezone.now() - timedelta(minutes=1)
)
build = factories.BuildFactory.create(source=source, date_created=timezone.now())
job = factories.JobFactory.create(build=build)
artifact = factories.ArtifactFactory.create(job=job)
resp = client.get(
"/api/repos/{}/revisions/{}/artifacts".format(
repo.get_full_name(), revision.sha
)
)
assert resp.status_code == 200
data = resp.json()
assert len(data) == 1
assert data[0]["id"] == str(artifact.id)
|
test: Add coverage for revision artifacts endpointfrom datetime import timedelta
from zeus import factories
from zeus.models import RepositoryAccess, RepositoryBackend, RepositoryProvider
from zeus.utils import timezone
def test_revision_artifacts(
client, db_session, default_login, default_user, git_repo_config
):
repo = factories.RepositoryFactory.create(
backend=RepositoryBackend.git,
provider=RepositoryProvider.github,
url=git_repo_config.url,
)
db_session.add(RepositoryAccess(user=default_user, repository=repo))
db_session.flush()
revision = factories.RevisionFactory.create(
sha=git_repo_config.commits[0], repository=repo
)
source = factories.SourceFactory.create(revision=revision)
factories.BuildFactory.create(
source=source, date_created=timezone.now() - timedelta(minutes=1)
)
build = factories.BuildFactory.create(source=source, date_created=timezone.now())
job = factories.JobFactory.create(build=build)
artifact = factories.ArtifactFactory.create(job=job)
resp = client.get(
"/api/repos/{}/revisions/{}/artifacts".format(
repo.get_full_name(), revision.sha
)
)
assert resp.status_code == 200
data = resp.json()
assert len(data) == 1
assert data[0]["id"] == str(artifact.id)
|
<commit_before><commit_msg>test: Add coverage for revision artifacts endpoint<commit_after>from datetime import timedelta
from zeus import factories
from zeus.models import RepositoryAccess, RepositoryBackend, RepositoryProvider
from zeus.utils import timezone
def test_revision_artifacts(
client, db_session, default_login, default_user, git_repo_config
):
repo = factories.RepositoryFactory.create(
backend=RepositoryBackend.git,
provider=RepositoryProvider.github,
url=git_repo_config.url,
)
db_session.add(RepositoryAccess(user=default_user, repository=repo))
db_session.flush()
revision = factories.RevisionFactory.create(
sha=git_repo_config.commits[0], repository=repo
)
source = factories.SourceFactory.create(revision=revision)
factories.BuildFactory.create(
source=source, date_created=timezone.now() - timedelta(minutes=1)
)
build = factories.BuildFactory.create(source=source, date_created=timezone.now())
job = factories.JobFactory.create(build=build)
artifact = factories.ArtifactFactory.create(job=job)
resp = client.get(
"/api/repos/{}/revisions/{}/artifacts".format(
repo.get_full_name(), revision.sha
)
)
assert resp.status_code == 200
data = resp.json()
assert len(data) == 1
assert data[0]["id"] == str(artifact.id)
|
|
e738c5caa352f72419bb07ad7673ce1ed3f86039
|
zivinetz/migrations/0002_auto_20140908_1116.py
|
zivinetz/migrations/0002_auto_20140908_1116.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('zivinetz', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='specification',
name='accomodation_throughout',
field=models.BooleanField(default=False, help_text='Accomodation is offered throughout.', verbose_name='accomodation throughout'),
),
migrations.AlterField(
model_name='specification',
name='food_throughout',
field=models.BooleanField(default=False, help_text='Food is offered throughout.', verbose_name='food throughout'),
),
migrations.AlterField(
model_name='specification',
name='with_accomodation',
field=models.BooleanField(default=False, verbose_name='with accomodation'),
),
]
|
Add a migration for the boolean field changes
|
Add a migration for the boolean field changes
|
Python
|
mit
|
matthiask/zivinetz,matthiask/zivinetz,matthiask/zivinetz,matthiask/zivinetz
|
Add a migration for the boolean field changes
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('zivinetz', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='specification',
name='accomodation_throughout',
field=models.BooleanField(default=False, help_text='Accomodation is offered throughout.', verbose_name='accomodation throughout'),
),
migrations.AlterField(
model_name='specification',
name='food_throughout',
field=models.BooleanField(default=False, help_text='Food is offered throughout.', verbose_name='food throughout'),
),
migrations.AlterField(
model_name='specification',
name='with_accomodation',
field=models.BooleanField(default=False, verbose_name='with accomodation'),
),
]
|
<commit_before><commit_msg>Add a migration for the boolean field changes<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('zivinetz', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='specification',
name='accomodation_throughout',
field=models.BooleanField(default=False, help_text='Accomodation is offered throughout.', verbose_name='accomodation throughout'),
),
migrations.AlterField(
model_name='specification',
name='food_throughout',
field=models.BooleanField(default=False, help_text='Food is offered throughout.', verbose_name='food throughout'),
),
migrations.AlterField(
model_name='specification',
name='with_accomodation',
field=models.BooleanField(default=False, verbose_name='with accomodation'),
),
]
|
Add a migration for the boolean field changes# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('zivinetz', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='specification',
name='accomodation_throughout',
field=models.BooleanField(default=False, help_text='Accomodation is offered throughout.', verbose_name='accomodation throughout'),
),
migrations.AlterField(
model_name='specification',
name='food_throughout',
field=models.BooleanField(default=False, help_text='Food is offered throughout.', verbose_name='food throughout'),
),
migrations.AlterField(
model_name='specification',
name='with_accomodation',
field=models.BooleanField(default=False, verbose_name='with accomodation'),
),
]
|
<commit_before><commit_msg>Add a migration for the boolean field changes<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('zivinetz', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='specification',
name='accomodation_throughout',
field=models.BooleanField(default=False, help_text='Accomodation is offered throughout.', verbose_name='accomodation throughout'),
),
migrations.AlterField(
model_name='specification',
name='food_throughout',
field=models.BooleanField(default=False, help_text='Food is offered throughout.', verbose_name='food throughout'),
),
migrations.AlterField(
model_name='specification',
name='with_accomodation',
field=models.BooleanField(default=False, verbose_name='with accomodation'),
),
]
|
|
2011ca3e3b05591dfff3779a58e6ac21b4624be5
|
cluster_viz/upload_data.py
|
cluster_viz/upload_data.py
|
import os, sys, subprocess, tempfile, gzip, cStringIO
from boto.s3.connection import S3Connection
from boto.s3.key import Key
conn = S3Connection()
bucket = conn.get_bucket("openinternet.widgets.sunlightfoundation.com")
base = sys.argv[1]
for j in os.listdir(base):
print "uploading", j
k = Key(bucket)
k.key = os.path.join("tree_data", j)
gzdata = cStringIO.StringIO()
gzfile = gzip.GzipFile(fileobj=gzdata, mode="wb")
gzfile.write(open(os.path.join(base, j)).read())
gzfile.close()
gzdata.seek(0)
k.set_metadata('Content-Type', 'application/json')
k.set_metadata('Content-Encoding', 'gzip')
k.set_contents_from_file(gzdata)
k.set_acl('public-read')
|
Add script for uploading tree_data.
|
Add script for uploading tree_data.
|
Python
|
mit
|
sunlightlabs/fcc-net-neutrality-comments,sunlightlabs/fcc-net-neutrality-comments,sunlightlabs/fcc-net-neutrality-comments,sunlightlabs/fcc-net-neutrality-comments
|
Add script for uploading tree_data.
|
import os, sys, subprocess, tempfile, gzip, cStringIO
from boto.s3.connection import S3Connection
from boto.s3.key import Key
conn = S3Connection()
bucket = conn.get_bucket("openinternet.widgets.sunlightfoundation.com")
base = sys.argv[1]
for j in os.listdir(base):
print "uploading", j
k = Key(bucket)
k.key = os.path.join("tree_data", j)
gzdata = cStringIO.StringIO()
gzfile = gzip.GzipFile(fileobj=gzdata, mode="wb")
gzfile.write(open(os.path.join(base, j)).read())
gzfile.close()
gzdata.seek(0)
k.set_metadata('Content-Type', 'application/json')
k.set_metadata('Content-Encoding', 'gzip')
k.set_contents_from_file(gzdata)
k.set_acl('public-read')
|
<commit_before><commit_msg>Add script for uploading tree_data.<commit_after>
|
import os, sys, subprocess, tempfile, gzip, cStringIO
from boto.s3.connection import S3Connection
from boto.s3.key import Key
conn = S3Connection()
bucket = conn.get_bucket("openinternet.widgets.sunlightfoundation.com")
base = sys.argv[1]
for j in os.listdir(base):
print "uploading", j
k = Key(bucket)
k.key = os.path.join("tree_data", j)
gzdata = cStringIO.StringIO()
gzfile = gzip.GzipFile(fileobj=gzdata, mode="wb")
gzfile.write(open(os.path.join(base, j)).read())
gzfile.close()
gzdata.seek(0)
k.set_metadata('Content-Type', 'application/json')
k.set_metadata('Content-Encoding', 'gzip')
k.set_contents_from_file(gzdata)
k.set_acl('public-read')
|
Add script for uploading tree_data.import os, sys, subprocess, tempfile, gzip, cStringIO
from boto.s3.connection import S3Connection
from boto.s3.key import Key
conn = S3Connection()
bucket = conn.get_bucket("openinternet.widgets.sunlightfoundation.com")
base = sys.argv[1]
for j in os.listdir(base):
print "uploading", j
k = Key(bucket)
k.key = os.path.join("tree_data", j)
gzdata = cStringIO.StringIO()
gzfile = gzip.GzipFile(fileobj=gzdata, mode="wb")
gzfile.write(open(os.path.join(base, j)).read())
gzfile.close()
gzdata.seek(0)
k.set_metadata('Content-Type', 'application/json')
k.set_metadata('Content-Encoding', 'gzip')
k.set_contents_from_file(gzdata)
k.set_acl('public-read')
|
<commit_before><commit_msg>Add script for uploading tree_data.<commit_after>import os, sys, subprocess, tempfile, gzip, cStringIO
from boto.s3.connection import S3Connection
from boto.s3.key import Key
conn = S3Connection()
bucket = conn.get_bucket("openinternet.widgets.sunlightfoundation.com")
base = sys.argv[1]
for j in os.listdir(base):
print "uploading", j
k = Key(bucket)
k.key = os.path.join("tree_data", j)
gzdata = cStringIO.StringIO()
gzfile = gzip.GzipFile(fileobj=gzdata, mode="wb")
gzfile.write(open(os.path.join(base, j)).read())
gzfile.close()
gzdata.seek(0)
k.set_metadata('Content-Type', 'application/json')
k.set_metadata('Content-Encoding', 'gzip')
k.set_contents_from_file(gzdata)
k.set_acl('public-read')
|
|
f7276ddf05c9775fb92706eedf1b1503f01b79e4
|
src/url_json.py
|
src/url_json.py
|
from argparse import ArgumentParser
from time import time
from urllib.request import urlopen
import json
def parse_args():
parser = ArgumentParser()
parser.add_argument('--prefix', default='url_json')
parser.add_argument('--url', default='http://localhost/')
parser.add_argument(
'--key',
action='append',
dest='keys', )
return parser.parse_args()
def main():
args = parse_args()
response = urlopen(args.url)
data = json.loads(response.read().decode('utf-8'))
template = args.prefix + '.{} {}' + str(int(time()))
for key, value in data.items():
if key in args.keys or not args.keys:
print(template.format(key, value))
if __name__ == '__main__':
main()
|
Add plugin to request data from JSON url endpoint
|
Add plugin to request data from JSON url endpoint
|
Python
|
mit
|
innogames/igcollect
|
Add plugin to request data from JSON url endpoint
|
from argparse import ArgumentParser
from time import time
from urllib.request import urlopen
import json
def parse_args():
parser = ArgumentParser()
parser.add_argument('--prefix', default='url_json')
parser.add_argument('--url', default='http://localhost/')
parser.add_argument(
'--key',
action='append',
dest='keys', )
return parser.parse_args()
def main():
args = parse_args()
response = urlopen(args.url)
data = json.loads(response.read().decode('utf-8'))
template = args.prefix + '.{} {}' + str(int(time()))
for key, value in data.items():
if key in args.keys or not args.keys:
print(template.format(key, value))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add plugin to request data from JSON url endpoint<commit_after>
|
from argparse import ArgumentParser
from time import time
from urllib.request import urlopen
import json
def parse_args():
parser = ArgumentParser()
parser.add_argument('--prefix', default='url_json')
parser.add_argument('--url', default='http://localhost/')
parser.add_argument(
'--key',
action='append',
dest='keys', )
return parser.parse_args()
def main():
args = parse_args()
response = urlopen(args.url)
data = json.loads(response.read().decode('utf-8'))
template = args.prefix + '.{} {}' + str(int(time()))
for key, value in data.items():
if key in args.keys or not args.keys:
print(template.format(key, value))
if __name__ == '__main__':
main()
|
Add plugin to request data from JSON url endpointfrom argparse import ArgumentParser
from time import time
from urllib.request import urlopen
import json
def parse_args():
parser = ArgumentParser()
parser.add_argument('--prefix', default='url_json')
parser.add_argument('--url', default='http://localhost/')
parser.add_argument(
'--key',
action='append',
dest='keys', )
return parser.parse_args()
def main():
args = parse_args()
response = urlopen(args.url)
data = json.loads(response.read().decode('utf-8'))
template = args.prefix + '.{} {}' + str(int(time()))
for key, value in data.items():
if key in args.keys or not args.keys:
print(template.format(key, value))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add plugin to request data from JSON url endpoint<commit_after>from argparse import ArgumentParser
from time import time
from urllib.request import urlopen
import json
def parse_args():
parser = ArgumentParser()
parser.add_argument('--prefix', default='url_json')
parser.add_argument('--url', default='http://localhost/')
parser.add_argument(
'--key',
action='append',
dest='keys', )
return parser.parse_args()
def main():
args = parse_args()
response = urlopen(args.url)
data = json.loads(response.read().decode('utf-8'))
template = args.prefix + '.{} {}' + str(int(time()))
for key, value in data.items():
if key in args.keys or not args.keys:
print(template.format(key, value))
if __name__ == '__main__':
main()
|
|
655f8f319b3be348916731a424354dfc4a6cadab
|
utils/repl.py
|
utils/repl.py
|
from nex.state import GlobalState
from nex.reader import Reader, EndOfFile
from nex.lexer import Lexer
from nex.instructioner import Instructioner
from nex.banisher import Banisher
from nex.parsing.command_parser import command_parser
from nex.parsing.utils import ChunkGrabber
from nex.box_writer import write_to_file
from nex.utils import TidyEnd
reader = Reader()
state = GlobalState.from_defaults(font_search_paths=['/Users/ejm/projects/nex/example/fonts'])
font_id = state.define_new_font(file_name='cmr10', at_clause=None)
state.select_font(is_global=True, font_id=font_id)
lexer = Lexer(reader, get_cat_code_func=state.codes.get_cat_code)
instructioner = Instructioner(lexer)
banisher = Banisher(instructioner, state, reader)
command_grabber = ChunkGrabber(banisher, command_parser)
while True:
s = input('In: ')
reader.insert_string(s + '\n')
try:
state.execute_commands(command_grabber, banisher, reader)
except TidyEnd:
break
# out_path = sys.stdout.buffer
write_to_file(state, 'done.dvi')
|
Add simple REPL to test commands
|
Add simple REPL to test commands
|
Python
|
mit
|
eddiejessup/nex
|
Add simple REPL to test commands
|
from nex.state import GlobalState
from nex.reader import Reader, EndOfFile
from nex.lexer import Lexer
from nex.instructioner import Instructioner
from nex.banisher import Banisher
from nex.parsing.command_parser import command_parser
from nex.parsing.utils import ChunkGrabber
from nex.box_writer import write_to_file
from nex.utils import TidyEnd
reader = Reader()
state = GlobalState.from_defaults(font_search_paths=['/Users/ejm/projects/nex/example/fonts'])
font_id = state.define_new_font(file_name='cmr10', at_clause=None)
state.select_font(is_global=True, font_id=font_id)
lexer = Lexer(reader, get_cat_code_func=state.codes.get_cat_code)
instructioner = Instructioner(lexer)
banisher = Banisher(instructioner, state, reader)
command_grabber = ChunkGrabber(banisher, command_parser)
while True:
s = input('In: ')
reader.insert_string(s + '\n')
try:
state.execute_commands(command_grabber, banisher, reader)
except TidyEnd:
break
# out_path = sys.stdout.buffer
write_to_file(state, 'done.dvi')
|
<commit_before><commit_msg>Add simple REPL to test commands<commit_after>
|
from nex.state import GlobalState
from nex.reader import Reader, EndOfFile
from nex.lexer import Lexer
from nex.instructioner import Instructioner
from nex.banisher import Banisher
from nex.parsing.command_parser import command_parser
from nex.parsing.utils import ChunkGrabber
from nex.box_writer import write_to_file
from nex.utils import TidyEnd
reader = Reader()
state = GlobalState.from_defaults(font_search_paths=['/Users/ejm/projects/nex/example/fonts'])
font_id = state.define_new_font(file_name='cmr10', at_clause=None)
state.select_font(is_global=True, font_id=font_id)
lexer = Lexer(reader, get_cat_code_func=state.codes.get_cat_code)
instructioner = Instructioner(lexer)
banisher = Banisher(instructioner, state, reader)
command_grabber = ChunkGrabber(banisher, command_parser)
while True:
s = input('In: ')
reader.insert_string(s + '\n')
try:
state.execute_commands(command_grabber, banisher, reader)
except TidyEnd:
break
# out_path = sys.stdout.buffer
write_to_file(state, 'done.dvi')
|
Add simple REPL to test commandsfrom nex.state import GlobalState
from nex.reader import Reader, EndOfFile
from nex.lexer import Lexer
from nex.instructioner import Instructioner
from nex.banisher import Banisher
from nex.parsing.command_parser import command_parser
from nex.parsing.utils import ChunkGrabber
from nex.box_writer import write_to_file
from nex.utils import TidyEnd
reader = Reader()
state = GlobalState.from_defaults(font_search_paths=['/Users/ejm/projects/nex/example/fonts'])
font_id = state.define_new_font(file_name='cmr10', at_clause=None)
state.select_font(is_global=True, font_id=font_id)
lexer = Lexer(reader, get_cat_code_func=state.codes.get_cat_code)
instructioner = Instructioner(lexer)
banisher = Banisher(instructioner, state, reader)
command_grabber = ChunkGrabber(banisher, command_parser)
while True:
s = input('In: ')
reader.insert_string(s + '\n')
try:
state.execute_commands(command_grabber, banisher, reader)
except TidyEnd:
break
# out_path = sys.stdout.buffer
write_to_file(state, 'done.dvi')
|
<commit_before><commit_msg>Add simple REPL to test commands<commit_after>from nex.state import GlobalState
from nex.reader import Reader, EndOfFile
from nex.lexer import Lexer
from nex.instructioner import Instructioner
from nex.banisher import Banisher
from nex.parsing.command_parser import command_parser
from nex.parsing.utils import ChunkGrabber
from nex.box_writer import write_to_file
from nex.utils import TidyEnd
reader = Reader()
state = GlobalState.from_defaults(font_search_paths=['/Users/ejm/projects/nex/example/fonts'])
font_id = state.define_new_font(file_name='cmr10', at_clause=None)
state.select_font(is_global=True, font_id=font_id)
lexer = Lexer(reader, get_cat_code_func=state.codes.get_cat_code)
instructioner = Instructioner(lexer)
banisher = Banisher(instructioner, state, reader)
command_grabber = ChunkGrabber(banisher, command_parser)
while True:
s = input('In: ')
reader.insert_string(s + '\n')
try:
state.execute_commands(command_grabber, banisher, reader)
except TidyEnd:
break
# out_path = sys.stdout.buffer
write_to_file(state, 'done.dvi')
|
|
4dfb5003994b45d01d451412d12fbf30051f68a8
|
project_euler/library/number_theory/test_pells_equation.py
|
project_euler/library/number_theory/test_pells_equation.py
|
import pytest
from typing import Tuple
from .pells_equation import solve_pells_equation
from ..sqrt import is_square
PELLS_SOLUTIONS = [
(
2,
[
3,
2
]
),
(
3,
[
2,
1
]
),
(
5,
[
9,
4
]
),
(
6,
[
5,
2
]
),
(
7,
[
8,
3
]
),
]
RANGE = [n for n in range(1, 20) if not is_square(n)]
DEPTH = 100
@pytest.mark.parametrize('n,expected_output', PELLS_SOLUTIONS)
def test_first_solution_pells_equation(n: int,
expected_output: Tuple[int, int]) -> \
None:
assert tuple(expected_output) == next(solve_pells_equation(n))
@pytest.mark.parametrize('n', RANGE)
def test_further_solutions_pells_equation(n: int,
number: int=DEPTH) -> None:
solutions = solve_pells_equation(n)
for (x, y), i in zip(solutions, range(number)):
assert x ** 2 - n * y ** 2 == 1
|
Add testing for Pells equation
|
Add testing for Pells equation
|
Python
|
mit
|
cryvate/project-euler,cryvate/project-euler
|
Add testing for Pells equation
|
import pytest
from typing import Tuple
from .pells_equation import solve_pells_equation
from ..sqrt import is_square
PELLS_SOLUTIONS = [
(
2,
[
3,
2
]
),
(
3,
[
2,
1
]
),
(
5,
[
9,
4
]
),
(
6,
[
5,
2
]
),
(
7,
[
8,
3
]
),
]
RANGE = [n for n in range(1, 20) if not is_square(n)]
DEPTH = 100
@pytest.mark.parametrize('n,expected_output', PELLS_SOLUTIONS)
def test_first_solution_pells_equation(n: int,
expected_output: Tuple[int, int]) -> \
None:
assert tuple(expected_output) == next(solve_pells_equation(n))
@pytest.mark.parametrize('n', RANGE)
def test_further_solutions_pells_equation(n: int,
number: int=DEPTH) -> None:
solutions = solve_pells_equation(n)
for (x, y), i in zip(solutions, range(number)):
assert x ** 2 - n * y ** 2 == 1
|
<commit_before><commit_msg>Add testing for Pells equation<commit_after>
|
import pytest
from typing import Tuple
from .pells_equation import solve_pells_equation
from ..sqrt import is_square
PELLS_SOLUTIONS = [
(
2,
[
3,
2
]
),
(
3,
[
2,
1
]
),
(
5,
[
9,
4
]
),
(
6,
[
5,
2
]
),
(
7,
[
8,
3
]
),
]
RANGE = [n for n in range(1, 20) if not is_square(n)]
DEPTH = 100
@pytest.mark.parametrize('n,expected_output', PELLS_SOLUTIONS)
def test_first_solution_pells_equation(n: int,
expected_output: Tuple[int, int]) -> \
None:
assert tuple(expected_output) == next(solve_pells_equation(n))
@pytest.mark.parametrize('n', RANGE)
def test_further_solutions_pells_equation(n: int,
number: int=DEPTH) -> None:
solutions = solve_pells_equation(n)
for (x, y), i in zip(solutions, range(number)):
assert x ** 2 - n * y ** 2 == 1
|
Add testing for Pells equationimport pytest
from typing import Tuple
from .pells_equation import solve_pells_equation
from ..sqrt import is_square
PELLS_SOLUTIONS = [
(
2,
[
3,
2
]
),
(
3,
[
2,
1
]
),
(
5,
[
9,
4
]
),
(
6,
[
5,
2
]
),
(
7,
[
8,
3
]
),
]
RANGE = [n for n in range(1, 20) if not is_square(n)]
DEPTH = 100
@pytest.mark.parametrize('n,expected_output', PELLS_SOLUTIONS)
def test_first_solution_pells_equation(n: int,
expected_output: Tuple[int, int]) -> \
None:
assert tuple(expected_output) == next(solve_pells_equation(n))
@pytest.mark.parametrize('n', RANGE)
def test_further_solutions_pells_equation(n: int,
number: int=DEPTH) -> None:
solutions = solve_pells_equation(n)
for (x, y), i in zip(solutions, range(number)):
assert x ** 2 - n * y ** 2 == 1
|
<commit_before><commit_msg>Add testing for Pells equation<commit_after>import pytest
from typing import Tuple
from .pells_equation import solve_pells_equation
from ..sqrt import is_square
PELLS_SOLUTIONS = [
(
2,
[
3,
2
]
),
(
3,
[
2,
1
]
),
(
5,
[
9,
4
]
),
(
6,
[
5,
2
]
),
(
7,
[
8,
3
]
),
]
RANGE = [n for n in range(1, 20) if not is_square(n)]
DEPTH = 100
@pytest.mark.parametrize('n,expected_output', PELLS_SOLUTIONS)
def test_first_solution_pells_equation(n: int,
expected_output: Tuple[int, int]) -> \
None:
assert tuple(expected_output) == next(solve_pells_equation(n))
@pytest.mark.parametrize('n', RANGE)
def test_further_solutions_pells_equation(n: int,
number: int=DEPTH) -> None:
solutions = solve_pells_equation(n)
for (x, y), i in zip(solutions, range(number)):
assert x ** 2 - n * y ** 2 == 1
|
|
13ba3720f473ec5057a5da2ab804e7a99dcd9d95
|
tools/client.py
|
tools/client.py
|
import hashlib
import RandomIO
# config vars
address = "1CutsncbjcCtZKeRfvQ7bnYFVj28zeU6fo"
byte_size = 1024*1024*10
# lib functions
def sha256(content):
"""Finds the sha256 hash of the content."""
content = content.encode('utf-8')
return hashlib.sha256(content).hexdigest()
def build_seed(height):
"""Deterministically build a seed."""
seed = sha256(address)
for i in range(height):
seed = sha256(seed)
return seed
# code stuff
def build(x):
for i in range(x):
seed = build_seed(i)
print(seed)
# get hash
gen_file = RandomIO.RandomIO(seed).read(byte_size)
file_hash = hashlib.sha256(gen_file).hexdigest()
# save it
RandomIO.RandomIO(seed).genfile(byte_size, 'tmp/'+file_hash)
print(file_hash)
print("")
# run it
build(5)
|
Add Sample Data Generating Client
|
Add Sample Data Generating Client
|
Python
|
mit
|
Storj/dataserv,F483/dataserv,littleskunk/dataserv
|
Add Sample Data Generating Client
|
import hashlib
import RandomIO
# config vars
address = "1CutsncbjcCtZKeRfvQ7bnYFVj28zeU6fo"
byte_size = 1024*1024*10
# lib functions
def sha256(content):
"""Finds the sha256 hash of the content."""
content = content.encode('utf-8')
return hashlib.sha256(content).hexdigest()
def build_seed(height):
"""Deterministically build a seed."""
seed = sha256(address)
for i in range(height):
seed = sha256(seed)
return seed
# code stuff
def build(x):
for i in range(x):
seed = build_seed(i)
print(seed)
# get hash
gen_file = RandomIO.RandomIO(seed).read(byte_size)
file_hash = hashlib.sha256(gen_file).hexdigest()
# save it
RandomIO.RandomIO(seed).genfile(byte_size, 'tmp/'+file_hash)
print(file_hash)
print("")
# run it
build(5)
|
<commit_before><commit_msg>Add Sample Data Generating Client<commit_after>
|
import hashlib
import RandomIO
# config vars
address = "1CutsncbjcCtZKeRfvQ7bnYFVj28zeU6fo"
byte_size = 1024*1024*10
# lib functions
def sha256(content):
"""Finds the sha256 hash of the content."""
content = content.encode('utf-8')
return hashlib.sha256(content).hexdigest()
def build_seed(height):
"""Deterministically build a seed."""
seed = sha256(address)
for i in range(height):
seed = sha256(seed)
return seed
# code stuff
def build(x):
for i in range(x):
seed = build_seed(i)
print(seed)
# get hash
gen_file = RandomIO.RandomIO(seed).read(byte_size)
file_hash = hashlib.sha256(gen_file).hexdigest()
# save it
RandomIO.RandomIO(seed).genfile(byte_size, 'tmp/'+file_hash)
print(file_hash)
print("")
# run it
build(5)
|
Add Sample Data Generating Clientimport hashlib
import RandomIO
# config vars
address = "1CutsncbjcCtZKeRfvQ7bnYFVj28zeU6fo"
byte_size = 1024*1024*10
# lib functions
def sha256(content):
"""Finds the sha256 hash of the content."""
content = content.encode('utf-8')
return hashlib.sha256(content).hexdigest()
def build_seed(height):
"""Deterministically build a seed."""
seed = sha256(address)
for i in range(height):
seed = sha256(seed)
return seed
# code stuff
def build(x):
for i in range(x):
seed = build_seed(i)
print(seed)
# get hash
gen_file = RandomIO.RandomIO(seed).read(byte_size)
file_hash = hashlib.sha256(gen_file).hexdigest()
# save it
RandomIO.RandomIO(seed).genfile(byte_size, 'tmp/'+file_hash)
print(file_hash)
print("")
# run it
build(5)
|
<commit_before><commit_msg>Add Sample Data Generating Client<commit_after>import hashlib
import RandomIO
# config vars
address = "1CutsncbjcCtZKeRfvQ7bnYFVj28zeU6fo"
byte_size = 1024*1024*10
# lib functions
def sha256(content):
"""Finds the sha256 hash of the content."""
content = content.encode('utf-8')
return hashlib.sha256(content).hexdigest()
def build_seed(height):
"""Deterministically build a seed."""
seed = sha256(address)
for i in range(height):
seed = sha256(seed)
return seed
# code stuff
def build(x):
for i in range(x):
seed = build_seed(i)
print(seed)
# get hash
gen_file = RandomIO.RandomIO(seed).read(byte_size)
file_hash = hashlib.sha256(gen_file).hexdigest()
# save it
RandomIO.RandomIO(seed).genfile(byte_size, 'tmp/'+file_hash)
print(file_hash)
print("")
# run it
build(5)
|
|
d8ae8c08324f428618639ef004388b42c27b3bdb
|
jsk_apc2016_common/scripts/list_objects.py
|
jsk_apc2016_common/scripts/list_objects.py
|
#!/usr/bin/env python
import argparse
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--year', type=int, default=2016)
args = parser.parse_args()
if args.year == 2015:
import jsk_apc2015_common
cls_names = ['background'] + jsk_apc2015_common.get_object_list()
elif args.year == 2016:
import jsk_apc2016_common
data = jsk_apc2016_common.get_object_data()
cls_names = ['background'] + [d['name'] for d in data]
else:
raise ValueError
text = []
for cls_id, cls_name in enumerate(cls_names):
text.append('{:2}: {}'.format(cls_id, cls_name))
print('\n'.join(text))
if __name__ == '__main__':
main()
|
Add script to list object names
|
Add script to list object names
|
Python
|
bsd-3-clause
|
pazeshun/jsk_apc,pazeshun/jsk_apc,pazeshun/jsk_apc,pazeshun/jsk_apc,pazeshun/jsk_apc
|
Add script to list object names
|
#!/usr/bin/env python
import argparse
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--year', type=int, default=2016)
args = parser.parse_args()
if args.year == 2015:
import jsk_apc2015_common
cls_names = ['background'] + jsk_apc2015_common.get_object_list()
elif args.year == 2016:
import jsk_apc2016_common
data = jsk_apc2016_common.get_object_data()
cls_names = ['background'] + [d['name'] for d in data]
else:
raise ValueError
text = []
for cls_id, cls_name in enumerate(cls_names):
text.append('{:2}: {}'.format(cls_id, cls_name))
print('\n'.join(text))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add script to list object names<commit_after>
|
#!/usr/bin/env python
import argparse
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--year', type=int, default=2016)
args = parser.parse_args()
if args.year == 2015:
import jsk_apc2015_common
cls_names = ['background'] + jsk_apc2015_common.get_object_list()
elif args.year == 2016:
import jsk_apc2016_common
data = jsk_apc2016_common.get_object_data()
cls_names = ['background'] + [d['name'] for d in data]
else:
raise ValueError
text = []
for cls_id, cls_name in enumerate(cls_names):
text.append('{:2}: {}'.format(cls_id, cls_name))
print('\n'.join(text))
if __name__ == '__main__':
main()
|
Add script to list object names#!/usr/bin/env python
import argparse
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--year', type=int, default=2016)
args = parser.parse_args()
if args.year == 2015:
import jsk_apc2015_common
cls_names = ['background'] + jsk_apc2015_common.get_object_list()
elif args.year == 2016:
import jsk_apc2016_common
data = jsk_apc2016_common.get_object_data()
cls_names = ['background'] + [d['name'] for d in data]
else:
raise ValueError
text = []
for cls_id, cls_name in enumerate(cls_names):
text.append('{:2}: {}'.format(cls_id, cls_name))
print('\n'.join(text))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add script to list object names<commit_after>#!/usr/bin/env python
import argparse
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--year', type=int, default=2016)
args = parser.parse_args()
if args.year == 2015:
import jsk_apc2015_common
cls_names = ['background'] + jsk_apc2015_common.get_object_list()
elif args.year == 2016:
import jsk_apc2016_common
data = jsk_apc2016_common.get_object_data()
cls_names = ['background'] + [d['name'] for d in data]
else:
raise ValueError
text = []
for cls_id, cls_name in enumerate(cls_names):
text.append('{:2}: {}'.format(cls_id, cls_name))
print('\n'.join(text))
if __name__ == '__main__':
main()
|
|
f12334865233518fe128105834ef5fea59c500b2
|
tests/test_web_listener.py
|
tests/test_web_listener.py
|
import pytest
@pytest.fixture
def test_flask_app():
from gitbot import web_listener
web_listener.app.config['TESTING'] = True
return web_listener.app.test_client()
expected_root_html_strings = [
(True, "<h1>GitHub issues bot index page</h1>"),
(True, "<!DOCTYPE html>"),
(True, "Set up the <code>webhook secret</code> to secret defined in file <code>auth.cfg</code>."),
(False, "teststring that should not be in the readme page"),
]
not_expected_root_html_strings = [
]
@pytest.mark.parametrize('expected', expected_root_html_strings)
def test_root_route(test_flask_app, expected):
data = test_flask_app.get('/').data.decode('utf-8')
print("DATA: {}".format(data))
if expected[0]:
assert expected[1] in data
else:
assert expected[1] not in data
|
Add tests for web homepage
|
Add tests for web homepage
|
Python
|
mit
|
melkamar/gitbot,melkamar/gitbot
|
Add tests for web homepage
|
import pytest
@pytest.fixture
def test_flask_app():
from gitbot import web_listener
web_listener.app.config['TESTING'] = True
return web_listener.app.test_client()
expected_root_html_strings = [
(True, "<h1>GitHub issues bot index page</h1>"),
(True, "<!DOCTYPE html>"),
(True, "Set up the <code>webhook secret</code> to secret defined in file <code>auth.cfg</code>."),
(False, "teststring that should not be in the readme page"),
]
not_expected_root_html_strings = [
]
@pytest.mark.parametrize('expected', expected_root_html_strings)
def test_root_route(test_flask_app, expected):
data = test_flask_app.get('/').data.decode('utf-8')
print("DATA: {}".format(data))
if expected[0]:
assert expected[1] in data
else:
assert expected[1] not in data
|
<commit_before><commit_msg>Add tests for web homepage<commit_after>
|
import pytest
@pytest.fixture
def test_flask_app():
from gitbot import web_listener
web_listener.app.config['TESTING'] = True
return web_listener.app.test_client()
expected_root_html_strings = [
(True, "<h1>GitHub issues bot index page</h1>"),
(True, "<!DOCTYPE html>"),
(True, "Set up the <code>webhook secret</code> to secret defined in file <code>auth.cfg</code>."),
(False, "teststring that should not be in the readme page"),
]
not_expected_root_html_strings = [
]
@pytest.mark.parametrize('expected', expected_root_html_strings)
def test_root_route(test_flask_app, expected):
data = test_flask_app.get('/').data.decode('utf-8')
print("DATA: {}".format(data))
if expected[0]:
assert expected[1] in data
else:
assert expected[1] not in data
|
Add tests for web homepageimport pytest
@pytest.fixture
def test_flask_app():
from gitbot import web_listener
web_listener.app.config['TESTING'] = True
return web_listener.app.test_client()
expected_root_html_strings = [
(True, "<h1>GitHub issues bot index page</h1>"),
(True, "<!DOCTYPE html>"),
(True, "Set up the <code>webhook secret</code> to secret defined in file <code>auth.cfg</code>."),
(False, "teststring that should not be in the readme page"),
]
not_expected_root_html_strings = [
]
@pytest.mark.parametrize('expected', expected_root_html_strings)
def test_root_route(test_flask_app, expected):
data = test_flask_app.get('/').data.decode('utf-8')
print("DATA: {}".format(data))
if expected[0]:
assert expected[1] in data
else:
assert expected[1] not in data
|
<commit_before><commit_msg>Add tests for web homepage<commit_after>import pytest
@pytest.fixture
def test_flask_app():
from gitbot import web_listener
web_listener.app.config['TESTING'] = True
return web_listener.app.test_client()
expected_root_html_strings = [
(True, "<h1>GitHub issues bot index page</h1>"),
(True, "<!DOCTYPE html>"),
(True, "Set up the <code>webhook secret</code> to secret defined in file <code>auth.cfg</code>."),
(False, "teststring that should not be in the readme page"),
]
not_expected_root_html_strings = [
]
@pytest.mark.parametrize('expected', expected_root_html_strings)
def test_root_route(test_flask_app, expected):
data = test_flask_app.get('/').data.decode('utf-8')
print("DATA: {}".format(data))
if expected[0]:
assert expected[1] in data
else:
assert expected[1] not in data
|
|
c22fde851dc4e8f3c9c930e1f0151b677eeadb52
|
tensorflow/python/platform/remote_utils.py
|
tensorflow/python/platform/remote_utils.py
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Platform-specific helpers for connecting to remote servers."""
def get_default_communication_protocol():
return 'grpc'
def is_remote_path(_):
return False
def get_appendable_file_encoding():
return ''
def coordination_service_type(_):
return None
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Platform-specific helpers for connecting to remote servers."""
def get_default_communication_protocol():
return 'grpc'
def is_remote_path(_):
return False
def get_appendable_file_encoding():
return ''
def coordination_service_type(*args, **kwargs):
del args, kwargs
return None
|
Add `args` and `kwargs` to cloud-tpu version of `coordination_service_type()` function
|
Add `args` and `kwargs` to cloud-tpu version of `coordination_service_type()` function
PiperOrigin-RevId: 470776162
|
Python
|
apache-2.0
|
tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,karllessard/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,karllessard/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,yongtang/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,karllessard/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Platform-specific helpers for connecting to remote servers."""
def get_default_communication_protocol():
return 'grpc'
def is_remote_path(_):
return False
def get_appendable_file_encoding():
return ''
def coordination_service_type(_):
return None
Add `args` and `kwargs` to cloud-tpu version of `coordination_service_type()` function
PiperOrigin-RevId: 470776162
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Platform-specific helpers for connecting to remote servers."""
def get_default_communication_protocol():
return 'grpc'
def is_remote_path(_):
return False
def get_appendable_file_encoding():
return ''
def coordination_service_type(*args, **kwargs):
del args, kwargs
return None
|
<commit_before># Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Platform-specific helpers for connecting to remote servers."""
def get_default_communication_protocol():
return 'grpc'
def is_remote_path(_):
return False
def get_appendable_file_encoding():
return ''
def coordination_service_type(_):
return None
<commit_msg>Add `args` and `kwargs` to cloud-tpu version of `coordination_service_type()` function
PiperOrigin-RevId: 470776162<commit_after>
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Platform-specific helpers for connecting to remote servers."""
def get_default_communication_protocol():
return 'grpc'
def is_remote_path(_):
return False
def get_appendable_file_encoding():
return ''
def coordination_service_type(*args, **kwargs):
del args, kwargs
return None
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Platform-specific helpers for connecting to remote servers."""
def get_default_communication_protocol():
return 'grpc'
def is_remote_path(_):
return False
def get_appendable_file_encoding():
return ''
def coordination_service_type(_):
return None
Add `args` and `kwargs` to cloud-tpu version of `coordination_service_type()` function
PiperOrigin-RevId: 470776162# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Platform-specific helpers for connecting to remote servers."""
def get_default_communication_protocol():
return 'grpc'
def is_remote_path(_):
return False
def get_appendable_file_encoding():
return ''
def coordination_service_type(*args, **kwargs):
del args, kwargs
return None
|
<commit_before># Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Platform-specific helpers for connecting to remote servers."""
def get_default_communication_protocol():
return 'grpc'
def is_remote_path(_):
return False
def get_appendable_file_encoding():
return ''
def coordination_service_type(_):
return None
<commit_msg>Add `args` and `kwargs` to cloud-tpu version of `coordination_service_type()` function
PiperOrigin-RevId: 470776162<commit_after># Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Platform-specific helpers for connecting to remote servers."""
def get_default_communication_protocol():
return 'grpc'
def is_remote_path(_):
return False
def get_appendable_file_encoding():
return ''
def coordination_service_type(*args, **kwargs):
del args, kwargs
return None
|
eb084516ceca0645024d41ae2a0ea7e04250b13f
|
bluebottle/time_based/migrations/0044_auto_20201224_1120.py
|
bluebottle/time_based/migrations/0044_auto_20201224_1120.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.17 on 2020-12-24 10:20
from __future__ import unicode_literals
from django.db import migrations
def fix_wallpost_ctype(apps, schema_editor):
ContentType = apps.get_model('contenttypes', 'ContentType')
Assignment = apps.get_model('assignments', 'Assignment')
Event = apps.get_model('events', 'Event')
Activity = apps.get_model('activities', 'Activity')
Wallpost = apps.get_model('wallposts', 'Wallpost')
assigment_ctype = ContentType.objects.get_for_model(Assignment)
event_ctype = ContentType.objects.get_for_model(Event)
posts = Wallpost.objects.filter(content_type__in=[event_ctype, assigment_ctype])
for post in posts:
act = Activity.objects.get(pk=post.object_id)
post.content_type = act.polymorphic_ctype
post.save()
class Migration(migrations.Migration):
dependencies = [
('time_based', '0043_auto_20201217_0743'),
]
operations = [
migrations.RunPython(fix_wallpost_ctype, migrations.RunPython.noop)
]
|
Migrate wallposts to right ctype
|
Migrate wallposts to right ctype
|
Python
|
bsd-3-clause
|
onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle
|
Migrate wallposts to right ctype
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.17 on 2020-12-24 10:20
from __future__ import unicode_literals
from django.db import migrations
def fix_wallpost_ctype(apps, schema_editor):
ContentType = apps.get_model('contenttypes', 'ContentType')
Assignment = apps.get_model('assignments', 'Assignment')
Event = apps.get_model('events', 'Event')
Activity = apps.get_model('activities', 'Activity')
Wallpost = apps.get_model('wallposts', 'Wallpost')
assigment_ctype = ContentType.objects.get_for_model(Assignment)
event_ctype = ContentType.objects.get_for_model(Event)
posts = Wallpost.objects.filter(content_type__in=[event_ctype, assigment_ctype])
for post in posts:
act = Activity.objects.get(pk=post.object_id)
post.content_type = act.polymorphic_ctype
post.save()
class Migration(migrations.Migration):
dependencies = [
('time_based', '0043_auto_20201217_0743'),
]
operations = [
migrations.RunPython(fix_wallpost_ctype, migrations.RunPython.noop)
]
|
<commit_before><commit_msg>Migrate wallposts to right ctype<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.17 on 2020-12-24 10:20
from __future__ import unicode_literals
from django.db import migrations
def fix_wallpost_ctype(apps, schema_editor):
ContentType = apps.get_model('contenttypes', 'ContentType')
Assignment = apps.get_model('assignments', 'Assignment')
Event = apps.get_model('events', 'Event')
Activity = apps.get_model('activities', 'Activity')
Wallpost = apps.get_model('wallposts', 'Wallpost')
assigment_ctype = ContentType.objects.get_for_model(Assignment)
event_ctype = ContentType.objects.get_for_model(Event)
posts = Wallpost.objects.filter(content_type__in=[event_ctype, assigment_ctype])
for post in posts:
act = Activity.objects.get(pk=post.object_id)
post.content_type = act.polymorphic_ctype
post.save()
class Migration(migrations.Migration):
dependencies = [
('time_based', '0043_auto_20201217_0743'),
]
operations = [
migrations.RunPython(fix_wallpost_ctype, migrations.RunPython.noop)
]
|
Migrate wallposts to right ctype# -*- coding: utf-8 -*-
# Generated by Django 1.11.17 on 2020-12-24 10:20
from __future__ import unicode_literals
from django.db import migrations
def fix_wallpost_ctype(apps, schema_editor):
ContentType = apps.get_model('contenttypes', 'ContentType')
Assignment = apps.get_model('assignments', 'Assignment')
Event = apps.get_model('events', 'Event')
Activity = apps.get_model('activities', 'Activity')
Wallpost = apps.get_model('wallposts', 'Wallpost')
assigment_ctype = ContentType.objects.get_for_model(Assignment)
event_ctype = ContentType.objects.get_for_model(Event)
posts = Wallpost.objects.filter(content_type__in=[event_ctype, assigment_ctype])
for post in posts:
act = Activity.objects.get(pk=post.object_id)
post.content_type = act.polymorphic_ctype
post.save()
class Migration(migrations.Migration):
dependencies = [
('time_based', '0043_auto_20201217_0743'),
]
operations = [
migrations.RunPython(fix_wallpost_ctype, migrations.RunPython.noop)
]
|
<commit_before><commit_msg>Migrate wallposts to right ctype<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.17 on 2020-12-24 10:20
from __future__ import unicode_literals
from django.db import migrations
def fix_wallpost_ctype(apps, schema_editor):
ContentType = apps.get_model('contenttypes', 'ContentType')
Assignment = apps.get_model('assignments', 'Assignment')
Event = apps.get_model('events', 'Event')
Activity = apps.get_model('activities', 'Activity')
Wallpost = apps.get_model('wallposts', 'Wallpost')
assigment_ctype = ContentType.objects.get_for_model(Assignment)
event_ctype = ContentType.objects.get_for_model(Event)
posts = Wallpost.objects.filter(content_type__in=[event_ctype, assigment_ctype])
for post in posts:
act = Activity.objects.get(pk=post.object_id)
post.content_type = act.polymorphic_ctype
post.save()
class Migration(migrations.Migration):
dependencies = [
('time_based', '0043_auto_20201217_0743'),
]
operations = [
migrations.RunPython(fix_wallpost_ctype, migrations.RunPython.noop)
]
|
|
47bff9240beaa1214f945067310fbed32ee16b7c
|
scripts/revert-database.py
|
scripts/revert-database.py
|
#!/usr/bin/python
# This is a small helper script revert a CATMAID
# database from to a recent backup.
# For example, you might call this as:
# revert-database.py /mnt/catmaid-backups/2011-12-10T19-14-47.bz2
# You will need to create a .pgpass file so that your password can be
# found.
# You may need to install psycopg2, e.g. with:
# sudo apt-get install python-psycopg2
# Requires the file .catmaid-db to be present in your
# home directory, with the following format:
#
# host: localhost
# database: catmaid
# username: catmaid_user
# password: password_of_your_catmaid_user
import sys
import os
from common import db_database, db_username, db_password
from subprocess import Popen, check_call, PIPE
import getpass
from psycopg2 import IntegrityError
from datetime import datetime
if len(sys.argv) != 2:
print >> sys.stderr, "Usage: %s <COMPRESSED-BACKUP>" % (sys.argv[0],)
sys.exit(1)
filename = sys.argv[1]
# You must specify your password in ~/.pgpass, as described here:
# http://www.postgresql.org/docs/current/static/libpq-pgpass.html
cat_command = ['bzcat', filename]
restore_command = ['psql',
'-U',
db_username,
'--no-password',
db_database]
p1 = Popen(cat_command, stdout=PIPE)
p2 = Popen(restore_command, stdin=p1.stdout)
p1.stdout.close()
p1.wait()
p2.wait()
|
Add a script to revert the database from an earlier backup
|
Add a script to revert the database from an earlier backup
This scripts assumes that the database dump was done with
pg_dump --clean. If it wasn't done with the --clean option
you would need to do:
dropdb catmaid
sudo -u postgres psql < docs/createuser.sql
... before running revert-database.py backup.bz2
|
Python
|
agpl-3.0
|
fzadow/CATMAID,fzadow/CATMAID,htem/CATMAID,htem/CATMAID,fzadow/CATMAID,fzadow/CATMAID,htem/CATMAID,htem/CATMAID
|
Add a script to revert the database from an earlier backup
This scripts assumes that the database dump was done with
pg_dump --clean. If it wasn't done with the --clean option
you would need to do:
dropdb catmaid
sudo -u postgres psql < docs/createuser.sql
... before running revert-database.py backup.bz2
|
#!/usr/bin/python
# This is a small helper script revert a CATMAID
# database from to a recent backup.
# For example, you might call this as:
# revert-database.py /mnt/catmaid-backups/2011-12-10T19-14-47.bz2
# You will need to create a .pgpass file so that your password can be
# found.
# You may need to install psycopg2, e.g. with:
# sudo apt-get install python-psycopg2
# Requires the file .catmaid-db to be present in your
# home directory, with the following format:
#
# host: localhost
# database: catmaid
# username: catmaid_user
# password: password_of_your_catmaid_user
import sys
import os
from common import db_database, db_username, db_password
from subprocess import Popen, check_call, PIPE
import getpass
from psycopg2 import IntegrityError
from datetime import datetime
if len(sys.argv) != 2:
print >> sys.stderr, "Usage: %s <COMPRESSED-BACKUP>" % (sys.argv[0],)
sys.exit(1)
filename = sys.argv[1]
# You must specify your password in ~/.pgpass, as described here:
# http://www.postgresql.org/docs/current/static/libpq-pgpass.html
cat_command = ['bzcat', filename]
restore_command = ['psql',
'-U',
db_username,
'--no-password',
db_database]
p1 = Popen(cat_command, stdout=PIPE)
p2 = Popen(restore_command, stdin=p1.stdout)
p1.stdout.close()
p1.wait()
p2.wait()
|
<commit_before><commit_msg>Add a script to revert the database from an earlier backup
This scripts assumes that the database dump was done with
pg_dump --clean. If it wasn't done with the --clean option
you would need to do:
dropdb catmaid
sudo -u postgres psql < docs/createuser.sql
... before running revert-database.py backup.bz2<commit_after>
|
#!/usr/bin/python
# This is a small helper script revert a CATMAID
# database from to a recent backup.
# For example, you might call this as:
# revert-database.py /mnt/catmaid-backups/2011-12-10T19-14-47.bz2
# You will need to create a .pgpass file so that your password can be
# found.
# You may need to install psycopg2, e.g. with:
# sudo apt-get install python-psycopg2
# Requires the file .catmaid-db to be present in your
# home directory, with the following format:
#
# host: localhost
# database: catmaid
# username: catmaid_user
# password: password_of_your_catmaid_user
import sys
import os
from common import db_database, db_username, db_password
from subprocess import Popen, check_call, PIPE
import getpass
from psycopg2 import IntegrityError
from datetime import datetime
if len(sys.argv) != 2:
print >> sys.stderr, "Usage: %s <COMPRESSED-BACKUP>" % (sys.argv[0],)
sys.exit(1)
filename = sys.argv[1]
# You must specify your password in ~/.pgpass, as described here:
# http://www.postgresql.org/docs/current/static/libpq-pgpass.html
cat_command = ['bzcat', filename]
restore_command = ['psql',
'-U',
db_username,
'--no-password',
db_database]
p1 = Popen(cat_command, stdout=PIPE)
p2 = Popen(restore_command, stdin=p1.stdout)
p1.stdout.close()
p1.wait()
p2.wait()
|
Add a script to revert the database from an earlier backup
This scripts assumes that the database dump was done with
pg_dump --clean. If it wasn't done with the --clean option
you would need to do:
dropdb catmaid
sudo -u postgres psql < docs/createuser.sql
... before running revert-database.py backup.bz2#!/usr/bin/python
# This is a small helper script revert a CATMAID
# database from to a recent backup.
# For example, you might call this as:
# revert-database.py /mnt/catmaid-backups/2011-12-10T19-14-47.bz2
# You will need to create a .pgpass file so that your password can be
# found.
# You may need to install psycopg2, e.g. with:
# sudo apt-get install python-psycopg2
# Requires the file .catmaid-db to be present in your
# home directory, with the following format:
#
# host: localhost
# database: catmaid
# username: catmaid_user
# password: password_of_your_catmaid_user
import sys
import os
from common import db_database, db_username, db_password
from subprocess import Popen, check_call, PIPE
import getpass
from psycopg2 import IntegrityError
from datetime import datetime
if len(sys.argv) != 2:
print >> sys.stderr, "Usage: %s <COMPRESSED-BACKUP>" % (sys.argv[0],)
sys.exit(1)
filename = sys.argv[1]
# You must specify your password in ~/.pgpass, as described here:
# http://www.postgresql.org/docs/current/static/libpq-pgpass.html
cat_command = ['bzcat', filename]
restore_command = ['psql',
'-U',
db_username,
'--no-password',
db_database]
p1 = Popen(cat_command, stdout=PIPE)
p2 = Popen(restore_command, stdin=p1.stdout)
p1.stdout.close()
p1.wait()
p2.wait()
|
<commit_before><commit_msg>Add a script to revert the database from an earlier backup
This scripts assumes that the database dump was done with
pg_dump --clean. If it wasn't done with the --clean option
you would need to do:
dropdb catmaid
sudo -u postgres psql < docs/createuser.sql
... before running revert-database.py backup.bz2<commit_after>#!/usr/bin/python
# This is a small helper script revert a CATMAID
# database from to a recent backup.
# For example, you might call this as:
# revert-database.py /mnt/catmaid-backups/2011-12-10T19-14-47.bz2
# You will need to create a .pgpass file so that your password can be
# found.
# You may need to install psycopg2, e.g. with:
# sudo apt-get install python-psycopg2
# Requires the file .catmaid-db to be present in your
# home directory, with the following format:
#
# host: localhost
# database: catmaid
# username: catmaid_user
# password: password_of_your_catmaid_user
import sys
import os
from common import db_database, db_username, db_password
from subprocess import Popen, check_call, PIPE
import getpass
from psycopg2 import IntegrityError
from datetime import datetime
if len(sys.argv) != 2:
print >> sys.stderr, "Usage: %s <COMPRESSED-BACKUP>" % (sys.argv[0],)
sys.exit(1)
filename = sys.argv[1]
# You must specify your password in ~/.pgpass, as described here:
# http://www.postgresql.org/docs/current/static/libpq-pgpass.html
cat_command = ['bzcat', filename]
restore_command = ['psql',
'-U',
db_username,
'--no-password',
db_database]
p1 = Popen(cat_command, stdout=PIPE)
p2 = Popen(restore_command, stdin=p1.stdout)
p1.stdout.close()
p1.wait()
p2.wait()
|
|
605323fa8fa4dd8f447e7ba1b1a24cb791efe57e
|
thecut/authorship/views.py
|
thecut/authorship/views.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
class AuthorshipViewMixin(object):
"""Add the request's ``User`` instance to the form kwargs."""
def get_form_kwargs(self, *args, **kwargs):
form_kwargs = super(AuthorshipViewMixin, self).get_form_kwargs(
*args, **kwargs)
form_kwargs.update({'user': self.request.user})
return form_kwargs
|
Add a view mixin to pass the ``User`` instance to the view's form.
|
Add a view mixin to pass the ``User`` instance to the view's form.
|
Python
|
apache-2.0
|
thecut/thecut-authorship
|
Add a view mixin to pass the ``User`` instance to the view's form.
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
class AuthorshipViewMixin(object):
"""Add the request's ``User`` instance to the form kwargs."""
def get_form_kwargs(self, *args, **kwargs):
form_kwargs = super(AuthorshipViewMixin, self).get_form_kwargs(
*args, **kwargs)
form_kwargs.update({'user': self.request.user})
return form_kwargs
|
<commit_before><commit_msg>Add a view mixin to pass the ``User`` instance to the view's form.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
class AuthorshipViewMixin(object):
"""Add the request's ``User`` instance to the form kwargs."""
def get_form_kwargs(self, *args, **kwargs):
form_kwargs = super(AuthorshipViewMixin, self).get_form_kwargs(
*args, **kwargs)
form_kwargs.update({'user': self.request.user})
return form_kwargs
|
Add a view mixin to pass the ``User`` instance to the view's form.# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
class AuthorshipViewMixin(object):
"""Add the request's ``User`` instance to the form kwargs."""
def get_form_kwargs(self, *args, **kwargs):
form_kwargs = super(AuthorshipViewMixin, self).get_form_kwargs(
*args, **kwargs)
form_kwargs.update({'user': self.request.user})
return form_kwargs
|
<commit_before><commit_msg>Add a view mixin to pass the ``User`` instance to the view's form.<commit_after># -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
class AuthorshipViewMixin(object):
"""Add the request's ``User`` instance to the form kwargs."""
def get_form_kwargs(self, *args, **kwargs):
form_kwargs = super(AuthorshipViewMixin, self).get_form_kwargs(
*args, **kwargs)
form_kwargs.update({'user': self.request.user})
return form_kwargs
|
|
f73709f8a542a768c5a96e9f8ce7504a4c8f839f
|
coffin/template/response.py
|
coffin/template/response.py
|
from coffin.template import loader
from django.template import response as django_response
class SimpleTemplateResponse(django_response.SimpleTemplateResponse):
def resolve_template(self, template):
if isinstance(template, (list, tuple)):
return loader.select_template(template)
elif isinstance(template, basestring):
return loader.get_template(template)
else:
return template
class TemplateResponse(django_response.TemplateResponse,
SimpleTemplateResponse):
pass
|
Implement a wrapper for the SimpleTemplateResponse and the TemplateResponse which use jinja2
|
Implement a wrapper for the SimpleTemplateResponse and the TemplateResponse which use jinja2
|
Python
|
bsd-3-clause
|
rossowl/coffin,rossowl/coffin,akx/coffin
|
Implement a wrapper for the SimpleTemplateResponse and the TemplateResponse which use jinja2
|
from coffin.template import loader
from django.template import response as django_response
class SimpleTemplateResponse(django_response.SimpleTemplateResponse):
def resolve_template(self, template):
if isinstance(template, (list, tuple)):
return loader.select_template(template)
elif isinstance(template, basestring):
return loader.get_template(template)
else:
return template
class TemplateResponse(django_response.TemplateResponse,
SimpleTemplateResponse):
pass
|
<commit_before><commit_msg>Implement a wrapper for the SimpleTemplateResponse and the TemplateResponse which use jinja2<commit_after>
|
from coffin.template import loader
from django.template import response as django_response
class SimpleTemplateResponse(django_response.SimpleTemplateResponse):
def resolve_template(self, template):
if isinstance(template, (list, tuple)):
return loader.select_template(template)
elif isinstance(template, basestring):
return loader.get_template(template)
else:
return template
class TemplateResponse(django_response.TemplateResponse,
SimpleTemplateResponse):
pass
|
Implement a wrapper for the SimpleTemplateResponse and the TemplateResponse which use jinja2from coffin.template import loader
from django.template import response as django_response
class SimpleTemplateResponse(django_response.SimpleTemplateResponse):
def resolve_template(self, template):
if isinstance(template, (list, tuple)):
return loader.select_template(template)
elif isinstance(template, basestring):
return loader.get_template(template)
else:
return template
class TemplateResponse(django_response.TemplateResponse,
SimpleTemplateResponse):
pass
|
<commit_before><commit_msg>Implement a wrapper for the SimpleTemplateResponse and the TemplateResponse which use jinja2<commit_after>from coffin.template import loader
from django.template import response as django_response
class SimpleTemplateResponse(django_response.SimpleTemplateResponse):
def resolve_template(self, template):
if isinstance(template, (list, tuple)):
return loader.select_template(template)
elif isinstance(template, basestring):
return loader.get_template(template)
else:
return template
class TemplateResponse(django_response.TemplateResponse,
SimpleTemplateResponse):
pass
|
|
05952797e4be473bf68ba5f2f6710c35379b59f5
|
scripts/stream_iss_data.py
|
scripts/stream_iss_data.py
|
#!/usr/bin/python
import socket
import logging
import json
import requests
import time
import datetime
logger = logging.getLogger(__name__)
class Socket(socket.socket):
def __init__(self, x=socket.AF_INET, y=socket.SOCK_DGRAM, *args, **kwargs):
super(Socket, self).__init__(x, y, *args, **kwargs)
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
sock = Socket() #socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
while True:
response = requests.get('http://api.open-notify.org/iss-now.json')
print response.content
sock.sendto(response.content, ('127.0.0.1', 9999))
time.sleep(5)
|
Add script to populate the database periodically with geocoordinates of the International Space Station
|
Add script to populate the database periodically with geocoordinates of the International Space Station
|
Python
|
apache-2.0
|
opentrv/iotlaunchpad,opentrv/iotlaunchpad
|
Add script to populate the database periodically with geocoordinates of the International Space Station
|
#!/usr/bin/python
import socket
import logging
import json
import requests
import time
import datetime
logger = logging.getLogger(__name__)
class Socket(socket.socket):
def __init__(self, x=socket.AF_INET, y=socket.SOCK_DGRAM, *args, **kwargs):
super(Socket, self).__init__(x, y, *args, **kwargs)
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
sock = Socket() #socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
while True:
response = requests.get('http://api.open-notify.org/iss-now.json')
print response.content
sock.sendto(response.content, ('127.0.0.1', 9999))
time.sleep(5)
|
<commit_before><commit_msg>Add script to populate the database periodically with geocoordinates of the International Space Station<commit_after>
|
#!/usr/bin/python
import socket
import logging
import json
import requests
import time
import datetime
logger = logging.getLogger(__name__)
class Socket(socket.socket):
def __init__(self, x=socket.AF_INET, y=socket.SOCK_DGRAM, *args, **kwargs):
super(Socket, self).__init__(x, y, *args, **kwargs)
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
sock = Socket() #socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
while True:
response = requests.get('http://api.open-notify.org/iss-now.json')
print response.content
sock.sendto(response.content, ('127.0.0.1', 9999))
time.sleep(5)
|
Add script to populate the database periodically with geocoordinates of the International Space Station#!/usr/bin/python
import socket
import logging
import json
import requests
import time
import datetime
logger = logging.getLogger(__name__)
class Socket(socket.socket):
def __init__(self, x=socket.AF_INET, y=socket.SOCK_DGRAM, *args, **kwargs):
super(Socket, self).__init__(x, y, *args, **kwargs)
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
sock = Socket() #socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
while True:
response = requests.get('http://api.open-notify.org/iss-now.json')
print response.content
sock.sendto(response.content, ('127.0.0.1', 9999))
time.sleep(5)
|
<commit_before><commit_msg>Add script to populate the database periodically with geocoordinates of the International Space Station<commit_after>#!/usr/bin/python
import socket
import logging
import json
import requests
import time
import datetime
logger = logging.getLogger(__name__)
class Socket(socket.socket):
def __init__(self, x=socket.AF_INET, y=socket.SOCK_DGRAM, *args, **kwargs):
super(Socket, self).__init__(x, y, *args, **kwargs)
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
sock = Socket() #socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
while True:
response = requests.get('http://api.open-notify.org/iss-now.json')
print response.content
sock.sendto(response.content, ('127.0.0.1', 9999))
time.sleep(5)
|
|
5f3be583d945d04ef9f259ee77d21933756209bd
|
components/table_fetcher.py
|
components/table_fetcher.py
|
"""Fetches data from GitHub API, store and return the data in a SgTable."""
import table
import inspect
class SgTableFetcher:
"""Fetches data from GitHub API, store and return the data in a SgTable."""
def __init__(self, github):
self._github = github
def _Parse(self, label):
tmp = label.split(".")
if len(tmp) == 1: # no dots
return label, None
else:
return tmp[0], tmp[1]
def _GetKeys(self, cls):
return [key for key, val in inspect.getmembers(cls, lambda m: not inspect.ismethod(m)) if not key.startswith("_")]
def _GetVals(self, cls):
return [val for key, val in inspect.getmembers(cls, lambda m: not inspect.ismethod(m)) if not key.startswith("_")]
def Fetch(self, label):
ret = table.SgTable()
org_name, sub_name = self._Parse(label)
org = self._github.get_organization(org_name)
if sub_name == None: # eg. "google"
ret.SetFields(self._GetKeys(org))
ret.Append(self._GetVals(org))
elif sub_name == "repos":
repos = org.get_repos()
for repo in repos:
if not ret.GetFields():
ret.SetFields(self._GetKeys(repo))
ret.Append(self._GetVals(repo))
elif sub_name == "issues":
repos = org.get_repos()
for repo in repos:
issues = repo.get_issues(state="all")
for issue in issues:
if not ret.GetFields():
ret.SetFields(self._GetKeys(repo))
ret.Append(self._GetVals(issue))
return ret
|
Add SgTableFetcher - automatically infers keys/fields and values from object
|
Add SgTableFetcher - automatically infers keys/fields and values from object
|
Python
|
mit
|
lnishan/SQLGitHub
|
Add SgTableFetcher - automatically infers keys/fields and values from object
|
"""Fetches data from GitHub API, store and return the data in a SgTable."""
import table
import inspect
class SgTableFetcher:
"""Fetches data from GitHub API, store and return the data in a SgTable."""
def __init__(self, github):
self._github = github
def _Parse(self, label):
tmp = label.split(".")
if len(tmp) == 1: # no dots
return label, None
else:
return tmp[0], tmp[1]
def _GetKeys(self, cls):
return [key for key, val in inspect.getmembers(cls, lambda m: not inspect.ismethod(m)) if not key.startswith("_")]
def _GetVals(self, cls):
return [val for key, val in inspect.getmembers(cls, lambda m: not inspect.ismethod(m)) if not key.startswith("_")]
def Fetch(self, label):
ret = table.SgTable()
org_name, sub_name = self._Parse(label)
org = self._github.get_organization(org_name)
if sub_name == None: # eg. "google"
ret.SetFields(self._GetKeys(org))
ret.Append(self._GetVals(org))
elif sub_name == "repos":
repos = org.get_repos()
for repo in repos:
if not ret.GetFields():
ret.SetFields(self._GetKeys(repo))
ret.Append(self._GetVals(repo))
elif sub_name == "issues":
repos = org.get_repos()
for repo in repos:
issues = repo.get_issues(state="all")
for issue in issues:
if not ret.GetFields():
ret.SetFields(self._GetKeys(repo))
ret.Append(self._GetVals(issue))
return ret
|
<commit_before><commit_msg>Add SgTableFetcher - automatically infers keys/fields and values from object<commit_after>
|
"""Fetches data from GitHub API, store and return the data in a SgTable."""
import table
import inspect
class SgTableFetcher:
"""Fetches data from GitHub API, store and return the data in a SgTable."""
def __init__(self, github):
self._github = github
def _Parse(self, label):
tmp = label.split(".")
if len(tmp) == 1: # no dots
return label, None
else:
return tmp[0], tmp[1]
def _GetKeys(self, cls):
return [key for key, val in inspect.getmembers(cls, lambda m: not inspect.ismethod(m)) if not key.startswith("_")]
def _GetVals(self, cls):
return [val for key, val in inspect.getmembers(cls, lambda m: not inspect.ismethod(m)) if not key.startswith("_")]
def Fetch(self, label):
ret = table.SgTable()
org_name, sub_name = self._Parse(label)
org = self._github.get_organization(org_name)
if sub_name == None: # eg. "google"
ret.SetFields(self._GetKeys(org))
ret.Append(self._GetVals(org))
elif sub_name == "repos":
repos = org.get_repos()
for repo in repos:
if not ret.GetFields():
ret.SetFields(self._GetKeys(repo))
ret.Append(self._GetVals(repo))
elif sub_name == "issues":
repos = org.get_repos()
for repo in repos:
issues = repo.get_issues(state="all")
for issue in issues:
if not ret.GetFields():
ret.SetFields(self._GetKeys(repo))
ret.Append(self._GetVals(issue))
return ret
|
Add SgTableFetcher - automatically infers keys/fields and values from object"""Fetches data from GitHub API, store and return the data in a SgTable."""
import table
import inspect
class SgTableFetcher:
"""Fetches data from GitHub API, store and return the data in a SgTable."""
def __init__(self, github):
self._github = github
def _Parse(self, label):
tmp = label.split(".")
if len(tmp) == 1: # no dots
return label, None
else:
return tmp[0], tmp[1]
def _GetKeys(self, cls):
return [key for key, val in inspect.getmembers(cls, lambda m: not inspect.ismethod(m)) if not key.startswith("_")]
def _GetVals(self, cls):
return [val for key, val in inspect.getmembers(cls, lambda m: not inspect.ismethod(m)) if not key.startswith("_")]
def Fetch(self, label):
ret = table.SgTable()
org_name, sub_name = self._Parse(label)
org = self._github.get_organization(org_name)
if sub_name == None: # eg. "google"
ret.SetFields(self._GetKeys(org))
ret.Append(self._GetVals(org))
elif sub_name == "repos":
repos = org.get_repos()
for repo in repos:
if not ret.GetFields():
ret.SetFields(self._GetKeys(repo))
ret.Append(self._GetVals(repo))
elif sub_name == "issues":
repos = org.get_repos()
for repo in repos:
issues = repo.get_issues(state="all")
for issue in issues:
if not ret.GetFields():
ret.SetFields(self._GetKeys(repo))
ret.Append(self._GetVals(issue))
return ret
|
<commit_before><commit_msg>Add SgTableFetcher - automatically infers keys/fields and values from object<commit_after>"""Fetches data from GitHub API, store and return the data in a SgTable."""
import table
import inspect
class SgTableFetcher:
"""Fetches data from GitHub API, store and return the data in a SgTable."""
def __init__(self, github):
self._github = github
def _Parse(self, label):
tmp = label.split(".")
if len(tmp) == 1: # no dots
return label, None
else:
return tmp[0], tmp[1]
def _GetKeys(self, cls):
return [key for key, val in inspect.getmembers(cls, lambda m: not inspect.ismethod(m)) if not key.startswith("_")]
def _GetVals(self, cls):
return [val for key, val in inspect.getmembers(cls, lambda m: not inspect.ismethod(m)) if not key.startswith("_")]
def Fetch(self, label):
ret = table.SgTable()
org_name, sub_name = self._Parse(label)
org = self._github.get_organization(org_name)
if sub_name == None: # eg. "google"
ret.SetFields(self._GetKeys(org))
ret.Append(self._GetVals(org))
elif sub_name == "repos":
repos = org.get_repos()
for repo in repos:
if not ret.GetFields():
ret.SetFields(self._GetKeys(repo))
ret.Append(self._GetVals(repo))
elif sub_name == "issues":
repos = org.get_repos()
for repo in repos:
issues = repo.get_issues(state="all")
for issue in issues:
if not ret.GetFields():
ret.SetFields(self._GetKeys(repo))
ret.Append(self._GetVals(issue))
return ret
|
|
39848dcbaae0648e42108c720eeb008d23bf51e4
|
import/bde-metadata-extractor.py
|
import/bde-metadata-extractor.py
|
from __future__ import print_function
import os
import sys
import yaml
# Scan the local directory (or one provided as the first command line argument)
# for directories that contain 'package' directories. Extract BDE metadata from
# these and print a YAML document.
def readDeps(filename):
f = open(filename, "rt")
deps = []
while True:
l = f.readline()
if l == "": return deps
if len(l)>0 and l[0] != '#':
deps.append(l.strip())
def processPackage(packageName, directory):
metaDataDirectory = os.path.join(directory, "package")
# Find the packages we depend on
dep = []
if os.path.exists(os.path.join(metaDataDirectory, packageName + ".dep")):
deps = readDeps(os.path.join(metaDataDirectory, packageName + ".dep"))
package = { '@id': "id:"+packageName,
'@type': "sw:Package",
'name': packageName }
# Now output the YAML
if len(deps)>0: package['requires'] = map(lambda x: "id:"+x, deps)
return package
def findSourceFiles(packagename, directory):
sourceFiles = []
# Scan for files. By default, our build tool only considers .c, .cpp and .f files
# in the top level directory, so there's no need to search subdirectories.
print("Scanning %s for source files"%directory, file=sys.stderr)
files = os.listdir(directory)
for f in files:
(root,ext) = os.path.splitext(f)
if ext in [ ".f", ".c", ".cpp" ]:
print("Source file %s found"%f, file=sys.stderr)
fileYaml = { '@id': "id:"+f,
'@type': "sw:SourceFile",
'name': f }
sourceFiles.append(fileYaml)
print("Returning %d entries in sourceFiles"%len(sourceFiles), file=sys.stderr)
return sourceFiles
def main():
if len(sys.argv) > 1:
scanDirectory = sys.argv[1]
else:
scanDirectory = "."
dirs = os.listdir(scanDirectory)
nodes = []
for d in dirs:
if os.path.exists(os.path.join(scanDirectory, d, "package")):
package = processPackage(d, os.path.join(scanDirectory, d))
sourceFiles = findSourceFiles(d, os.path.join(scanDirectory, d))
if len(sourceFiles)>0: package['contains'] = map(lambda x: "id:"+x, sourceFiles)
nodes.append(package)
nodes.extend(sourceFiles)
if len(nodes) == 0:
print("Nothing found!", file=sys.stderr)
else:
print(yaml.dump({ "@context": ["https://raw.githubusercontent.com/ssssam/software-integration-ontology/master/context.jsonld"],
"@graph": nodes }))
if __name__=="__main__":
main()
|
Add prototype BDE Metadata scanner in import/extract-yaml.pl
|
Add prototype BDE Metadata scanner in import/extract-yaml.pl
|
Python
|
apache-2.0
|
CodethinkLabs/software-dependency-visualizer,CodethinkLabs/software-dependency-visualizer,CodethinkLabs/software-dependency-visualizer,CodethinkLabs/software-dependency-visualizer
|
Add prototype BDE Metadata scanner in import/extract-yaml.pl
|
from __future__ import print_function
import os
import sys
import yaml
# Scan the local directory (or one provided as the first command line argument)
# for directories that contain 'package' directories. Extract BDE metadata from
# these and print a YAML document.
def readDeps(filename):
f = open(filename, "rt")
deps = []
while True:
l = f.readline()
if l == "": return deps
if len(l)>0 and l[0] != '#':
deps.append(l.strip())
def processPackage(packageName, directory):
metaDataDirectory = os.path.join(directory, "package")
# Find the packages we depend on
dep = []
if os.path.exists(os.path.join(metaDataDirectory, packageName + ".dep")):
deps = readDeps(os.path.join(metaDataDirectory, packageName + ".dep"))
package = { '@id': "id:"+packageName,
'@type': "sw:Package",
'name': packageName }
# Now output the YAML
if len(deps)>0: package['requires'] = map(lambda x: "id:"+x, deps)
return package
def findSourceFiles(packagename, directory):
sourceFiles = []
# Scan for files. By default, our build tool only considers .c, .cpp and .f files
# in the top level directory, so there's no need to search subdirectories.
print("Scanning %s for source files"%directory, file=sys.stderr)
files = os.listdir(directory)
for f in files:
(root,ext) = os.path.splitext(f)
if ext in [ ".f", ".c", ".cpp" ]:
print("Source file %s found"%f, file=sys.stderr)
fileYaml = { '@id': "id:"+f,
'@type': "sw:SourceFile",
'name': f }
sourceFiles.append(fileYaml)
print("Returning %d entries in sourceFiles"%len(sourceFiles), file=sys.stderr)
return sourceFiles
def main():
if len(sys.argv) > 1:
scanDirectory = sys.argv[1]
else:
scanDirectory = "."
dirs = os.listdir(scanDirectory)
nodes = []
for d in dirs:
if os.path.exists(os.path.join(scanDirectory, d, "package")):
package = processPackage(d, os.path.join(scanDirectory, d))
sourceFiles = findSourceFiles(d, os.path.join(scanDirectory, d))
if len(sourceFiles)>0: package['contains'] = map(lambda x: "id:"+x, sourceFiles)
nodes.append(package)
nodes.extend(sourceFiles)
if len(nodes) == 0:
print("Nothing found!", file=sys.stderr)
else:
print(yaml.dump({ "@context": ["https://raw.githubusercontent.com/ssssam/software-integration-ontology/master/context.jsonld"],
"@graph": nodes }))
if __name__=="__main__":
main()
|
<commit_before><commit_msg>Add prototype BDE Metadata scanner in import/extract-yaml.pl<commit_after>
|
from __future__ import print_function
import os
import sys
import yaml
# Scan the local directory (or one provided as the first command line argument)
# for directories that contain 'package' directories. Extract BDE metadata from
# these and print a YAML document.
def readDeps(filename):
f = open(filename, "rt")
deps = []
while True:
l = f.readline()
if l == "": return deps
if len(l)>0 and l[0] != '#':
deps.append(l.strip())
def processPackage(packageName, directory):
metaDataDirectory = os.path.join(directory, "package")
# Find the packages we depend on
dep = []
if os.path.exists(os.path.join(metaDataDirectory, packageName + ".dep")):
deps = readDeps(os.path.join(metaDataDirectory, packageName + ".dep"))
package = { '@id': "id:"+packageName,
'@type': "sw:Package",
'name': packageName }
# Now output the YAML
if len(deps)>0: package['requires'] = map(lambda x: "id:"+x, deps)
return package
def findSourceFiles(packagename, directory):
sourceFiles = []
# Scan for files. By default, our build tool only considers .c, .cpp and .f files
# in the top level directory, so there's no need to search subdirectories.
print("Scanning %s for source files"%directory, file=sys.stderr)
files = os.listdir(directory)
for f in files:
(root,ext) = os.path.splitext(f)
if ext in [ ".f", ".c", ".cpp" ]:
print("Source file %s found"%f, file=sys.stderr)
fileYaml = { '@id': "id:"+f,
'@type': "sw:SourceFile",
'name': f }
sourceFiles.append(fileYaml)
print("Returning %d entries in sourceFiles"%len(sourceFiles), file=sys.stderr)
return sourceFiles
def main():
if len(sys.argv) > 1:
scanDirectory = sys.argv[1]
else:
scanDirectory = "."
dirs = os.listdir(scanDirectory)
nodes = []
for d in dirs:
if os.path.exists(os.path.join(scanDirectory, d, "package")):
package = processPackage(d, os.path.join(scanDirectory, d))
sourceFiles = findSourceFiles(d, os.path.join(scanDirectory, d))
if len(sourceFiles)>0: package['contains'] = map(lambda x: "id:"+x, sourceFiles)
nodes.append(package)
nodes.extend(sourceFiles)
if len(nodes) == 0:
print("Nothing found!", file=sys.stderr)
else:
print(yaml.dump({ "@context": ["https://raw.githubusercontent.com/ssssam/software-integration-ontology/master/context.jsonld"],
"@graph": nodes }))
if __name__=="__main__":
main()
|
Add prototype BDE Metadata scanner in import/extract-yaml.plfrom __future__ import print_function
import os
import sys
import yaml
# Scan the local directory (or one provided as the first command line argument)
# for directories that contain 'package' directories. Extract BDE metadata from
# these and print a YAML document.
def readDeps(filename):
f = open(filename, "rt")
deps = []
while True:
l = f.readline()
if l == "": return deps
if len(l)>0 and l[0] != '#':
deps.append(l.strip())
def processPackage(packageName, directory):
metaDataDirectory = os.path.join(directory, "package")
# Find the packages we depend on
dep = []
if os.path.exists(os.path.join(metaDataDirectory, packageName + ".dep")):
deps = readDeps(os.path.join(metaDataDirectory, packageName + ".dep"))
package = { '@id': "id:"+packageName,
'@type': "sw:Package",
'name': packageName }
# Now output the YAML
if len(deps)>0: package['requires'] = map(lambda x: "id:"+x, deps)
return package
def findSourceFiles(packagename, directory):
sourceFiles = []
# Scan for files. By default, our build tool only considers .c, .cpp and .f files
# in the top level directory, so there's no need to search subdirectories.
print("Scanning %s for source files"%directory, file=sys.stderr)
files = os.listdir(directory)
for f in files:
(root,ext) = os.path.splitext(f)
if ext in [ ".f", ".c", ".cpp" ]:
print("Source file %s found"%f, file=sys.stderr)
fileYaml = { '@id': "id:"+f,
'@type': "sw:SourceFile",
'name': f }
sourceFiles.append(fileYaml)
print("Returning %d entries in sourceFiles"%len(sourceFiles), file=sys.stderr)
return sourceFiles
def main():
if len(sys.argv) > 1:
scanDirectory = sys.argv[1]
else:
scanDirectory = "."
dirs = os.listdir(scanDirectory)
nodes = []
for d in dirs:
if os.path.exists(os.path.join(scanDirectory, d, "package")):
package = processPackage(d, os.path.join(scanDirectory, d))
sourceFiles = findSourceFiles(d, os.path.join(scanDirectory, d))
if len(sourceFiles)>0: package['contains'] = map(lambda x: "id:"+x, sourceFiles)
nodes.append(package)
nodes.extend(sourceFiles)
if len(nodes) == 0:
print("Nothing found!", file=sys.stderr)
else:
print(yaml.dump({ "@context": ["https://raw.githubusercontent.com/ssssam/software-integration-ontology/master/context.jsonld"],
"@graph": nodes }))
if __name__=="__main__":
main()
|
<commit_before><commit_msg>Add prototype BDE Metadata scanner in import/extract-yaml.pl<commit_after>from __future__ import print_function
import os
import sys
import yaml
# Scan the local directory (or one provided as the first command line argument)
# for directories that contain 'package' directories. Extract BDE metadata from
# these and print a YAML document.
def readDeps(filename):
f = open(filename, "rt")
deps = []
while True:
l = f.readline()
if l == "": return deps
if len(l)>0 and l[0] != '#':
deps.append(l.strip())
def processPackage(packageName, directory):
metaDataDirectory = os.path.join(directory, "package")
# Find the packages we depend on
dep = []
if os.path.exists(os.path.join(metaDataDirectory, packageName + ".dep")):
deps = readDeps(os.path.join(metaDataDirectory, packageName + ".dep"))
package = { '@id': "id:"+packageName,
'@type': "sw:Package",
'name': packageName }
# Now output the YAML
if len(deps)>0: package['requires'] = map(lambda x: "id:"+x, deps)
return package
def findSourceFiles(packagename, directory):
sourceFiles = []
# Scan for files. By default, our build tool only considers .c, .cpp and .f files
# in the top level directory, so there's no need to search subdirectories.
print("Scanning %s for source files"%directory, file=sys.stderr)
files = os.listdir(directory)
for f in files:
(root,ext) = os.path.splitext(f)
if ext in [ ".f", ".c", ".cpp" ]:
print("Source file %s found"%f, file=sys.stderr)
fileYaml = { '@id': "id:"+f,
'@type': "sw:SourceFile",
'name': f }
sourceFiles.append(fileYaml)
print("Returning %d entries in sourceFiles"%len(sourceFiles), file=sys.stderr)
return sourceFiles
def main():
if len(sys.argv) > 1:
scanDirectory = sys.argv[1]
else:
scanDirectory = "."
dirs = os.listdir(scanDirectory)
nodes = []
for d in dirs:
if os.path.exists(os.path.join(scanDirectory, d, "package")):
package = processPackage(d, os.path.join(scanDirectory, d))
sourceFiles = findSourceFiles(d, os.path.join(scanDirectory, d))
if len(sourceFiles)>0: package['contains'] = map(lambda x: "id:"+x, sourceFiles)
nodes.append(package)
nodes.extend(sourceFiles)
if len(nodes) == 0:
print("Nothing found!", file=sys.stderr)
else:
print(yaml.dump({ "@context": ["https://raw.githubusercontent.com/ssssam/software-integration-ontology/master/context.jsonld"],
"@graph": nodes }))
if __name__=="__main__":
main()
|
|
c44df13321a7f8b844b708ea90dbccc89579dc95
|
examples/widgets/focus_behavior.py
|
examples/widgets/focus_behavior.py
|
from kivy.app import App
from kivy.uix.gridlayout import GridLayout
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.button import Button
from kivy.uix.behaviors import FocusBehavior
class FocusButton(FocusBehavior, Button):
'''A button, which when focused, turns red and sets the keyboard
input to the text of the button.
'''
def on_focused(self, instance, value, *largs):
self.background_color = [1, 0, 0, 1] if value else [1, 1, 1, 1]
def keyboard_on_key_down(self, window, keycode, text, modifiers):
'''We call super before doing anything else to enable tab cycling
by FocusBehavior. If we wanted to use tab for ourselves, we could just
not call it, or call it if we didn't need tab.
'''
if super(FocusButton, self).keyboard_on_key_down(window, keycode,
text, modifiers):
return True
self.text = keycode[1]
return True
class FocusApp(App):
def build(self):
root = BoxLayout()
self.grid1 = grid1 = GridLayout(cols=4)
self.grid2 = grid2 = GridLayout(cols=4)
root.add_widget(grid1)
root.add_widget(grid2)
for i in range(40):
grid1.add_widget(FocusButton(text='l' + str(i)))
for i in range(40):
grid2.add_widget(FocusButton(text='r' + str(i)))
# make elements 29, 9 un-focusable. The widgets are displayed in
# reverse order, so 9 = 39 - 10
grid2.children[10].is_focusable = False
grid2.children[30].is_focusable = False
# similarly, make 39 - 14 = 25, and 5 un-focusable
grid1.children[14].is_focusable = False
grid1.children[34].is_focusable = False
# exchange the links between the sides so that it'll skip to the other
# side in the middle. Remember that children are displayed reversed
# in layouts.
grid1.children[10].link_focus(next=grid2.children[9])
grid2.children[10].link_focus(next=grid1.children[9])
# autopopulate the rest, and complete the loop
FocusBehavior.autopopulate_focus(grid1, previous=grid2.children[-1])
# autopopulate the rest
FocusBehavior.autopopulate_focus(grid2)
# but now complete the loop directly, children[0] is the last element
grid2.children[0].link_focus(next=grid1.children[-1])
return root
if __name__ == '__main__':
FocusApp().run()
|
Add focus behavior example to examples.
|
Add focus behavior example to examples.
|
Python
|
mit
|
matham/kivy,rnixx/kivy,Cheaterman/kivy,LogicalDash/kivy,angryrancor/kivy,angryrancor/kivy,bob-the-hamster/kivy,niavlys/kivy,ernstp/kivy,jegger/kivy,manthansharma/kivy,jegger/kivy,gonzafirewall/kivy,angryrancor/kivy,arcticshores/kivy,niavlys/kivy,janssen/kivy,JohnHowland/kivy,arlowhite/kivy,KeyWeeUsr/kivy,rafalo1333/kivy,adamkh/kivy,LogicalDash/kivy,MiyamotoAkira/kivy,Shyam10/kivy,JohnHowland/kivy,manthansharma/kivy,vitorio/kivy,janssen/kivy,CuriousLearner/kivy,ehealthafrica-ci/kivy,adamkh/kivy,manthansharma/kivy,jkankiewicz/kivy,darkopevec/kivy,manashmndl/kivy,eHealthAfrica/kivy,edubrunaldi/kivy,MiyamotoAkira/kivy,iamutkarshtiwari/kivy,ernstp/kivy,vitorio/kivy,jkankiewicz/kivy,cbenhagen/kivy,arcticshores/kivy,aron-bordin/kivy,KeyWeeUsr/kivy,jffernandez/kivy,arcticshores/kivy,bionoid/kivy,iamutkarshtiwari/kivy,VinGarcia/kivy,bliz937/kivy,Shyam10/kivy,rafalo1333/kivy,matham/kivy,habibmasuro/kivy,VinGarcia/kivy,Cheaterman/kivy,cbenhagen/kivy,denys-duchier/kivy,aron-bordin/kivy,ernstp/kivy,Ramalus/kivy,CuriousLearner/kivy,viralpandey/kivy,akshayaurora/kivy,Farkal/kivy,akshayaurora/kivy,bliz937/kivy,denys-duchier/kivy,tony/kivy,kived/kivy,mSenyor/kivy,matham/kivy,JohnHowland/kivy,bliz937/kivy,vipulroxx/kivy,aron-bordin/kivy,tony/kivy,denys-duchier/kivy,dirkjot/kivy,kivy/kivy,darkopevec/kivy,aron-bordin/kivy,LogicalDash/kivy,Shyam10/kivy,yoelk/kivy,rafalo1333/kivy,autosportlabs/kivy,niavlys/kivy,arlowhite/kivy,inclement/kivy,bionoid/kivy,dirkjot/kivy,el-ethan/kivy,adamkh/kivy,jegger/kivy,kivy/kivy,janssen/kivy,yoelk/kivy,ernstp/kivy,youprofit/kivy,youprofit/kivy,thezawad/kivy,bob-the-hamster/kivy,VinGarcia/kivy,manashmndl/kivy,xpndlabs/kivy,thezawad/kivy,jkankiewicz/kivy,youprofit/kivy,arlowhite/kivy,viralpandey/kivy,LogicalDash/kivy,andnovar/kivy,vitorio/kivy,kived/kivy,jegger/kivy,ehealthafrica-ci/kivy,xiaoyanit/kivy,tony/kivy,mSenyor/kivy,andnovar/kivy,eHealthAfrica/kivy,adamkh/kivy,denys-duchier/kivy,yoelk/kivy,habibmasuro/kivy,jehutting/kivy,janssen/kivy,darkopevec/kivy,thezawad/kivy,Farkal/kivy,edubrunaldi/kivy,kivy/kivy,Farkal/kivy,xpndlabs/kivy,habibmasuro/kivy,bob-the-hamster/kivy,el-ethan/kivy,edubrunaldi/kivy,jkankiewicz/kivy,KeyWeeUsr/kivy,Ramalus/kivy,ehealthafrica-ci/kivy,inclement/kivy,niavlys/kivy,jehutting/kivy,jffernandez/kivy,bhargav2408/kivy,bionoid/kivy,xpndlabs/kivy,JohnHowland/kivy,dirkjot/kivy,matham/kivy,rnixx/kivy,bhargav2408/kivy,KeyWeeUsr/kivy,jffernandez/kivy,viralpandey/kivy,rnixx/kivy,gonzafirewall/kivy,Shyam10/kivy,jffernandez/kivy,eHealthAfrica/kivy,inclement/kivy,akshayaurora/kivy,gonzafirewall/kivy,iamutkarshtiwari/kivy,arcticshores/kivy,bob-the-hamster/kivy,gonzafirewall/kivy,vipulroxx/kivy,bionoid/kivy,dirkjot/kivy,vipulroxx/kivy,MiyamotoAkira/kivy,Cheaterman/kivy,autosportlabs/kivy,andnovar/kivy,xiaoyanit/kivy,angryrancor/kivy,CuriousLearner/kivy,cbenhagen/kivy,manashmndl/kivy,MiyamotoAkira/kivy,ehealthafrica-ci/kivy,manthansharma/kivy,jehutting/kivy,yoelk/kivy,vipulroxx/kivy,Cheaterman/kivy,Farkal/kivy,bhargav2408/kivy,xiaoyanit/kivy,eHealthAfrica/kivy,autosportlabs/kivy,Ramalus/kivy,darkopevec/kivy,kived/kivy,mSenyor/kivy,el-ethan/kivy
|
Add focus behavior example to examples.
|
from kivy.app import App
from kivy.uix.gridlayout import GridLayout
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.button import Button
from kivy.uix.behaviors import FocusBehavior
class FocusButton(FocusBehavior, Button):
'''A button, which when focused, turns red and sets the keyboard
input to the text of the button.
'''
def on_focused(self, instance, value, *largs):
self.background_color = [1, 0, 0, 1] if value else [1, 1, 1, 1]
def keyboard_on_key_down(self, window, keycode, text, modifiers):
'''We call super before doing anything else to enable tab cycling
by FocusBehavior. If we wanted to use tab for ourselves, we could just
not call it, or call it if we didn't need tab.
'''
if super(FocusButton, self).keyboard_on_key_down(window, keycode,
text, modifiers):
return True
self.text = keycode[1]
return True
class FocusApp(App):
def build(self):
root = BoxLayout()
self.grid1 = grid1 = GridLayout(cols=4)
self.grid2 = grid2 = GridLayout(cols=4)
root.add_widget(grid1)
root.add_widget(grid2)
for i in range(40):
grid1.add_widget(FocusButton(text='l' + str(i)))
for i in range(40):
grid2.add_widget(FocusButton(text='r' + str(i)))
# make elements 29, 9 un-focusable. The widgets are displayed in
# reverse order, so 9 = 39 - 10
grid2.children[10].is_focusable = False
grid2.children[30].is_focusable = False
# similarly, make 39 - 14 = 25, and 5 un-focusable
grid1.children[14].is_focusable = False
grid1.children[34].is_focusable = False
# exchange the links between the sides so that it'll skip to the other
# side in the middle. Remember that children are displayed reversed
# in layouts.
grid1.children[10].link_focus(next=grid2.children[9])
grid2.children[10].link_focus(next=grid1.children[9])
# autopopulate the rest, and complete the loop
FocusBehavior.autopopulate_focus(grid1, previous=grid2.children[-1])
# autopopulate the rest
FocusBehavior.autopopulate_focus(grid2)
# but now complete the loop directly, children[0] is the last element
grid2.children[0].link_focus(next=grid1.children[-1])
return root
if __name__ == '__main__':
FocusApp().run()
|
<commit_before><commit_msg>Add focus behavior example to examples.<commit_after>
|
from kivy.app import App
from kivy.uix.gridlayout import GridLayout
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.button import Button
from kivy.uix.behaviors import FocusBehavior
class FocusButton(FocusBehavior, Button):
'''A button, which when focused, turns red and sets the keyboard
input to the text of the button.
'''
def on_focused(self, instance, value, *largs):
self.background_color = [1, 0, 0, 1] if value else [1, 1, 1, 1]
def keyboard_on_key_down(self, window, keycode, text, modifiers):
'''We call super before doing anything else to enable tab cycling
by FocusBehavior. If we wanted to use tab for ourselves, we could just
not call it, or call it if we didn't need tab.
'''
if super(FocusButton, self).keyboard_on_key_down(window, keycode,
text, modifiers):
return True
self.text = keycode[1]
return True
class FocusApp(App):
def build(self):
root = BoxLayout()
self.grid1 = grid1 = GridLayout(cols=4)
self.grid2 = grid2 = GridLayout(cols=4)
root.add_widget(grid1)
root.add_widget(grid2)
for i in range(40):
grid1.add_widget(FocusButton(text='l' + str(i)))
for i in range(40):
grid2.add_widget(FocusButton(text='r' + str(i)))
# make elements 29, 9 un-focusable. The widgets are displayed in
# reverse order, so 9 = 39 - 10
grid2.children[10].is_focusable = False
grid2.children[30].is_focusable = False
# similarly, make 39 - 14 = 25, and 5 un-focusable
grid1.children[14].is_focusable = False
grid1.children[34].is_focusable = False
# exchange the links between the sides so that it'll skip to the other
# side in the middle. Remember that children are displayed reversed
# in layouts.
grid1.children[10].link_focus(next=grid2.children[9])
grid2.children[10].link_focus(next=grid1.children[9])
# autopopulate the rest, and complete the loop
FocusBehavior.autopopulate_focus(grid1, previous=grid2.children[-1])
# autopopulate the rest
FocusBehavior.autopopulate_focus(grid2)
# but now complete the loop directly, children[0] is the last element
grid2.children[0].link_focus(next=grid1.children[-1])
return root
if __name__ == '__main__':
FocusApp().run()
|
Add focus behavior example to examples.from kivy.app import App
from kivy.uix.gridlayout import GridLayout
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.button import Button
from kivy.uix.behaviors import FocusBehavior
class FocusButton(FocusBehavior, Button):
'''A button, which when focused, turns red and sets the keyboard
input to the text of the button.
'''
def on_focused(self, instance, value, *largs):
self.background_color = [1, 0, 0, 1] if value else [1, 1, 1, 1]
def keyboard_on_key_down(self, window, keycode, text, modifiers):
'''We call super before doing anything else to enable tab cycling
by FocusBehavior. If we wanted to use tab for ourselves, we could just
not call it, or call it if we didn't need tab.
'''
if super(FocusButton, self).keyboard_on_key_down(window, keycode,
text, modifiers):
return True
self.text = keycode[1]
return True
class FocusApp(App):
def build(self):
root = BoxLayout()
self.grid1 = grid1 = GridLayout(cols=4)
self.grid2 = grid2 = GridLayout(cols=4)
root.add_widget(grid1)
root.add_widget(grid2)
for i in range(40):
grid1.add_widget(FocusButton(text='l' + str(i)))
for i in range(40):
grid2.add_widget(FocusButton(text='r' + str(i)))
# make elements 29, 9 un-focusable. The widgets are displayed in
# reverse order, so 9 = 39 - 10
grid2.children[10].is_focusable = False
grid2.children[30].is_focusable = False
# similarly, make 39 - 14 = 25, and 5 un-focusable
grid1.children[14].is_focusable = False
grid1.children[34].is_focusable = False
# exchange the links between the sides so that it'll skip to the other
# side in the middle. Remember that children are displayed reversed
# in layouts.
grid1.children[10].link_focus(next=grid2.children[9])
grid2.children[10].link_focus(next=grid1.children[9])
# autopopulate the rest, and complete the loop
FocusBehavior.autopopulate_focus(grid1, previous=grid2.children[-1])
# autopopulate the rest
FocusBehavior.autopopulate_focus(grid2)
# but now complete the loop directly, children[0] is the last element
grid2.children[0].link_focus(next=grid1.children[-1])
return root
if __name__ == '__main__':
FocusApp().run()
|
<commit_before><commit_msg>Add focus behavior example to examples.<commit_after>from kivy.app import App
from kivy.uix.gridlayout import GridLayout
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.button import Button
from kivy.uix.behaviors import FocusBehavior
class FocusButton(FocusBehavior, Button):
'''A button, which when focused, turns red and sets the keyboard
input to the text of the button.
'''
def on_focused(self, instance, value, *largs):
self.background_color = [1, 0, 0, 1] if value else [1, 1, 1, 1]
def keyboard_on_key_down(self, window, keycode, text, modifiers):
'''We call super before doing anything else to enable tab cycling
by FocusBehavior. If we wanted to use tab for ourselves, we could just
not call it, or call it if we didn't need tab.
'''
if super(FocusButton, self).keyboard_on_key_down(window, keycode,
text, modifiers):
return True
self.text = keycode[1]
return True
class FocusApp(App):
def build(self):
root = BoxLayout()
self.grid1 = grid1 = GridLayout(cols=4)
self.grid2 = grid2 = GridLayout(cols=4)
root.add_widget(grid1)
root.add_widget(grid2)
for i in range(40):
grid1.add_widget(FocusButton(text='l' + str(i)))
for i in range(40):
grid2.add_widget(FocusButton(text='r' + str(i)))
# make elements 29, 9 un-focusable. The widgets are displayed in
# reverse order, so 9 = 39 - 10
grid2.children[10].is_focusable = False
grid2.children[30].is_focusable = False
# similarly, make 39 - 14 = 25, and 5 un-focusable
grid1.children[14].is_focusable = False
grid1.children[34].is_focusable = False
# exchange the links between the sides so that it'll skip to the other
# side in the middle. Remember that children are displayed reversed
# in layouts.
grid1.children[10].link_focus(next=grid2.children[9])
grid2.children[10].link_focus(next=grid1.children[9])
# autopopulate the rest, and complete the loop
FocusBehavior.autopopulate_focus(grid1, previous=grid2.children[-1])
# autopopulate the rest
FocusBehavior.autopopulate_focus(grid2)
# but now complete the loop directly, children[0] is the last element
grid2.children[0].link_focus(next=grid1.children[-1])
return root
if __name__ == '__main__':
FocusApp().run()
|
|
9b870f8a94cd232ecc5fddc155f9056fdcdfe9b1
|
runreport.py
|
runreport.py
|
import os
import json
import saulify.sitespec as sitespec
SPEC_DIRECTORY = "sitespecs"
if __name__ == "__main__":
for fname in os.listdir(SPEC_DIRECTORY):
fpath = os.path.join(SPEC_DIRECTORY, fname)
test_cases = sitespec.load_testcases(fpath)
for test_case in test_cases:
result = test_case.run()
print(json.dumps(result))
|
Add script to generate report on scraper tests
|
Add script to generate report on scraper tests
|
Python
|
agpl-3.0
|
asm-products/saulify-web,asm-products/saulify-web,asm-products/saulify-web
|
Add script to generate report on scraper tests
|
import os
import json
import saulify.sitespec as sitespec
SPEC_DIRECTORY = "sitespecs"
if __name__ == "__main__":
for fname in os.listdir(SPEC_DIRECTORY):
fpath = os.path.join(SPEC_DIRECTORY, fname)
test_cases = sitespec.load_testcases(fpath)
for test_case in test_cases:
result = test_case.run()
print(json.dumps(result))
|
<commit_before><commit_msg>Add script to generate report on scraper tests<commit_after>
|
import os
import json
import saulify.sitespec as sitespec
SPEC_DIRECTORY = "sitespecs"
if __name__ == "__main__":
for fname in os.listdir(SPEC_DIRECTORY):
fpath = os.path.join(SPEC_DIRECTORY, fname)
test_cases = sitespec.load_testcases(fpath)
for test_case in test_cases:
result = test_case.run()
print(json.dumps(result))
|
Add script to generate report on scraper testsimport os
import json
import saulify.sitespec as sitespec
SPEC_DIRECTORY = "sitespecs"
if __name__ == "__main__":
for fname in os.listdir(SPEC_DIRECTORY):
fpath = os.path.join(SPEC_DIRECTORY, fname)
test_cases = sitespec.load_testcases(fpath)
for test_case in test_cases:
result = test_case.run()
print(json.dumps(result))
|
<commit_before><commit_msg>Add script to generate report on scraper tests<commit_after>import os
import json
import saulify.sitespec as sitespec
SPEC_DIRECTORY = "sitespecs"
if __name__ == "__main__":
for fname in os.listdir(SPEC_DIRECTORY):
fpath = os.path.join(SPEC_DIRECTORY, fname)
test_cases = sitespec.load_testcases(fpath)
for test_case in test_cases:
result = test_case.run()
print(json.dumps(result))
|
|
b8259551a8d147e676cc7fe34fd511a38861f294
|
Week01/Problem04/cyu_04.py
|
Week01/Problem04/cyu_04.py
|
#!/usr/bin/env python3
"""This script is written by Chuanping Yu, on Jul 24, 2017,
for the Assignment#1 in IDEaS workshop"""
#Problem 4
S = 0
A = 0
B = 0
for a in range(100, 1000):
for b in range(100, 1000):
num = a*b
if str(num) == str(num)[::-1] and S < num:
S = num
A = a
B = b
print(S, "=", A, "*", B)
|
Add Chuanping Yu's solutions to Problem04
|
Add Chuanping Yu's solutions to Problem04
|
Python
|
bsd-3-clause
|
GT-IDEaS/SkillsWorkshop2017,GT-IDEaS/SkillsWorkshop2017,GT-IDEaS/SkillsWorkshop2017
|
Add Chuanping Yu's solutions to Problem04
|
#!/usr/bin/env python3
"""This script is written by Chuanping Yu, on Jul 24, 2017,
for the Assignment#1 in IDEaS workshop"""
#Problem 4
S = 0
A = 0
B = 0
for a in range(100, 1000):
for b in range(100, 1000):
num = a*b
if str(num) == str(num)[::-1] and S < num:
S = num
A = a
B = b
print(S, "=", A, "*", B)
|
<commit_before><commit_msg>Add Chuanping Yu's solutions to Problem04<commit_after>
|
#!/usr/bin/env python3
"""This script is written by Chuanping Yu, on Jul 24, 2017,
for the Assignment#1 in IDEaS workshop"""
#Problem 4
S = 0
A = 0
B = 0
for a in range(100, 1000):
for b in range(100, 1000):
num = a*b
if str(num) == str(num)[::-1] and S < num:
S = num
A = a
B = b
print(S, "=", A, "*", B)
|
Add Chuanping Yu's solutions to Problem04#!/usr/bin/env python3
"""This script is written by Chuanping Yu, on Jul 24, 2017,
for the Assignment#1 in IDEaS workshop"""
#Problem 4
S = 0
A = 0
B = 0
for a in range(100, 1000):
for b in range(100, 1000):
num = a*b
if str(num) == str(num)[::-1] and S < num:
S = num
A = a
B = b
print(S, "=", A, "*", B)
|
<commit_before><commit_msg>Add Chuanping Yu's solutions to Problem04<commit_after>#!/usr/bin/env python3
"""This script is written by Chuanping Yu, on Jul 24, 2017,
for the Assignment#1 in IDEaS workshop"""
#Problem 4
S = 0
A = 0
B = 0
for a in range(100, 1000):
for b in range(100, 1000):
num = a*b
if str(num) == str(num)[::-1] and S < num:
S = num
A = a
B = b
print(S, "=", A, "*", B)
|
|
b6e8a75077d4b1667861d307d352828f3a3fbd60
|
stoic/log.py
|
stoic/log.py
|
import logging
import datetime
class Iso8601Formatter(logging.Formatter):
def formatTime(self, record, datefmt=None):
record_time = datetime.datetime.utcfromtimestamp(record.created)
return record_time.strftime("%Y-%m-%dT%H:%M:%S.%fZ")
|
Add formatter for outputting iso8601 dates
|
Add formatter for outputting iso8601 dates
|
Python
|
apache-2.0
|
NegativeMjark/stoic
|
Add formatter for outputting iso8601 dates
|
import logging
import datetime
class Iso8601Formatter(logging.Formatter):
def formatTime(self, record, datefmt=None):
record_time = datetime.datetime.utcfromtimestamp(record.created)
return record_time.strftime("%Y-%m-%dT%H:%M:%S.%fZ")
|
<commit_before><commit_msg>Add formatter for outputting iso8601 dates<commit_after>
|
import logging
import datetime
class Iso8601Formatter(logging.Formatter):
def formatTime(self, record, datefmt=None):
record_time = datetime.datetime.utcfromtimestamp(record.created)
return record_time.strftime("%Y-%m-%dT%H:%M:%S.%fZ")
|
Add formatter for outputting iso8601 datesimport logging
import datetime
class Iso8601Formatter(logging.Formatter):
def formatTime(self, record, datefmt=None):
record_time = datetime.datetime.utcfromtimestamp(record.created)
return record_time.strftime("%Y-%m-%dT%H:%M:%S.%fZ")
|
<commit_before><commit_msg>Add formatter for outputting iso8601 dates<commit_after>import logging
import datetime
class Iso8601Formatter(logging.Formatter):
def formatTime(self, record, datefmt=None):
record_time = datetime.datetime.utcfromtimestamp(record.created)
return record_time.strftime("%Y-%m-%dT%H:%M:%S.%fZ")
|
|
51cad1634ed9f6b8e40264f027a3a891db45e729
|
benchmarks/expand2_sage.py
|
benchmarks/expand2_sage.py
|
from timeit import default_timer as clock
from sage.all import var
var("x y z w")
e = (x+y+z+w)**15
f = e*(e+w)
print f
t1 = clock()
g = f.expand()
t2 = clock()
print "Total time:", t2-t1, "s"
|
Add the expand2 benchmark using Sage
|
Add the expand2 benchmark using Sage
|
Python
|
mit
|
symengine/symengine.py,bjodah/symengine.py,bjodah/symengine.py,symengine/symengine.py,bjodah/symengine.py,symengine/symengine.py
|
Add the expand2 benchmark using Sage
|
from timeit import default_timer as clock
from sage.all import var
var("x y z w")
e = (x+y+z+w)**15
f = e*(e+w)
print f
t1 = clock()
g = f.expand()
t2 = clock()
print "Total time:", t2-t1, "s"
|
<commit_before><commit_msg>Add the expand2 benchmark using Sage<commit_after>
|
from timeit import default_timer as clock
from sage.all import var
var("x y z w")
e = (x+y+z+w)**15
f = e*(e+w)
print f
t1 = clock()
g = f.expand()
t2 = clock()
print "Total time:", t2-t1, "s"
|
Add the expand2 benchmark using Sagefrom timeit import default_timer as clock
from sage.all import var
var("x y z w")
e = (x+y+z+w)**15
f = e*(e+w)
print f
t1 = clock()
g = f.expand()
t2 = clock()
print "Total time:", t2-t1, "s"
|
<commit_before><commit_msg>Add the expand2 benchmark using Sage<commit_after>from timeit import default_timer as clock
from sage.all import var
var("x y z w")
e = (x+y+z+w)**15
f = e*(e+w)
print f
t1 = clock()
g = f.expand()
t2 = clock()
print "Total time:", t2-t1, "s"
|
|
4b6e1b2426efa4f96dc1120718b4acbfbcdbee98
|
numba/tests/test_caching.py
|
numba/tests/test_caching.py
|
from __future__ import print_function, absolute_import, division
import sys
import os
import multiprocessing as mp
import traceback
from numba import njit
from .support import (
TestCase,
temp_directory,
override_env_config,
captured_stdout,
captured_stderr,
)
def constant_unicode_cache():
c = "abcd"
return hash(c), c
def check_constant_unicode_cache():
pyfunc = constant_unicode_cache
cfunc = njit(cache=True)(pyfunc)
exp_hv, exp_str = pyfunc()
got_hv, got_str = cfunc()
assert exp_hv == got_hv
assert exp_str == got_str
def dict_cache():
return {'a': 1, 'b': 2}
def check_dict_cache():
pyfunc = dict_cache
cfunc = njit(cache=True)(pyfunc)
exp = pyfunc()
got = cfunc()
assert exp == got
class TestCaching(TestCase):
def run_test(self, func):
func()
ctx = mp.get_context('spawn')
qout = ctx.Queue()
cache_dir = temp_directory(__name__)
with override_env_config('NUMBA_CACHE_DIR', cache_dir):
proc = ctx.Process(target=_remote_runner, args=[func, qout])
proc.start()
stdout = qout.get()
stderr = qout.get()
if stdout.strip():
print()
print('STDOUT'.center(80, '-'))
print(stdout)
if stderr.strip():
print()
print('STDERR'.center(80, '-'))
print(stderr)
proc.join()
self.assertEqual(proc.exitcode, 0)
# The following is used to auto populate test methods into this class
def _make_test(fn):
def udt(self):
self.run_test(fn)
return udt
for k, v in globals().items():
prefix = 'check_'
if k.startswith(prefix):
locals()['test_' + k[len(prefix):]] = _make_test(v)
def _remote_runner(fn, qout):
with captured_stderr() as stderr:
with captured_stdout() as stdout:
try:
fn()
except Exception:
print(traceback.format_exc(), file=sys.stderr)
exitcode = 1
else:
exitcode = 0
qout.put(stdout.getvalue())
qout.put(stderr.getvalue())
sys.exit(exitcode)
def _remote_wrapper(fn):
_remote_wrapper()
|
Add test for unicode cache
|
Add test for unicode cache
|
Python
|
bsd-2-clause
|
jriehl/numba,IntelLabs/numba,jriehl/numba,cpcloud/numba,IntelLabs/numba,gmarkall/numba,IntelLabs/numba,gmarkall/numba,jriehl/numba,sklam/numba,sklam/numba,seibert/numba,numba/numba,stuartarchibald/numba,seibert/numba,cpcloud/numba,stonebig/numba,seibert/numba,cpcloud/numba,gmarkall/numba,numba/numba,gmarkall/numba,stuartarchibald/numba,sklam/numba,stonebig/numba,numba/numba,stonebig/numba,IntelLabs/numba,stonebig/numba,stuartarchibald/numba,stuartarchibald/numba,seibert/numba,stuartarchibald/numba,sklam/numba,IntelLabs/numba,gmarkall/numba,jriehl/numba,cpcloud/numba,sklam/numba,cpcloud/numba,numba/numba,jriehl/numba,seibert/numba,numba/numba,stonebig/numba
|
Add test for unicode cache
|
from __future__ import print_function, absolute_import, division
import sys
import os
import multiprocessing as mp
import traceback
from numba import njit
from .support import (
TestCase,
temp_directory,
override_env_config,
captured_stdout,
captured_stderr,
)
def constant_unicode_cache():
c = "abcd"
return hash(c), c
def check_constant_unicode_cache():
pyfunc = constant_unicode_cache
cfunc = njit(cache=True)(pyfunc)
exp_hv, exp_str = pyfunc()
got_hv, got_str = cfunc()
assert exp_hv == got_hv
assert exp_str == got_str
def dict_cache():
return {'a': 1, 'b': 2}
def check_dict_cache():
pyfunc = dict_cache
cfunc = njit(cache=True)(pyfunc)
exp = pyfunc()
got = cfunc()
assert exp == got
class TestCaching(TestCase):
def run_test(self, func):
func()
ctx = mp.get_context('spawn')
qout = ctx.Queue()
cache_dir = temp_directory(__name__)
with override_env_config('NUMBA_CACHE_DIR', cache_dir):
proc = ctx.Process(target=_remote_runner, args=[func, qout])
proc.start()
stdout = qout.get()
stderr = qout.get()
if stdout.strip():
print()
print('STDOUT'.center(80, '-'))
print(stdout)
if stderr.strip():
print()
print('STDERR'.center(80, '-'))
print(stderr)
proc.join()
self.assertEqual(proc.exitcode, 0)
# The following is used to auto populate test methods into this class
def _make_test(fn):
def udt(self):
self.run_test(fn)
return udt
for k, v in globals().items():
prefix = 'check_'
if k.startswith(prefix):
locals()['test_' + k[len(prefix):]] = _make_test(v)
def _remote_runner(fn, qout):
with captured_stderr() as stderr:
with captured_stdout() as stdout:
try:
fn()
except Exception:
print(traceback.format_exc(), file=sys.stderr)
exitcode = 1
else:
exitcode = 0
qout.put(stdout.getvalue())
qout.put(stderr.getvalue())
sys.exit(exitcode)
def _remote_wrapper(fn):
_remote_wrapper()
|
<commit_before><commit_msg>Add test for unicode cache<commit_after>
|
from __future__ import print_function, absolute_import, division
import sys
import os
import multiprocessing as mp
import traceback
from numba import njit
from .support import (
TestCase,
temp_directory,
override_env_config,
captured_stdout,
captured_stderr,
)
def constant_unicode_cache():
c = "abcd"
return hash(c), c
def check_constant_unicode_cache():
pyfunc = constant_unicode_cache
cfunc = njit(cache=True)(pyfunc)
exp_hv, exp_str = pyfunc()
got_hv, got_str = cfunc()
assert exp_hv == got_hv
assert exp_str == got_str
def dict_cache():
return {'a': 1, 'b': 2}
def check_dict_cache():
pyfunc = dict_cache
cfunc = njit(cache=True)(pyfunc)
exp = pyfunc()
got = cfunc()
assert exp == got
class TestCaching(TestCase):
def run_test(self, func):
func()
ctx = mp.get_context('spawn')
qout = ctx.Queue()
cache_dir = temp_directory(__name__)
with override_env_config('NUMBA_CACHE_DIR', cache_dir):
proc = ctx.Process(target=_remote_runner, args=[func, qout])
proc.start()
stdout = qout.get()
stderr = qout.get()
if stdout.strip():
print()
print('STDOUT'.center(80, '-'))
print(stdout)
if stderr.strip():
print()
print('STDERR'.center(80, '-'))
print(stderr)
proc.join()
self.assertEqual(proc.exitcode, 0)
# The following is used to auto populate test methods into this class
def _make_test(fn):
def udt(self):
self.run_test(fn)
return udt
for k, v in globals().items():
prefix = 'check_'
if k.startswith(prefix):
locals()['test_' + k[len(prefix):]] = _make_test(v)
def _remote_runner(fn, qout):
with captured_stderr() as stderr:
with captured_stdout() as stdout:
try:
fn()
except Exception:
print(traceback.format_exc(), file=sys.stderr)
exitcode = 1
else:
exitcode = 0
qout.put(stdout.getvalue())
qout.put(stderr.getvalue())
sys.exit(exitcode)
def _remote_wrapper(fn):
_remote_wrapper()
|
Add test for unicode cachefrom __future__ import print_function, absolute_import, division
import sys
import os
import multiprocessing as mp
import traceback
from numba import njit
from .support import (
TestCase,
temp_directory,
override_env_config,
captured_stdout,
captured_stderr,
)
def constant_unicode_cache():
c = "abcd"
return hash(c), c
def check_constant_unicode_cache():
pyfunc = constant_unicode_cache
cfunc = njit(cache=True)(pyfunc)
exp_hv, exp_str = pyfunc()
got_hv, got_str = cfunc()
assert exp_hv == got_hv
assert exp_str == got_str
def dict_cache():
return {'a': 1, 'b': 2}
def check_dict_cache():
pyfunc = dict_cache
cfunc = njit(cache=True)(pyfunc)
exp = pyfunc()
got = cfunc()
assert exp == got
class TestCaching(TestCase):
def run_test(self, func):
func()
ctx = mp.get_context('spawn')
qout = ctx.Queue()
cache_dir = temp_directory(__name__)
with override_env_config('NUMBA_CACHE_DIR', cache_dir):
proc = ctx.Process(target=_remote_runner, args=[func, qout])
proc.start()
stdout = qout.get()
stderr = qout.get()
if stdout.strip():
print()
print('STDOUT'.center(80, '-'))
print(stdout)
if stderr.strip():
print()
print('STDERR'.center(80, '-'))
print(stderr)
proc.join()
self.assertEqual(proc.exitcode, 0)
# The following is used to auto populate test methods into this class
def _make_test(fn):
def udt(self):
self.run_test(fn)
return udt
for k, v in globals().items():
prefix = 'check_'
if k.startswith(prefix):
locals()['test_' + k[len(prefix):]] = _make_test(v)
def _remote_runner(fn, qout):
with captured_stderr() as stderr:
with captured_stdout() as stdout:
try:
fn()
except Exception:
print(traceback.format_exc(), file=sys.stderr)
exitcode = 1
else:
exitcode = 0
qout.put(stdout.getvalue())
qout.put(stderr.getvalue())
sys.exit(exitcode)
def _remote_wrapper(fn):
_remote_wrapper()
|
<commit_before><commit_msg>Add test for unicode cache<commit_after>from __future__ import print_function, absolute_import, division
import sys
import os
import multiprocessing as mp
import traceback
from numba import njit
from .support import (
TestCase,
temp_directory,
override_env_config,
captured_stdout,
captured_stderr,
)
def constant_unicode_cache():
c = "abcd"
return hash(c), c
def check_constant_unicode_cache():
pyfunc = constant_unicode_cache
cfunc = njit(cache=True)(pyfunc)
exp_hv, exp_str = pyfunc()
got_hv, got_str = cfunc()
assert exp_hv == got_hv
assert exp_str == got_str
def dict_cache():
return {'a': 1, 'b': 2}
def check_dict_cache():
pyfunc = dict_cache
cfunc = njit(cache=True)(pyfunc)
exp = pyfunc()
got = cfunc()
assert exp == got
class TestCaching(TestCase):
def run_test(self, func):
func()
ctx = mp.get_context('spawn')
qout = ctx.Queue()
cache_dir = temp_directory(__name__)
with override_env_config('NUMBA_CACHE_DIR', cache_dir):
proc = ctx.Process(target=_remote_runner, args=[func, qout])
proc.start()
stdout = qout.get()
stderr = qout.get()
if stdout.strip():
print()
print('STDOUT'.center(80, '-'))
print(stdout)
if stderr.strip():
print()
print('STDERR'.center(80, '-'))
print(stderr)
proc.join()
self.assertEqual(proc.exitcode, 0)
# The following is used to auto populate test methods into this class
def _make_test(fn):
def udt(self):
self.run_test(fn)
return udt
for k, v in globals().items():
prefix = 'check_'
if k.startswith(prefix):
locals()['test_' + k[len(prefix):]] = _make_test(v)
def _remote_runner(fn, qout):
with captured_stderr() as stderr:
with captured_stdout() as stdout:
try:
fn()
except Exception:
print(traceback.format_exc(), file=sys.stderr)
exitcode = 1
else:
exitcode = 0
qout.put(stdout.getvalue())
qout.put(stderr.getvalue())
sys.exit(exitcode)
def _remote_wrapper(fn):
_remote_wrapper()
|
|
6a89500514073906df88216048ea0afe1c817e33
|
tests/test_brann_bronzebeard.py
|
tests/test_brann_bronzebeard.py
|
from utils import *
BRANN_BRONZEBEARD = "LOE_077"
def _prepare_game():
game = prepare_game()
brann = game.player1.give(BRANN_BRONZEBEARD)
brann.play()
game.end_turn(); game.end_turn()
game.player1.discard_hand()
return game, brann
def test_brann_abusive_sergeant():
game, brann = _prepare_game()
abusive = game.player1.give("CS2_188")
abusive.play(target=brann)
assert brann.atk == 2 + (2 * 2)
def test_brann_injured_blademaster():
game, brann = _prepare_game()
blademaster = game.player1.give("CS2_181")
blademaster.play()
assert blademaster.dead
def test_brann_novice_engineer():
game, brann = _prepare_game()
novice = game.player1.give("EX1_015")
novice.play()
assert len(game.player1.hand) == 2
def test_brann_youthful_brewmaster():
game, brann = _prepare_game()
brewmaster = game.player1.give("EX1_049")
brewmaster.play(target=brann)
assert brann in game.player1.hand
|
Add some tests for Brann Bronzebeard
|
Add some tests for Brann Bronzebeard
|
Python
|
agpl-3.0
|
Ragowit/fireplace,Ragowit/fireplace,NightKev/fireplace,amw2104/fireplace,smallnamespace/fireplace,amw2104/fireplace,smallnamespace/fireplace,beheh/fireplace,jleclanche/fireplace
|
Add some tests for Brann Bronzebeard
|
from utils import *
BRANN_BRONZEBEARD = "LOE_077"
def _prepare_game():
game = prepare_game()
brann = game.player1.give(BRANN_BRONZEBEARD)
brann.play()
game.end_turn(); game.end_turn()
game.player1.discard_hand()
return game, brann
def test_brann_abusive_sergeant():
game, brann = _prepare_game()
abusive = game.player1.give("CS2_188")
abusive.play(target=brann)
assert brann.atk == 2 + (2 * 2)
def test_brann_injured_blademaster():
game, brann = _prepare_game()
blademaster = game.player1.give("CS2_181")
blademaster.play()
assert blademaster.dead
def test_brann_novice_engineer():
game, brann = _prepare_game()
novice = game.player1.give("EX1_015")
novice.play()
assert len(game.player1.hand) == 2
def test_brann_youthful_brewmaster():
game, brann = _prepare_game()
brewmaster = game.player1.give("EX1_049")
brewmaster.play(target=brann)
assert brann in game.player1.hand
|
<commit_before><commit_msg>Add some tests for Brann Bronzebeard<commit_after>
|
from utils import *
BRANN_BRONZEBEARD = "LOE_077"
def _prepare_game():
game = prepare_game()
brann = game.player1.give(BRANN_BRONZEBEARD)
brann.play()
game.end_turn(); game.end_turn()
game.player1.discard_hand()
return game, brann
def test_brann_abusive_sergeant():
game, brann = _prepare_game()
abusive = game.player1.give("CS2_188")
abusive.play(target=brann)
assert brann.atk == 2 + (2 * 2)
def test_brann_injured_blademaster():
game, brann = _prepare_game()
blademaster = game.player1.give("CS2_181")
blademaster.play()
assert blademaster.dead
def test_brann_novice_engineer():
game, brann = _prepare_game()
novice = game.player1.give("EX1_015")
novice.play()
assert len(game.player1.hand) == 2
def test_brann_youthful_brewmaster():
game, brann = _prepare_game()
brewmaster = game.player1.give("EX1_049")
brewmaster.play(target=brann)
assert brann in game.player1.hand
|
Add some tests for Brann Bronzebeardfrom utils import *
BRANN_BRONZEBEARD = "LOE_077"
def _prepare_game():
game = prepare_game()
brann = game.player1.give(BRANN_BRONZEBEARD)
brann.play()
game.end_turn(); game.end_turn()
game.player1.discard_hand()
return game, brann
def test_brann_abusive_sergeant():
game, brann = _prepare_game()
abusive = game.player1.give("CS2_188")
abusive.play(target=brann)
assert brann.atk == 2 + (2 * 2)
def test_brann_injured_blademaster():
game, brann = _prepare_game()
blademaster = game.player1.give("CS2_181")
blademaster.play()
assert blademaster.dead
def test_brann_novice_engineer():
game, brann = _prepare_game()
novice = game.player1.give("EX1_015")
novice.play()
assert len(game.player1.hand) == 2
def test_brann_youthful_brewmaster():
game, brann = _prepare_game()
brewmaster = game.player1.give("EX1_049")
brewmaster.play(target=brann)
assert brann in game.player1.hand
|
<commit_before><commit_msg>Add some tests for Brann Bronzebeard<commit_after>from utils import *
BRANN_BRONZEBEARD = "LOE_077"
def _prepare_game():
game = prepare_game()
brann = game.player1.give(BRANN_BRONZEBEARD)
brann.play()
game.end_turn(); game.end_turn()
game.player1.discard_hand()
return game, brann
def test_brann_abusive_sergeant():
game, brann = _prepare_game()
abusive = game.player1.give("CS2_188")
abusive.play(target=brann)
assert brann.atk == 2 + (2 * 2)
def test_brann_injured_blademaster():
game, brann = _prepare_game()
blademaster = game.player1.give("CS2_181")
blademaster.play()
assert blademaster.dead
def test_brann_novice_engineer():
game, brann = _prepare_game()
novice = game.player1.give("EX1_015")
novice.play()
assert len(game.player1.hand) == 2
def test_brann_youthful_brewmaster():
game, brann = _prepare_game()
brewmaster = game.player1.give("EX1_049")
brewmaster.play(target=brann)
assert brann in game.player1.hand
|
|
ea4b0cece818a3d615fa116936f7067bb9c13832
|
tests/test_spatial_relations.py
|
tests/test_spatial_relations.py
|
## Imports
import numpy as np
import matplotlib.pyplot as plt
from pySpatialTools.Retrieve import GridSpatialDisc, CircRetriever
from pySpatialTools.Spatial_Relations.region_spatial_relations import regions_relation_points
## Paramters
n = 10000
ngx, ngy = 100, 100
## Artificial distribution in space
locs = np.random.random((n, 2))
locs2 = np.array((locs[:, 0]*np.cos(locs[:, 1]*2*np.pi), locs[:, 0]*np.sin(locs[:, 1]*np.pi*2))).T
# Test distributions
fig1 = plt.plot(locs[:,0], locs[:, 1], '.')
fig2 = plt.plot(locs2[:,0], locs2[:, 1], '.')
## Discretization
disc = GridSpatialDisc((ngx, ngy), xlim=(0, 1), ylim=(0, 1))
regions = disc.map2id(locs)
disc2 = GridSpatialDisc((ngx, ngy), xlim=(-1, 1), ylim=(-1, 1))
regions2 = disc2.map2id(locs2)
## Spatial relations
retriever = CircRetriever(locs, True)
radis = [0.01, 0.05, 0.1, 0.25, 0.5]
n_radis = len(radis)
n_reg1, n_reg2 = np.unique(regions).shape[0], np.unique(regions2).shape[0]
relation1 = np.zeros((n_reg1, n_reg1, n_radis))
relation2 = np.zeros((n_reg2, n_reg2, n_radis))
for i in range(n_radis):
info_ret = np.ones(n)*radis[i]
relation1[:, :, i] = regions_relation_points(locs, regions, retriever, info_ret)
relation2[:, :, i] = regions_relation_points(locs2, regions2, retriever, info_ret)
|
Test for spatial relations between regions.
|
Test for spatial relations between regions.
|
Python
|
mit
|
tgquintela/pySpatialTools,tgquintela/pySpatialTools
|
Test for spatial relations between regions.
|
## Imports
import numpy as np
import matplotlib.pyplot as plt
from pySpatialTools.Retrieve import GridSpatialDisc, CircRetriever
from pySpatialTools.Spatial_Relations.region_spatial_relations import regions_relation_points
## Paramters
n = 10000
ngx, ngy = 100, 100
## Artificial distribution in space
locs = np.random.random((n, 2))
locs2 = np.array((locs[:, 0]*np.cos(locs[:, 1]*2*np.pi), locs[:, 0]*np.sin(locs[:, 1]*np.pi*2))).T
# Test distributions
fig1 = plt.plot(locs[:,0], locs[:, 1], '.')
fig2 = plt.plot(locs2[:,0], locs2[:, 1], '.')
## Discretization
disc = GridSpatialDisc((ngx, ngy), xlim=(0, 1), ylim=(0, 1))
regions = disc.map2id(locs)
disc2 = GridSpatialDisc((ngx, ngy), xlim=(-1, 1), ylim=(-1, 1))
regions2 = disc2.map2id(locs2)
## Spatial relations
retriever = CircRetriever(locs, True)
radis = [0.01, 0.05, 0.1, 0.25, 0.5]
n_radis = len(radis)
n_reg1, n_reg2 = np.unique(regions).shape[0], np.unique(regions2).shape[0]
relation1 = np.zeros((n_reg1, n_reg1, n_radis))
relation2 = np.zeros((n_reg2, n_reg2, n_radis))
for i in range(n_radis):
info_ret = np.ones(n)*radis[i]
relation1[:, :, i] = regions_relation_points(locs, regions, retriever, info_ret)
relation2[:, :, i] = regions_relation_points(locs2, regions2, retriever, info_ret)
|
<commit_before><commit_msg>Test for spatial relations between regions.<commit_after>
|
## Imports
import numpy as np
import matplotlib.pyplot as plt
from pySpatialTools.Retrieve import GridSpatialDisc, CircRetriever
from pySpatialTools.Spatial_Relations.region_spatial_relations import regions_relation_points
## Paramters
n = 10000
ngx, ngy = 100, 100
## Artificial distribution in space
locs = np.random.random((n, 2))
locs2 = np.array((locs[:, 0]*np.cos(locs[:, 1]*2*np.pi), locs[:, 0]*np.sin(locs[:, 1]*np.pi*2))).T
# Test distributions
fig1 = plt.plot(locs[:,0], locs[:, 1], '.')
fig2 = plt.plot(locs2[:,0], locs2[:, 1], '.')
## Discretization
disc = GridSpatialDisc((ngx, ngy), xlim=(0, 1), ylim=(0, 1))
regions = disc.map2id(locs)
disc2 = GridSpatialDisc((ngx, ngy), xlim=(-1, 1), ylim=(-1, 1))
regions2 = disc2.map2id(locs2)
## Spatial relations
retriever = CircRetriever(locs, True)
radis = [0.01, 0.05, 0.1, 0.25, 0.5]
n_radis = len(radis)
n_reg1, n_reg2 = np.unique(regions).shape[0], np.unique(regions2).shape[0]
relation1 = np.zeros((n_reg1, n_reg1, n_radis))
relation2 = np.zeros((n_reg2, n_reg2, n_radis))
for i in range(n_radis):
info_ret = np.ones(n)*radis[i]
relation1[:, :, i] = regions_relation_points(locs, regions, retriever, info_ret)
relation2[:, :, i] = regions_relation_points(locs2, regions2, retriever, info_ret)
|
Test for spatial relations between regions.
## Imports
import numpy as np
import matplotlib.pyplot as plt
from pySpatialTools.Retrieve import GridSpatialDisc, CircRetriever
from pySpatialTools.Spatial_Relations.region_spatial_relations import regions_relation_points
## Paramters
n = 10000
ngx, ngy = 100, 100
## Artificial distribution in space
locs = np.random.random((n, 2))
locs2 = np.array((locs[:, 0]*np.cos(locs[:, 1]*2*np.pi), locs[:, 0]*np.sin(locs[:, 1]*np.pi*2))).T
# Test distributions
fig1 = plt.plot(locs[:,0], locs[:, 1], '.')
fig2 = plt.plot(locs2[:,0], locs2[:, 1], '.')
## Discretization
disc = GridSpatialDisc((ngx, ngy), xlim=(0, 1), ylim=(0, 1))
regions = disc.map2id(locs)
disc2 = GridSpatialDisc((ngx, ngy), xlim=(-1, 1), ylim=(-1, 1))
regions2 = disc2.map2id(locs2)
## Spatial relations
retriever = CircRetriever(locs, True)
radis = [0.01, 0.05, 0.1, 0.25, 0.5]
n_radis = len(radis)
n_reg1, n_reg2 = np.unique(regions).shape[0], np.unique(regions2).shape[0]
relation1 = np.zeros((n_reg1, n_reg1, n_radis))
relation2 = np.zeros((n_reg2, n_reg2, n_radis))
for i in range(n_radis):
info_ret = np.ones(n)*radis[i]
relation1[:, :, i] = regions_relation_points(locs, regions, retriever, info_ret)
relation2[:, :, i] = regions_relation_points(locs2, regions2, retriever, info_ret)
|
<commit_before><commit_msg>Test for spatial relations between regions.<commit_after>
## Imports
import numpy as np
import matplotlib.pyplot as plt
from pySpatialTools.Retrieve import GridSpatialDisc, CircRetriever
from pySpatialTools.Spatial_Relations.region_spatial_relations import regions_relation_points
## Paramters
n = 10000
ngx, ngy = 100, 100
## Artificial distribution in space
locs = np.random.random((n, 2))
locs2 = np.array((locs[:, 0]*np.cos(locs[:, 1]*2*np.pi), locs[:, 0]*np.sin(locs[:, 1]*np.pi*2))).T
# Test distributions
fig1 = plt.plot(locs[:,0], locs[:, 1], '.')
fig2 = plt.plot(locs2[:,0], locs2[:, 1], '.')
## Discretization
disc = GridSpatialDisc((ngx, ngy), xlim=(0, 1), ylim=(0, 1))
regions = disc.map2id(locs)
disc2 = GridSpatialDisc((ngx, ngy), xlim=(-1, 1), ylim=(-1, 1))
regions2 = disc2.map2id(locs2)
## Spatial relations
retriever = CircRetriever(locs, True)
radis = [0.01, 0.05, 0.1, 0.25, 0.5]
n_radis = len(radis)
n_reg1, n_reg2 = np.unique(regions).shape[0], np.unique(regions2).shape[0]
relation1 = np.zeros((n_reg1, n_reg1, n_radis))
relation2 = np.zeros((n_reg2, n_reg2, n_radis))
for i in range(n_radis):
info_ret = np.ones(n)*radis[i]
relation1[:, :, i] = regions_relation_points(locs, regions, retriever, info_ret)
relation2[:, :, i] = regions_relation_points(locs2, regions2, retriever, info_ret)
|
|
89ed35bf6215ba2fa71640eb6d5117f3a5fe5018
|
stix2matcher/test/test_comparison_exprs.py
|
stix2matcher/test/test_comparison_exprs.py
|
import pytest
from stix2matcher.matcher import match
_observations = [
{
"type": "observed-data",
"number_observed": 1,
"first_observed": "2004-11-26T11:42:29Z",
"objects": {
"0": {
"type": u"person",
"name": u"alice",
"age": 10
},
"1": {
"type": u"person",
"name": u"bob",
"age": 15
}
}
}
]
@pytest.mark.parametrize("pattern", [
"[person:name = 'alice' and person:age < 20]",
"[person:name = 'alice' or person:age > 20]",
"[person:name = 'alice' or person:age > 1000 and person:age < 0]",
"[(person:name = 'carol' or person:name = 'bob') and person:age > 10]",
"[(person:name = 'darlene' or person:name = 'carol') and person:age < 0 or person:age > 5]"
])
def test_comparison_and_or_match(pattern):
assert match(pattern, _observations)
@pytest.mark.parametrize("pattern", [
"[person:name = 'alice' and person:age > 10]",
"[person:name = 'carol' or person:age > 20]",
"[(person:age = 'alice' or person:age > 1000) and person:age < 0]",
"[(person:name = 'darlene' or person:name = 'carol') and (person:age < 0 or person:age > 5)]"
])
def test_comparison_and_or_nomatch(pattern):
assert not match(pattern, _observations)
|
Add some tests for comparison expressions: and, or, and order of operations.
|
Add some tests for comparison expressions: and, or, and order
of operations.
|
Python
|
bsd-3-clause
|
chisholm/cti-pattern-matcher,oasis-open/cti-pattern-matcher
|
Add some tests for comparison expressions: and, or, and order
of operations.
|
import pytest
from stix2matcher.matcher import match
_observations = [
{
"type": "observed-data",
"number_observed": 1,
"first_observed": "2004-11-26T11:42:29Z",
"objects": {
"0": {
"type": u"person",
"name": u"alice",
"age": 10
},
"1": {
"type": u"person",
"name": u"bob",
"age": 15
}
}
}
]
@pytest.mark.parametrize("pattern", [
"[person:name = 'alice' and person:age < 20]",
"[person:name = 'alice' or person:age > 20]",
"[person:name = 'alice' or person:age > 1000 and person:age < 0]",
"[(person:name = 'carol' or person:name = 'bob') and person:age > 10]",
"[(person:name = 'darlene' or person:name = 'carol') and person:age < 0 or person:age > 5]"
])
def test_comparison_and_or_match(pattern):
assert match(pattern, _observations)
@pytest.mark.parametrize("pattern", [
"[person:name = 'alice' and person:age > 10]",
"[person:name = 'carol' or person:age > 20]",
"[(person:age = 'alice' or person:age > 1000) and person:age < 0]",
"[(person:name = 'darlene' or person:name = 'carol') and (person:age < 0 or person:age > 5)]"
])
def test_comparison_and_or_nomatch(pattern):
assert not match(pattern, _observations)
|
<commit_before><commit_msg>Add some tests for comparison expressions: and, or, and order
of operations.<commit_after>
|
import pytest
from stix2matcher.matcher import match
_observations = [
{
"type": "observed-data",
"number_observed": 1,
"first_observed": "2004-11-26T11:42:29Z",
"objects": {
"0": {
"type": u"person",
"name": u"alice",
"age": 10
},
"1": {
"type": u"person",
"name": u"bob",
"age": 15
}
}
}
]
@pytest.mark.parametrize("pattern", [
"[person:name = 'alice' and person:age < 20]",
"[person:name = 'alice' or person:age > 20]",
"[person:name = 'alice' or person:age > 1000 and person:age < 0]",
"[(person:name = 'carol' or person:name = 'bob') and person:age > 10]",
"[(person:name = 'darlene' or person:name = 'carol') and person:age < 0 or person:age > 5]"
])
def test_comparison_and_or_match(pattern):
assert match(pattern, _observations)
@pytest.mark.parametrize("pattern", [
"[person:name = 'alice' and person:age > 10]",
"[person:name = 'carol' or person:age > 20]",
"[(person:age = 'alice' or person:age > 1000) and person:age < 0]",
"[(person:name = 'darlene' or person:name = 'carol') and (person:age < 0 or person:age > 5)]"
])
def test_comparison_and_or_nomatch(pattern):
assert not match(pattern, _observations)
|
Add some tests for comparison expressions: and, or, and order
of operations.import pytest
from stix2matcher.matcher import match
_observations = [
{
"type": "observed-data",
"number_observed": 1,
"first_observed": "2004-11-26T11:42:29Z",
"objects": {
"0": {
"type": u"person",
"name": u"alice",
"age": 10
},
"1": {
"type": u"person",
"name": u"bob",
"age": 15
}
}
}
]
@pytest.mark.parametrize("pattern", [
"[person:name = 'alice' and person:age < 20]",
"[person:name = 'alice' or person:age > 20]",
"[person:name = 'alice' or person:age > 1000 and person:age < 0]",
"[(person:name = 'carol' or person:name = 'bob') and person:age > 10]",
"[(person:name = 'darlene' or person:name = 'carol') and person:age < 0 or person:age > 5]"
])
def test_comparison_and_or_match(pattern):
assert match(pattern, _observations)
@pytest.mark.parametrize("pattern", [
"[person:name = 'alice' and person:age > 10]",
"[person:name = 'carol' or person:age > 20]",
"[(person:age = 'alice' or person:age > 1000) and person:age < 0]",
"[(person:name = 'darlene' or person:name = 'carol') and (person:age < 0 or person:age > 5)]"
])
def test_comparison_and_or_nomatch(pattern):
assert not match(pattern, _observations)
|
<commit_before><commit_msg>Add some tests for comparison expressions: and, or, and order
of operations.<commit_after>import pytest
from stix2matcher.matcher import match
_observations = [
{
"type": "observed-data",
"number_observed": 1,
"first_observed": "2004-11-26T11:42:29Z",
"objects": {
"0": {
"type": u"person",
"name": u"alice",
"age": 10
},
"1": {
"type": u"person",
"name": u"bob",
"age": 15
}
}
}
]
@pytest.mark.parametrize("pattern", [
"[person:name = 'alice' and person:age < 20]",
"[person:name = 'alice' or person:age > 20]",
"[person:name = 'alice' or person:age > 1000 and person:age < 0]",
"[(person:name = 'carol' or person:name = 'bob') and person:age > 10]",
"[(person:name = 'darlene' or person:name = 'carol') and person:age < 0 or person:age > 5]"
])
def test_comparison_and_or_match(pattern):
assert match(pattern, _observations)
@pytest.mark.parametrize("pattern", [
"[person:name = 'alice' and person:age > 10]",
"[person:name = 'carol' or person:age > 20]",
"[(person:age = 'alice' or person:age > 1000) and person:age < 0]",
"[(person:name = 'darlene' or person:name = 'carol') and (person:age < 0 or person:age > 5)]"
])
def test_comparison_and_or_nomatch(pattern):
assert not match(pattern, _observations)
|
|
c7ba3c4340c73ef5765bc883b500dc5188d43189
|
android/runtime/v8/tools/genRequireIndex.py
|
android/runtime/v8/tools/genRequireIndex.py
|
#! /usr/bin/env python
# Generates an index file used by require
# to test if a file exists in the assets folder.
# Usage: genRequireIndex.py <rootDirectory> <outputFile>
# rootDirectory = path to the directory being indexed
# (Should be path to the app's assets folder)
# outputFile = path where the JSON index file should be written.
import json
from os.path import join, relpath
from os import walk
import sys
rootDirectory = sys.argv[1]
outputFilename = sys.argv[2]
index = {}
for dirpath, dirnames, filenames in walk(rootDirectory):
for name in filenames:
index[join(relpath(dirpath, rootDirectory), name)] = 1
json.dump(index, open(outputFilename, "w"))
|
Add script to generate file index for Require.
|
Add script to generate file index for Require.
|
Python
|
apache-2.0
|
ashcoding/titanium_mobile,KoketsoMabuela92/titanium_mobile,taoger/titanium_mobile,csg-coder/titanium_mobile,emilyvon/titanium_mobile,KangaCoders/titanium_mobile,perdona/titanium_mobile,linearhub/titanium_mobile,formalin14/titanium_mobile,cheekiatng/titanium_mobile,bright-sparks/titanium_mobile,pinnamur/titanium_mobile,KangaCoders/titanium_mobile,KoketsoMabuela92/titanium_mobile,pinnamur/titanium_mobile,perdona/titanium_mobile,pec1985/titanium_mobile,taoger/titanium_mobile,openbaoz/titanium_mobile,hieupham007/Titanium_Mobile,KoketsoMabuela92/titanium_mobile,KangaCoders/titanium_mobile,pinnamur/titanium_mobile,ashcoding/titanium_mobile,emilyvon/titanium_mobile,bright-sparks/titanium_mobile,cheekiatng/titanium_mobile,peymanmortazavi/titanium_mobile,taoger/titanium_mobile,rblalock/titanium_mobile,csg-coder/titanium_mobile,csg-coder/titanium_mobile,jhaynie/titanium_mobile,pinnamur/titanium_mobile,sriks/titanium_mobile,pinnamur/titanium_mobile,AngelkPetkov/titanium_mobile,pec1985/titanium_mobile,KangaCoders/titanium_mobile,KoketsoMabuela92/titanium_mobile,sriks/titanium_mobile,openbaoz/titanium_mobile,falkolab/titanium_mobile,shopmium/titanium_mobile,rblalock/titanium_mobile,rblalock/titanium_mobile,csg-coder/titanium_mobile,jvkops/titanium_mobile,FokkeZB/titanium_mobile,falkolab/titanium_mobile,falkolab/titanium_mobile,csg-coder/titanium_mobile,bhatfield/titanium_mobile,smit1625/titanium_mobile,ashcoding/titanium_mobile,bhatfield/titanium_mobile,formalin14/titanium_mobile,mvitr/titanium_mobile,openbaoz/titanium_mobile,mvitr/titanium_mobile,indera/titanium_mobile,shopmium/titanium_mobile,indera/titanium_mobile,formalin14/titanium_mobile,kopiro/titanium_mobile,kopiro/titanium_mobile,mvitr/titanium_mobile,mvitr/titanium_mobile,sriks/titanium_mobile,rblalock/titanium_mobile,pinnamur/titanium_mobile,kopiro/titanium_mobile,smit1625/titanium_mobile,cheekiatng/titanium_mobile,pinnamur/titanium_mobile,taoger/titanium_mobile,shopmium/titanium_mobile,smit1625/titanium_mobile,jvkops/titanium_mobile,smit1625/titanium_mobile,emilyvon/titanium_mobile,perdona/titanium_mobile,sriks/titanium_mobile,falkolab/titanium_mobile,openbaoz/titanium_mobile,sriks/titanium_mobile,rblalock/titanium_mobile,indera/titanium_mobile,formalin14/titanium_mobile,peymanmortazavi/titanium_mobile,linearhub/titanium_mobile,KangaCoders/titanium_mobile,csg-coder/titanium_mobile,AngelkPetkov/titanium_mobile,jhaynie/titanium_mobile,cheekiatng/titanium_mobile,FokkeZB/titanium_mobile,benbahrenburg/titanium_mobile,shopmium/titanium_mobile,indera/titanium_mobile,linearhub/titanium_mobile,bright-sparks/titanium_mobile,falkolab/titanium_mobile,collinprice/titanium_mobile,smit1625/titanium_mobile,collinprice/titanium_mobile,peymanmortazavi/titanium_mobile,FokkeZB/titanium_mobile,collinprice/titanium_mobile,FokkeZB/titanium_mobile,hieupham007/Titanium_Mobile,jhaynie/titanium_mobile,kopiro/titanium_mobile,pec1985/titanium_mobile,formalin14/titanium_mobile,ashcoding/titanium_mobile,benbahrenburg/titanium_mobile,bhatfield/titanium_mobile,indera/titanium_mobile,ashcoding/titanium_mobile,bhatfield/titanium_mobile,formalin14/titanium_mobile,perdona/titanium_mobile,csg-coder/titanium_mobile,AngelkPetkov/titanium_mobile,mvitr/titanium_mobile,AngelkPetkov/titanium_mobile,mano-mykingdom/titanium_mobile,emilyvon/titanium_mobile,linearhub/titanium_mobile,perdona/titanium_mobile,jvkops/titanium_mobile,csg-coder/titanium_mobile,pec1985/titanium_mobile,mano-mykingdom/titanium_mobile,FokkeZB/titanium_mobile,openbaoz/titanium_mobile,prop/titanium_mobile,collinprice/titanium_mobile,collinprice/titanium_mobile,linearhub/titanium_mobile,pec1985/titanium_mobile,benbahrenburg/titanium_mobile,bright-sparks/titanium_mobile,sriks/titanium_mobile,mano-mykingdom/titanium_mobile,perdona/titanium_mobile,formalin14/titanium_mobile,shopmium/titanium_mobile,openbaoz/titanium_mobile,AngelkPetkov/titanium_mobile,prop/titanium_mobile,taoger/titanium_mobile,emilyvon/titanium_mobile,AngelkPetkov/titanium_mobile,benbahrenburg/titanium_mobile,FokkeZB/titanium_mobile,linearhub/titanium_mobile,formalin14/titanium_mobile,peymanmortazavi/titanium_mobile,perdona/titanium_mobile,hieupham007/Titanium_Mobile,kopiro/titanium_mobile,jhaynie/titanium_mobile,cheekiatng/titanium_mobile,pec1985/titanium_mobile,falkolab/titanium_mobile,kopiro/titanium_mobile,kopiro/titanium_mobile,falkolab/titanium_mobile,KoketsoMabuela92/titanium_mobile,bright-sparks/titanium_mobile,jvkops/titanium_mobile,FokkeZB/titanium_mobile,emilyvon/titanium_mobile,bhatfield/titanium_mobile,emilyvon/titanium_mobile,benbahrenburg/titanium_mobile,bright-sparks/titanium_mobile,hieupham007/Titanium_Mobile,collinprice/titanium_mobile,jhaynie/titanium_mobile,bright-sparks/titanium_mobile,indera/titanium_mobile,collinprice/titanium_mobile,cheekiatng/titanium_mobile,hieupham007/Titanium_Mobile,AngelkPetkov/titanium_mobile,rblalock/titanium_mobile,prop/titanium_mobile,FokkeZB/titanium_mobile,peymanmortazavi/titanium_mobile,jhaynie/titanium_mobile,taoger/titanium_mobile,indera/titanium_mobile,taoger/titanium_mobile,mano-mykingdom/titanium_mobile,pec1985/titanium_mobile,KangaCoders/titanium_mobile,openbaoz/titanium_mobile,peymanmortazavi/titanium_mobile,ashcoding/titanium_mobile,prop/titanium_mobile,ashcoding/titanium_mobile,linearhub/titanium_mobile,perdona/titanium_mobile,collinprice/titanium_mobile,mvitr/titanium_mobile,pinnamur/titanium_mobile,shopmium/titanium_mobile,openbaoz/titanium_mobile,mvitr/titanium_mobile,peymanmortazavi/titanium_mobile,bhatfield/titanium_mobile,jvkops/titanium_mobile,pec1985/titanium_mobile,mano-mykingdom/titanium_mobile,indera/titanium_mobile,benbahrenburg/titanium_mobile,rblalock/titanium_mobile,jhaynie/titanium_mobile,mano-mykingdom/titanium_mobile,hieupham007/Titanium_Mobile,jvkops/titanium_mobile,hieupham007/Titanium_Mobile,hieupham007/Titanium_Mobile,jvkops/titanium_mobile,smit1625/titanium_mobile,bhatfield/titanium_mobile,mvitr/titanium_mobile,falkolab/titanium_mobile,jvkops/titanium_mobile,peymanmortazavi/titanium_mobile,shopmium/titanium_mobile,sriks/titanium_mobile,pec1985/titanium_mobile,mano-mykingdom/titanium_mobile,kopiro/titanium_mobile,KoketsoMabuela92/titanium_mobile,AngelkPetkov/titanium_mobile,bhatfield/titanium_mobile,jhaynie/titanium_mobile,KoketsoMabuela92/titanium_mobile,prop/titanium_mobile,pinnamur/titanium_mobile,mano-mykingdom/titanium_mobile,linearhub/titanium_mobile,prop/titanium_mobile,KoketsoMabuela92/titanium_mobile,bright-sparks/titanium_mobile,KangaCoders/titanium_mobile,smit1625/titanium_mobile,ashcoding/titanium_mobile,prop/titanium_mobile,shopmium/titanium_mobile,prop/titanium_mobile,KangaCoders/titanium_mobile,sriks/titanium_mobile,benbahrenburg/titanium_mobile,smit1625/titanium_mobile,rblalock/titanium_mobile,cheekiatng/titanium_mobile,emilyvon/titanium_mobile,cheekiatng/titanium_mobile,benbahrenburg/titanium_mobile,taoger/titanium_mobile
|
Add script to generate file index for Require.
|
#! /usr/bin/env python
# Generates an index file used by require
# to test if a file exists in the assets folder.
# Usage: genRequireIndex.py <rootDirectory> <outputFile>
# rootDirectory = path to the directory being indexed
# (Should be path to the app's assets folder)
# outputFile = path where the JSON index file should be written.
import json
from os.path import join, relpath
from os import walk
import sys
rootDirectory = sys.argv[1]
outputFilename = sys.argv[2]
index = {}
for dirpath, dirnames, filenames in walk(rootDirectory):
for name in filenames:
index[join(relpath(dirpath, rootDirectory), name)] = 1
json.dump(index, open(outputFilename, "w"))
|
<commit_before><commit_msg>Add script to generate file index for Require.<commit_after>
|
#! /usr/bin/env python
# Generates an index file used by require
# to test if a file exists in the assets folder.
# Usage: genRequireIndex.py <rootDirectory> <outputFile>
# rootDirectory = path to the directory being indexed
# (Should be path to the app's assets folder)
# outputFile = path where the JSON index file should be written.
import json
from os.path import join, relpath
from os import walk
import sys
rootDirectory = sys.argv[1]
outputFilename = sys.argv[2]
index = {}
for dirpath, dirnames, filenames in walk(rootDirectory):
for name in filenames:
index[join(relpath(dirpath, rootDirectory), name)] = 1
json.dump(index, open(outputFilename, "w"))
|
Add script to generate file index for Require.#! /usr/bin/env python
# Generates an index file used by require
# to test if a file exists in the assets folder.
# Usage: genRequireIndex.py <rootDirectory> <outputFile>
# rootDirectory = path to the directory being indexed
# (Should be path to the app's assets folder)
# outputFile = path where the JSON index file should be written.
import json
from os.path import join, relpath
from os import walk
import sys
rootDirectory = sys.argv[1]
outputFilename = sys.argv[2]
index = {}
for dirpath, dirnames, filenames in walk(rootDirectory):
for name in filenames:
index[join(relpath(dirpath, rootDirectory), name)] = 1
json.dump(index, open(outputFilename, "w"))
|
<commit_before><commit_msg>Add script to generate file index for Require.<commit_after>#! /usr/bin/env python
# Generates an index file used by require
# to test if a file exists in the assets folder.
# Usage: genRequireIndex.py <rootDirectory> <outputFile>
# rootDirectory = path to the directory being indexed
# (Should be path to the app's assets folder)
# outputFile = path where the JSON index file should be written.
import json
from os.path import join, relpath
from os import walk
import sys
rootDirectory = sys.argv[1]
outputFilename = sys.argv[2]
index = {}
for dirpath, dirnames, filenames in walk(rootDirectory):
for name in filenames:
index[join(relpath(dirpath, rootDirectory), name)] = 1
json.dump(index, open(outputFilename, "w"))
|
|
033d46a8157146027f2fe4bc94c6e1ae6ba49f6a
|
tri/photo.py
|
tri/photo.py
|
#!/usr/bin/python
import re
from os import listdir, makedirs, link, walk, rename
from os.path import isdir,isfile
cam_directory = '/home/poclement/Dropbox/Camera Uploads'
file_list = listdir(cam_directory)
archive_directory = ('/home/poclement/Dropbox/Images/Archive_Camera')
for f in file_list:
m = re.search('^(\d{4}-\d{2})-\d{2} \d{2}\.\d{2}\.\d{2}\.(jpg|jpeg|png|mp4)$',f)
if m:
folder = ''.join((archive_directory,'/',m.group(1),'/'))
if not isdir(folder):
makedirs(folder)
dst= ''.join((folder,m.group(0)))
src = ''.join((cam_directory,'/',f))
print(src,' ',dst)
if not isfile(dst):
rename(src,dst)
|
Add Camera upload sorting script
|
Add Camera upload sorting script
|
Python
|
mit
|
riyoth/Script,riyoth/Script
|
Add Camera upload sorting script
|
#!/usr/bin/python
import re
from os import listdir, makedirs, link, walk, rename
from os.path import isdir,isfile
cam_directory = '/home/poclement/Dropbox/Camera Uploads'
file_list = listdir(cam_directory)
archive_directory = ('/home/poclement/Dropbox/Images/Archive_Camera')
for f in file_list:
m = re.search('^(\d{4}-\d{2})-\d{2} \d{2}\.\d{2}\.\d{2}\.(jpg|jpeg|png|mp4)$',f)
if m:
folder = ''.join((archive_directory,'/',m.group(1),'/'))
if not isdir(folder):
makedirs(folder)
dst= ''.join((folder,m.group(0)))
src = ''.join((cam_directory,'/',f))
print(src,' ',dst)
if not isfile(dst):
rename(src,dst)
|
<commit_before><commit_msg>Add Camera upload sorting script<commit_after>
|
#!/usr/bin/python
import re
from os import listdir, makedirs, link, walk, rename
from os.path import isdir,isfile
cam_directory = '/home/poclement/Dropbox/Camera Uploads'
file_list = listdir(cam_directory)
archive_directory = ('/home/poclement/Dropbox/Images/Archive_Camera')
for f in file_list:
m = re.search('^(\d{4}-\d{2})-\d{2} \d{2}\.\d{2}\.\d{2}\.(jpg|jpeg|png|mp4)$',f)
if m:
folder = ''.join((archive_directory,'/',m.group(1),'/'))
if not isdir(folder):
makedirs(folder)
dst= ''.join((folder,m.group(0)))
src = ''.join((cam_directory,'/',f))
print(src,' ',dst)
if not isfile(dst):
rename(src,dst)
|
Add Camera upload sorting script#!/usr/bin/python
import re
from os import listdir, makedirs, link, walk, rename
from os.path import isdir,isfile
cam_directory = '/home/poclement/Dropbox/Camera Uploads'
file_list = listdir(cam_directory)
archive_directory = ('/home/poclement/Dropbox/Images/Archive_Camera')
for f in file_list:
m = re.search('^(\d{4}-\d{2})-\d{2} \d{2}\.\d{2}\.\d{2}\.(jpg|jpeg|png|mp4)$',f)
if m:
folder = ''.join((archive_directory,'/',m.group(1),'/'))
if not isdir(folder):
makedirs(folder)
dst= ''.join((folder,m.group(0)))
src = ''.join((cam_directory,'/',f))
print(src,' ',dst)
if not isfile(dst):
rename(src,dst)
|
<commit_before><commit_msg>Add Camera upload sorting script<commit_after>#!/usr/bin/python
import re
from os import listdir, makedirs, link, walk, rename
from os.path import isdir,isfile
cam_directory = '/home/poclement/Dropbox/Camera Uploads'
file_list = listdir(cam_directory)
archive_directory = ('/home/poclement/Dropbox/Images/Archive_Camera')
for f in file_list:
m = re.search('^(\d{4}-\d{2})-\d{2} \d{2}\.\d{2}\.\d{2}\.(jpg|jpeg|png|mp4)$',f)
if m:
folder = ''.join((archive_directory,'/',m.group(1),'/'))
if not isdir(folder):
makedirs(folder)
dst= ''.join((folder,m.group(0)))
src = ''.join((cam_directory,'/',f))
print(src,' ',dst)
if not isfile(dst):
rename(src,dst)
|
|
c948e8cc3fde47fac0ed9604baba741ee961043c
|
projecteuler/numbertypes.py
|
projecteuler/numbertypes.py
|
def pentagonal(n):
"""Returns the n-th pentagonal number"""
return n*(3*n-1)/2
def ispentagonal(P):
"""Returns true if P is pentagonal.
ABC-formula on P(n) gives
n = (sqrt(1+24P)+1)/6
If P is pentagonal, n is an int.
1. sqrt must return a integer
2. must be divisble by 6
"""
from gmpy2 import is_square, sqrt
sq = 1 + 24*P
return is_square(sq) and int(sqrt(sq)+1) % 6 == 0
def istriangular(t):
"""Returns true if t is triangular.
See ispentagonal for analog explenation"""
from gmpy2 import is_square, sqrt
sq = 1 + 8*t
return is_square(sq) and int(sqrt(sq)-1) % 2 == 0
def triangular(n):
"""Gives the n-th triangle number."""
return n*(n+1)/2
def hexagonal(n):
"""Returns the n-th hexagonal number"""
return n*(2*n-1)
def ishexagonal(H):
"""Returns true if H is hexagonal
See ispentagonal for analog explenation"""
from gmpy2 import is_square, sqrt
sq = 1 + 8*H
return is_square(sq) and int(sqrt(sq)+1) % 4 == 0
def palindrome(s):
"""Returns true if string s is a palindrome"""
l = len(s)
even = int(l % 2 == 0)
return s[:l//2] == s[-1:l//2-even:-1]
|
Rename number module to not crash NumPy
|
Rename number module to not crash NumPy
Ended up overriding a NumPy module called numbers.
|
Python
|
mit
|
bsamseth/project-euler,bsamseth/project-euler
|
Rename number module to not crash NumPy
Ended up overriding a NumPy module called numbers.
|
def pentagonal(n):
"""Returns the n-th pentagonal number"""
return n*(3*n-1)/2
def ispentagonal(P):
"""Returns true if P is pentagonal.
ABC-formula on P(n) gives
n = (sqrt(1+24P)+1)/6
If P is pentagonal, n is an int.
1. sqrt must return a integer
2. must be divisble by 6
"""
from gmpy2 import is_square, sqrt
sq = 1 + 24*P
return is_square(sq) and int(sqrt(sq)+1) % 6 == 0
def istriangular(t):
"""Returns true if t is triangular.
See ispentagonal for analog explenation"""
from gmpy2 import is_square, sqrt
sq = 1 + 8*t
return is_square(sq) and int(sqrt(sq)-1) % 2 == 0
def triangular(n):
"""Gives the n-th triangle number."""
return n*(n+1)/2
def hexagonal(n):
"""Returns the n-th hexagonal number"""
return n*(2*n-1)
def ishexagonal(H):
"""Returns true if H is hexagonal
See ispentagonal for analog explenation"""
from gmpy2 import is_square, sqrt
sq = 1 + 8*H
return is_square(sq) and int(sqrt(sq)+1) % 4 == 0
def palindrome(s):
"""Returns true if string s is a palindrome"""
l = len(s)
even = int(l % 2 == 0)
return s[:l//2] == s[-1:l//2-even:-1]
|
<commit_before><commit_msg>Rename number module to not crash NumPy
Ended up overriding a NumPy module called numbers.<commit_after>
|
def pentagonal(n):
"""Returns the n-th pentagonal number"""
return n*(3*n-1)/2
def ispentagonal(P):
"""Returns true if P is pentagonal.
ABC-formula on P(n) gives
n = (sqrt(1+24P)+1)/6
If P is pentagonal, n is an int.
1. sqrt must return a integer
2. must be divisble by 6
"""
from gmpy2 import is_square, sqrt
sq = 1 + 24*P
return is_square(sq) and int(sqrt(sq)+1) % 6 == 0
def istriangular(t):
"""Returns true if t is triangular.
See ispentagonal for analog explenation"""
from gmpy2 import is_square, sqrt
sq = 1 + 8*t
return is_square(sq) and int(sqrt(sq)-1) % 2 == 0
def triangular(n):
"""Gives the n-th triangle number."""
return n*(n+1)/2
def hexagonal(n):
"""Returns the n-th hexagonal number"""
return n*(2*n-1)
def ishexagonal(H):
"""Returns true if H is hexagonal
See ispentagonal for analog explenation"""
from gmpy2 import is_square, sqrt
sq = 1 + 8*H
return is_square(sq) and int(sqrt(sq)+1) % 4 == 0
def palindrome(s):
"""Returns true if string s is a palindrome"""
l = len(s)
even = int(l % 2 == 0)
return s[:l//2] == s[-1:l//2-even:-1]
|
Rename number module to not crash NumPy
Ended up overriding a NumPy module called numbers.def pentagonal(n):
"""Returns the n-th pentagonal number"""
return n*(3*n-1)/2
def ispentagonal(P):
"""Returns true if P is pentagonal.
ABC-formula on P(n) gives
n = (sqrt(1+24P)+1)/6
If P is pentagonal, n is an int.
1. sqrt must return a integer
2. must be divisble by 6
"""
from gmpy2 import is_square, sqrt
sq = 1 + 24*P
return is_square(sq) and int(sqrt(sq)+1) % 6 == 0
def istriangular(t):
"""Returns true if t is triangular.
See ispentagonal for analog explenation"""
from gmpy2 import is_square, sqrt
sq = 1 + 8*t
return is_square(sq) and int(sqrt(sq)-1) % 2 == 0
def triangular(n):
"""Gives the n-th triangle number."""
return n*(n+1)/2
def hexagonal(n):
"""Returns the n-th hexagonal number"""
return n*(2*n-1)
def ishexagonal(H):
"""Returns true if H is hexagonal
See ispentagonal for analog explenation"""
from gmpy2 import is_square, sqrt
sq = 1 + 8*H
return is_square(sq) and int(sqrt(sq)+1) % 4 == 0
def palindrome(s):
"""Returns true if string s is a palindrome"""
l = len(s)
even = int(l % 2 == 0)
return s[:l//2] == s[-1:l//2-even:-1]
|
<commit_before><commit_msg>Rename number module to not crash NumPy
Ended up overriding a NumPy module called numbers.<commit_after>def pentagonal(n):
"""Returns the n-th pentagonal number"""
return n*(3*n-1)/2
def ispentagonal(P):
"""Returns true if P is pentagonal.
ABC-formula on P(n) gives
n = (sqrt(1+24P)+1)/6
If P is pentagonal, n is an int.
1. sqrt must return a integer
2. must be divisble by 6
"""
from gmpy2 import is_square, sqrt
sq = 1 + 24*P
return is_square(sq) and int(sqrt(sq)+1) % 6 == 0
def istriangular(t):
"""Returns true if t is triangular.
See ispentagonal for analog explenation"""
from gmpy2 import is_square, sqrt
sq = 1 + 8*t
return is_square(sq) and int(sqrt(sq)-1) % 2 == 0
def triangular(n):
"""Gives the n-th triangle number."""
return n*(n+1)/2
def hexagonal(n):
"""Returns the n-th hexagonal number"""
return n*(2*n-1)
def ishexagonal(H):
"""Returns true if H is hexagonal
See ispentagonal for analog explenation"""
from gmpy2 import is_square, sqrt
sq = 1 + 8*H
return is_square(sq) and int(sqrt(sq)+1) % 4 == 0
def palindrome(s):
"""Returns true if string s is a palindrome"""
l = len(s)
even = int(l % 2 == 0)
return s[:l//2] == s[-1:l//2-even:-1]
|
|
2284acc100e1690bc649574f0116934e7f3bd689
|
py/poor-pigs.py
|
py/poor-pigs.py
|
from itertools import count
class Solution(object):
def poorPigs(self, buckets, minutesToDie, minutesToTest):
"""
:type buckets: int
:type minutesToDie: int
:type minutesToTest: int
:rtype: int
"""
t = minutesToTest / minutesToDie
for pigs in count(0):
if (t + 1) ** pigs >= buckets:
return pigs
|
Add py solution for 458. Poor Pigs
|
Add py solution for 458. Poor Pigs
458. Poor Pigs: https://leetcode.com/problems/poor-pigs/
|
Python
|
apache-2.0
|
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
|
Add py solution for 458. Poor Pigs
458. Poor Pigs: https://leetcode.com/problems/poor-pigs/
|
from itertools import count
class Solution(object):
def poorPigs(self, buckets, minutesToDie, minutesToTest):
"""
:type buckets: int
:type minutesToDie: int
:type minutesToTest: int
:rtype: int
"""
t = minutesToTest / minutesToDie
for pigs in count(0):
if (t + 1) ** pigs >= buckets:
return pigs
|
<commit_before><commit_msg>Add py solution for 458. Poor Pigs
458. Poor Pigs: https://leetcode.com/problems/poor-pigs/<commit_after>
|
from itertools import count
class Solution(object):
def poorPigs(self, buckets, minutesToDie, minutesToTest):
"""
:type buckets: int
:type minutesToDie: int
:type minutesToTest: int
:rtype: int
"""
t = minutesToTest / minutesToDie
for pigs in count(0):
if (t + 1) ** pigs >= buckets:
return pigs
|
Add py solution for 458. Poor Pigs
458. Poor Pigs: https://leetcode.com/problems/poor-pigs/from itertools import count
class Solution(object):
def poorPigs(self, buckets, minutesToDie, minutesToTest):
"""
:type buckets: int
:type minutesToDie: int
:type minutesToTest: int
:rtype: int
"""
t = minutesToTest / minutesToDie
for pigs in count(0):
if (t + 1) ** pigs >= buckets:
return pigs
|
<commit_before><commit_msg>Add py solution for 458. Poor Pigs
458. Poor Pigs: https://leetcode.com/problems/poor-pigs/<commit_after>from itertools import count
class Solution(object):
def poorPigs(self, buckets, minutesToDie, minutesToTest):
"""
:type buckets: int
:type minutesToDie: int
:type minutesToTest: int
:rtype: int
"""
t = minutesToTest / minutesToDie
for pigs in count(0):
if (t + 1) ** pigs >= buckets:
return pigs
|
|
32b242cab9d3e9a83c69c90b155920a77a318ed7
|
programming/python/matplotlib/sin_and_cos_1.py
|
programming/python/matplotlib/sin_and_cos_1.py
|
# from http://www.labri.fr/perso/nrougier/teaching/matplotlib/
from pylab import*
X = np.linspace(-np.pi, np.pi, 256,endpoint=True)
C,S = np.cos(X), np.sin(X)
plot(X,C)
plot(X,S)
show()
|
Add another, simpler, matplotlib example
|
Add another, simpler, matplotlib example
|
Python
|
mit
|
claremacrae/raspi_code,claremacrae/raspi_code,claremacrae/raspi_code
|
Add another, simpler, matplotlib example
|
# from http://www.labri.fr/perso/nrougier/teaching/matplotlib/
from pylab import*
X = np.linspace(-np.pi, np.pi, 256,endpoint=True)
C,S = np.cos(X), np.sin(X)
plot(X,C)
plot(X,S)
show()
|
<commit_before><commit_msg>Add another, simpler, matplotlib example<commit_after>
|
# from http://www.labri.fr/perso/nrougier/teaching/matplotlib/
from pylab import*
X = np.linspace(-np.pi, np.pi, 256,endpoint=True)
C,S = np.cos(X), np.sin(X)
plot(X,C)
plot(X,S)
show()
|
Add another, simpler, matplotlib example# from http://www.labri.fr/perso/nrougier/teaching/matplotlib/
from pylab import*
X = np.linspace(-np.pi, np.pi, 256,endpoint=True)
C,S = np.cos(X), np.sin(X)
plot(X,C)
plot(X,S)
show()
|
<commit_before><commit_msg>Add another, simpler, matplotlib example<commit_after># from http://www.labri.fr/perso/nrougier/teaching/matplotlib/
from pylab import*
X = np.linspace(-np.pi, np.pi, 256,endpoint=True)
C,S = np.cos(X), np.sin(X)
plot(X,C)
plot(X,S)
show()
|
|
838201e987ea420cdb2726477f08fc93aae295e7
|
hs_core/management/commands/set_shareable.py
|
hs_core/management/commands/set_shareable.py
|
"""Set the shareable bit for a resource to True or False
This is a workaround for the fact that published, unshareable i
resources can't be added to collections.
"""
from django.core.management.base import BaseCommand
from hs_core.models import BaseResource
from hs_core.hydroshare.utils import get_resource_by_shortkey
def check_shareable(rid, options):
try:
resource = get_resource_by_shortkey(rid, or_404=False)
except BaseResource.DoesNotExist:
print("{}: does not exist".format(rid))
return
print("{}: shareable bit is now {}".format(rid, resource.raccess.shareable))
if options['on'] and not options['off']:
print("{}: changing sharable bit to True".format(rid))
resource.raccess.shareable = True
resource.raccess.save()
elif options['off'] and not options['on']:
print("{}: changing sharable bit to False".format(rid))
resource.raccess.shareable = False
resource.raccess.save()
elif options['off'] and options['on']:
print("{}: conflicting options for shareable bit. No action taken.".format(rid))
class Command(BaseCommand):
help = "edit the shareable bit of a resource."
def add_arguments(self, parser):
# a list of resource id's: none does nothing.
parser.add_argument('resource_ids', nargs='*', type=str)
# Named (optional) arguments
parser.add_argument(
'--on',
action='store_true', # True for presence, False for absence
dest='on', # value is options['on']
help='turn shareable on',
)
parser.add_argument(
'--off',
action='store_true', # True for presence, False for absence
dest='off', # value is options['off']
help='turn shareable off'
)
def handle(self, *args, **options):
if len(options['resource_ids']) > 0: # an array of resource short_id to check.
for rid in options['resource_ids']:
check_shareable(rid, options)
else:
for resource in BaseResource.objects.all():
check_shareable(resource.short_id)
|
Add command to set shareable bit
|
Add command to set shareable bit
|
Python
|
bsd-3-clause
|
hydroshare/hydroshare,hydroshare/hydroshare,hydroshare/hydroshare,hydroshare/hydroshare,hydroshare/hydroshare
|
Add command to set shareable bit
|
"""Set the shareable bit for a resource to True or False
This is a workaround for the fact that published, unshareable i
resources can't be added to collections.
"""
from django.core.management.base import BaseCommand
from hs_core.models import BaseResource
from hs_core.hydroshare.utils import get_resource_by_shortkey
def check_shareable(rid, options):
try:
resource = get_resource_by_shortkey(rid, or_404=False)
except BaseResource.DoesNotExist:
print("{}: does not exist".format(rid))
return
print("{}: shareable bit is now {}".format(rid, resource.raccess.shareable))
if options['on'] and not options['off']:
print("{}: changing sharable bit to True".format(rid))
resource.raccess.shareable = True
resource.raccess.save()
elif options['off'] and not options['on']:
print("{}: changing sharable bit to False".format(rid))
resource.raccess.shareable = False
resource.raccess.save()
elif options['off'] and options['on']:
print("{}: conflicting options for shareable bit. No action taken.".format(rid))
class Command(BaseCommand):
help = "edit the shareable bit of a resource."
def add_arguments(self, parser):
# a list of resource id's: none does nothing.
parser.add_argument('resource_ids', nargs='*', type=str)
# Named (optional) arguments
parser.add_argument(
'--on',
action='store_true', # True for presence, False for absence
dest='on', # value is options['on']
help='turn shareable on',
)
parser.add_argument(
'--off',
action='store_true', # True for presence, False for absence
dest='off', # value is options['off']
help='turn shareable off'
)
def handle(self, *args, **options):
if len(options['resource_ids']) > 0: # an array of resource short_id to check.
for rid in options['resource_ids']:
check_shareable(rid, options)
else:
for resource in BaseResource.objects.all():
check_shareable(resource.short_id)
|
<commit_before><commit_msg>Add command to set shareable bit<commit_after>
|
"""Set the shareable bit for a resource to True or False
This is a workaround for the fact that published, unshareable i
resources can't be added to collections.
"""
from django.core.management.base import BaseCommand
from hs_core.models import BaseResource
from hs_core.hydroshare.utils import get_resource_by_shortkey
def check_shareable(rid, options):
try:
resource = get_resource_by_shortkey(rid, or_404=False)
except BaseResource.DoesNotExist:
print("{}: does not exist".format(rid))
return
print("{}: shareable bit is now {}".format(rid, resource.raccess.shareable))
if options['on'] and not options['off']:
print("{}: changing sharable bit to True".format(rid))
resource.raccess.shareable = True
resource.raccess.save()
elif options['off'] and not options['on']:
print("{}: changing sharable bit to False".format(rid))
resource.raccess.shareable = False
resource.raccess.save()
elif options['off'] and options['on']:
print("{}: conflicting options for shareable bit. No action taken.".format(rid))
class Command(BaseCommand):
help = "edit the shareable bit of a resource."
def add_arguments(self, parser):
# a list of resource id's: none does nothing.
parser.add_argument('resource_ids', nargs='*', type=str)
# Named (optional) arguments
parser.add_argument(
'--on',
action='store_true', # True for presence, False for absence
dest='on', # value is options['on']
help='turn shareable on',
)
parser.add_argument(
'--off',
action='store_true', # True for presence, False for absence
dest='off', # value is options['off']
help='turn shareable off'
)
def handle(self, *args, **options):
if len(options['resource_ids']) > 0: # an array of resource short_id to check.
for rid in options['resource_ids']:
check_shareable(rid, options)
else:
for resource in BaseResource.objects.all():
check_shareable(resource.short_id)
|
Add command to set shareable bit"""Set the shareable bit for a resource to True or False
This is a workaround for the fact that published, unshareable i
resources can't be added to collections.
"""
from django.core.management.base import BaseCommand
from hs_core.models import BaseResource
from hs_core.hydroshare.utils import get_resource_by_shortkey
def check_shareable(rid, options):
try:
resource = get_resource_by_shortkey(rid, or_404=False)
except BaseResource.DoesNotExist:
print("{}: does not exist".format(rid))
return
print("{}: shareable bit is now {}".format(rid, resource.raccess.shareable))
if options['on'] and not options['off']:
print("{}: changing sharable bit to True".format(rid))
resource.raccess.shareable = True
resource.raccess.save()
elif options['off'] and not options['on']:
print("{}: changing sharable bit to False".format(rid))
resource.raccess.shareable = False
resource.raccess.save()
elif options['off'] and options['on']:
print("{}: conflicting options for shareable bit. No action taken.".format(rid))
class Command(BaseCommand):
help = "edit the shareable bit of a resource."
def add_arguments(self, parser):
# a list of resource id's: none does nothing.
parser.add_argument('resource_ids', nargs='*', type=str)
# Named (optional) arguments
parser.add_argument(
'--on',
action='store_true', # True for presence, False for absence
dest='on', # value is options['on']
help='turn shareable on',
)
parser.add_argument(
'--off',
action='store_true', # True for presence, False for absence
dest='off', # value is options['off']
help='turn shareable off'
)
def handle(self, *args, **options):
if len(options['resource_ids']) > 0: # an array of resource short_id to check.
for rid in options['resource_ids']:
check_shareable(rid, options)
else:
for resource in BaseResource.objects.all():
check_shareable(resource.short_id)
|
<commit_before><commit_msg>Add command to set shareable bit<commit_after>"""Set the shareable bit for a resource to True or False
This is a workaround for the fact that published, unshareable i
resources can't be added to collections.
"""
from django.core.management.base import BaseCommand
from hs_core.models import BaseResource
from hs_core.hydroshare.utils import get_resource_by_shortkey
def check_shareable(rid, options):
try:
resource = get_resource_by_shortkey(rid, or_404=False)
except BaseResource.DoesNotExist:
print("{}: does not exist".format(rid))
return
print("{}: shareable bit is now {}".format(rid, resource.raccess.shareable))
if options['on'] and not options['off']:
print("{}: changing sharable bit to True".format(rid))
resource.raccess.shareable = True
resource.raccess.save()
elif options['off'] and not options['on']:
print("{}: changing sharable bit to False".format(rid))
resource.raccess.shareable = False
resource.raccess.save()
elif options['off'] and options['on']:
print("{}: conflicting options for shareable bit. No action taken.".format(rid))
class Command(BaseCommand):
help = "edit the shareable bit of a resource."
def add_arguments(self, parser):
# a list of resource id's: none does nothing.
parser.add_argument('resource_ids', nargs='*', type=str)
# Named (optional) arguments
parser.add_argument(
'--on',
action='store_true', # True for presence, False for absence
dest='on', # value is options['on']
help='turn shareable on',
)
parser.add_argument(
'--off',
action='store_true', # True for presence, False for absence
dest='off', # value is options['off']
help='turn shareable off'
)
def handle(self, *args, **options):
if len(options['resource_ids']) > 0: # an array of resource short_id to check.
for rid in options['resource_ids']:
check_shareable(rid, options)
else:
for resource in BaseResource.objects.all():
check_shareable(resource.short_id)
|
|
f46a162ea225d4a962977120ad3f4bdb01777cad
|
giftwrap/tests/test_buildspec.py
|
giftwrap/tests/test_buildspec.py
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2014, John Dewey
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import tempfile
import unittest2 as unittest
import yaml
from giftwrap import build_spec
from giftwrap.settings import Settings
class TestBuildSpec(unittest.TestCase):
def test_build_spec(self):
manifest = {
'settings': {},
}
with tempfile.TemporaryFile(mode='w+') as tf:
version = '0'
yaml.safe_dump(manifest, tf)
tf.flush()
tf.seek(0)
bs = build_spec.BuildSpec(tf, version)
self.assertTrue(isinstance(bs.settings, Settings))
def test_build_spec_projects(self):
manifest = {
'settings': {},
'projects': [
{
'name': 'project1',
},
{
'name': 'project2',
},
],
}
with tempfile.TemporaryFile(mode='w+') as tf:
version = '99'
yaml.safe_dump(manifest, tf)
tf.flush()
tf.seek(0)
bs = build_spec.BuildSpec(tf, version)
self.assertEqual(2, len(bs.projects))
for project in bs.projects:
self.assertEqual('99', project.version)
|
Add basic test coverage for build_spec
|
Add basic test coverage for build_spec
This asserts the basic public surface area of the module
Change-Id: I97f5f228ed55dd0d0b748a47bd88c72c55500102
|
Python
|
apache-2.0
|
j2sol/giftwrap,j2sol/giftwrap
|
Add basic test coverage for build_spec
This asserts the basic public surface area of the module
Change-Id: I97f5f228ed55dd0d0b748a47bd88c72c55500102
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2014, John Dewey
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import tempfile
import unittest2 as unittest
import yaml
from giftwrap import build_spec
from giftwrap.settings import Settings
class TestBuildSpec(unittest.TestCase):
def test_build_spec(self):
manifest = {
'settings': {},
}
with tempfile.TemporaryFile(mode='w+') as tf:
version = '0'
yaml.safe_dump(manifest, tf)
tf.flush()
tf.seek(0)
bs = build_spec.BuildSpec(tf, version)
self.assertTrue(isinstance(bs.settings, Settings))
def test_build_spec_projects(self):
manifest = {
'settings': {},
'projects': [
{
'name': 'project1',
},
{
'name': 'project2',
},
],
}
with tempfile.TemporaryFile(mode='w+') as tf:
version = '99'
yaml.safe_dump(manifest, tf)
tf.flush()
tf.seek(0)
bs = build_spec.BuildSpec(tf, version)
self.assertEqual(2, len(bs.projects))
for project in bs.projects:
self.assertEqual('99', project.version)
|
<commit_before><commit_msg>Add basic test coverage for build_spec
This asserts the basic public surface area of the module
Change-Id: I97f5f228ed55dd0d0b748a47bd88c72c55500102<commit_after>
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2014, John Dewey
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import tempfile
import unittest2 as unittest
import yaml
from giftwrap import build_spec
from giftwrap.settings import Settings
class TestBuildSpec(unittest.TestCase):
def test_build_spec(self):
manifest = {
'settings': {},
}
with tempfile.TemporaryFile(mode='w+') as tf:
version = '0'
yaml.safe_dump(manifest, tf)
tf.flush()
tf.seek(0)
bs = build_spec.BuildSpec(tf, version)
self.assertTrue(isinstance(bs.settings, Settings))
def test_build_spec_projects(self):
manifest = {
'settings': {},
'projects': [
{
'name': 'project1',
},
{
'name': 'project2',
},
],
}
with tempfile.TemporaryFile(mode='w+') as tf:
version = '99'
yaml.safe_dump(manifest, tf)
tf.flush()
tf.seek(0)
bs = build_spec.BuildSpec(tf, version)
self.assertEqual(2, len(bs.projects))
for project in bs.projects:
self.assertEqual('99', project.version)
|
Add basic test coverage for build_spec
This asserts the basic public surface area of the module
Change-Id: I97f5f228ed55dd0d0b748a47bd88c72c55500102# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2014, John Dewey
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import tempfile
import unittest2 as unittest
import yaml
from giftwrap import build_spec
from giftwrap.settings import Settings
class TestBuildSpec(unittest.TestCase):
def test_build_spec(self):
manifest = {
'settings': {},
}
with tempfile.TemporaryFile(mode='w+') as tf:
version = '0'
yaml.safe_dump(manifest, tf)
tf.flush()
tf.seek(0)
bs = build_spec.BuildSpec(tf, version)
self.assertTrue(isinstance(bs.settings, Settings))
def test_build_spec_projects(self):
manifest = {
'settings': {},
'projects': [
{
'name': 'project1',
},
{
'name': 'project2',
},
],
}
with tempfile.TemporaryFile(mode='w+') as tf:
version = '99'
yaml.safe_dump(manifest, tf)
tf.flush()
tf.seek(0)
bs = build_spec.BuildSpec(tf, version)
self.assertEqual(2, len(bs.projects))
for project in bs.projects:
self.assertEqual('99', project.version)
|
<commit_before><commit_msg>Add basic test coverage for build_spec
This asserts the basic public surface area of the module
Change-Id: I97f5f228ed55dd0d0b748a47bd88c72c55500102<commit_after># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2014, John Dewey
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import tempfile
import unittest2 as unittest
import yaml
from giftwrap import build_spec
from giftwrap.settings import Settings
class TestBuildSpec(unittest.TestCase):
def test_build_spec(self):
manifest = {
'settings': {},
}
with tempfile.TemporaryFile(mode='w+') as tf:
version = '0'
yaml.safe_dump(manifest, tf)
tf.flush()
tf.seek(0)
bs = build_spec.BuildSpec(tf, version)
self.assertTrue(isinstance(bs.settings, Settings))
def test_build_spec_projects(self):
manifest = {
'settings': {},
'projects': [
{
'name': 'project1',
},
{
'name': 'project2',
},
],
}
with tempfile.TemporaryFile(mode='w+') as tf:
version = '99'
yaml.safe_dump(manifest, tf)
tf.flush()
tf.seek(0)
bs = build_spec.BuildSpec(tf, version)
self.assertEqual(2, len(bs.projects))
for project in bs.projects:
self.assertEqual('99', project.version)
|
|
9738e06267d35adc96fa9344f752ea4ddb8f3272
|
pipeline/build-clusters.py
|
pipeline/build-clusters.py
|
import MySQLdb as mdb
input_file = "phylota_184_trees.tre"
out_dir = "clusters/"
db_host = "localhost"
db_user = "root"
db_password = "reelab14"
db_database = "phylota"
### Read in a parse the tree file output from trees file
treefile = open(input_file, 'r')
raw_id = []
ids = []
for t in treefile:
tree_id = t.split("\t")
tree_id = tree_id[0].split("_")
tree_id[0] = tree_id[0][2:]
tree_id[1] = tree_id[1][2:]
ids.append(tree_id)
## connect to the database and set a cursor
database = mdb.connect(host = db_host, # your host, usually localhost
user = db_user, # your username
passwd = db_password, # your password
db = db_database) # name of the data base
cluster_db = database.cursor() # define the db cursor
count = 0 # status count
for i in ids:
# sql query to find the sequences that belong to the cluster / taxon that are present in the given tree
sql = "".join(["SELECT seqs.gi,seqs.seq FROM seqs LEFT JOIN ci_gi_184 ON seqs.gi=ci_gi_184.gi WHERE ci_gi_184.ti=", ids[count][0], " AND ci_gi_184.clustid = ", ids[count][1], " AND ci_gi_184.cl_type='subtree';"])
cluster_db.execute(sql) # execute the above sql query
record = cluster_db.fetchall() # fetch all records that meet the query criteria and place them in the list record
record = list(record) # convert tuple to list
# create a string that contains the appropriate filename for each FASTA file
filename = "".join([out_dir, "ti", ids[count][0], "_ci", ids[count][1], ".fas"])
f = open(filename, 'w+')
for r in record:
cur_record = list(r) # convert this element of the record list from tuple to a list
cur_record[0] = str(cur_record[0]) # convert the GID value from long to string
# join all elements of the cur_record list into a single string, added formatting for FASTA style
cur_record = "".join([">gi|", str(cur_record[0]), "\n", cur_record[1]])
f.write("%s\n" % cur_record)
print "Sequences for tree " + str(count) + " successfully written to " + filename + "."
count += 1
f.close()
|
Refactor from get-clusters. Changes for speed, clarity and use simplification.
|
Refactor from get-clusters.
Changes for speed, clarity and use simplification.
|
Python
|
bsd-3-clause
|
lcoghill/phyloboost,lcoghill/phyloboost,lcoghill/phyloboost
|
Refactor from get-clusters.
Changes for speed, clarity and use simplification.
|
import MySQLdb as mdb
input_file = "phylota_184_trees.tre"
out_dir = "clusters/"
db_host = "localhost"
db_user = "root"
db_password = "reelab14"
db_database = "phylota"
### Read in a parse the tree file output from trees file
treefile = open(input_file, 'r')
raw_id = []
ids = []
for t in treefile:
tree_id = t.split("\t")
tree_id = tree_id[0].split("_")
tree_id[0] = tree_id[0][2:]
tree_id[1] = tree_id[1][2:]
ids.append(tree_id)
## connect to the database and set a cursor
database = mdb.connect(host = db_host, # your host, usually localhost
user = db_user, # your username
passwd = db_password, # your password
db = db_database) # name of the data base
cluster_db = database.cursor() # define the db cursor
count = 0 # status count
for i in ids:
# sql query to find the sequences that belong to the cluster / taxon that are present in the given tree
sql = "".join(["SELECT seqs.gi,seqs.seq FROM seqs LEFT JOIN ci_gi_184 ON seqs.gi=ci_gi_184.gi WHERE ci_gi_184.ti=", ids[count][0], " AND ci_gi_184.clustid = ", ids[count][1], " AND ci_gi_184.cl_type='subtree';"])
cluster_db.execute(sql) # execute the above sql query
record = cluster_db.fetchall() # fetch all records that meet the query criteria and place them in the list record
record = list(record) # convert tuple to list
# create a string that contains the appropriate filename for each FASTA file
filename = "".join([out_dir, "ti", ids[count][0], "_ci", ids[count][1], ".fas"])
f = open(filename, 'w+')
for r in record:
cur_record = list(r) # convert this element of the record list from tuple to a list
cur_record[0] = str(cur_record[0]) # convert the GID value from long to string
# join all elements of the cur_record list into a single string, added formatting for FASTA style
cur_record = "".join([">gi|", str(cur_record[0]), "\n", cur_record[1]])
f.write("%s\n" % cur_record)
print "Sequences for tree " + str(count) + " successfully written to " + filename + "."
count += 1
f.close()
|
<commit_before><commit_msg>Refactor from get-clusters.
Changes for speed, clarity and use simplification.<commit_after>
|
import MySQLdb as mdb
input_file = "phylota_184_trees.tre"
out_dir = "clusters/"
db_host = "localhost"
db_user = "root"
db_password = "reelab14"
db_database = "phylota"
### Read in a parse the tree file output from trees file
treefile = open(input_file, 'r')
raw_id = []
ids = []
for t in treefile:
tree_id = t.split("\t")
tree_id = tree_id[0].split("_")
tree_id[0] = tree_id[0][2:]
tree_id[1] = tree_id[1][2:]
ids.append(tree_id)
## connect to the database and set a cursor
database = mdb.connect(host = db_host, # your host, usually localhost
user = db_user, # your username
passwd = db_password, # your password
db = db_database) # name of the data base
cluster_db = database.cursor() # define the db cursor
count = 0 # status count
for i in ids:
# sql query to find the sequences that belong to the cluster / taxon that are present in the given tree
sql = "".join(["SELECT seqs.gi,seqs.seq FROM seqs LEFT JOIN ci_gi_184 ON seqs.gi=ci_gi_184.gi WHERE ci_gi_184.ti=", ids[count][0], " AND ci_gi_184.clustid = ", ids[count][1], " AND ci_gi_184.cl_type='subtree';"])
cluster_db.execute(sql) # execute the above sql query
record = cluster_db.fetchall() # fetch all records that meet the query criteria and place them in the list record
record = list(record) # convert tuple to list
# create a string that contains the appropriate filename for each FASTA file
filename = "".join([out_dir, "ti", ids[count][0], "_ci", ids[count][1], ".fas"])
f = open(filename, 'w+')
for r in record:
cur_record = list(r) # convert this element of the record list from tuple to a list
cur_record[0] = str(cur_record[0]) # convert the GID value from long to string
# join all elements of the cur_record list into a single string, added formatting for FASTA style
cur_record = "".join([">gi|", str(cur_record[0]), "\n", cur_record[1]])
f.write("%s\n" % cur_record)
print "Sequences for tree " + str(count) + " successfully written to " + filename + "."
count += 1
f.close()
|
Refactor from get-clusters.
Changes for speed, clarity and use simplification.import MySQLdb as mdb
input_file = "phylota_184_trees.tre"
out_dir = "clusters/"
db_host = "localhost"
db_user = "root"
db_password = "reelab14"
db_database = "phylota"
### Read in a parse the tree file output from trees file
treefile = open(input_file, 'r')
raw_id = []
ids = []
for t in treefile:
tree_id = t.split("\t")
tree_id = tree_id[0].split("_")
tree_id[0] = tree_id[0][2:]
tree_id[1] = tree_id[1][2:]
ids.append(tree_id)
## connect to the database and set a cursor
database = mdb.connect(host = db_host, # your host, usually localhost
user = db_user, # your username
passwd = db_password, # your password
db = db_database) # name of the data base
cluster_db = database.cursor() # define the db cursor
count = 0 # status count
for i in ids:
# sql query to find the sequences that belong to the cluster / taxon that are present in the given tree
sql = "".join(["SELECT seqs.gi,seqs.seq FROM seqs LEFT JOIN ci_gi_184 ON seqs.gi=ci_gi_184.gi WHERE ci_gi_184.ti=", ids[count][0], " AND ci_gi_184.clustid = ", ids[count][1], " AND ci_gi_184.cl_type='subtree';"])
cluster_db.execute(sql) # execute the above sql query
record = cluster_db.fetchall() # fetch all records that meet the query criteria and place them in the list record
record = list(record) # convert tuple to list
# create a string that contains the appropriate filename for each FASTA file
filename = "".join([out_dir, "ti", ids[count][0], "_ci", ids[count][1], ".fas"])
f = open(filename, 'w+')
for r in record:
cur_record = list(r) # convert this element of the record list from tuple to a list
cur_record[0] = str(cur_record[0]) # convert the GID value from long to string
# join all elements of the cur_record list into a single string, added formatting for FASTA style
cur_record = "".join([">gi|", str(cur_record[0]), "\n", cur_record[1]])
f.write("%s\n" % cur_record)
print "Sequences for tree " + str(count) + " successfully written to " + filename + "."
count += 1
f.close()
|
<commit_before><commit_msg>Refactor from get-clusters.
Changes for speed, clarity and use simplification.<commit_after>import MySQLdb as mdb
input_file = "phylota_184_trees.tre"
out_dir = "clusters/"
db_host = "localhost"
db_user = "root"
db_password = "reelab14"
db_database = "phylota"
### Read in a parse the tree file output from trees file
treefile = open(input_file, 'r')
raw_id = []
ids = []
for t in treefile:
tree_id = t.split("\t")
tree_id = tree_id[0].split("_")
tree_id[0] = tree_id[0][2:]
tree_id[1] = tree_id[1][2:]
ids.append(tree_id)
## connect to the database and set a cursor
database = mdb.connect(host = db_host, # your host, usually localhost
user = db_user, # your username
passwd = db_password, # your password
db = db_database) # name of the data base
cluster_db = database.cursor() # define the db cursor
count = 0 # status count
for i in ids:
# sql query to find the sequences that belong to the cluster / taxon that are present in the given tree
sql = "".join(["SELECT seqs.gi,seqs.seq FROM seqs LEFT JOIN ci_gi_184 ON seqs.gi=ci_gi_184.gi WHERE ci_gi_184.ti=", ids[count][0], " AND ci_gi_184.clustid = ", ids[count][1], " AND ci_gi_184.cl_type='subtree';"])
cluster_db.execute(sql) # execute the above sql query
record = cluster_db.fetchall() # fetch all records that meet the query criteria and place them in the list record
record = list(record) # convert tuple to list
# create a string that contains the appropriate filename for each FASTA file
filename = "".join([out_dir, "ti", ids[count][0], "_ci", ids[count][1], ".fas"])
f = open(filename, 'w+')
for r in record:
cur_record = list(r) # convert this element of the record list from tuple to a list
cur_record[0] = str(cur_record[0]) # convert the GID value from long to string
# join all elements of the cur_record list into a single string, added formatting for FASTA style
cur_record = "".join([">gi|", str(cur_record[0]), "\n", cur_record[1]])
f.write("%s\n" % cur_record)
print "Sequences for tree " + str(count) + " successfully written to " + filename + "."
count += 1
f.close()
|
|
ce542e0bd75a8f93d1be69be6d4c2ad3f05e85db
|
cybox/test/objects/win_network_share_test.py
|
cybox/test/objects/win_network_share_test.py
|
# Copyright (c) 2015, The MITRE Corporation. All rights reserved.
# See LICENSE.txt for complete terms.
import unittest
from mixbox.vendor.six import u
from cybox.objects.win_network_share_object import WinNetworkShare
from cybox.test.objects import ObjectTestCase
class TestWinNetworkShare(ObjectTestCase, unittest.TestCase):
object_type = "WindowsNetworkShareObjectType"
klass = WinNetworkShare
_full_dict = {
'access_read': True,
'access_write': False,
'access_create': True,
'access_exec': False,
'access_delete': True,
'access_atrib': False,
'access_perm': True,
'access_all': False,
'current_uses': 1,
'local_path': u("Z:/"),
'max_uses': 10,
'netname': u("shared drive"),
'type': u("Folder"),
'xsi:type': object_type,
}
if __name__ == "__main__":
unittest.main()
|
Add tests for WinNetworkShare object.
|
Add tests for WinNetworkShare object.
|
Python
|
bsd-3-clause
|
CybOXProject/python-cybox
|
Add tests for WinNetworkShare object.
|
# Copyright (c) 2015, The MITRE Corporation. All rights reserved.
# See LICENSE.txt for complete terms.
import unittest
from mixbox.vendor.six import u
from cybox.objects.win_network_share_object import WinNetworkShare
from cybox.test.objects import ObjectTestCase
class TestWinNetworkShare(ObjectTestCase, unittest.TestCase):
object_type = "WindowsNetworkShareObjectType"
klass = WinNetworkShare
_full_dict = {
'access_read': True,
'access_write': False,
'access_create': True,
'access_exec': False,
'access_delete': True,
'access_atrib': False,
'access_perm': True,
'access_all': False,
'current_uses': 1,
'local_path': u("Z:/"),
'max_uses': 10,
'netname': u("shared drive"),
'type': u("Folder"),
'xsi:type': object_type,
}
if __name__ == "__main__":
unittest.main()
|
<commit_before><commit_msg>Add tests for WinNetworkShare object.<commit_after>
|
# Copyright (c) 2015, The MITRE Corporation. All rights reserved.
# See LICENSE.txt for complete terms.
import unittest
from mixbox.vendor.six import u
from cybox.objects.win_network_share_object import WinNetworkShare
from cybox.test.objects import ObjectTestCase
class TestWinNetworkShare(ObjectTestCase, unittest.TestCase):
object_type = "WindowsNetworkShareObjectType"
klass = WinNetworkShare
_full_dict = {
'access_read': True,
'access_write': False,
'access_create': True,
'access_exec': False,
'access_delete': True,
'access_atrib': False,
'access_perm': True,
'access_all': False,
'current_uses': 1,
'local_path': u("Z:/"),
'max_uses': 10,
'netname': u("shared drive"),
'type': u("Folder"),
'xsi:type': object_type,
}
if __name__ == "__main__":
unittest.main()
|
Add tests for WinNetworkShare object.# Copyright (c) 2015, The MITRE Corporation. All rights reserved.
# See LICENSE.txt for complete terms.
import unittest
from mixbox.vendor.six import u
from cybox.objects.win_network_share_object import WinNetworkShare
from cybox.test.objects import ObjectTestCase
class TestWinNetworkShare(ObjectTestCase, unittest.TestCase):
object_type = "WindowsNetworkShareObjectType"
klass = WinNetworkShare
_full_dict = {
'access_read': True,
'access_write': False,
'access_create': True,
'access_exec': False,
'access_delete': True,
'access_atrib': False,
'access_perm': True,
'access_all': False,
'current_uses': 1,
'local_path': u("Z:/"),
'max_uses': 10,
'netname': u("shared drive"),
'type': u("Folder"),
'xsi:type': object_type,
}
if __name__ == "__main__":
unittest.main()
|
<commit_before><commit_msg>Add tests for WinNetworkShare object.<commit_after># Copyright (c) 2015, The MITRE Corporation. All rights reserved.
# See LICENSE.txt for complete terms.
import unittest
from mixbox.vendor.six import u
from cybox.objects.win_network_share_object import WinNetworkShare
from cybox.test.objects import ObjectTestCase
class TestWinNetworkShare(ObjectTestCase, unittest.TestCase):
object_type = "WindowsNetworkShareObjectType"
klass = WinNetworkShare
_full_dict = {
'access_read': True,
'access_write': False,
'access_create': True,
'access_exec': False,
'access_delete': True,
'access_atrib': False,
'access_perm': True,
'access_all': False,
'current_uses': 1,
'local_path': u("Z:/"),
'max_uses': 10,
'netname': u("shared drive"),
'type': u("Folder"),
'xsi:type': object_type,
}
if __name__ == "__main__":
unittest.main()
|
|
3602699e1eb45e96ce6227ed6739dab49207b4bf
|
etc/gen-test-dump.py
|
etc/gen-test-dump.py
|
#!/usr/bin/env python
import random
WORLDS = [None] * 8 + ['world', 'world_nether', 'creative', 'hardcore']
REGIONS = [None] * 20 + ['Region%d' % i for i in range(10)]
NUM_PLAYERS = 100
NUM_PERMISSIONS_PER_PLAYER = 50
NUM_GROUPS = (3, 13, 23, 31, 41)
NUM_PERMISSIONS_PER_GROUP = 50
NUM_PLAYERS_PER_GROUP = 50
PLAYER_MEMBER_POOL_SIZE = 1000
groups_at_depth = []
for i in range(len(NUM_GROUPS)):
groups_at_depth.append([])
def generate_permissions(name, is_group, count):
for i in range(count):
region = REGIONS[random.randint(0, len(REGIONS) - 1)]
if region is None:
region = ''
else:
region += '/'
world = WORLDS[random.randint(0, len(WORLDS) - 1)]
if world is None:
world = ''
else:
world += ':'
print('permissions %s %s set %s%spermission.%s.%d true' % (
is_group and 'group' or 'player',
name,
region,
world,
name,
i))
def generate_group(name, depth):
if depth == 0:
# Nothing special
print('permissions group %s create' % name)
else:
print('permissions group %s create' % name)
# Pick random parent of previous depth
potential_parents = groups_at_depth[depth - 1]
parent = potential_parents[random.randint(0, len(potential_parents) - 1)]
print('permissions group %s setparent %s' % (name, parent))
assert name not in groups_at_depth[depth]
groups_at_depth[depth].append(name)
def generate_members(name, count):
for i in range(count):
p = random.randint(0, PLAYER_MEMBER_POOL_SIZE - 1)
print('permissions group %s add TestPlayer%d' % (name, p))
def main():
for p in range(NUM_PLAYERS):
generate_permissions('TestPlayer%d' % p, False,
NUM_PERMISSIONS_PER_PLAYER)
group_count = 0
for depth, num_at_depth in enumerate(NUM_GROUPS):
for g in range(num_at_depth):
name = 'TestGroup%d' % group_count
group_count += 1
generate_group(name, depth)
generate_permissions(name, True, NUM_PERMISSIONS_PER_GROUP)
generate_members(name, NUM_PLAYERS_PER_GROUP)
if __name__ == '__main__':
main()
|
Add Python script to generate (huge) test dumps.
|
Add Python script to generate (huge) test dumps.
|
Python
|
apache-2.0
|
MineYourMind/zPermissions,MineYourMind/zPermissions,MrWisski/zPermissions,MrWisski/zPermissions
|
Add Python script to generate (huge) test dumps.
|
#!/usr/bin/env python
import random
WORLDS = [None] * 8 + ['world', 'world_nether', 'creative', 'hardcore']
REGIONS = [None] * 20 + ['Region%d' % i for i in range(10)]
NUM_PLAYERS = 100
NUM_PERMISSIONS_PER_PLAYER = 50
NUM_GROUPS = (3, 13, 23, 31, 41)
NUM_PERMISSIONS_PER_GROUP = 50
NUM_PLAYERS_PER_GROUP = 50
PLAYER_MEMBER_POOL_SIZE = 1000
groups_at_depth = []
for i in range(len(NUM_GROUPS)):
groups_at_depth.append([])
def generate_permissions(name, is_group, count):
for i in range(count):
region = REGIONS[random.randint(0, len(REGIONS) - 1)]
if region is None:
region = ''
else:
region += '/'
world = WORLDS[random.randint(0, len(WORLDS) - 1)]
if world is None:
world = ''
else:
world += ':'
print('permissions %s %s set %s%spermission.%s.%d true' % (
is_group and 'group' or 'player',
name,
region,
world,
name,
i))
def generate_group(name, depth):
if depth == 0:
# Nothing special
print('permissions group %s create' % name)
else:
print('permissions group %s create' % name)
# Pick random parent of previous depth
potential_parents = groups_at_depth[depth - 1]
parent = potential_parents[random.randint(0, len(potential_parents) - 1)]
print('permissions group %s setparent %s' % (name, parent))
assert name not in groups_at_depth[depth]
groups_at_depth[depth].append(name)
def generate_members(name, count):
for i in range(count):
p = random.randint(0, PLAYER_MEMBER_POOL_SIZE - 1)
print('permissions group %s add TestPlayer%d' % (name, p))
def main():
for p in range(NUM_PLAYERS):
generate_permissions('TestPlayer%d' % p, False,
NUM_PERMISSIONS_PER_PLAYER)
group_count = 0
for depth, num_at_depth in enumerate(NUM_GROUPS):
for g in range(num_at_depth):
name = 'TestGroup%d' % group_count
group_count += 1
generate_group(name, depth)
generate_permissions(name, True, NUM_PERMISSIONS_PER_GROUP)
generate_members(name, NUM_PLAYERS_PER_GROUP)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add Python script to generate (huge) test dumps.<commit_after>
|
#!/usr/bin/env python
import random
WORLDS = [None] * 8 + ['world', 'world_nether', 'creative', 'hardcore']
REGIONS = [None] * 20 + ['Region%d' % i for i in range(10)]
NUM_PLAYERS = 100
NUM_PERMISSIONS_PER_PLAYER = 50
NUM_GROUPS = (3, 13, 23, 31, 41)
NUM_PERMISSIONS_PER_GROUP = 50
NUM_PLAYERS_PER_GROUP = 50
PLAYER_MEMBER_POOL_SIZE = 1000
groups_at_depth = []
for i in range(len(NUM_GROUPS)):
groups_at_depth.append([])
def generate_permissions(name, is_group, count):
for i in range(count):
region = REGIONS[random.randint(0, len(REGIONS) - 1)]
if region is None:
region = ''
else:
region += '/'
world = WORLDS[random.randint(0, len(WORLDS) - 1)]
if world is None:
world = ''
else:
world += ':'
print('permissions %s %s set %s%spermission.%s.%d true' % (
is_group and 'group' or 'player',
name,
region,
world,
name,
i))
def generate_group(name, depth):
if depth == 0:
# Nothing special
print('permissions group %s create' % name)
else:
print('permissions group %s create' % name)
# Pick random parent of previous depth
potential_parents = groups_at_depth[depth - 1]
parent = potential_parents[random.randint(0, len(potential_parents) - 1)]
print('permissions group %s setparent %s' % (name, parent))
assert name not in groups_at_depth[depth]
groups_at_depth[depth].append(name)
def generate_members(name, count):
for i in range(count):
p = random.randint(0, PLAYER_MEMBER_POOL_SIZE - 1)
print('permissions group %s add TestPlayer%d' % (name, p))
def main():
for p in range(NUM_PLAYERS):
generate_permissions('TestPlayer%d' % p, False,
NUM_PERMISSIONS_PER_PLAYER)
group_count = 0
for depth, num_at_depth in enumerate(NUM_GROUPS):
for g in range(num_at_depth):
name = 'TestGroup%d' % group_count
group_count += 1
generate_group(name, depth)
generate_permissions(name, True, NUM_PERMISSIONS_PER_GROUP)
generate_members(name, NUM_PLAYERS_PER_GROUP)
if __name__ == '__main__':
main()
|
Add Python script to generate (huge) test dumps.#!/usr/bin/env python
import random
WORLDS = [None] * 8 + ['world', 'world_nether', 'creative', 'hardcore']
REGIONS = [None] * 20 + ['Region%d' % i for i in range(10)]
NUM_PLAYERS = 100
NUM_PERMISSIONS_PER_PLAYER = 50
NUM_GROUPS = (3, 13, 23, 31, 41)
NUM_PERMISSIONS_PER_GROUP = 50
NUM_PLAYERS_PER_GROUP = 50
PLAYER_MEMBER_POOL_SIZE = 1000
groups_at_depth = []
for i in range(len(NUM_GROUPS)):
groups_at_depth.append([])
def generate_permissions(name, is_group, count):
for i in range(count):
region = REGIONS[random.randint(0, len(REGIONS) - 1)]
if region is None:
region = ''
else:
region += '/'
world = WORLDS[random.randint(0, len(WORLDS) - 1)]
if world is None:
world = ''
else:
world += ':'
print('permissions %s %s set %s%spermission.%s.%d true' % (
is_group and 'group' or 'player',
name,
region,
world,
name,
i))
def generate_group(name, depth):
if depth == 0:
# Nothing special
print('permissions group %s create' % name)
else:
print('permissions group %s create' % name)
# Pick random parent of previous depth
potential_parents = groups_at_depth[depth - 1]
parent = potential_parents[random.randint(0, len(potential_parents) - 1)]
print('permissions group %s setparent %s' % (name, parent))
assert name not in groups_at_depth[depth]
groups_at_depth[depth].append(name)
def generate_members(name, count):
for i in range(count):
p = random.randint(0, PLAYER_MEMBER_POOL_SIZE - 1)
print('permissions group %s add TestPlayer%d' % (name, p))
def main():
for p in range(NUM_PLAYERS):
generate_permissions('TestPlayer%d' % p, False,
NUM_PERMISSIONS_PER_PLAYER)
group_count = 0
for depth, num_at_depth in enumerate(NUM_GROUPS):
for g in range(num_at_depth):
name = 'TestGroup%d' % group_count
group_count += 1
generate_group(name, depth)
generate_permissions(name, True, NUM_PERMISSIONS_PER_GROUP)
generate_members(name, NUM_PLAYERS_PER_GROUP)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add Python script to generate (huge) test dumps.<commit_after>#!/usr/bin/env python
import random
WORLDS = [None] * 8 + ['world', 'world_nether', 'creative', 'hardcore']
REGIONS = [None] * 20 + ['Region%d' % i for i in range(10)]
NUM_PLAYERS = 100
NUM_PERMISSIONS_PER_PLAYER = 50
NUM_GROUPS = (3, 13, 23, 31, 41)
NUM_PERMISSIONS_PER_GROUP = 50
NUM_PLAYERS_PER_GROUP = 50
PLAYER_MEMBER_POOL_SIZE = 1000
groups_at_depth = []
for i in range(len(NUM_GROUPS)):
groups_at_depth.append([])
def generate_permissions(name, is_group, count):
for i in range(count):
region = REGIONS[random.randint(0, len(REGIONS) - 1)]
if region is None:
region = ''
else:
region += '/'
world = WORLDS[random.randint(0, len(WORLDS) - 1)]
if world is None:
world = ''
else:
world += ':'
print('permissions %s %s set %s%spermission.%s.%d true' % (
is_group and 'group' or 'player',
name,
region,
world,
name,
i))
def generate_group(name, depth):
if depth == 0:
# Nothing special
print('permissions group %s create' % name)
else:
print('permissions group %s create' % name)
# Pick random parent of previous depth
potential_parents = groups_at_depth[depth - 1]
parent = potential_parents[random.randint(0, len(potential_parents) - 1)]
print('permissions group %s setparent %s' % (name, parent))
assert name not in groups_at_depth[depth]
groups_at_depth[depth].append(name)
def generate_members(name, count):
for i in range(count):
p = random.randint(0, PLAYER_MEMBER_POOL_SIZE - 1)
print('permissions group %s add TestPlayer%d' % (name, p))
def main():
for p in range(NUM_PLAYERS):
generate_permissions('TestPlayer%d' % p, False,
NUM_PERMISSIONS_PER_PLAYER)
group_count = 0
for depth, num_at_depth in enumerate(NUM_GROUPS):
for g in range(num_at_depth):
name = 'TestGroup%d' % group_count
group_count += 1
generate_group(name, depth)
generate_permissions(name, True, NUM_PERMISSIONS_PER_GROUP)
generate_members(name, NUM_PLAYERS_PER_GROUP)
if __name__ == '__main__':
main()
|
|
ec4296bf19f85502f947d278ce58c27e7179a1fc
|
random_particle_movement.py
|
random_particle_movement.py
|
import math
import pygame
import random
pygame.init()
Pi = math.pi
#-- SCREEN CHARACTERISTICS ------------------------->>>
background_color = (255,255,255)
(width, height) = (300, 200)
#-- PARTICLE DEFINITION ---------------------------->>>
class Particle:
def __init__(self, (x, y), radius):
self.x = x
self.y = y
self.radius = radius
self.color = (255, 0, 0)
self.thickness = 1
self.speed = 0.1
self.angle = 0
def move(self):
self.x += math.cos(self.angle) * self.speed
self.y += math.sin(self.angle) * self.speed
def display(self):
pygame.draw.circle(screen, self.color, (int(self.x), int(self.y)), self.radius, self.thickness)
#-- RENDER SCREEN ---------------------------------->>>
screen = pygame.display.set_mode((width, height))
screen.fill(background_color)
number_of_particles = 20
particles = []
for n in range(number_of_particles):
radius = random.randint(5, 20)
x = random.randint(radius, width - radius)
y = random.randint(radius, height - radius)
particle = Particle((x, y), radius)
particle.speed = random.random()
particle.angle = random.uniform(0, Pi*2)
particles.append(particle)
#-- RUN LOOP --------------------------------------->>>
running = True
while running:
#-- move particles -------------------------->>>
'''
A particle.angle of 0 moves particles vertically at a given speed.
Directions chosen arbitrarily, defined in the particle.move(), and subject are to change.
Use the unit circle for all other motions:
down => 0 rad.
up => Pi rad.
right => Pi/2 rad.
left => 3 * Pi/2 rad.
... and all the angles betwixt
'''
for particle in particles:
particle.move()
particle.display()
pygame.display.flip()
screen.fill(background_color)
#-- detect pygame events -------------------->>>
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
|
Add module where particles randomly disperse
|
Add module where particles randomly disperse
|
Python
|
mit
|
withtwoemms/pygame-explorations
|
Add module where particles randomly disperse
|
import math
import pygame
import random
pygame.init()
Pi = math.pi
#-- SCREEN CHARACTERISTICS ------------------------->>>
background_color = (255,255,255)
(width, height) = (300, 200)
#-- PARTICLE DEFINITION ---------------------------->>>
class Particle:
def __init__(self, (x, y), radius):
self.x = x
self.y = y
self.radius = radius
self.color = (255, 0, 0)
self.thickness = 1
self.speed = 0.1
self.angle = 0
def move(self):
self.x += math.cos(self.angle) * self.speed
self.y += math.sin(self.angle) * self.speed
def display(self):
pygame.draw.circle(screen, self.color, (int(self.x), int(self.y)), self.radius, self.thickness)
#-- RENDER SCREEN ---------------------------------->>>
screen = pygame.display.set_mode((width, height))
screen.fill(background_color)
number_of_particles = 20
particles = []
for n in range(number_of_particles):
radius = random.randint(5, 20)
x = random.randint(radius, width - radius)
y = random.randint(radius, height - radius)
particle = Particle((x, y), radius)
particle.speed = random.random()
particle.angle = random.uniform(0, Pi*2)
particles.append(particle)
#-- RUN LOOP --------------------------------------->>>
running = True
while running:
#-- move particles -------------------------->>>
'''
A particle.angle of 0 moves particles vertically at a given speed.
Directions chosen arbitrarily, defined in the particle.move(), and subject are to change.
Use the unit circle for all other motions:
down => 0 rad.
up => Pi rad.
right => Pi/2 rad.
left => 3 * Pi/2 rad.
... and all the angles betwixt
'''
for particle in particles:
particle.move()
particle.display()
pygame.display.flip()
screen.fill(background_color)
#-- detect pygame events -------------------->>>
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
|
<commit_before><commit_msg>Add module where particles randomly disperse<commit_after>
|
import math
import pygame
import random
pygame.init()
Pi = math.pi
#-- SCREEN CHARACTERISTICS ------------------------->>>
background_color = (255,255,255)
(width, height) = (300, 200)
#-- PARTICLE DEFINITION ---------------------------->>>
class Particle:
def __init__(self, (x, y), radius):
self.x = x
self.y = y
self.radius = radius
self.color = (255, 0, 0)
self.thickness = 1
self.speed = 0.1
self.angle = 0
def move(self):
self.x += math.cos(self.angle) * self.speed
self.y += math.sin(self.angle) * self.speed
def display(self):
pygame.draw.circle(screen, self.color, (int(self.x), int(self.y)), self.radius, self.thickness)
#-- RENDER SCREEN ---------------------------------->>>
screen = pygame.display.set_mode((width, height))
screen.fill(background_color)
number_of_particles = 20
particles = []
for n in range(number_of_particles):
radius = random.randint(5, 20)
x = random.randint(radius, width - radius)
y = random.randint(radius, height - radius)
particle = Particle((x, y), radius)
particle.speed = random.random()
particle.angle = random.uniform(0, Pi*2)
particles.append(particle)
#-- RUN LOOP --------------------------------------->>>
running = True
while running:
#-- move particles -------------------------->>>
'''
A particle.angle of 0 moves particles vertically at a given speed.
Directions chosen arbitrarily, defined in the particle.move(), and subject are to change.
Use the unit circle for all other motions:
down => 0 rad.
up => Pi rad.
right => Pi/2 rad.
left => 3 * Pi/2 rad.
... and all the angles betwixt
'''
for particle in particles:
particle.move()
particle.display()
pygame.display.flip()
screen.fill(background_color)
#-- detect pygame events -------------------->>>
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
|
Add module where particles randomly disperseimport math
import pygame
import random
pygame.init()
Pi = math.pi
#-- SCREEN CHARACTERISTICS ------------------------->>>
background_color = (255,255,255)
(width, height) = (300, 200)
#-- PARTICLE DEFINITION ---------------------------->>>
class Particle:
def __init__(self, (x, y), radius):
self.x = x
self.y = y
self.radius = radius
self.color = (255, 0, 0)
self.thickness = 1
self.speed = 0.1
self.angle = 0
def move(self):
self.x += math.cos(self.angle) * self.speed
self.y += math.sin(self.angle) * self.speed
def display(self):
pygame.draw.circle(screen, self.color, (int(self.x), int(self.y)), self.radius, self.thickness)
#-- RENDER SCREEN ---------------------------------->>>
screen = pygame.display.set_mode((width, height))
screen.fill(background_color)
number_of_particles = 20
particles = []
for n in range(number_of_particles):
radius = random.randint(5, 20)
x = random.randint(radius, width - radius)
y = random.randint(radius, height - radius)
particle = Particle((x, y), radius)
particle.speed = random.random()
particle.angle = random.uniform(0, Pi*2)
particles.append(particle)
#-- RUN LOOP --------------------------------------->>>
running = True
while running:
#-- move particles -------------------------->>>
'''
A particle.angle of 0 moves particles vertically at a given speed.
Directions chosen arbitrarily, defined in the particle.move(), and subject are to change.
Use the unit circle for all other motions:
down => 0 rad.
up => Pi rad.
right => Pi/2 rad.
left => 3 * Pi/2 rad.
... and all the angles betwixt
'''
for particle in particles:
particle.move()
particle.display()
pygame.display.flip()
screen.fill(background_color)
#-- detect pygame events -------------------->>>
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
|
<commit_before><commit_msg>Add module where particles randomly disperse<commit_after>import math
import pygame
import random
pygame.init()
Pi = math.pi
#-- SCREEN CHARACTERISTICS ------------------------->>>
background_color = (255,255,255)
(width, height) = (300, 200)
#-- PARTICLE DEFINITION ---------------------------->>>
class Particle:
def __init__(self, (x, y), radius):
self.x = x
self.y = y
self.radius = radius
self.color = (255, 0, 0)
self.thickness = 1
self.speed = 0.1
self.angle = 0
def move(self):
self.x += math.cos(self.angle) * self.speed
self.y += math.sin(self.angle) * self.speed
def display(self):
pygame.draw.circle(screen, self.color, (int(self.x), int(self.y)), self.radius, self.thickness)
#-- RENDER SCREEN ---------------------------------->>>
screen = pygame.display.set_mode((width, height))
screen.fill(background_color)
number_of_particles = 20
particles = []
for n in range(number_of_particles):
radius = random.randint(5, 20)
x = random.randint(radius, width - radius)
y = random.randint(radius, height - radius)
particle = Particle((x, y), radius)
particle.speed = random.random()
particle.angle = random.uniform(0, Pi*2)
particles.append(particle)
#-- RUN LOOP --------------------------------------->>>
running = True
while running:
#-- move particles -------------------------->>>
'''
A particle.angle of 0 moves particles vertically at a given speed.
Directions chosen arbitrarily, defined in the particle.move(), and subject are to change.
Use the unit circle for all other motions:
down => 0 rad.
up => Pi rad.
right => Pi/2 rad.
left => 3 * Pi/2 rad.
... and all the angles betwixt
'''
for particle in particles:
particle.move()
particle.display()
pygame.display.flip()
screen.fill(background_color)
#-- detect pygame events -------------------->>>
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
|
|
447fba372fd7e438edd6119c5e59342958f9ec8d
|
genome_designer/debug/transactions_debug.py
|
genome_designer/debug/transactions_debug.py
|
"""
Experimenting with Django transactions.
"""
import random
# Since this script is intended to be used from the terminal, setup the
# environment first so that django and model imports work.
from util import setup_django_env
setup_django_env()
from django.contrib.auth.models import User
from django.db import transaction
from main.models import Project
from main.models import ReferenceGenome
from main.models import Variant
from scripts.bootstrap_data import REF_GENOME_1_LABEL
from scripts.bootstrap_data import TEST_PROJECT_NAME
from scripts.bootstrap_data import TEST_USERNAME
def main():
try:
user = User.objects.get(username=TEST_USERNAME)
except User.DoesNotExist:
user = User.objects.create_user(
TEST_USERNAME, password=TEST_PASSWORD, email=TEST_EMAIL)
ref_genome, ref_genome_created = ReferenceGenome.objects.get_or_create(
label=REF_GENOME_1_LABEL)
test_project = Project.objects.create(
title='deleteme', owner=user.get_profile())
# var_list = []
# for pos in range(3):
# var_list.append(Variant(
# type=Variant.TYPE.TRANSITION,
# reference_genome=ref_genome,
# chromosome='chrom',
# position=pos,
# ref_value='A'))
# Variant.objects.bulk_create(var_list)
# with transaction.commit_on_success():
# for pos in range(3):
# Variant.objects.create(
# type=Variant.TYPE.TRANSITION,
# reference_genome=ref_genome,
# chromosome='chrom',
# position=pos,
# ref_value='A')
if __name__ == '__main__':
main()
|
Save code for debugging transactions.
|
Save code for debugging transactions.
|
Python
|
mit
|
woodymit/millstone,churchlab/millstone,woodymit/millstone,woodymit/millstone,churchlab/millstone,woodymit/millstone_accidental_source,churchlab/millstone,woodymit/millstone_accidental_source,woodymit/millstone_accidental_source,woodymit/millstone,churchlab/millstone,woodymit/millstone_accidental_source
|
Save code for debugging transactions.
|
"""
Experimenting with Django transactions.
"""
import random
# Since this script is intended to be used from the terminal, setup the
# environment first so that django and model imports work.
from util import setup_django_env
setup_django_env()
from django.contrib.auth.models import User
from django.db import transaction
from main.models import Project
from main.models import ReferenceGenome
from main.models import Variant
from scripts.bootstrap_data import REF_GENOME_1_LABEL
from scripts.bootstrap_data import TEST_PROJECT_NAME
from scripts.bootstrap_data import TEST_USERNAME
def main():
try:
user = User.objects.get(username=TEST_USERNAME)
except User.DoesNotExist:
user = User.objects.create_user(
TEST_USERNAME, password=TEST_PASSWORD, email=TEST_EMAIL)
ref_genome, ref_genome_created = ReferenceGenome.objects.get_or_create(
label=REF_GENOME_1_LABEL)
test_project = Project.objects.create(
title='deleteme', owner=user.get_profile())
# var_list = []
# for pos in range(3):
# var_list.append(Variant(
# type=Variant.TYPE.TRANSITION,
# reference_genome=ref_genome,
# chromosome='chrom',
# position=pos,
# ref_value='A'))
# Variant.objects.bulk_create(var_list)
# with transaction.commit_on_success():
# for pos in range(3):
# Variant.objects.create(
# type=Variant.TYPE.TRANSITION,
# reference_genome=ref_genome,
# chromosome='chrom',
# position=pos,
# ref_value='A')
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Save code for debugging transactions.<commit_after>
|
"""
Experimenting with Django transactions.
"""
import random
# Since this script is intended to be used from the terminal, setup the
# environment first so that django and model imports work.
from util import setup_django_env
setup_django_env()
from django.contrib.auth.models import User
from django.db import transaction
from main.models import Project
from main.models import ReferenceGenome
from main.models import Variant
from scripts.bootstrap_data import REF_GENOME_1_LABEL
from scripts.bootstrap_data import TEST_PROJECT_NAME
from scripts.bootstrap_data import TEST_USERNAME
def main():
try:
user = User.objects.get(username=TEST_USERNAME)
except User.DoesNotExist:
user = User.objects.create_user(
TEST_USERNAME, password=TEST_PASSWORD, email=TEST_EMAIL)
ref_genome, ref_genome_created = ReferenceGenome.objects.get_or_create(
label=REF_GENOME_1_LABEL)
test_project = Project.objects.create(
title='deleteme', owner=user.get_profile())
# var_list = []
# for pos in range(3):
# var_list.append(Variant(
# type=Variant.TYPE.TRANSITION,
# reference_genome=ref_genome,
# chromosome='chrom',
# position=pos,
# ref_value='A'))
# Variant.objects.bulk_create(var_list)
# with transaction.commit_on_success():
# for pos in range(3):
# Variant.objects.create(
# type=Variant.TYPE.TRANSITION,
# reference_genome=ref_genome,
# chromosome='chrom',
# position=pos,
# ref_value='A')
if __name__ == '__main__':
main()
|
Save code for debugging transactions."""
Experimenting with Django transactions.
"""
import random
# Since this script is intended to be used from the terminal, setup the
# environment first so that django and model imports work.
from util import setup_django_env
setup_django_env()
from django.contrib.auth.models import User
from django.db import transaction
from main.models import Project
from main.models import ReferenceGenome
from main.models import Variant
from scripts.bootstrap_data import REF_GENOME_1_LABEL
from scripts.bootstrap_data import TEST_PROJECT_NAME
from scripts.bootstrap_data import TEST_USERNAME
def main():
try:
user = User.objects.get(username=TEST_USERNAME)
except User.DoesNotExist:
user = User.objects.create_user(
TEST_USERNAME, password=TEST_PASSWORD, email=TEST_EMAIL)
ref_genome, ref_genome_created = ReferenceGenome.objects.get_or_create(
label=REF_GENOME_1_LABEL)
test_project = Project.objects.create(
title='deleteme', owner=user.get_profile())
# var_list = []
# for pos in range(3):
# var_list.append(Variant(
# type=Variant.TYPE.TRANSITION,
# reference_genome=ref_genome,
# chromosome='chrom',
# position=pos,
# ref_value='A'))
# Variant.objects.bulk_create(var_list)
# with transaction.commit_on_success():
# for pos in range(3):
# Variant.objects.create(
# type=Variant.TYPE.TRANSITION,
# reference_genome=ref_genome,
# chromosome='chrom',
# position=pos,
# ref_value='A')
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Save code for debugging transactions.<commit_after>"""
Experimenting with Django transactions.
"""
import random
# Since this script is intended to be used from the terminal, setup the
# environment first so that django and model imports work.
from util import setup_django_env
setup_django_env()
from django.contrib.auth.models import User
from django.db import transaction
from main.models import Project
from main.models import ReferenceGenome
from main.models import Variant
from scripts.bootstrap_data import REF_GENOME_1_LABEL
from scripts.bootstrap_data import TEST_PROJECT_NAME
from scripts.bootstrap_data import TEST_USERNAME
def main():
try:
user = User.objects.get(username=TEST_USERNAME)
except User.DoesNotExist:
user = User.objects.create_user(
TEST_USERNAME, password=TEST_PASSWORD, email=TEST_EMAIL)
ref_genome, ref_genome_created = ReferenceGenome.objects.get_or_create(
label=REF_GENOME_1_LABEL)
test_project = Project.objects.create(
title='deleteme', owner=user.get_profile())
# var_list = []
# for pos in range(3):
# var_list.append(Variant(
# type=Variant.TYPE.TRANSITION,
# reference_genome=ref_genome,
# chromosome='chrom',
# position=pos,
# ref_value='A'))
# Variant.objects.bulk_create(var_list)
# with transaction.commit_on_success():
# for pos in range(3):
# Variant.objects.create(
# type=Variant.TYPE.TRANSITION,
# reference_genome=ref_genome,
# chromosome='chrom',
# position=pos,
# ref_value='A')
if __name__ == '__main__':
main()
|
|
a206807606130d9eb1d55b9b54ec03b3fa3f1816
|
test/integrationtests/skills/single_test.py
|
test/integrationtests/skills/single_test.py
|
# Copyright 2017 Mycroft AI Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
""" Test a single skill
python single_test.py PATH_TO_SKILL
"""
import glob
import unittest
import os
from test.integrationtests.skills.skill_tester import MockSkillsLoader
from test.integrationtests.skills.skill_tester import SkillTest
import sys
d = sys.argv.pop() + '/'
HOME_DIR = os.path.dirname(d)
def discover_tests():
"""Find skills whith test files
For all skills with test files, starten from current directory,
find the test files in subdirectory test/intent.
:return: skills and corresponding test case files found
"""
tests = {}
skills = [HOME_DIR]
for skill in skills:
test_intent_files = [
f for f
in glob.glob(os.path.join(skill, 'test/intent/*.intent.json'))
]
if len(test_intent_files) > 0:
tests[skill] = test_intent_files
return tests
class IntentTestSequenceMeta(type):
def __new__(mcs, name, bases, d):
def gen_test(a, b):
def test(self):
if not SkillTest(a, b, self.emitter).run(self.loader):
assert False
return test
tests = discover_tests()
for skill in tests.keys():
skill_name = os.path.basename(skill) # Path of the skill
for example in tests[skill]:
# Name of the intent
example_name = os.path.basename(
os.path.splitext(os.path.splitext(example)[0])[0])
test_name = "test_IntentValidation[%s:%s]" % (skill_name,
example_name)
d[test_name] = gen_test(skill, example)
return type.__new__(mcs, name, bases, d)
class IntentTestSequence(unittest.TestCase):
"""This is the TestCase class that pythons unit tester can execute.
"""
__metaclass__ = IntentTestSequenceMeta
loader = None
@classmethod
def setUpClass(cls):
cls.loader = MockSkillsLoader(HOME_DIR)
cls.emitter = cls.loader.load_skills()
@classmethod
def tearDownClass(cls):
cls.loader.unload_skills()
if __name__ == '__main__':
unittest.main()
|
Add simple script for running a single skill
|
Add simple script for running a single skill
Based on the skill_developers_testrunner.py, takes the path to a skill
as argument and runs any tests for that skill.
|
Python
|
apache-2.0
|
MycroftAI/mycroft-core,linuxipho/mycroft-core,forslund/mycroft-core,Dark5ide/mycroft-core,forslund/mycroft-core,MycroftAI/mycroft-core,Dark5ide/mycroft-core,linuxipho/mycroft-core
|
Add simple script for running a single skill
Based on the skill_developers_testrunner.py, takes the path to a skill
as argument and runs any tests for that skill.
|
# Copyright 2017 Mycroft AI Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
""" Test a single skill
python single_test.py PATH_TO_SKILL
"""
import glob
import unittest
import os
from test.integrationtests.skills.skill_tester import MockSkillsLoader
from test.integrationtests.skills.skill_tester import SkillTest
import sys
d = sys.argv.pop() + '/'
HOME_DIR = os.path.dirname(d)
def discover_tests():
"""Find skills whith test files
For all skills with test files, starten from current directory,
find the test files in subdirectory test/intent.
:return: skills and corresponding test case files found
"""
tests = {}
skills = [HOME_DIR]
for skill in skills:
test_intent_files = [
f for f
in glob.glob(os.path.join(skill, 'test/intent/*.intent.json'))
]
if len(test_intent_files) > 0:
tests[skill] = test_intent_files
return tests
class IntentTestSequenceMeta(type):
def __new__(mcs, name, bases, d):
def gen_test(a, b):
def test(self):
if not SkillTest(a, b, self.emitter).run(self.loader):
assert False
return test
tests = discover_tests()
for skill in tests.keys():
skill_name = os.path.basename(skill) # Path of the skill
for example in tests[skill]:
# Name of the intent
example_name = os.path.basename(
os.path.splitext(os.path.splitext(example)[0])[0])
test_name = "test_IntentValidation[%s:%s]" % (skill_name,
example_name)
d[test_name] = gen_test(skill, example)
return type.__new__(mcs, name, bases, d)
class IntentTestSequence(unittest.TestCase):
"""This is the TestCase class that pythons unit tester can execute.
"""
__metaclass__ = IntentTestSequenceMeta
loader = None
@classmethod
def setUpClass(cls):
cls.loader = MockSkillsLoader(HOME_DIR)
cls.emitter = cls.loader.load_skills()
@classmethod
def tearDownClass(cls):
cls.loader.unload_skills()
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add simple script for running a single skill
Based on the skill_developers_testrunner.py, takes the path to a skill
as argument and runs any tests for that skill.<commit_after>
|
# Copyright 2017 Mycroft AI Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
""" Test a single skill
python single_test.py PATH_TO_SKILL
"""
import glob
import unittest
import os
from test.integrationtests.skills.skill_tester import MockSkillsLoader
from test.integrationtests.skills.skill_tester import SkillTest
import sys
d = sys.argv.pop() + '/'
HOME_DIR = os.path.dirname(d)
def discover_tests():
"""Find skills whith test files
For all skills with test files, starten from current directory,
find the test files in subdirectory test/intent.
:return: skills and corresponding test case files found
"""
tests = {}
skills = [HOME_DIR]
for skill in skills:
test_intent_files = [
f for f
in glob.glob(os.path.join(skill, 'test/intent/*.intent.json'))
]
if len(test_intent_files) > 0:
tests[skill] = test_intent_files
return tests
class IntentTestSequenceMeta(type):
def __new__(mcs, name, bases, d):
def gen_test(a, b):
def test(self):
if not SkillTest(a, b, self.emitter).run(self.loader):
assert False
return test
tests = discover_tests()
for skill in tests.keys():
skill_name = os.path.basename(skill) # Path of the skill
for example in tests[skill]:
# Name of the intent
example_name = os.path.basename(
os.path.splitext(os.path.splitext(example)[0])[0])
test_name = "test_IntentValidation[%s:%s]" % (skill_name,
example_name)
d[test_name] = gen_test(skill, example)
return type.__new__(mcs, name, bases, d)
class IntentTestSequence(unittest.TestCase):
"""This is the TestCase class that pythons unit tester can execute.
"""
__metaclass__ = IntentTestSequenceMeta
loader = None
@classmethod
def setUpClass(cls):
cls.loader = MockSkillsLoader(HOME_DIR)
cls.emitter = cls.loader.load_skills()
@classmethod
def tearDownClass(cls):
cls.loader.unload_skills()
if __name__ == '__main__':
unittest.main()
|
Add simple script for running a single skill
Based on the skill_developers_testrunner.py, takes the path to a skill
as argument and runs any tests for that skill.# Copyright 2017 Mycroft AI Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
""" Test a single skill
python single_test.py PATH_TO_SKILL
"""
import glob
import unittest
import os
from test.integrationtests.skills.skill_tester import MockSkillsLoader
from test.integrationtests.skills.skill_tester import SkillTest
import sys
d = sys.argv.pop() + '/'
HOME_DIR = os.path.dirname(d)
def discover_tests():
"""Find skills whith test files
For all skills with test files, starten from current directory,
find the test files in subdirectory test/intent.
:return: skills and corresponding test case files found
"""
tests = {}
skills = [HOME_DIR]
for skill in skills:
test_intent_files = [
f for f
in glob.glob(os.path.join(skill, 'test/intent/*.intent.json'))
]
if len(test_intent_files) > 0:
tests[skill] = test_intent_files
return tests
class IntentTestSequenceMeta(type):
def __new__(mcs, name, bases, d):
def gen_test(a, b):
def test(self):
if not SkillTest(a, b, self.emitter).run(self.loader):
assert False
return test
tests = discover_tests()
for skill in tests.keys():
skill_name = os.path.basename(skill) # Path of the skill
for example in tests[skill]:
# Name of the intent
example_name = os.path.basename(
os.path.splitext(os.path.splitext(example)[0])[0])
test_name = "test_IntentValidation[%s:%s]" % (skill_name,
example_name)
d[test_name] = gen_test(skill, example)
return type.__new__(mcs, name, bases, d)
class IntentTestSequence(unittest.TestCase):
"""This is the TestCase class that pythons unit tester can execute.
"""
__metaclass__ = IntentTestSequenceMeta
loader = None
@classmethod
def setUpClass(cls):
cls.loader = MockSkillsLoader(HOME_DIR)
cls.emitter = cls.loader.load_skills()
@classmethod
def tearDownClass(cls):
cls.loader.unload_skills()
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add simple script for running a single skill
Based on the skill_developers_testrunner.py, takes the path to a skill
as argument and runs any tests for that skill.<commit_after># Copyright 2017 Mycroft AI Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
""" Test a single skill
python single_test.py PATH_TO_SKILL
"""
import glob
import unittest
import os
from test.integrationtests.skills.skill_tester import MockSkillsLoader
from test.integrationtests.skills.skill_tester import SkillTest
import sys
d = sys.argv.pop() + '/'
HOME_DIR = os.path.dirname(d)
def discover_tests():
"""Find skills whith test files
For all skills with test files, starten from current directory,
find the test files in subdirectory test/intent.
:return: skills and corresponding test case files found
"""
tests = {}
skills = [HOME_DIR]
for skill in skills:
test_intent_files = [
f for f
in glob.glob(os.path.join(skill, 'test/intent/*.intent.json'))
]
if len(test_intent_files) > 0:
tests[skill] = test_intent_files
return tests
class IntentTestSequenceMeta(type):
def __new__(mcs, name, bases, d):
def gen_test(a, b):
def test(self):
if not SkillTest(a, b, self.emitter).run(self.loader):
assert False
return test
tests = discover_tests()
for skill in tests.keys():
skill_name = os.path.basename(skill) # Path of the skill
for example in tests[skill]:
# Name of the intent
example_name = os.path.basename(
os.path.splitext(os.path.splitext(example)[0])[0])
test_name = "test_IntentValidation[%s:%s]" % (skill_name,
example_name)
d[test_name] = gen_test(skill, example)
return type.__new__(mcs, name, bases, d)
class IntentTestSequence(unittest.TestCase):
"""This is the TestCase class that pythons unit tester can execute.
"""
__metaclass__ = IntentTestSequenceMeta
loader = None
@classmethod
def setUpClass(cls):
cls.loader = MockSkillsLoader(HOME_DIR)
cls.emitter = cls.loader.load_skills()
@classmethod
def tearDownClass(cls):
cls.loader.unload_skills()
if __name__ == '__main__':
unittest.main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.