commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e6d6b39a2ec03c992f450fd99b42122bc5d9249f
|
aospy/test/test_objs/runs.py
|
aospy/test/test_objs/runs.py
|
from aospy.run import Run
test_am2 = Run(
name='test_am2',
description=(
'Preindustrial control simulation.'
),
data_in_direc=('/archive/Yi.Ming/sm2.1_fixed/'
'SM2.1U_Control-1860_lm2_aie_rerun6.YIM/pp'),
data_in_dur=5,
data_in_start_date='0001-01-01',
data_in_end_date='0080-12-31',
default_date_range=('0021-01-01', '0080-12-31')
)
test_idealized_moist = Run(
name='test_idealized_moist',
description=(
'Control case at T42 spectral resolution'
),
data_in_direc='/archive/skc/idealized_moist_alb_T42/control_gaussian_T42/'
'gfdl.ncrc2-default-prod/1x0m720d_32pe/history',
data_in_dir_struc='one_dir',
data_in_files={'20-day': {v: '00000.1x20days.nc'
for v in ['olr', 'temp']}},
)
|
from aospy.run import Run
test_am2 = Run(
name='test_am2',
description=(
'Preindustrial control simulation.'
),
data_in_direc=('/archive/Yi.Ming/sm2.1_fixed/'
'SM2.1U_Control-1860_lm2_aie_rerun6.YIM/pp'),
data_in_dur=5,
data_in_start_date='0001-01-01',
data_in_end_date='0080-12-31',
default_date_range=('0021-01-01', '0080-12-31')
)
test_idealized_moist = Run(
name='test_idealized_moist',
description=(
'Control case at T42 spectral resolution'
),
data_in_direc='/archive/skc/idealized_moist_alb_T42/control_gaussian_T42/'
'gfdl.ncrc2-default-prod/1x0m720d_32pe/history',
data_in_dir_struc='one_dir',
data_in_files={'20-day': {v: '00000.1x20days.nc'
for v in ['olr', 'temp', 'ps']}},
)
|
TEST Add 'ps' variable to idealized run
|
TEST Add 'ps' variable to idealized run
|
Python
|
apache-2.0
|
spencerkclark/aospy,spencerahill/aospy
|
from aospy.run import Run
test_am2 = Run(
name='test_am2',
description=(
'Preindustrial control simulation.'
),
data_in_direc=('/archive/Yi.Ming/sm2.1_fixed/'
'SM2.1U_Control-1860_lm2_aie_rerun6.YIM/pp'),
data_in_dur=5,
data_in_start_date='0001-01-01',
data_in_end_date='0080-12-31',
default_date_range=('0021-01-01', '0080-12-31')
)
test_idealized_moist = Run(
name='test_idealized_moist',
description=(
'Control case at T42 spectral resolution'
),
data_in_direc='/archive/skc/idealized_moist_alb_T42/control_gaussian_T42/'
'gfdl.ncrc2-default-prod/1x0m720d_32pe/history',
data_in_dir_struc='one_dir',
data_in_files={'20-day': {v: '00000.1x20days.nc'
for v in ['olr', 'temp']}},
)
TEST Add 'ps' variable to idealized run
|
from aospy.run import Run
test_am2 = Run(
name='test_am2',
description=(
'Preindustrial control simulation.'
),
data_in_direc=('/archive/Yi.Ming/sm2.1_fixed/'
'SM2.1U_Control-1860_lm2_aie_rerun6.YIM/pp'),
data_in_dur=5,
data_in_start_date='0001-01-01',
data_in_end_date='0080-12-31',
default_date_range=('0021-01-01', '0080-12-31')
)
test_idealized_moist = Run(
name='test_idealized_moist',
description=(
'Control case at T42 spectral resolution'
),
data_in_direc='/archive/skc/idealized_moist_alb_T42/control_gaussian_T42/'
'gfdl.ncrc2-default-prod/1x0m720d_32pe/history',
data_in_dir_struc='one_dir',
data_in_files={'20-day': {v: '00000.1x20days.nc'
for v in ['olr', 'temp', 'ps']}},
)
|
<commit_before>from aospy.run import Run
test_am2 = Run(
name='test_am2',
description=(
'Preindustrial control simulation.'
),
data_in_direc=('/archive/Yi.Ming/sm2.1_fixed/'
'SM2.1U_Control-1860_lm2_aie_rerun6.YIM/pp'),
data_in_dur=5,
data_in_start_date='0001-01-01',
data_in_end_date='0080-12-31',
default_date_range=('0021-01-01', '0080-12-31')
)
test_idealized_moist = Run(
name='test_idealized_moist',
description=(
'Control case at T42 spectral resolution'
),
data_in_direc='/archive/skc/idealized_moist_alb_T42/control_gaussian_T42/'
'gfdl.ncrc2-default-prod/1x0m720d_32pe/history',
data_in_dir_struc='one_dir',
data_in_files={'20-day': {v: '00000.1x20days.nc'
for v in ['olr', 'temp']}},
)
<commit_msg>TEST Add 'ps' variable to idealized run<commit_after>
|
from aospy.run import Run
test_am2 = Run(
name='test_am2',
description=(
'Preindustrial control simulation.'
),
data_in_direc=('/archive/Yi.Ming/sm2.1_fixed/'
'SM2.1U_Control-1860_lm2_aie_rerun6.YIM/pp'),
data_in_dur=5,
data_in_start_date='0001-01-01',
data_in_end_date='0080-12-31',
default_date_range=('0021-01-01', '0080-12-31')
)
test_idealized_moist = Run(
name='test_idealized_moist',
description=(
'Control case at T42 spectral resolution'
),
data_in_direc='/archive/skc/idealized_moist_alb_T42/control_gaussian_T42/'
'gfdl.ncrc2-default-prod/1x0m720d_32pe/history',
data_in_dir_struc='one_dir',
data_in_files={'20-day': {v: '00000.1x20days.nc'
for v in ['olr', 'temp', 'ps']}},
)
|
from aospy.run import Run
test_am2 = Run(
name='test_am2',
description=(
'Preindustrial control simulation.'
),
data_in_direc=('/archive/Yi.Ming/sm2.1_fixed/'
'SM2.1U_Control-1860_lm2_aie_rerun6.YIM/pp'),
data_in_dur=5,
data_in_start_date='0001-01-01',
data_in_end_date='0080-12-31',
default_date_range=('0021-01-01', '0080-12-31')
)
test_idealized_moist = Run(
name='test_idealized_moist',
description=(
'Control case at T42 spectral resolution'
),
data_in_direc='/archive/skc/idealized_moist_alb_T42/control_gaussian_T42/'
'gfdl.ncrc2-default-prod/1x0m720d_32pe/history',
data_in_dir_struc='one_dir',
data_in_files={'20-day': {v: '00000.1x20days.nc'
for v in ['olr', 'temp']}},
)
TEST Add 'ps' variable to idealized runfrom aospy.run import Run
test_am2 = Run(
name='test_am2',
description=(
'Preindustrial control simulation.'
),
data_in_direc=('/archive/Yi.Ming/sm2.1_fixed/'
'SM2.1U_Control-1860_lm2_aie_rerun6.YIM/pp'),
data_in_dur=5,
data_in_start_date='0001-01-01',
data_in_end_date='0080-12-31',
default_date_range=('0021-01-01', '0080-12-31')
)
test_idealized_moist = Run(
name='test_idealized_moist',
description=(
'Control case at T42 spectral resolution'
),
data_in_direc='/archive/skc/idealized_moist_alb_T42/control_gaussian_T42/'
'gfdl.ncrc2-default-prod/1x0m720d_32pe/history',
data_in_dir_struc='one_dir',
data_in_files={'20-day': {v: '00000.1x20days.nc'
for v in ['olr', 'temp', 'ps']}},
)
|
<commit_before>from aospy.run import Run
test_am2 = Run(
name='test_am2',
description=(
'Preindustrial control simulation.'
),
data_in_direc=('/archive/Yi.Ming/sm2.1_fixed/'
'SM2.1U_Control-1860_lm2_aie_rerun6.YIM/pp'),
data_in_dur=5,
data_in_start_date='0001-01-01',
data_in_end_date='0080-12-31',
default_date_range=('0021-01-01', '0080-12-31')
)
test_idealized_moist = Run(
name='test_idealized_moist',
description=(
'Control case at T42 spectral resolution'
),
data_in_direc='/archive/skc/idealized_moist_alb_T42/control_gaussian_T42/'
'gfdl.ncrc2-default-prod/1x0m720d_32pe/history',
data_in_dir_struc='one_dir',
data_in_files={'20-day': {v: '00000.1x20days.nc'
for v in ['olr', 'temp']}},
)
<commit_msg>TEST Add 'ps' variable to idealized run<commit_after>from aospy.run import Run
test_am2 = Run(
name='test_am2',
description=(
'Preindustrial control simulation.'
),
data_in_direc=('/archive/Yi.Ming/sm2.1_fixed/'
'SM2.1U_Control-1860_lm2_aie_rerun6.YIM/pp'),
data_in_dur=5,
data_in_start_date='0001-01-01',
data_in_end_date='0080-12-31',
default_date_range=('0021-01-01', '0080-12-31')
)
test_idealized_moist = Run(
name='test_idealized_moist',
description=(
'Control case at T42 spectral resolution'
),
data_in_direc='/archive/skc/idealized_moist_alb_T42/control_gaussian_T42/'
'gfdl.ncrc2-default-prod/1x0m720d_32pe/history',
data_in_dir_struc='one_dir',
data_in_files={'20-day': {v: '00000.1x20days.nc'
for v in ['olr', 'temp', 'ps']}},
)
|
ee93f680c911f6879e12f7ce7ee3a6c1cfda8379
|
app/taskqueue/tasks/stats.py
|
app/taskqueue/tasks/stats.py
|
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Tasks to calculate statistics."""
import taskqueue.celery as taskc
import utils
import utils.db
import utils.stats.daily
@taskc.app.task(
name="calculate-daily-statistics", ack_late=True, track_started=True)
def calculate_daily_statistics():
db_options = taskc.app.conf.DB_OPTIONS
daily_stats = utils.stats.daily.calculate_daily_stats(db_options)
database = utils.db.get_db_connection(db_options)
ret_val, doc_id = utils.db.save(database, daily_stats, manipulate=True)
return ret_val, doc_id
|
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Tasks to calculate statistics."""
import taskqueue.celery as taskc
import utils
import utils.db
import utils.stats.daily
@taskc.app.task(
name="calculate-daily-statistics", ack_late=True, track_started=True)
def calculate_daily_statistics():
"""Collect daily statistics on the data stored."""
db_options = taskc.app.conf.DB_OPTIONS
daily_stats = utils.stats.daily.calculate_daily_stats(db_options)
database = utils.db.get_db_connection(db_options)
ret_val, doc_id = utils.db.save(database, daily_stats, manipulate=True)
return ret_val, doc_id
|
Add missing function doc string.
|
Add missing function doc string.
|
Python
|
lgpl-2.1
|
kernelci/kernelci-backend,kernelci/kernelci-backend
|
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Tasks to calculate statistics."""
import taskqueue.celery as taskc
import utils
import utils.db
import utils.stats.daily
@taskc.app.task(
name="calculate-daily-statistics", ack_late=True, track_started=True)
def calculate_daily_statistics():
db_options = taskc.app.conf.DB_OPTIONS
daily_stats = utils.stats.daily.calculate_daily_stats(db_options)
database = utils.db.get_db_connection(db_options)
ret_val, doc_id = utils.db.save(database, daily_stats, manipulate=True)
return ret_val, doc_id
Add missing function doc string.
|
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Tasks to calculate statistics."""
import taskqueue.celery as taskc
import utils
import utils.db
import utils.stats.daily
@taskc.app.task(
name="calculate-daily-statistics", ack_late=True, track_started=True)
def calculate_daily_statistics():
"""Collect daily statistics on the data stored."""
db_options = taskc.app.conf.DB_OPTIONS
daily_stats = utils.stats.daily.calculate_daily_stats(db_options)
database = utils.db.get_db_connection(db_options)
ret_val, doc_id = utils.db.save(database, daily_stats, manipulate=True)
return ret_val, doc_id
|
<commit_before># This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Tasks to calculate statistics."""
import taskqueue.celery as taskc
import utils
import utils.db
import utils.stats.daily
@taskc.app.task(
name="calculate-daily-statistics", ack_late=True, track_started=True)
def calculate_daily_statistics():
db_options = taskc.app.conf.DB_OPTIONS
daily_stats = utils.stats.daily.calculate_daily_stats(db_options)
database = utils.db.get_db_connection(db_options)
ret_val, doc_id = utils.db.save(database, daily_stats, manipulate=True)
return ret_val, doc_id
<commit_msg>Add missing function doc string.<commit_after>
|
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Tasks to calculate statistics."""
import taskqueue.celery as taskc
import utils
import utils.db
import utils.stats.daily
@taskc.app.task(
name="calculate-daily-statistics", ack_late=True, track_started=True)
def calculate_daily_statistics():
"""Collect daily statistics on the data stored."""
db_options = taskc.app.conf.DB_OPTIONS
daily_stats = utils.stats.daily.calculate_daily_stats(db_options)
database = utils.db.get_db_connection(db_options)
ret_val, doc_id = utils.db.save(database, daily_stats, manipulate=True)
return ret_val, doc_id
|
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Tasks to calculate statistics."""
import taskqueue.celery as taskc
import utils
import utils.db
import utils.stats.daily
@taskc.app.task(
name="calculate-daily-statistics", ack_late=True, track_started=True)
def calculate_daily_statistics():
db_options = taskc.app.conf.DB_OPTIONS
daily_stats = utils.stats.daily.calculate_daily_stats(db_options)
database = utils.db.get_db_connection(db_options)
ret_val, doc_id = utils.db.save(database, daily_stats, manipulate=True)
return ret_val, doc_id
Add missing function doc string.# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Tasks to calculate statistics."""
import taskqueue.celery as taskc
import utils
import utils.db
import utils.stats.daily
@taskc.app.task(
name="calculate-daily-statistics", ack_late=True, track_started=True)
def calculate_daily_statistics():
"""Collect daily statistics on the data stored."""
db_options = taskc.app.conf.DB_OPTIONS
daily_stats = utils.stats.daily.calculate_daily_stats(db_options)
database = utils.db.get_db_connection(db_options)
ret_val, doc_id = utils.db.save(database, daily_stats, manipulate=True)
return ret_val, doc_id
|
<commit_before># This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Tasks to calculate statistics."""
import taskqueue.celery as taskc
import utils
import utils.db
import utils.stats.daily
@taskc.app.task(
name="calculate-daily-statistics", ack_late=True, track_started=True)
def calculate_daily_statistics():
db_options = taskc.app.conf.DB_OPTIONS
daily_stats = utils.stats.daily.calculate_daily_stats(db_options)
database = utils.db.get_db_connection(db_options)
ret_val, doc_id = utils.db.save(database, daily_stats, manipulate=True)
return ret_val, doc_id
<commit_msg>Add missing function doc string.<commit_after># This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Tasks to calculate statistics."""
import taskqueue.celery as taskc
import utils
import utils.db
import utils.stats.daily
@taskc.app.task(
name="calculate-daily-statistics", ack_late=True, track_started=True)
def calculate_daily_statistics():
"""Collect daily statistics on the data stored."""
db_options = taskc.app.conf.DB_OPTIONS
daily_stats = utils.stats.daily.calculate_daily_stats(db_options)
database = utils.db.get_db_connection(db_options)
ret_val, doc_id = utils.db.save(database, daily_stats, manipulate=True)
return ret_val, doc_id
|
4be85164344defc9d348594a26c4dab147a8de4a
|
dvol/test_plugin.py
|
dvol/test_plugin.py
|
"""
log of integration tests to write:
write test_switch_branches_restarts_containers
command:
docker-compose up -d (in a directory with appropriate docker-compose.yml file)
expected behaviour:
docker containers are started with dvol accordingly
XXX this doesn't seem to work at the moment
command:
docker run -ti --volume-driver dvol -v hello:/data busybox sh
expected output:
Error response from daemon: Voluminous 'hello2' does not exist, create it with: dvol init hello2
command:
dvol init hello2
expected output:
Created volume hello2
Created branch hello2/master
command:
dvol commit ...
expected behaviour:
a container which only persists its in-memory state to disk occasionally (e.g. on shutdown) has correctly written out its state
command:
dvol reset...
expected behaviour:
a container which caches disk state in memory has correctly updated its state (IOW, containers get restarted around rollbacks)
command:
run a container using a dvol volume
dvol list
container names shows up in output
"""
|
"""
log of integration tests to write:
write test_switch_branches_restarts_containers
command:
docker-compose up -d (in a directory with appropriate docker-compose.yml file)
expected behaviour:
docker containers are started with dvol accordingly
command:
docker run -ti --volume-driver dvol -v hello:/data busybox sh
expected output:
dvol volume is created on-demand
command:
dvol init hello2
expected output:
Created volume hello2
Created branch hello2/master
command:
dvol commit ...
expected behaviour:
a container which only persists its in-memory state to disk occasionally (e.g. on shutdown) has correctly written out its state
command:
dvol reset...
expected behaviour:
a container which caches disk state in memory has correctly updated its state (IOW, containers get restarted around rollbacks)
command:
run a container using a dvol volume
expected behaviour:
dvol list
container names shows up in output
"""
|
Tidy up list of tests to write.
|
Tidy up list of tests to write.
|
Python
|
apache-2.0
|
ClusterHQ/dvol,ClusterHQ/dvol,ClusterHQ/dvol
|
"""
log of integration tests to write:
write test_switch_branches_restarts_containers
command:
docker-compose up -d (in a directory with appropriate docker-compose.yml file)
expected behaviour:
docker containers are started with dvol accordingly
XXX this doesn't seem to work at the moment
command:
docker run -ti --volume-driver dvol -v hello:/data busybox sh
expected output:
Error response from daemon: Voluminous 'hello2' does not exist, create it with: dvol init hello2
command:
dvol init hello2
expected output:
Created volume hello2
Created branch hello2/master
command:
dvol commit ...
expected behaviour:
a container which only persists its in-memory state to disk occasionally (e.g. on shutdown) has correctly written out its state
command:
dvol reset...
expected behaviour:
a container which caches disk state in memory has correctly updated its state (IOW, containers get restarted around rollbacks)
command:
run a container using a dvol volume
dvol list
container names shows up in output
"""
Tidy up list of tests to write.
|
"""
log of integration tests to write:
write test_switch_branches_restarts_containers
command:
docker-compose up -d (in a directory with appropriate docker-compose.yml file)
expected behaviour:
docker containers are started with dvol accordingly
command:
docker run -ti --volume-driver dvol -v hello:/data busybox sh
expected output:
dvol volume is created on-demand
command:
dvol init hello2
expected output:
Created volume hello2
Created branch hello2/master
command:
dvol commit ...
expected behaviour:
a container which only persists its in-memory state to disk occasionally (e.g. on shutdown) has correctly written out its state
command:
dvol reset...
expected behaviour:
a container which caches disk state in memory has correctly updated its state (IOW, containers get restarted around rollbacks)
command:
run a container using a dvol volume
expected behaviour:
dvol list
container names shows up in output
"""
|
<commit_before>"""
log of integration tests to write:
write test_switch_branches_restarts_containers
command:
docker-compose up -d (in a directory with appropriate docker-compose.yml file)
expected behaviour:
docker containers are started with dvol accordingly
XXX this doesn't seem to work at the moment
command:
docker run -ti --volume-driver dvol -v hello:/data busybox sh
expected output:
Error response from daemon: Voluminous 'hello2' does not exist, create it with: dvol init hello2
command:
dvol init hello2
expected output:
Created volume hello2
Created branch hello2/master
command:
dvol commit ...
expected behaviour:
a container which only persists its in-memory state to disk occasionally (e.g. on shutdown) has correctly written out its state
command:
dvol reset...
expected behaviour:
a container which caches disk state in memory has correctly updated its state (IOW, containers get restarted around rollbacks)
command:
run a container using a dvol volume
dvol list
container names shows up in output
"""
<commit_msg>Tidy up list of tests to write.<commit_after>
|
"""
log of integration tests to write:
write test_switch_branches_restarts_containers
command:
docker-compose up -d (in a directory with appropriate docker-compose.yml file)
expected behaviour:
docker containers are started with dvol accordingly
command:
docker run -ti --volume-driver dvol -v hello:/data busybox sh
expected output:
dvol volume is created on-demand
command:
dvol init hello2
expected output:
Created volume hello2
Created branch hello2/master
command:
dvol commit ...
expected behaviour:
a container which only persists its in-memory state to disk occasionally (e.g. on shutdown) has correctly written out its state
command:
dvol reset...
expected behaviour:
a container which caches disk state in memory has correctly updated its state (IOW, containers get restarted around rollbacks)
command:
run a container using a dvol volume
expected behaviour:
dvol list
container names shows up in output
"""
|
"""
log of integration tests to write:
write test_switch_branches_restarts_containers
command:
docker-compose up -d (in a directory with appropriate docker-compose.yml file)
expected behaviour:
docker containers are started with dvol accordingly
XXX this doesn't seem to work at the moment
command:
docker run -ti --volume-driver dvol -v hello:/data busybox sh
expected output:
Error response from daemon: Voluminous 'hello2' does not exist, create it with: dvol init hello2
command:
dvol init hello2
expected output:
Created volume hello2
Created branch hello2/master
command:
dvol commit ...
expected behaviour:
a container which only persists its in-memory state to disk occasionally (e.g. on shutdown) has correctly written out its state
command:
dvol reset...
expected behaviour:
a container which caches disk state in memory has correctly updated its state (IOW, containers get restarted around rollbacks)
command:
run a container using a dvol volume
dvol list
container names shows up in output
"""
Tidy up list of tests to write."""
log of integration tests to write:
write test_switch_branches_restarts_containers
command:
docker-compose up -d (in a directory with appropriate docker-compose.yml file)
expected behaviour:
docker containers are started with dvol accordingly
command:
docker run -ti --volume-driver dvol -v hello:/data busybox sh
expected output:
dvol volume is created on-demand
command:
dvol init hello2
expected output:
Created volume hello2
Created branch hello2/master
command:
dvol commit ...
expected behaviour:
a container which only persists its in-memory state to disk occasionally (e.g. on shutdown) has correctly written out its state
command:
dvol reset...
expected behaviour:
a container which caches disk state in memory has correctly updated its state (IOW, containers get restarted around rollbacks)
command:
run a container using a dvol volume
expected behaviour:
dvol list
container names shows up in output
"""
|
<commit_before>"""
log of integration tests to write:
write test_switch_branches_restarts_containers
command:
docker-compose up -d (in a directory with appropriate docker-compose.yml file)
expected behaviour:
docker containers are started with dvol accordingly
XXX this doesn't seem to work at the moment
command:
docker run -ti --volume-driver dvol -v hello:/data busybox sh
expected output:
Error response from daemon: Voluminous 'hello2' does not exist, create it with: dvol init hello2
command:
dvol init hello2
expected output:
Created volume hello2
Created branch hello2/master
command:
dvol commit ...
expected behaviour:
a container which only persists its in-memory state to disk occasionally (e.g. on shutdown) has correctly written out its state
command:
dvol reset...
expected behaviour:
a container which caches disk state in memory has correctly updated its state (IOW, containers get restarted around rollbacks)
command:
run a container using a dvol volume
dvol list
container names shows up in output
"""
<commit_msg>Tidy up list of tests to write.<commit_after>"""
log of integration tests to write:
write test_switch_branches_restarts_containers
command:
docker-compose up -d (in a directory with appropriate docker-compose.yml file)
expected behaviour:
docker containers are started with dvol accordingly
command:
docker run -ti --volume-driver dvol -v hello:/data busybox sh
expected output:
dvol volume is created on-demand
command:
dvol init hello2
expected output:
Created volume hello2
Created branch hello2/master
command:
dvol commit ...
expected behaviour:
a container which only persists its in-memory state to disk occasionally (e.g. on shutdown) has correctly written out its state
command:
dvol reset...
expected behaviour:
a container which caches disk state in memory has correctly updated its state (IOW, containers get restarted around rollbacks)
command:
run a container using a dvol volume
expected behaviour:
dvol list
container names shows up in output
"""
|
a7311b1cd9a184af2a98130ba288157f62220da1
|
src/formatter.py
|
src/formatter.py
|
import json
from collections import OrderedDict
from .command import ShellCommand
from .settings import FormatterSettings
class Formatter():
def __init__(self, name, command=None, args=None, formatter=None):
self.__name = name
self.__format = formatter
self.__settings = FormatterSettings(name.lower())
if not formatter:
command = command.split(' ') if command else []
options = self.__settings.options
args = args.split(' ') if args else []
self.__format = ShellCommand(command + options + args).run
@property
def name(self):
return self.__name
@property
def sources(self):
return self.__settings.sources
@property
def format_on_save(self):
return self.__settings.format_on_save
@format_on_save.setter
def format_on_save(self, value):
self.__settings.format_on_save = value
def format(self, input):
return self.__format(input)
class JsonFormatter(Formatter):
def __init__(self):
def format_json(input):
try:
data = json.loads(input, object_pairs_hook=OrderedDict)
return json.dumps(data, indent=4), None
except ValueError:
return None, 'Invalid JSON'
super().__init__(name='JSON', formatter=format_json)
|
import json
from collections import OrderedDict
from .command import ShellCommand
from .settings import FormatterSettings
class Formatter():
def __init__(self, name, command='', args='', formatter=None):
self.__name = name
self.__settings = FormatterSettings(name.lower())
if formatter:
self.__format = formatter
else:
command = command.split(' ')
options = self.__settings.options
args = args.split(' ')
self.__format = ShellCommand(command + options + args).run
@property
def name(self):
return self.__name
@property
def sources(self):
return self.__settings.sources
@property
def format_on_save(self):
return self.__settings.format_on_save
@format_on_save.setter
def format_on_save(self, value):
self.__settings.format_on_save = value
def format(self, input):
return self.__format(input)
class JsonFormatter(Formatter):
def __init__(self):
def format_json(input):
try:
data = json.loads(input, object_pairs_hook=OrderedDict)
return json.dumps(data, indent=4), None
except ValueError:
return None, 'Invalid JSON'
super().__init__(name='JSON', formatter=format_json)
|
Set default values for command and args
|
Set default values for command and args
|
Python
|
mit
|
Rypac/sublime-format
|
import json
from collections import OrderedDict
from .command import ShellCommand
from .settings import FormatterSettings
class Formatter():
def __init__(self, name, command=None, args=None, formatter=None):
self.__name = name
self.__format = formatter
self.__settings = FormatterSettings(name.lower())
if not formatter:
command = command.split(' ') if command else []
options = self.__settings.options
args = args.split(' ') if args else []
self.__format = ShellCommand(command + options + args).run
@property
def name(self):
return self.__name
@property
def sources(self):
return self.__settings.sources
@property
def format_on_save(self):
return self.__settings.format_on_save
@format_on_save.setter
def format_on_save(self, value):
self.__settings.format_on_save = value
def format(self, input):
return self.__format(input)
class JsonFormatter(Formatter):
def __init__(self):
def format_json(input):
try:
data = json.loads(input, object_pairs_hook=OrderedDict)
return json.dumps(data, indent=4), None
except ValueError:
return None, 'Invalid JSON'
super().__init__(name='JSON', formatter=format_json)
Set default values for command and args
|
import json
from collections import OrderedDict
from .command import ShellCommand
from .settings import FormatterSettings
class Formatter():
def __init__(self, name, command='', args='', formatter=None):
self.__name = name
self.__settings = FormatterSettings(name.lower())
if formatter:
self.__format = formatter
else:
command = command.split(' ')
options = self.__settings.options
args = args.split(' ')
self.__format = ShellCommand(command + options + args).run
@property
def name(self):
return self.__name
@property
def sources(self):
return self.__settings.sources
@property
def format_on_save(self):
return self.__settings.format_on_save
@format_on_save.setter
def format_on_save(self, value):
self.__settings.format_on_save = value
def format(self, input):
return self.__format(input)
class JsonFormatter(Formatter):
def __init__(self):
def format_json(input):
try:
data = json.loads(input, object_pairs_hook=OrderedDict)
return json.dumps(data, indent=4), None
except ValueError:
return None, 'Invalid JSON'
super().__init__(name='JSON', formatter=format_json)
|
<commit_before>import json
from collections import OrderedDict
from .command import ShellCommand
from .settings import FormatterSettings
class Formatter():
def __init__(self, name, command=None, args=None, formatter=None):
self.__name = name
self.__format = formatter
self.__settings = FormatterSettings(name.lower())
if not formatter:
command = command.split(' ') if command else []
options = self.__settings.options
args = args.split(' ') if args else []
self.__format = ShellCommand(command + options + args).run
@property
def name(self):
return self.__name
@property
def sources(self):
return self.__settings.sources
@property
def format_on_save(self):
return self.__settings.format_on_save
@format_on_save.setter
def format_on_save(self, value):
self.__settings.format_on_save = value
def format(self, input):
return self.__format(input)
class JsonFormatter(Formatter):
def __init__(self):
def format_json(input):
try:
data = json.loads(input, object_pairs_hook=OrderedDict)
return json.dumps(data, indent=4), None
except ValueError:
return None, 'Invalid JSON'
super().__init__(name='JSON', formatter=format_json)
<commit_msg>Set default values for command and args<commit_after>
|
import json
from collections import OrderedDict
from .command import ShellCommand
from .settings import FormatterSettings
class Formatter():
def __init__(self, name, command='', args='', formatter=None):
self.__name = name
self.__settings = FormatterSettings(name.lower())
if formatter:
self.__format = formatter
else:
command = command.split(' ')
options = self.__settings.options
args = args.split(' ')
self.__format = ShellCommand(command + options + args).run
@property
def name(self):
return self.__name
@property
def sources(self):
return self.__settings.sources
@property
def format_on_save(self):
return self.__settings.format_on_save
@format_on_save.setter
def format_on_save(self, value):
self.__settings.format_on_save = value
def format(self, input):
return self.__format(input)
class JsonFormatter(Formatter):
def __init__(self):
def format_json(input):
try:
data = json.loads(input, object_pairs_hook=OrderedDict)
return json.dumps(data, indent=4), None
except ValueError:
return None, 'Invalid JSON'
super().__init__(name='JSON', formatter=format_json)
|
import json
from collections import OrderedDict
from .command import ShellCommand
from .settings import FormatterSettings
class Formatter():
def __init__(self, name, command=None, args=None, formatter=None):
self.__name = name
self.__format = formatter
self.__settings = FormatterSettings(name.lower())
if not formatter:
command = command.split(' ') if command else []
options = self.__settings.options
args = args.split(' ') if args else []
self.__format = ShellCommand(command + options + args).run
@property
def name(self):
return self.__name
@property
def sources(self):
return self.__settings.sources
@property
def format_on_save(self):
return self.__settings.format_on_save
@format_on_save.setter
def format_on_save(self, value):
self.__settings.format_on_save = value
def format(self, input):
return self.__format(input)
class JsonFormatter(Formatter):
def __init__(self):
def format_json(input):
try:
data = json.loads(input, object_pairs_hook=OrderedDict)
return json.dumps(data, indent=4), None
except ValueError:
return None, 'Invalid JSON'
super().__init__(name='JSON', formatter=format_json)
Set default values for command and argsimport json
from collections import OrderedDict
from .command import ShellCommand
from .settings import FormatterSettings
class Formatter():
def __init__(self, name, command='', args='', formatter=None):
self.__name = name
self.__settings = FormatterSettings(name.lower())
if formatter:
self.__format = formatter
else:
command = command.split(' ')
options = self.__settings.options
args = args.split(' ')
self.__format = ShellCommand(command + options + args).run
@property
def name(self):
return self.__name
@property
def sources(self):
return self.__settings.sources
@property
def format_on_save(self):
return self.__settings.format_on_save
@format_on_save.setter
def format_on_save(self, value):
self.__settings.format_on_save = value
def format(self, input):
return self.__format(input)
class JsonFormatter(Formatter):
def __init__(self):
def format_json(input):
try:
data = json.loads(input, object_pairs_hook=OrderedDict)
return json.dumps(data, indent=4), None
except ValueError:
return None, 'Invalid JSON'
super().__init__(name='JSON', formatter=format_json)
|
<commit_before>import json
from collections import OrderedDict
from .command import ShellCommand
from .settings import FormatterSettings
class Formatter():
def __init__(self, name, command=None, args=None, formatter=None):
self.__name = name
self.__format = formatter
self.__settings = FormatterSettings(name.lower())
if not formatter:
command = command.split(' ') if command else []
options = self.__settings.options
args = args.split(' ') if args else []
self.__format = ShellCommand(command + options + args).run
@property
def name(self):
return self.__name
@property
def sources(self):
return self.__settings.sources
@property
def format_on_save(self):
return self.__settings.format_on_save
@format_on_save.setter
def format_on_save(self, value):
self.__settings.format_on_save = value
def format(self, input):
return self.__format(input)
class JsonFormatter(Formatter):
def __init__(self):
def format_json(input):
try:
data = json.loads(input, object_pairs_hook=OrderedDict)
return json.dumps(data, indent=4), None
except ValueError:
return None, 'Invalid JSON'
super().__init__(name='JSON', formatter=format_json)
<commit_msg>Set default values for command and args<commit_after>import json
from collections import OrderedDict
from .command import ShellCommand
from .settings import FormatterSettings
class Formatter():
def __init__(self, name, command='', args='', formatter=None):
self.__name = name
self.__settings = FormatterSettings(name.lower())
if formatter:
self.__format = formatter
else:
command = command.split(' ')
options = self.__settings.options
args = args.split(' ')
self.__format = ShellCommand(command + options + args).run
@property
def name(self):
return self.__name
@property
def sources(self):
return self.__settings.sources
@property
def format_on_save(self):
return self.__settings.format_on_save
@format_on_save.setter
def format_on_save(self, value):
self.__settings.format_on_save = value
def format(self, input):
return self.__format(input)
class JsonFormatter(Formatter):
def __init__(self):
def format_json(input):
try:
data = json.loads(input, object_pairs_hook=OrderedDict)
return json.dumps(data, indent=4), None
except ValueError:
return None, 'Invalid JSON'
super().__init__(name='JSON', formatter=format_json)
|
e8a0e7c3714445577851c5a84ecf7a036937725a
|
clang_corpus/__init__.py
|
clang_corpus/__init__.py
|
from os import listdir
from os.path import abspath, isfile, join, splitext
# C, C++, Obj-C, & Obj-C++
SOURCE_EXTENSIONS = ('.h', '.hh', '.hpp', '.c', '.cpp', '.cxx', '.m', '.mm')
class SourceFile(object):
""" A simple object which wraps a text file.
"""
def __init__(self, path):
self._path = abspath(path)
@property
def path(self):
return self._path
@property
def bytes(self):
with open(self._path, "rb") as fp:
bytes = fp.read()
return bytes
def create_package_modules(package_path, module_dict):
""" Populate a module dictionary with `SourceFile` objects for each source
file in a directory.
"""
package_path = abspath(package_path)
for filename in listdir(package_path):
file_path = join(package_path, filename)
key, ext = splitext(filename)
if isfile(file_path) and ext in SOURCE_EXTENSIONS:
key, ext = splitext(filename)
module_dict[key] = SourceFile(file_path)
|
from os import listdir
from os.path import abspath, isfile, join, split, splitext
# C, C++, Obj-C, & Obj-C++
SOURCE_EXTENSIONS = ('.h', '.hh', '.hpp', '.c', '.cpp', '.cxx', '.m', '.mm')
class SourceFile(object):
""" A simple object which wraps a text file.
"""
def __init__(self, path):
self._path = abspath(path)
@property
def path(self):
return self._path
@property
def include_paths(self):
return [split(self._path)[0]]
@property
def bytes(self):
with open(self._path, "rb") as fp:
bytes = fp.read()
return bytes
def create_package_modules(package_path, module_dict):
""" Populate a module dictionary with `SourceFile` objects for each source
file in a directory.
"""
package_path = abspath(package_path)
for filename in listdir(package_path):
file_path = join(package_path, filename)
key, ext = splitext(filename)
if isfile(file_path) and ext in SOURCE_EXTENSIONS:
key, ext = splitext(filename)
module_dict[key] = SourceFile(file_path)
|
Add an include_paths property to the SourceFile class.
|
Add an include_paths property to the SourceFile class.
|
Python
|
unlicense
|
jwiggins/clang-corpus,jwiggins/clang-corpus,jwiggins/clang-corpus
|
from os import listdir
from os.path import abspath, isfile, join, splitext
# C, C++, Obj-C, & Obj-C++
SOURCE_EXTENSIONS = ('.h', '.hh', '.hpp', '.c', '.cpp', '.cxx', '.m', '.mm')
class SourceFile(object):
""" A simple object which wraps a text file.
"""
def __init__(self, path):
self._path = abspath(path)
@property
def path(self):
return self._path
@property
def bytes(self):
with open(self._path, "rb") as fp:
bytes = fp.read()
return bytes
def create_package_modules(package_path, module_dict):
""" Populate a module dictionary with `SourceFile` objects for each source
file in a directory.
"""
package_path = abspath(package_path)
for filename in listdir(package_path):
file_path = join(package_path, filename)
key, ext = splitext(filename)
if isfile(file_path) and ext in SOURCE_EXTENSIONS:
key, ext = splitext(filename)
module_dict[key] = SourceFile(file_path)
Add an include_paths property to the SourceFile class.
|
from os import listdir
from os.path import abspath, isfile, join, split, splitext
# C, C++, Obj-C, & Obj-C++
SOURCE_EXTENSIONS = ('.h', '.hh', '.hpp', '.c', '.cpp', '.cxx', '.m', '.mm')
class SourceFile(object):
""" A simple object which wraps a text file.
"""
def __init__(self, path):
self._path = abspath(path)
@property
def path(self):
return self._path
@property
def include_paths(self):
return [split(self._path)[0]]
@property
def bytes(self):
with open(self._path, "rb") as fp:
bytes = fp.read()
return bytes
def create_package_modules(package_path, module_dict):
""" Populate a module dictionary with `SourceFile` objects for each source
file in a directory.
"""
package_path = abspath(package_path)
for filename in listdir(package_path):
file_path = join(package_path, filename)
key, ext = splitext(filename)
if isfile(file_path) and ext in SOURCE_EXTENSIONS:
key, ext = splitext(filename)
module_dict[key] = SourceFile(file_path)
|
<commit_before>from os import listdir
from os.path import abspath, isfile, join, splitext
# C, C++, Obj-C, & Obj-C++
SOURCE_EXTENSIONS = ('.h', '.hh', '.hpp', '.c', '.cpp', '.cxx', '.m', '.mm')
class SourceFile(object):
""" A simple object which wraps a text file.
"""
def __init__(self, path):
self._path = abspath(path)
@property
def path(self):
return self._path
@property
def bytes(self):
with open(self._path, "rb") as fp:
bytes = fp.read()
return bytes
def create_package_modules(package_path, module_dict):
""" Populate a module dictionary with `SourceFile` objects for each source
file in a directory.
"""
package_path = abspath(package_path)
for filename in listdir(package_path):
file_path = join(package_path, filename)
key, ext = splitext(filename)
if isfile(file_path) and ext in SOURCE_EXTENSIONS:
key, ext = splitext(filename)
module_dict[key] = SourceFile(file_path)
<commit_msg>Add an include_paths property to the SourceFile class.<commit_after>
|
from os import listdir
from os.path import abspath, isfile, join, split, splitext
# C, C++, Obj-C, & Obj-C++
SOURCE_EXTENSIONS = ('.h', '.hh', '.hpp', '.c', '.cpp', '.cxx', '.m', '.mm')
class SourceFile(object):
""" A simple object which wraps a text file.
"""
def __init__(self, path):
self._path = abspath(path)
@property
def path(self):
return self._path
@property
def include_paths(self):
return [split(self._path)[0]]
@property
def bytes(self):
with open(self._path, "rb") as fp:
bytes = fp.read()
return bytes
def create_package_modules(package_path, module_dict):
""" Populate a module dictionary with `SourceFile` objects for each source
file in a directory.
"""
package_path = abspath(package_path)
for filename in listdir(package_path):
file_path = join(package_path, filename)
key, ext = splitext(filename)
if isfile(file_path) and ext in SOURCE_EXTENSIONS:
key, ext = splitext(filename)
module_dict[key] = SourceFile(file_path)
|
from os import listdir
from os.path import abspath, isfile, join, splitext
# C, C++, Obj-C, & Obj-C++
SOURCE_EXTENSIONS = ('.h', '.hh', '.hpp', '.c', '.cpp', '.cxx', '.m', '.mm')
class SourceFile(object):
""" A simple object which wraps a text file.
"""
def __init__(self, path):
self._path = abspath(path)
@property
def path(self):
return self._path
@property
def bytes(self):
with open(self._path, "rb") as fp:
bytes = fp.read()
return bytes
def create_package_modules(package_path, module_dict):
""" Populate a module dictionary with `SourceFile` objects for each source
file in a directory.
"""
package_path = abspath(package_path)
for filename in listdir(package_path):
file_path = join(package_path, filename)
key, ext = splitext(filename)
if isfile(file_path) and ext in SOURCE_EXTENSIONS:
key, ext = splitext(filename)
module_dict[key] = SourceFile(file_path)
Add an include_paths property to the SourceFile class.from os import listdir
from os.path import abspath, isfile, join, split, splitext
# C, C++, Obj-C, & Obj-C++
SOURCE_EXTENSIONS = ('.h', '.hh', '.hpp', '.c', '.cpp', '.cxx', '.m', '.mm')
class SourceFile(object):
""" A simple object which wraps a text file.
"""
def __init__(self, path):
self._path = abspath(path)
@property
def path(self):
return self._path
@property
def include_paths(self):
return [split(self._path)[0]]
@property
def bytes(self):
with open(self._path, "rb") as fp:
bytes = fp.read()
return bytes
def create_package_modules(package_path, module_dict):
""" Populate a module dictionary with `SourceFile` objects for each source
file in a directory.
"""
package_path = abspath(package_path)
for filename in listdir(package_path):
file_path = join(package_path, filename)
key, ext = splitext(filename)
if isfile(file_path) and ext in SOURCE_EXTENSIONS:
key, ext = splitext(filename)
module_dict[key] = SourceFile(file_path)
|
<commit_before>from os import listdir
from os.path import abspath, isfile, join, splitext
# C, C++, Obj-C, & Obj-C++
SOURCE_EXTENSIONS = ('.h', '.hh', '.hpp', '.c', '.cpp', '.cxx', '.m', '.mm')
class SourceFile(object):
""" A simple object which wraps a text file.
"""
def __init__(self, path):
self._path = abspath(path)
@property
def path(self):
return self._path
@property
def bytes(self):
with open(self._path, "rb") as fp:
bytes = fp.read()
return bytes
def create_package_modules(package_path, module_dict):
""" Populate a module dictionary with `SourceFile` objects for each source
file in a directory.
"""
package_path = abspath(package_path)
for filename in listdir(package_path):
file_path = join(package_path, filename)
key, ext = splitext(filename)
if isfile(file_path) and ext in SOURCE_EXTENSIONS:
key, ext = splitext(filename)
module_dict[key] = SourceFile(file_path)
<commit_msg>Add an include_paths property to the SourceFile class.<commit_after>from os import listdir
from os.path import abspath, isfile, join, split, splitext
# C, C++, Obj-C, & Obj-C++
SOURCE_EXTENSIONS = ('.h', '.hh', '.hpp', '.c', '.cpp', '.cxx', '.m', '.mm')
class SourceFile(object):
""" A simple object which wraps a text file.
"""
def __init__(self, path):
self._path = abspath(path)
@property
def path(self):
return self._path
@property
def include_paths(self):
return [split(self._path)[0]]
@property
def bytes(self):
with open(self._path, "rb") as fp:
bytes = fp.read()
return bytes
def create_package_modules(package_path, module_dict):
""" Populate a module dictionary with `SourceFile` objects for each source
file in a directory.
"""
package_path = abspath(package_path)
for filename in listdir(package_path):
file_path = join(package_path, filename)
key, ext = splitext(filename)
if isfile(file_path) and ext in SOURCE_EXTENSIONS:
key, ext = splitext(filename)
module_dict[key] = SourceFile(file_path)
|
2402afe296191d3fddc98212564fb0158cfdcb51
|
upload_redirects.py
|
upload_redirects.py
|
import json
from backend.app import app, db
from backend.models import *
from flask import url_for
# read in json redirect dump
with open('data/prod_url_alias.json', 'r') as f:
redirects = json.loads(f.read())
print len(redirects)
old_urls = []
error_count = 0
for i in range(len(redirects)):
nid = None
try:
nid = int(redirects[i]['nid'])
except ValueError as e:
tmp = redirects[i]['nid']
if not 'user' in tmp:
tmp = tmp.split('/')
for item in tmp:
try:
nid = int(item)
break
except ValueError:
pass
url = redirects[i]['url']
if nid and not url in old_urls:
redirect = Redirect(nid=nid, old_url=url)
old_urls.append(url)
db.session.add(redirect)
else:
error_count += 1
print nid, redirects[i]['url']
if i % 500 == 0:
print "saving 500 redirects (" + str(i) + " out of " + str(len(redirects)) + ")"
db.session.commit()
db.session.commit()
print "Error count:", str(error_count)
|
import json
from backend.app import app, db
from backend.models import *
from flask import url_for
# read in json redirect dump
with open('data/nid_url.json', 'r') as f:
redirects = json.loads(f.read())
print len(redirects)
old_urls = []
existing_redirects = Redirect.query.all()
for redirect in existing_redirects:
old_urls.append(redirect.old_url)
error_count = 0
for i in range(len(redirects)):
nid = None
try:
nid = int(redirects[i]['nid'])
except ValueError as e:
tmp = redirects[i]['nid']
if not 'user' in tmp:
tmp = tmp.split('/')
for item in tmp:
try:
nid = int(item)
break
except ValueError:
pass
url = redirects[i]['url']
if nid and not url in old_urls:
redirect = Redirect(nid=nid, old_url=url)
old_urls.append(url)
db.session.add(redirect)
else:
error_count += 1
print nid, redirects[i]['url'].encode('utf8')
if i % 500 == 0:
print "saving 500 redirects (" + str(i) + " out of " + str(len(redirects)) + ")"
db.session.commit()
db.session.commit()
print "Error count:", str(error_count)
|
Check for duplicates in existing dataset. Fix reference to dump file.
|
Check for duplicates in existing dataset. Fix reference to dump file.
|
Python
|
apache-2.0
|
Code4SA/pmg-cms-2,Code4SA/pmg-cms-2,Code4SA/pmg-cms-2
|
import json
from backend.app import app, db
from backend.models import *
from flask import url_for
# read in json redirect dump
with open('data/prod_url_alias.json', 'r') as f:
redirects = json.loads(f.read())
print len(redirects)
old_urls = []
error_count = 0
for i in range(len(redirects)):
nid = None
try:
nid = int(redirects[i]['nid'])
except ValueError as e:
tmp = redirects[i]['nid']
if not 'user' in tmp:
tmp = tmp.split('/')
for item in tmp:
try:
nid = int(item)
break
except ValueError:
pass
url = redirects[i]['url']
if nid and not url in old_urls:
redirect = Redirect(nid=nid, old_url=url)
old_urls.append(url)
db.session.add(redirect)
else:
error_count += 1
print nid, redirects[i]['url']
if i % 500 == 0:
print "saving 500 redirects (" + str(i) + " out of " + str(len(redirects)) + ")"
db.session.commit()
db.session.commit()
print "Error count:", str(error_count)Check for duplicates in existing dataset. Fix reference to dump file.
|
import json
from backend.app import app, db
from backend.models import *
from flask import url_for
# read in json redirect dump
with open('data/nid_url.json', 'r') as f:
redirects = json.loads(f.read())
print len(redirects)
old_urls = []
existing_redirects = Redirect.query.all()
for redirect in existing_redirects:
old_urls.append(redirect.old_url)
error_count = 0
for i in range(len(redirects)):
nid = None
try:
nid = int(redirects[i]['nid'])
except ValueError as e:
tmp = redirects[i]['nid']
if not 'user' in tmp:
tmp = tmp.split('/')
for item in tmp:
try:
nid = int(item)
break
except ValueError:
pass
url = redirects[i]['url']
if nid and not url in old_urls:
redirect = Redirect(nid=nid, old_url=url)
old_urls.append(url)
db.session.add(redirect)
else:
error_count += 1
print nid, redirects[i]['url'].encode('utf8')
if i % 500 == 0:
print "saving 500 redirects (" + str(i) + " out of " + str(len(redirects)) + ")"
db.session.commit()
db.session.commit()
print "Error count:", str(error_count)
|
<commit_before>import json
from backend.app import app, db
from backend.models import *
from flask import url_for
# read in json redirect dump
with open('data/prod_url_alias.json', 'r') as f:
redirects = json.loads(f.read())
print len(redirects)
old_urls = []
error_count = 0
for i in range(len(redirects)):
nid = None
try:
nid = int(redirects[i]['nid'])
except ValueError as e:
tmp = redirects[i]['nid']
if not 'user' in tmp:
tmp = tmp.split('/')
for item in tmp:
try:
nid = int(item)
break
except ValueError:
pass
url = redirects[i]['url']
if nid and not url in old_urls:
redirect = Redirect(nid=nid, old_url=url)
old_urls.append(url)
db.session.add(redirect)
else:
error_count += 1
print nid, redirects[i]['url']
if i % 500 == 0:
print "saving 500 redirects (" + str(i) + " out of " + str(len(redirects)) + ")"
db.session.commit()
db.session.commit()
print "Error count:", str(error_count)<commit_msg>Check for duplicates in existing dataset. Fix reference to dump file.<commit_after>
|
import json
from backend.app import app, db
from backend.models import *
from flask import url_for
# read in json redirect dump
with open('data/nid_url.json', 'r') as f:
redirects = json.loads(f.read())
print len(redirects)
old_urls = []
existing_redirects = Redirect.query.all()
for redirect in existing_redirects:
old_urls.append(redirect.old_url)
error_count = 0
for i in range(len(redirects)):
nid = None
try:
nid = int(redirects[i]['nid'])
except ValueError as e:
tmp = redirects[i]['nid']
if not 'user' in tmp:
tmp = tmp.split('/')
for item in tmp:
try:
nid = int(item)
break
except ValueError:
pass
url = redirects[i]['url']
if nid and not url in old_urls:
redirect = Redirect(nid=nid, old_url=url)
old_urls.append(url)
db.session.add(redirect)
else:
error_count += 1
print nid, redirects[i]['url'].encode('utf8')
if i % 500 == 0:
print "saving 500 redirects (" + str(i) + " out of " + str(len(redirects)) + ")"
db.session.commit()
db.session.commit()
print "Error count:", str(error_count)
|
import json
from backend.app import app, db
from backend.models import *
from flask import url_for
# read in json redirect dump
with open('data/prod_url_alias.json', 'r') as f:
redirects = json.loads(f.read())
print len(redirects)
old_urls = []
error_count = 0
for i in range(len(redirects)):
nid = None
try:
nid = int(redirects[i]['nid'])
except ValueError as e:
tmp = redirects[i]['nid']
if not 'user' in tmp:
tmp = tmp.split('/')
for item in tmp:
try:
nid = int(item)
break
except ValueError:
pass
url = redirects[i]['url']
if nid and not url in old_urls:
redirect = Redirect(nid=nid, old_url=url)
old_urls.append(url)
db.session.add(redirect)
else:
error_count += 1
print nid, redirects[i]['url']
if i % 500 == 0:
print "saving 500 redirects (" + str(i) + " out of " + str(len(redirects)) + ")"
db.session.commit()
db.session.commit()
print "Error count:", str(error_count)Check for duplicates in existing dataset. Fix reference to dump file.import json
from backend.app import app, db
from backend.models import *
from flask import url_for
# read in json redirect dump
with open('data/nid_url.json', 'r') as f:
redirects = json.loads(f.read())
print len(redirects)
old_urls = []
existing_redirects = Redirect.query.all()
for redirect in existing_redirects:
old_urls.append(redirect.old_url)
error_count = 0
for i in range(len(redirects)):
nid = None
try:
nid = int(redirects[i]['nid'])
except ValueError as e:
tmp = redirects[i]['nid']
if not 'user' in tmp:
tmp = tmp.split('/')
for item in tmp:
try:
nid = int(item)
break
except ValueError:
pass
url = redirects[i]['url']
if nid and not url in old_urls:
redirect = Redirect(nid=nid, old_url=url)
old_urls.append(url)
db.session.add(redirect)
else:
error_count += 1
print nid, redirects[i]['url'].encode('utf8')
if i % 500 == 0:
print "saving 500 redirects (" + str(i) + " out of " + str(len(redirects)) + ")"
db.session.commit()
db.session.commit()
print "Error count:", str(error_count)
|
<commit_before>import json
from backend.app import app, db
from backend.models import *
from flask import url_for
# read in json redirect dump
with open('data/prod_url_alias.json', 'r') as f:
redirects = json.loads(f.read())
print len(redirects)
old_urls = []
error_count = 0
for i in range(len(redirects)):
nid = None
try:
nid = int(redirects[i]['nid'])
except ValueError as e:
tmp = redirects[i]['nid']
if not 'user' in tmp:
tmp = tmp.split('/')
for item in tmp:
try:
nid = int(item)
break
except ValueError:
pass
url = redirects[i]['url']
if nid and not url in old_urls:
redirect = Redirect(nid=nid, old_url=url)
old_urls.append(url)
db.session.add(redirect)
else:
error_count += 1
print nid, redirects[i]['url']
if i % 500 == 0:
print "saving 500 redirects (" + str(i) + " out of " + str(len(redirects)) + ")"
db.session.commit()
db.session.commit()
print "Error count:", str(error_count)<commit_msg>Check for duplicates in existing dataset. Fix reference to dump file.<commit_after>import json
from backend.app import app, db
from backend.models import *
from flask import url_for
# read in json redirect dump
with open('data/nid_url.json', 'r') as f:
redirects = json.loads(f.read())
print len(redirects)
old_urls = []
existing_redirects = Redirect.query.all()
for redirect in existing_redirects:
old_urls.append(redirect.old_url)
error_count = 0
for i in range(len(redirects)):
nid = None
try:
nid = int(redirects[i]['nid'])
except ValueError as e:
tmp = redirects[i]['nid']
if not 'user' in tmp:
tmp = tmp.split('/')
for item in tmp:
try:
nid = int(item)
break
except ValueError:
pass
url = redirects[i]['url']
if nid and not url in old_urls:
redirect = Redirect(nid=nid, old_url=url)
old_urls.append(url)
db.session.add(redirect)
else:
error_count += 1
print nid, redirects[i]['url'].encode('utf8')
if i % 500 == 0:
print "saving 500 redirects (" + str(i) + " out of " + str(len(redirects)) + ")"
db.session.commit()
db.session.commit()
print "Error count:", str(error_count)
|
1a761c9360f185d6bd07be9f16ea2cfa239f4bdd
|
groupy/api/base.py
|
groupy/api/base.py
|
from groupy import utils
class Manager:
"""Class for interacting with the endpoint for a resource.
:param session: the requests session
:type session: :class:`~groupy.session.Session`
:param str path: path relative to the base URL
"""
#: the base URL
base_url = 'https://api.groupme.com/v3/'
def __init__(self, session, path=None):
self.session = session
self.url = utils.urljoin(self.base_url, path)
class Resource:
def __init__(self, **data):
self.data = data
def __getattr__(self, attr):
if attr not in self.data:
error_message = 'this {!s} resource does not have a {!r} field'
raise AttributeError(error_message.format(self.__class__.__name__,
attr))
return self.data[attr]
class ManagedResource(Resource):
"""Class to represent an API object."""
def __init__(self, manager, **data):
"""Create an instance of the resource.
:param manager: the resource's manager
:type manager: :class:`~groupy.api.base.Manager`
:param kwargs data: the resource data
"""
super().__init__(**data)
self.manager = manager
|
from groupy import utils
class Manager:
"""Class for interacting with the endpoint for a resource.
:param session: the requests session
:type session: :class:`~groupy.session.Session`
:param str path: path relative to the base URL
"""
#: the base URL
base_url = 'https://api.groupme.com/v3/'
def __init__(self, session, path=None):
self.session = session
self.url = utils.urljoin(self.base_url, path)
class Resource:
def __init__(self, **data):
self.data = data
def __getattr__(self, attr):
if attr not in self.data:
error_message = 'this {!s} resource does not have a {!r} field'
raise AttributeError(error_message.format(self.__class__.__name__,
attr))
return self.data[attr]
def __getstate__(self):
return self.__dict__
def __setstate__(self, d):
self.__dict__.update(d)
class ManagedResource(Resource):
"""Class to represent an API object."""
def __init__(self, manager, **data):
"""Create an instance of the resource.
:param manager: the resource's manager
:type manager: :class:`~groupy.api.base.Manager`
:param kwargs data: the resource data
"""
super().__init__(**data)
self.manager = manager
|
Fix pickling/unpickling of Resource objects
|
Fix pickling/unpickling of Resource objects
Add __getstate__ and __setstate__ methods to the Resource class to avoid hitting the recursion limit when trying to pickle/unpickle Resource objects.
A similar issue/solution can be found here: https://stackoverflow.com/a/12102691
|
Python
|
apache-2.0
|
rhgrant10/Groupy
|
from groupy import utils
class Manager:
"""Class for interacting with the endpoint for a resource.
:param session: the requests session
:type session: :class:`~groupy.session.Session`
:param str path: path relative to the base URL
"""
#: the base URL
base_url = 'https://api.groupme.com/v3/'
def __init__(self, session, path=None):
self.session = session
self.url = utils.urljoin(self.base_url, path)
class Resource:
def __init__(self, **data):
self.data = data
def __getattr__(self, attr):
if attr not in self.data:
error_message = 'this {!s} resource does not have a {!r} field'
raise AttributeError(error_message.format(self.__class__.__name__,
attr))
return self.data[attr]
class ManagedResource(Resource):
"""Class to represent an API object."""
def __init__(self, manager, **data):
"""Create an instance of the resource.
:param manager: the resource's manager
:type manager: :class:`~groupy.api.base.Manager`
:param kwargs data: the resource data
"""
super().__init__(**data)
self.manager = manager
Fix pickling/unpickling of Resource objects
Add __getstate__ and __setstate__ methods to the Resource class to avoid hitting the recursion limit when trying to pickle/unpickle Resource objects.
A similar issue/solution can be found here: https://stackoverflow.com/a/12102691
|
from groupy import utils
class Manager:
"""Class for interacting with the endpoint for a resource.
:param session: the requests session
:type session: :class:`~groupy.session.Session`
:param str path: path relative to the base URL
"""
#: the base URL
base_url = 'https://api.groupme.com/v3/'
def __init__(self, session, path=None):
self.session = session
self.url = utils.urljoin(self.base_url, path)
class Resource:
def __init__(self, **data):
self.data = data
def __getattr__(self, attr):
if attr not in self.data:
error_message = 'this {!s} resource does not have a {!r} field'
raise AttributeError(error_message.format(self.__class__.__name__,
attr))
return self.data[attr]
def __getstate__(self):
return self.__dict__
def __setstate__(self, d):
self.__dict__.update(d)
class ManagedResource(Resource):
"""Class to represent an API object."""
def __init__(self, manager, **data):
"""Create an instance of the resource.
:param manager: the resource's manager
:type manager: :class:`~groupy.api.base.Manager`
:param kwargs data: the resource data
"""
super().__init__(**data)
self.manager = manager
|
<commit_before>from groupy import utils
class Manager:
"""Class for interacting with the endpoint for a resource.
:param session: the requests session
:type session: :class:`~groupy.session.Session`
:param str path: path relative to the base URL
"""
#: the base URL
base_url = 'https://api.groupme.com/v3/'
def __init__(self, session, path=None):
self.session = session
self.url = utils.urljoin(self.base_url, path)
class Resource:
def __init__(self, **data):
self.data = data
def __getattr__(self, attr):
if attr not in self.data:
error_message = 'this {!s} resource does not have a {!r} field'
raise AttributeError(error_message.format(self.__class__.__name__,
attr))
return self.data[attr]
class ManagedResource(Resource):
"""Class to represent an API object."""
def __init__(self, manager, **data):
"""Create an instance of the resource.
:param manager: the resource's manager
:type manager: :class:`~groupy.api.base.Manager`
:param kwargs data: the resource data
"""
super().__init__(**data)
self.manager = manager
<commit_msg>Fix pickling/unpickling of Resource objects
Add __getstate__ and __setstate__ methods to the Resource class to avoid hitting the recursion limit when trying to pickle/unpickle Resource objects.
A similar issue/solution can be found here: https://stackoverflow.com/a/12102691<commit_after>
|
from groupy import utils
class Manager:
"""Class for interacting with the endpoint for a resource.
:param session: the requests session
:type session: :class:`~groupy.session.Session`
:param str path: path relative to the base URL
"""
#: the base URL
base_url = 'https://api.groupme.com/v3/'
def __init__(self, session, path=None):
self.session = session
self.url = utils.urljoin(self.base_url, path)
class Resource:
def __init__(self, **data):
self.data = data
def __getattr__(self, attr):
if attr not in self.data:
error_message = 'this {!s} resource does not have a {!r} field'
raise AttributeError(error_message.format(self.__class__.__name__,
attr))
return self.data[attr]
def __getstate__(self):
return self.__dict__
def __setstate__(self, d):
self.__dict__.update(d)
class ManagedResource(Resource):
"""Class to represent an API object."""
def __init__(self, manager, **data):
"""Create an instance of the resource.
:param manager: the resource's manager
:type manager: :class:`~groupy.api.base.Manager`
:param kwargs data: the resource data
"""
super().__init__(**data)
self.manager = manager
|
from groupy import utils
class Manager:
"""Class for interacting with the endpoint for a resource.
:param session: the requests session
:type session: :class:`~groupy.session.Session`
:param str path: path relative to the base URL
"""
#: the base URL
base_url = 'https://api.groupme.com/v3/'
def __init__(self, session, path=None):
self.session = session
self.url = utils.urljoin(self.base_url, path)
class Resource:
def __init__(self, **data):
self.data = data
def __getattr__(self, attr):
if attr not in self.data:
error_message = 'this {!s} resource does not have a {!r} field'
raise AttributeError(error_message.format(self.__class__.__name__,
attr))
return self.data[attr]
class ManagedResource(Resource):
"""Class to represent an API object."""
def __init__(self, manager, **data):
"""Create an instance of the resource.
:param manager: the resource's manager
:type manager: :class:`~groupy.api.base.Manager`
:param kwargs data: the resource data
"""
super().__init__(**data)
self.manager = manager
Fix pickling/unpickling of Resource objects
Add __getstate__ and __setstate__ methods to the Resource class to avoid hitting the recursion limit when trying to pickle/unpickle Resource objects.
A similar issue/solution can be found here: https://stackoverflow.com/a/12102691from groupy import utils
class Manager:
"""Class for interacting with the endpoint for a resource.
:param session: the requests session
:type session: :class:`~groupy.session.Session`
:param str path: path relative to the base URL
"""
#: the base URL
base_url = 'https://api.groupme.com/v3/'
def __init__(self, session, path=None):
self.session = session
self.url = utils.urljoin(self.base_url, path)
class Resource:
def __init__(self, **data):
self.data = data
def __getattr__(self, attr):
if attr not in self.data:
error_message = 'this {!s} resource does not have a {!r} field'
raise AttributeError(error_message.format(self.__class__.__name__,
attr))
return self.data[attr]
def __getstate__(self):
return self.__dict__
def __setstate__(self, d):
self.__dict__.update(d)
class ManagedResource(Resource):
"""Class to represent an API object."""
def __init__(self, manager, **data):
"""Create an instance of the resource.
:param manager: the resource's manager
:type manager: :class:`~groupy.api.base.Manager`
:param kwargs data: the resource data
"""
super().__init__(**data)
self.manager = manager
|
<commit_before>from groupy import utils
class Manager:
"""Class for interacting with the endpoint for a resource.
:param session: the requests session
:type session: :class:`~groupy.session.Session`
:param str path: path relative to the base URL
"""
#: the base URL
base_url = 'https://api.groupme.com/v3/'
def __init__(self, session, path=None):
self.session = session
self.url = utils.urljoin(self.base_url, path)
class Resource:
def __init__(self, **data):
self.data = data
def __getattr__(self, attr):
if attr not in self.data:
error_message = 'this {!s} resource does not have a {!r} field'
raise AttributeError(error_message.format(self.__class__.__name__,
attr))
return self.data[attr]
class ManagedResource(Resource):
"""Class to represent an API object."""
def __init__(self, manager, **data):
"""Create an instance of the resource.
:param manager: the resource's manager
:type manager: :class:`~groupy.api.base.Manager`
:param kwargs data: the resource data
"""
super().__init__(**data)
self.manager = manager
<commit_msg>Fix pickling/unpickling of Resource objects
Add __getstate__ and __setstate__ methods to the Resource class to avoid hitting the recursion limit when trying to pickle/unpickle Resource objects.
A similar issue/solution can be found here: https://stackoverflow.com/a/12102691<commit_after>from groupy import utils
class Manager:
"""Class for interacting with the endpoint for a resource.
:param session: the requests session
:type session: :class:`~groupy.session.Session`
:param str path: path relative to the base URL
"""
#: the base URL
base_url = 'https://api.groupme.com/v3/'
def __init__(self, session, path=None):
self.session = session
self.url = utils.urljoin(self.base_url, path)
class Resource:
def __init__(self, **data):
self.data = data
def __getattr__(self, attr):
if attr not in self.data:
error_message = 'this {!s} resource does not have a {!r} field'
raise AttributeError(error_message.format(self.__class__.__name__,
attr))
return self.data[attr]
def __getstate__(self):
return self.__dict__
def __setstate__(self, d):
self.__dict__.update(d)
class ManagedResource(Resource):
"""Class to represent an API object."""
def __init__(self, manager, **data):
"""Create an instance of the resource.
:param manager: the resource's manager
:type manager: :class:`~groupy.api.base.Manager`
:param kwargs data: the resource data
"""
super().__init__(**data)
self.manager = manager
|
8626733d3a4960013189ffa90fe8e496dd8cc90a
|
calexicon/constants.py
|
calexicon/constants.py
|
from datetime import date as vanilla_date
from dates.base import BasicBCEDate
first_julian_date = BasicBCEDate(-45, 1, 1)
first_vanilla_date = vanilla_date(1, 1, 1)
last_vanilla_date = vanilla_date(9999, 12, 31)
julian_day_number_of_first_vanilla_date = 1721423
julian_day_number_of_last_vanilla_date = (
julian_day_number_of_first_vanilla_date
+ (last_vanilla_date - first_vanilla_date).days
)
|
from datetime import date as vanilla_date
from dates.base import BasicBCEDate
first_julian_date = BasicBCEDate(-45, 1, 1)
first_vanilla_date = vanilla_date(1, 1, 1)
last_vanilla_date = vanilla_date(9999, 12, 31)
julian_day_number_of_first_vanilla_date = 1721423
julian_day_number_of_last_vanilla_date = (
julian_day_number_of_first_vanilla_date
+ (last_vanilla_date - first_vanilla_date).days
)
number_of_vanilla_dates = (last_vanilla_date - first_vanilla_date).days
|
Add another constant for the number of vanilla dates.
|
Add another constant for the number of vanilla dates.
|
Python
|
apache-2.0
|
jwg4/calexicon,jwg4/qual
|
from datetime import date as vanilla_date
from dates.base import BasicBCEDate
first_julian_date = BasicBCEDate(-45, 1, 1)
first_vanilla_date = vanilla_date(1, 1, 1)
last_vanilla_date = vanilla_date(9999, 12, 31)
julian_day_number_of_first_vanilla_date = 1721423
julian_day_number_of_last_vanilla_date = (
julian_day_number_of_first_vanilla_date
+ (last_vanilla_date - first_vanilla_date).days
)
Add another constant for the number of vanilla dates.
|
from datetime import date as vanilla_date
from dates.base import BasicBCEDate
first_julian_date = BasicBCEDate(-45, 1, 1)
first_vanilla_date = vanilla_date(1, 1, 1)
last_vanilla_date = vanilla_date(9999, 12, 31)
julian_day_number_of_first_vanilla_date = 1721423
julian_day_number_of_last_vanilla_date = (
julian_day_number_of_first_vanilla_date
+ (last_vanilla_date - first_vanilla_date).days
)
number_of_vanilla_dates = (last_vanilla_date - first_vanilla_date).days
|
<commit_before>from datetime import date as vanilla_date
from dates.base import BasicBCEDate
first_julian_date = BasicBCEDate(-45, 1, 1)
first_vanilla_date = vanilla_date(1, 1, 1)
last_vanilla_date = vanilla_date(9999, 12, 31)
julian_day_number_of_first_vanilla_date = 1721423
julian_day_number_of_last_vanilla_date = (
julian_day_number_of_first_vanilla_date
+ (last_vanilla_date - first_vanilla_date).days
)
<commit_msg>Add another constant for the number of vanilla dates.<commit_after>
|
from datetime import date as vanilla_date
from dates.base import BasicBCEDate
first_julian_date = BasicBCEDate(-45, 1, 1)
first_vanilla_date = vanilla_date(1, 1, 1)
last_vanilla_date = vanilla_date(9999, 12, 31)
julian_day_number_of_first_vanilla_date = 1721423
julian_day_number_of_last_vanilla_date = (
julian_day_number_of_first_vanilla_date
+ (last_vanilla_date - first_vanilla_date).days
)
number_of_vanilla_dates = (last_vanilla_date - first_vanilla_date).days
|
from datetime import date as vanilla_date
from dates.base import BasicBCEDate
first_julian_date = BasicBCEDate(-45, 1, 1)
first_vanilla_date = vanilla_date(1, 1, 1)
last_vanilla_date = vanilla_date(9999, 12, 31)
julian_day_number_of_first_vanilla_date = 1721423
julian_day_number_of_last_vanilla_date = (
julian_day_number_of_first_vanilla_date
+ (last_vanilla_date - first_vanilla_date).days
)
Add another constant for the number of vanilla dates.from datetime import date as vanilla_date
from dates.base import BasicBCEDate
first_julian_date = BasicBCEDate(-45, 1, 1)
first_vanilla_date = vanilla_date(1, 1, 1)
last_vanilla_date = vanilla_date(9999, 12, 31)
julian_day_number_of_first_vanilla_date = 1721423
julian_day_number_of_last_vanilla_date = (
julian_day_number_of_first_vanilla_date
+ (last_vanilla_date - first_vanilla_date).days
)
number_of_vanilla_dates = (last_vanilla_date - first_vanilla_date).days
|
<commit_before>from datetime import date as vanilla_date
from dates.base import BasicBCEDate
first_julian_date = BasicBCEDate(-45, 1, 1)
first_vanilla_date = vanilla_date(1, 1, 1)
last_vanilla_date = vanilla_date(9999, 12, 31)
julian_day_number_of_first_vanilla_date = 1721423
julian_day_number_of_last_vanilla_date = (
julian_day_number_of_first_vanilla_date
+ (last_vanilla_date - first_vanilla_date).days
)
<commit_msg>Add another constant for the number of vanilla dates.<commit_after>from datetime import date as vanilla_date
from dates.base import BasicBCEDate
first_julian_date = BasicBCEDate(-45, 1, 1)
first_vanilla_date = vanilla_date(1, 1, 1)
last_vanilla_date = vanilla_date(9999, 12, 31)
julian_day_number_of_first_vanilla_date = 1721423
julian_day_number_of_last_vanilla_date = (
julian_day_number_of_first_vanilla_date
+ (last_vanilla_date - first_vanilla_date).days
)
number_of_vanilla_dates = (last_vanilla_date - first_vanilla_date).days
|
195f4d25bdf30f355d029697675481ba3193ad77
|
gatt/gatt.py
|
gatt/gatt.py
|
import os
import platform
if platform.system() == 'Darwin':
if os.environ.get('LINUX_WITHOUT_DBUS', '0') == '0':
from .gatt_linux import *
else:
from .gatt_stubs import *
else:
# TODO: Add support for more platforms
from .gatt_stubs import *
|
import os
import platform
if platform.system() == 'Linux':
if os.environ.get('LINUX_WITHOUT_DBUS', '0') == '0':
from .gatt_linux import *
else:
from .gatt_stubs import *
else:
# TODO: Add support for more platforms
from .gatt_stubs import *
|
Fix testing for wrong platform name
|
Fix testing for wrong platform name
|
Python
|
mit
|
getsenic/gatt-python
|
import os
import platform
if platform.system() == 'Darwin':
if os.environ.get('LINUX_WITHOUT_DBUS', '0') == '0':
from .gatt_linux import *
else:
from .gatt_stubs import *
else:
# TODO: Add support for more platforms
from .gatt_stubs import *
Fix testing for wrong platform name
|
import os
import platform
if platform.system() == 'Linux':
if os.environ.get('LINUX_WITHOUT_DBUS', '0') == '0':
from .gatt_linux import *
else:
from .gatt_stubs import *
else:
# TODO: Add support for more platforms
from .gatt_stubs import *
|
<commit_before>import os
import platform
if platform.system() == 'Darwin':
if os.environ.get('LINUX_WITHOUT_DBUS', '0') == '0':
from .gatt_linux import *
else:
from .gatt_stubs import *
else:
# TODO: Add support for more platforms
from .gatt_stubs import *
<commit_msg>Fix testing for wrong platform name<commit_after>
|
import os
import platform
if platform.system() == 'Linux':
if os.environ.get('LINUX_WITHOUT_DBUS', '0') == '0':
from .gatt_linux import *
else:
from .gatt_stubs import *
else:
# TODO: Add support for more platforms
from .gatt_stubs import *
|
import os
import platform
if platform.system() == 'Darwin':
if os.environ.get('LINUX_WITHOUT_DBUS', '0') == '0':
from .gatt_linux import *
else:
from .gatt_stubs import *
else:
# TODO: Add support for more platforms
from .gatt_stubs import *
Fix testing for wrong platform nameimport os
import platform
if platform.system() == 'Linux':
if os.environ.get('LINUX_WITHOUT_DBUS', '0') == '0':
from .gatt_linux import *
else:
from .gatt_stubs import *
else:
# TODO: Add support for more platforms
from .gatt_stubs import *
|
<commit_before>import os
import platform
if platform.system() == 'Darwin':
if os.environ.get('LINUX_WITHOUT_DBUS', '0') == '0':
from .gatt_linux import *
else:
from .gatt_stubs import *
else:
# TODO: Add support for more platforms
from .gatt_stubs import *
<commit_msg>Fix testing for wrong platform name<commit_after>import os
import platform
if platform.system() == 'Linux':
if os.environ.get('LINUX_WITHOUT_DBUS', '0') == '0':
from .gatt_linux import *
else:
from .gatt_stubs import *
else:
# TODO: Add support for more platforms
from .gatt_stubs import *
|
67717116a2585975e0e773956d6a102be9dc11d6
|
ggplot/geoms/geom_boxplot.py
|
ggplot/geoms/geom_boxplot.py
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.cbook as cbook
from .geom import geom
from ggplot.utils import is_string
from ggplot.utils import is_categorical
class geom_boxplot(geom):
DEFAULT_AES = {'y': None, 'color': 'black', 'flier_marker': '+'}
REQUIRED_AES = {'x'}
DEFAULT_PARAMS = {'stat': 'identity', 'position': 'identity'}
def __group(self, x, y):
out = {}
for xx, yy in zip(x,y):
if yy not in out: out[yy] = []
out[yy].append(xx)
return out
def _plot_unit(self, pinfo, ax):
x = pinfo.pop('x')
y = pinfo.pop('y')
color = pinfo.pop('color')
fliermarker = pinfo.pop('flier_marker')
if y is not None:
g = self.__group(x,y)
l = sorted(g.keys())
x = [g[k] for k in l]
plt.setp(ax, yticklabels=l)
q = ax.boxplot(x, vert=False)
plt.setp(q['boxes'], color=color)
plt.setp(q['whiskers'], color=color)
plt.setp(q['fliers'], color=color, marker=fliermarker)
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.cbook as cbook
from .geom import geom
from ggplot.utils import is_string
from ggplot.utils import is_categorical
class geom_boxplot(geom):
DEFAULT_AES = {'y': None, 'color': 'black', 'flier_marker': '+'}
REQUIRED_AES = {'x'}
DEFAULT_PARAMS = {'stat': 'identity', 'position': 'identity'}
def __group(self, x, y):
out = {}
for xx, yy in zip(x,y):
if yy not in out: out[yy] = []
out[yy].append(xx)
return out
def _plot_unit(self, pinfo, ax):
x = pinfo.pop('x')
y = pinfo.pop('y')
color = pinfo.pop('color')
fliermarker = pinfo.pop('flier_marker')
if y is not None:
g = self.__group(x,y)
l = sorted(g.keys())
x = [g[k] for k in l]
q = ax.boxplot(x, vert=False)
plt.setp(q['boxes'], color=color)
plt.setp(q['whiskers'], color=color)
plt.setp(q['fliers'], color=color, marker=fliermarker)
if l:
plt.setp(ax, yticklabels=l)
|
Fix y axis tick labels for boxplot
|
Fix y axis tick labels for boxplot
|
Python
|
bsd-2-clause
|
xguse/ggplot,benslice/ggplot,smblance/ggplot,Cophy08/ggplot,mizzao/ggplot,xguse/ggplot,ricket1978/ggplot,mizzao/ggplot,andnovar/ggplot,bitemyapp/ggplot,kmather73/ggplot,udacity/ggplot,ricket1978/ggplot,benslice/ggplot,wllmtrng/ggplot,assad2012/ggplot
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.cbook as cbook
from .geom import geom
from ggplot.utils import is_string
from ggplot.utils import is_categorical
class geom_boxplot(geom):
DEFAULT_AES = {'y': None, 'color': 'black', 'flier_marker': '+'}
REQUIRED_AES = {'x'}
DEFAULT_PARAMS = {'stat': 'identity', 'position': 'identity'}
def __group(self, x, y):
out = {}
for xx, yy in zip(x,y):
if yy not in out: out[yy] = []
out[yy].append(xx)
return out
def _plot_unit(self, pinfo, ax):
x = pinfo.pop('x')
y = pinfo.pop('y')
color = pinfo.pop('color')
fliermarker = pinfo.pop('flier_marker')
if y is not None:
g = self.__group(x,y)
l = sorted(g.keys())
x = [g[k] for k in l]
plt.setp(ax, yticklabels=l)
q = ax.boxplot(x, vert=False)
plt.setp(q['boxes'], color=color)
plt.setp(q['whiskers'], color=color)
plt.setp(q['fliers'], color=color, marker=fliermarker)
Fix y axis tick labels for boxplot
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.cbook as cbook
from .geom import geom
from ggplot.utils import is_string
from ggplot.utils import is_categorical
class geom_boxplot(geom):
DEFAULT_AES = {'y': None, 'color': 'black', 'flier_marker': '+'}
REQUIRED_AES = {'x'}
DEFAULT_PARAMS = {'stat': 'identity', 'position': 'identity'}
def __group(self, x, y):
out = {}
for xx, yy in zip(x,y):
if yy not in out: out[yy] = []
out[yy].append(xx)
return out
def _plot_unit(self, pinfo, ax):
x = pinfo.pop('x')
y = pinfo.pop('y')
color = pinfo.pop('color')
fliermarker = pinfo.pop('flier_marker')
if y is not None:
g = self.__group(x,y)
l = sorted(g.keys())
x = [g[k] for k in l]
q = ax.boxplot(x, vert=False)
plt.setp(q['boxes'], color=color)
plt.setp(q['whiskers'], color=color)
plt.setp(q['fliers'], color=color, marker=fliermarker)
if l:
plt.setp(ax, yticklabels=l)
|
<commit_before>from __future__ import (absolute_import, division, print_function,
unicode_literals)
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.cbook as cbook
from .geom import geom
from ggplot.utils import is_string
from ggplot.utils import is_categorical
class geom_boxplot(geom):
DEFAULT_AES = {'y': None, 'color': 'black', 'flier_marker': '+'}
REQUIRED_AES = {'x'}
DEFAULT_PARAMS = {'stat': 'identity', 'position': 'identity'}
def __group(self, x, y):
out = {}
for xx, yy in zip(x,y):
if yy not in out: out[yy] = []
out[yy].append(xx)
return out
def _plot_unit(self, pinfo, ax):
x = pinfo.pop('x')
y = pinfo.pop('y')
color = pinfo.pop('color')
fliermarker = pinfo.pop('flier_marker')
if y is not None:
g = self.__group(x,y)
l = sorted(g.keys())
x = [g[k] for k in l]
plt.setp(ax, yticklabels=l)
q = ax.boxplot(x, vert=False)
plt.setp(q['boxes'], color=color)
plt.setp(q['whiskers'], color=color)
plt.setp(q['fliers'], color=color, marker=fliermarker)
<commit_msg>Fix y axis tick labels for boxplot<commit_after>
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.cbook as cbook
from .geom import geom
from ggplot.utils import is_string
from ggplot.utils import is_categorical
class geom_boxplot(geom):
DEFAULT_AES = {'y': None, 'color': 'black', 'flier_marker': '+'}
REQUIRED_AES = {'x'}
DEFAULT_PARAMS = {'stat': 'identity', 'position': 'identity'}
def __group(self, x, y):
out = {}
for xx, yy in zip(x,y):
if yy not in out: out[yy] = []
out[yy].append(xx)
return out
def _plot_unit(self, pinfo, ax):
x = pinfo.pop('x')
y = pinfo.pop('y')
color = pinfo.pop('color')
fliermarker = pinfo.pop('flier_marker')
if y is not None:
g = self.__group(x,y)
l = sorted(g.keys())
x = [g[k] for k in l]
q = ax.boxplot(x, vert=False)
plt.setp(q['boxes'], color=color)
plt.setp(q['whiskers'], color=color)
plt.setp(q['fliers'], color=color, marker=fliermarker)
if l:
plt.setp(ax, yticklabels=l)
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.cbook as cbook
from .geom import geom
from ggplot.utils import is_string
from ggplot.utils import is_categorical
class geom_boxplot(geom):
DEFAULT_AES = {'y': None, 'color': 'black', 'flier_marker': '+'}
REQUIRED_AES = {'x'}
DEFAULT_PARAMS = {'stat': 'identity', 'position': 'identity'}
def __group(self, x, y):
out = {}
for xx, yy in zip(x,y):
if yy not in out: out[yy] = []
out[yy].append(xx)
return out
def _plot_unit(self, pinfo, ax):
x = pinfo.pop('x')
y = pinfo.pop('y')
color = pinfo.pop('color')
fliermarker = pinfo.pop('flier_marker')
if y is not None:
g = self.__group(x,y)
l = sorted(g.keys())
x = [g[k] for k in l]
plt.setp(ax, yticklabels=l)
q = ax.boxplot(x, vert=False)
plt.setp(q['boxes'], color=color)
plt.setp(q['whiskers'], color=color)
plt.setp(q['fliers'], color=color, marker=fliermarker)
Fix y axis tick labels for boxplotfrom __future__ import (absolute_import, division, print_function,
unicode_literals)
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.cbook as cbook
from .geom import geom
from ggplot.utils import is_string
from ggplot.utils import is_categorical
class geom_boxplot(geom):
DEFAULT_AES = {'y': None, 'color': 'black', 'flier_marker': '+'}
REQUIRED_AES = {'x'}
DEFAULT_PARAMS = {'stat': 'identity', 'position': 'identity'}
def __group(self, x, y):
out = {}
for xx, yy in zip(x,y):
if yy not in out: out[yy] = []
out[yy].append(xx)
return out
def _plot_unit(self, pinfo, ax):
x = pinfo.pop('x')
y = pinfo.pop('y')
color = pinfo.pop('color')
fliermarker = pinfo.pop('flier_marker')
if y is not None:
g = self.__group(x,y)
l = sorted(g.keys())
x = [g[k] for k in l]
q = ax.boxplot(x, vert=False)
plt.setp(q['boxes'], color=color)
plt.setp(q['whiskers'], color=color)
plt.setp(q['fliers'], color=color, marker=fliermarker)
if l:
plt.setp(ax, yticklabels=l)
|
<commit_before>from __future__ import (absolute_import, division, print_function,
unicode_literals)
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.cbook as cbook
from .geom import geom
from ggplot.utils import is_string
from ggplot.utils import is_categorical
class geom_boxplot(geom):
DEFAULT_AES = {'y': None, 'color': 'black', 'flier_marker': '+'}
REQUIRED_AES = {'x'}
DEFAULT_PARAMS = {'stat': 'identity', 'position': 'identity'}
def __group(self, x, y):
out = {}
for xx, yy in zip(x,y):
if yy not in out: out[yy] = []
out[yy].append(xx)
return out
def _plot_unit(self, pinfo, ax):
x = pinfo.pop('x')
y = pinfo.pop('y')
color = pinfo.pop('color')
fliermarker = pinfo.pop('flier_marker')
if y is not None:
g = self.__group(x,y)
l = sorted(g.keys())
x = [g[k] for k in l]
plt.setp(ax, yticklabels=l)
q = ax.boxplot(x, vert=False)
plt.setp(q['boxes'], color=color)
plt.setp(q['whiskers'], color=color)
plt.setp(q['fliers'], color=color, marker=fliermarker)
<commit_msg>Fix y axis tick labels for boxplot<commit_after>from __future__ import (absolute_import, division, print_function,
unicode_literals)
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.cbook as cbook
from .geom import geom
from ggplot.utils import is_string
from ggplot.utils import is_categorical
class geom_boxplot(geom):
DEFAULT_AES = {'y': None, 'color': 'black', 'flier_marker': '+'}
REQUIRED_AES = {'x'}
DEFAULT_PARAMS = {'stat': 'identity', 'position': 'identity'}
def __group(self, x, y):
out = {}
for xx, yy in zip(x,y):
if yy not in out: out[yy] = []
out[yy].append(xx)
return out
def _plot_unit(self, pinfo, ax):
x = pinfo.pop('x')
y = pinfo.pop('y')
color = pinfo.pop('color')
fliermarker = pinfo.pop('flier_marker')
if y is not None:
g = self.__group(x,y)
l = sorted(g.keys())
x = [g[k] for k in l]
q = ax.boxplot(x, vert=False)
plt.setp(q['boxes'], color=color)
plt.setp(q['whiskers'], color=color)
plt.setp(q['fliers'], color=color, marker=fliermarker)
if l:
plt.setp(ax, yticklabels=l)
|
468e2369a2b1af203d8a00abbfb3b01af26ae89a
|
bot/multithreading/worker.py
|
bot/multithreading/worker.py
|
import queue
from bot.multithreading.work import Work
class Worker:
def run(self):
raise NotImplementedError()
def post(self, work: Work):
raise NotImplementedError()
def shutdown(self):
raise NotImplementedError()
class AbstractWorker(Worker):
def __init__(self, name: str, error_handler: callable):
self.name = name
self.error_handler = error_handler
def _work(self, work: Work):
try:
work.do_work()
except BaseException as e:
self._error(e, work)
def _error(self, e: BaseException, work: Work):
try:
self.error_handler(e, work, self)
except:
pass
def run(self):
raise NotImplementedError()
def post(self, work: Work):
raise NotImplementedError()
def shutdown(self):
raise NotImplementedError()
class QueueWorker(AbstractWorker):
def __init__(self, name: str, work_queue: queue.Queue, error_handler: callable):
super().__init__(name, error_handler)
self.queue = work_queue
def run(self):
while True:
work = self.queue.get()
self._work(work)
self.queue.task_done()
def post(self, work: Work):
self.queue.put(work)
def shutdown(self):
self.queue.join()
|
import queue
from bot.multithreading.work import Work
class Worker:
def run(self):
raise NotImplementedError()
def post(self, work: Work):
raise NotImplementedError()
def shutdown(self):
raise NotImplementedError()
class AbstractWorker(Worker):
def __init__(self, name: str, error_handler: callable):
self.name = name
self.error_handler = error_handler
def _work(self, work: Work):
try:
work.do_work()
except BaseException as e:
self._error(e, work)
def _error(self, e: BaseException, work: Work):
try:
self.error_handler(e, work, self)
except:
pass
def run(self):
raise NotImplementedError()
def post(self, work: Work):
raise NotImplementedError()
def shutdown(self):
raise NotImplementedError()
class QueueWorker(AbstractWorker):
def __init__(self, name: str, work_queue: queue.Queue, error_handler: callable):
super().__init__(name, error_handler)
self.queue = work_queue
def run(self):
while True:
work = self.queue.get()
self._work(work)
self.queue.task_done()
def post(self, work: Work):
self.queue.put(work)
def shutdown(self):
self.queue.join()
class ImmediateWorker(AbstractWorker):
def __init__(self, error_handler: callable):
super().__init__("immediate", error_handler)
def run(self):
pass
def post(self, work: Work):
self._work(work)
def shutdown(self):
pass
|
Create a ImmediateWorker that executes the posted jobs on the same thread synchronously
|
Create a ImmediateWorker that executes the posted jobs on the same thread synchronously
|
Python
|
agpl-3.0
|
alvarogzp/telegram-bot,alvarogzp/telegram-bot
|
import queue
from bot.multithreading.work import Work
class Worker:
def run(self):
raise NotImplementedError()
def post(self, work: Work):
raise NotImplementedError()
def shutdown(self):
raise NotImplementedError()
class AbstractWorker(Worker):
def __init__(self, name: str, error_handler: callable):
self.name = name
self.error_handler = error_handler
def _work(self, work: Work):
try:
work.do_work()
except BaseException as e:
self._error(e, work)
def _error(self, e: BaseException, work: Work):
try:
self.error_handler(e, work, self)
except:
pass
def run(self):
raise NotImplementedError()
def post(self, work: Work):
raise NotImplementedError()
def shutdown(self):
raise NotImplementedError()
class QueueWorker(AbstractWorker):
def __init__(self, name: str, work_queue: queue.Queue, error_handler: callable):
super().__init__(name, error_handler)
self.queue = work_queue
def run(self):
while True:
work = self.queue.get()
self._work(work)
self.queue.task_done()
def post(self, work: Work):
self.queue.put(work)
def shutdown(self):
self.queue.join()
Create a ImmediateWorker that executes the posted jobs on the same thread synchronously
|
import queue
from bot.multithreading.work import Work
class Worker:
def run(self):
raise NotImplementedError()
def post(self, work: Work):
raise NotImplementedError()
def shutdown(self):
raise NotImplementedError()
class AbstractWorker(Worker):
def __init__(self, name: str, error_handler: callable):
self.name = name
self.error_handler = error_handler
def _work(self, work: Work):
try:
work.do_work()
except BaseException as e:
self._error(e, work)
def _error(self, e: BaseException, work: Work):
try:
self.error_handler(e, work, self)
except:
pass
def run(self):
raise NotImplementedError()
def post(self, work: Work):
raise NotImplementedError()
def shutdown(self):
raise NotImplementedError()
class QueueWorker(AbstractWorker):
def __init__(self, name: str, work_queue: queue.Queue, error_handler: callable):
super().__init__(name, error_handler)
self.queue = work_queue
def run(self):
while True:
work = self.queue.get()
self._work(work)
self.queue.task_done()
def post(self, work: Work):
self.queue.put(work)
def shutdown(self):
self.queue.join()
class ImmediateWorker(AbstractWorker):
def __init__(self, error_handler: callable):
super().__init__("immediate", error_handler)
def run(self):
pass
def post(self, work: Work):
self._work(work)
def shutdown(self):
pass
|
<commit_before>import queue
from bot.multithreading.work import Work
class Worker:
def run(self):
raise NotImplementedError()
def post(self, work: Work):
raise NotImplementedError()
def shutdown(self):
raise NotImplementedError()
class AbstractWorker(Worker):
def __init__(self, name: str, error_handler: callable):
self.name = name
self.error_handler = error_handler
def _work(self, work: Work):
try:
work.do_work()
except BaseException as e:
self._error(e, work)
def _error(self, e: BaseException, work: Work):
try:
self.error_handler(e, work, self)
except:
pass
def run(self):
raise NotImplementedError()
def post(self, work: Work):
raise NotImplementedError()
def shutdown(self):
raise NotImplementedError()
class QueueWorker(AbstractWorker):
def __init__(self, name: str, work_queue: queue.Queue, error_handler: callable):
super().__init__(name, error_handler)
self.queue = work_queue
def run(self):
while True:
work = self.queue.get()
self._work(work)
self.queue.task_done()
def post(self, work: Work):
self.queue.put(work)
def shutdown(self):
self.queue.join()
<commit_msg>Create a ImmediateWorker that executes the posted jobs on the same thread synchronously<commit_after>
|
import queue
from bot.multithreading.work import Work
class Worker:
def run(self):
raise NotImplementedError()
def post(self, work: Work):
raise NotImplementedError()
def shutdown(self):
raise NotImplementedError()
class AbstractWorker(Worker):
def __init__(self, name: str, error_handler: callable):
self.name = name
self.error_handler = error_handler
def _work(self, work: Work):
try:
work.do_work()
except BaseException as e:
self._error(e, work)
def _error(self, e: BaseException, work: Work):
try:
self.error_handler(e, work, self)
except:
pass
def run(self):
raise NotImplementedError()
def post(self, work: Work):
raise NotImplementedError()
def shutdown(self):
raise NotImplementedError()
class QueueWorker(AbstractWorker):
def __init__(self, name: str, work_queue: queue.Queue, error_handler: callable):
super().__init__(name, error_handler)
self.queue = work_queue
def run(self):
while True:
work = self.queue.get()
self._work(work)
self.queue.task_done()
def post(self, work: Work):
self.queue.put(work)
def shutdown(self):
self.queue.join()
class ImmediateWorker(AbstractWorker):
def __init__(self, error_handler: callable):
super().__init__("immediate", error_handler)
def run(self):
pass
def post(self, work: Work):
self._work(work)
def shutdown(self):
pass
|
import queue
from bot.multithreading.work import Work
class Worker:
def run(self):
raise NotImplementedError()
def post(self, work: Work):
raise NotImplementedError()
def shutdown(self):
raise NotImplementedError()
class AbstractWorker(Worker):
def __init__(self, name: str, error_handler: callable):
self.name = name
self.error_handler = error_handler
def _work(self, work: Work):
try:
work.do_work()
except BaseException as e:
self._error(e, work)
def _error(self, e: BaseException, work: Work):
try:
self.error_handler(e, work, self)
except:
pass
def run(self):
raise NotImplementedError()
def post(self, work: Work):
raise NotImplementedError()
def shutdown(self):
raise NotImplementedError()
class QueueWorker(AbstractWorker):
def __init__(self, name: str, work_queue: queue.Queue, error_handler: callable):
super().__init__(name, error_handler)
self.queue = work_queue
def run(self):
while True:
work = self.queue.get()
self._work(work)
self.queue.task_done()
def post(self, work: Work):
self.queue.put(work)
def shutdown(self):
self.queue.join()
Create a ImmediateWorker that executes the posted jobs on the same thread synchronouslyimport queue
from bot.multithreading.work import Work
class Worker:
def run(self):
raise NotImplementedError()
def post(self, work: Work):
raise NotImplementedError()
def shutdown(self):
raise NotImplementedError()
class AbstractWorker(Worker):
def __init__(self, name: str, error_handler: callable):
self.name = name
self.error_handler = error_handler
def _work(self, work: Work):
try:
work.do_work()
except BaseException as e:
self._error(e, work)
def _error(self, e: BaseException, work: Work):
try:
self.error_handler(e, work, self)
except:
pass
def run(self):
raise NotImplementedError()
def post(self, work: Work):
raise NotImplementedError()
def shutdown(self):
raise NotImplementedError()
class QueueWorker(AbstractWorker):
def __init__(self, name: str, work_queue: queue.Queue, error_handler: callable):
super().__init__(name, error_handler)
self.queue = work_queue
def run(self):
while True:
work = self.queue.get()
self._work(work)
self.queue.task_done()
def post(self, work: Work):
self.queue.put(work)
def shutdown(self):
self.queue.join()
class ImmediateWorker(AbstractWorker):
def __init__(self, error_handler: callable):
super().__init__("immediate", error_handler)
def run(self):
pass
def post(self, work: Work):
self._work(work)
def shutdown(self):
pass
|
<commit_before>import queue
from bot.multithreading.work import Work
class Worker:
def run(self):
raise NotImplementedError()
def post(self, work: Work):
raise NotImplementedError()
def shutdown(self):
raise NotImplementedError()
class AbstractWorker(Worker):
def __init__(self, name: str, error_handler: callable):
self.name = name
self.error_handler = error_handler
def _work(self, work: Work):
try:
work.do_work()
except BaseException as e:
self._error(e, work)
def _error(self, e: BaseException, work: Work):
try:
self.error_handler(e, work, self)
except:
pass
def run(self):
raise NotImplementedError()
def post(self, work: Work):
raise NotImplementedError()
def shutdown(self):
raise NotImplementedError()
class QueueWorker(AbstractWorker):
def __init__(self, name: str, work_queue: queue.Queue, error_handler: callable):
super().__init__(name, error_handler)
self.queue = work_queue
def run(self):
while True:
work = self.queue.get()
self._work(work)
self.queue.task_done()
def post(self, work: Work):
self.queue.put(work)
def shutdown(self):
self.queue.join()
<commit_msg>Create a ImmediateWorker that executes the posted jobs on the same thread synchronously<commit_after>import queue
from bot.multithreading.work import Work
class Worker:
def run(self):
raise NotImplementedError()
def post(self, work: Work):
raise NotImplementedError()
def shutdown(self):
raise NotImplementedError()
class AbstractWorker(Worker):
def __init__(self, name: str, error_handler: callable):
self.name = name
self.error_handler = error_handler
def _work(self, work: Work):
try:
work.do_work()
except BaseException as e:
self._error(e, work)
def _error(self, e: BaseException, work: Work):
try:
self.error_handler(e, work, self)
except:
pass
def run(self):
raise NotImplementedError()
def post(self, work: Work):
raise NotImplementedError()
def shutdown(self):
raise NotImplementedError()
class QueueWorker(AbstractWorker):
def __init__(self, name: str, work_queue: queue.Queue, error_handler: callable):
super().__init__(name, error_handler)
self.queue = work_queue
def run(self):
while True:
work = self.queue.get()
self._work(work)
self.queue.task_done()
def post(self, work: Work):
self.queue.put(work)
def shutdown(self):
self.queue.join()
class ImmediateWorker(AbstractWorker):
def __init__(self, error_handler: callable):
super().__init__("immediate", error_handler)
def run(self):
pass
def post(self, work: Work):
self._work(work)
def shutdown(self):
pass
|
423288b4cc8cf1506285913558b3fcff9e7788fa
|
gitlab/urls.py
|
gitlab/urls.py
|
from django.conf.urls import patterns, url
from django.views.generic.base import RedirectView
from gitlab import views
urlpatterns = patterns('',
url(r'^favicon\.ico$', RedirectView.as_view(url='/static/app/favicon.ico')),
url(r'^push_event/hv$', views.push_event_hv),
url(r'^push_event/web$', views.push_event_web)
)
|
from django.conf.urls import patterns, url
from django.views.generic.base import RedirectView
from gitlab import views
urlpatterns = patterns('',
url(r'^favicon\.ico$', RedirectView.as_view(url='/static/app/favicon.ico')),
url(r'^push_event/hv$', views.push_event_hv),
url(r'^push_event/web$', views.push_event_web),
url(r'^push_event/monitoring$', views.push_event_monitoring),
)
|
Add URL for monitoring hook
|
Add URL for monitoring hook
|
Python
|
apache-2.0
|
ReanGD/web-work-fitnesse,ReanGD/web-work-fitnesse,ReanGD/web-work-fitnesse
|
from django.conf.urls import patterns, url
from django.views.generic.base import RedirectView
from gitlab import views
urlpatterns = patterns('',
url(r'^favicon\.ico$', RedirectView.as_view(url='/static/app/favicon.ico')),
url(r'^push_event/hv$', views.push_event_hv),
url(r'^push_event/web$', views.push_event_web)
)
Add URL for monitoring hook
|
from django.conf.urls import patterns, url
from django.views.generic.base import RedirectView
from gitlab import views
urlpatterns = patterns('',
url(r'^favicon\.ico$', RedirectView.as_view(url='/static/app/favicon.ico')),
url(r'^push_event/hv$', views.push_event_hv),
url(r'^push_event/web$', views.push_event_web),
url(r'^push_event/monitoring$', views.push_event_monitoring),
)
|
<commit_before>from django.conf.urls import patterns, url
from django.views.generic.base import RedirectView
from gitlab import views
urlpatterns = patterns('',
url(r'^favicon\.ico$', RedirectView.as_view(url='/static/app/favicon.ico')),
url(r'^push_event/hv$', views.push_event_hv),
url(r'^push_event/web$', views.push_event_web)
)
<commit_msg>Add URL for monitoring hook<commit_after>
|
from django.conf.urls import patterns, url
from django.views.generic.base import RedirectView
from gitlab import views
urlpatterns = patterns('',
url(r'^favicon\.ico$', RedirectView.as_view(url='/static/app/favicon.ico')),
url(r'^push_event/hv$', views.push_event_hv),
url(r'^push_event/web$', views.push_event_web),
url(r'^push_event/monitoring$', views.push_event_monitoring),
)
|
from django.conf.urls import patterns, url
from django.views.generic.base import RedirectView
from gitlab import views
urlpatterns = patterns('',
url(r'^favicon\.ico$', RedirectView.as_view(url='/static/app/favicon.ico')),
url(r'^push_event/hv$', views.push_event_hv),
url(r'^push_event/web$', views.push_event_web)
)
Add URL for monitoring hookfrom django.conf.urls import patterns, url
from django.views.generic.base import RedirectView
from gitlab import views
urlpatterns = patterns('',
url(r'^favicon\.ico$', RedirectView.as_view(url='/static/app/favicon.ico')),
url(r'^push_event/hv$', views.push_event_hv),
url(r'^push_event/web$', views.push_event_web),
url(r'^push_event/monitoring$', views.push_event_monitoring),
)
|
<commit_before>from django.conf.urls import patterns, url
from django.views.generic.base import RedirectView
from gitlab import views
urlpatterns = patterns('',
url(r'^favicon\.ico$', RedirectView.as_view(url='/static/app/favicon.ico')),
url(r'^push_event/hv$', views.push_event_hv),
url(r'^push_event/web$', views.push_event_web)
)
<commit_msg>Add URL for monitoring hook<commit_after>from django.conf.urls import patterns, url
from django.views.generic.base import RedirectView
from gitlab import views
urlpatterns = patterns('',
url(r'^favicon\.ico$', RedirectView.as_view(url='/static/app/favicon.ico')),
url(r'^push_event/hv$', views.push_event_hv),
url(r'^push_event/web$', views.push_event_web),
url(r'^push_event/monitoring$', views.push_event_monitoring),
)
|
2f9be950c372beb2f555cbe84a22366e0b95e721
|
hellopy/bot.py
|
hellopy/bot.py
|
from wit import Wit
from . import text_to_speech as tts
from . import config
WIT_AI_KEY = config.WIT_AI_KEY
session_id = config.USER
def say(session_id, context, msg):
tts.talk(msg)
def error(session_id, context, e):
# tts.talk("Algo salió mal.")
print(str(e))
def merge(session_id, context, entities, msg):
return context
def converse(msg):
client = Wit(WIT_AI_KEY, actions)
client.run_actions(session_id, msg)
actions = {
'say': say,
'error': error,
'merge': merge,
}
|
from wit import Wit
import shutil
import subprocess
import time
from . import text_to_speech as tts
from . import config
WIT_AI_KEY = config.WIT_AI_KEY
session_id = config.USER
def say(session_id, context, msg):
print("HelloPy: " + msg)
tts.talk(msg)
def error(session_id, context, e):
# tts.talk("Algo salió mal.")
print(str(e))
def first_entity_value(entities, entity):
if entity not in entities:
return None
val = entities[entity][0]['value']
if not val:
return None
return val['value'] if isinstance(val, dict) else val
def merge(session_id, context, entities, msg):
app = first_entity_value(entities, 'aplication')
if app: context['app'] = app
silence = first_entity_value(entities, 'mute')
if silence: context['mute'] = silence
return context
def converse(msg):
client = Wit(WIT_AI_KEY, actions)
client.run_actions(session_id, msg)
def open_app(session_id, context):
app = context['app']
path_app = shutil.which(app)
if path_app:
tts.talk(app + " encontrado")
subprocess.call([path_app])
else:
tts.talk(app + " no encontrado")
return context
def mute(session_id, context):
tts.talk('silencio')
time.sleep(2)
subprocess.call(["amixer", "-D", "pulse", "sset", "Master", "0%"])
context['state'] = 'shh!'
return context
actions = {
'say': say,
'error': error,
'merge': merge,
'open_app': open_app,
'mute': mute,
}
|
Add functions: mute & open
|
Add functions: mute & open
|
Python
|
mit
|
stsewd/hellopy
|
from wit import Wit
from . import text_to_speech as tts
from . import config
WIT_AI_KEY = config.WIT_AI_KEY
session_id = config.USER
def say(session_id, context, msg):
tts.talk(msg)
def error(session_id, context, e):
# tts.talk("Algo salió mal.")
print(str(e))
def merge(session_id, context, entities, msg):
return context
def converse(msg):
client = Wit(WIT_AI_KEY, actions)
client.run_actions(session_id, msg)
actions = {
'say': say,
'error': error,
'merge': merge,
}
Add functions: mute & open
|
from wit import Wit
import shutil
import subprocess
import time
from . import text_to_speech as tts
from . import config
WIT_AI_KEY = config.WIT_AI_KEY
session_id = config.USER
def say(session_id, context, msg):
print("HelloPy: " + msg)
tts.talk(msg)
def error(session_id, context, e):
# tts.talk("Algo salió mal.")
print(str(e))
def first_entity_value(entities, entity):
if entity not in entities:
return None
val = entities[entity][0]['value']
if not val:
return None
return val['value'] if isinstance(val, dict) else val
def merge(session_id, context, entities, msg):
app = first_entity_value(entities, 'aplication')
if app: context['app'] = app
silence = first_entity_value(entities, 'mute')
if silence: context['mute'] = silence
return context
def converse(msg):
client = Wit(WIT_AI_KEY, actions)
client.run_actions(session_id, msg)
def open_app(session_id, context):
app = context['app']
path_app = shutil.which(app)
if path_app:
tts.talk(app + " encontrado")
subprocess.call([path_app])
else:
tts.talk(app + " no encontrado")
return context
def mute(session_id, context):
tts.talk('silencio')
time.sleep(2)
subprocess.call(["amixer", "-D", "pulse", "sset", "Master", "0%"])
context['state'] = 'shh!'
return context
actions = {
'say': say,
'error': error,
'merge': merge,
'open_app': open_app,
'mute': mute,
}
|
<commit_before>from wit import Wit
from . import text_to_speech as tts
from . import config
WIT_AI_KEY = config.WIT_AI_KEY
session_id = config.USER
def say(session_id, context, msg):
tts.talk(msg)
def error(session_id, context, e):
# tts.talk("Algo salió mal.")
print(str(e))
def merge(session_id, context, entities, msg):
return context
def converse(msg):
client = Wit(WIT_AI_KEY, actions)
client.run_actions(session_id, msg)
actions = {
'say': say,
'error': error,
'merge': merge,
}
<commit_msg>Add functions: mute & open<commit_after>
|
from wit import Wit
import shutil
import subprocess
import time
from . import text_to_speech as tts
from . import config
WIT_AI_KEY = config.WIT_AI_KEY
session_id = config.USER
def say(session_id, context, msg):
print("HelloPy: " + msg)
tts.talk(msg)
def error(session_id, context, e):
# tts.talk("Algo salió mal.")
print(str(e))
def first_entity_value(entities, entity):
if entity not in entities:
return None
val = entities[entity][0]['value']
if not val:
return None
return val['value'] if isinstance(val, dict) else val
def merge(session_id, context, entities, msg):
app = first_entity_value(entities, 'aplication')
if app: context['app'] = app
silence = first_entity_value(entities, 'mute')
if silence: context['mute'] = silence
return context
def converse(msg):
client = Wit(WIT_AI_KEY, actions)
client.run_actions(session_id, msg)
def open_app(session_id, context):
app = context['app']
path_app = shutil.which(app)
if path_app:
tts.talk(app + " encontrado")
subprocess.call([path_app])
else:
tts.talk(app + " no encontrado")
return context
def mute(session_id, context):
tts.talk('silencio')
time.sleep(2)
subprocess.call(["amixer", "-D", "pulse", "sset", "Master", "0%"])
context['state'] = 'shh!'
return context
actions = {
'say': say,
'error': error,
'merge': merge,
'open_app': open_app,
'mute': mute,
}
|
from wit import Wit
from . import text_to_speech as tts
from . import config
WIT_AI_KEY = config.WIT_AI_KEY
session_id = config.USER
def say(session_id, context, msg):
tts.talk(msg)
def error(session_id, context, e):
# tts.talk("Algo salió mal.")
print(str(e))
def merge(session_id, context, entities, msg):
return context
def converse(msg):
client = Wit(WIT_AI_KEY, actions)
client.run_actions(session_id, msg)
actions = {
'say': say,
'error': error,
'merge': merge,
}
Add functions: mute & openfrom wit import Wit
import shutil
import subprocess
import time
from . import text_to_speech as tts
from . import config
WIT_AI_KEY = config.WIT_AI_KEY
session_id = config.USER
def say(session_id, context, msg):
print("HelloPy: " + msg)
tts.talk(msg)
def error(session_id, context, e):
# tts.talk("Algo salió mal.")
print(str(e))
def first_entity_value(entities, entity):
if entity not in entities:
return None
val = entities[entity][0]['value']
if not val:
return None
return val['value'] if isinstance(val, dict) else val
def merge(session_id, context, entities, msg):
app = first_entity_value(entities, 'aplication')
if app: context['app'] = app
silence = first_entity_value(entities, 'mute')
if silence: context['mute'] = silence
return context
def converse(msg):
client = Wit(WIT_AI_KEY, actions)
client.run_actions(session_id, msg)
def open_app(session_id, context):
app = context['app']
path_app = shutil.which(app)
if path_app:
tts.talk(app + " encontrado")
subprocess.call([path_app])
else:
tts.talk(app + " no encontrado")
return context
def mute(session_id, context):
tts.talk('silencio')
time.sleep(2)
subprocess.call(["amixer", "-D", "pulse", "sset", "Master", "0%"])
context['state'] = 'shh!'
return context
actions = {
'say': say,
'error': error,
'merge': merge,
'open_app': open_app,
'mute': mute,
}
|
<commit_before>from wit import Wit
from . import text_to_speech as tts
from . import config
WIT_AI_KEY = config.WIT_AI_KEY
session_id = config.USER
def say(session_id, context, msg):
tts.talk(msg)
def error(session_id, context, e):
# tts.talk("Algo salió mal.")
print(str(e))
def merge(session_id, context, entities, msg):
return context
def converse(msg):
client = Wit(WIT_AI_KEY, actions)
client.run_actions(session_id, msg)
actions = {
'say': say,
'error': error,
'merge': merge,
}
<commit_msg>Add functions: mute & open<commit_after>from wit import Wit
import shutil
import subprocess
import time
from . import text_to_speech as tts
from . import config
WIT_AI_KEY = config.WIT_AI_KEY
session_id = config.USER
def say(session_id, context, msg):
print("HelloPy: " + msg)
tts.talk(msg)
def error(session_id, context, e):
# tts.talk("Algo salió mal.")
print(str(e))
def first_entity_value(entities, entity):
if entity not in entities:
return None
val = entities[entity][0]['value']
if not val:
return None
return val['value'] if isinstance(val, dict) else val
def merge(session_id, context, entities, msg):
app = first_entity_value(entities, 'aplication')
if app: context['app'] = app
silence = first_entity_value(entities, 'mute')
if silence: context['mute'] = silence
return context
def converse(msg):
client = Wit(WIT_AI_KEY, actions)
client.run_actions(session_id, msg)
def open_app(session_id, context):
app = context['app']
path_app = shutil.which(app)
if path_app:
tts.talk(app + " encontrado")
subprocess.call([path_app])
else:
tts.talk(app + " no encontrado")
return context
def mute(session_id, context):
tts.talk('silencio')
time.sleep(2)
subprocess.call(["amixer", "-D", "pulse", "sset", "Master", "0%"])
context['state'] = 'shh!'
return context
actions = {
'say': say,
'error': error,
'merge': merge,
'open_app': open_app,
'mute': mute,
}
|
f6bf104cbdcdb909a15c80dafe9ae2e7aebbc2f0
|
examples/snowball_stemmer_example.py
|
examples/snowball_stemmer_example.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from nltk.stem.snowball import HungarianStemmer
from nltk import word_tokenize
stemmer = HungarianStemmer()
test_sentence = "Szeretnék kérni tőled egy óriási szívességet az édesanyám számára."
tokenized_sentence = word_tokenize(test_sentence)
for word in tokenized_sentence:
print "Original word form is '{0}' and the root is '{1}'".format(word, stemmer.stem(word))
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from nltk.stem.snowball import HungarianStemmer
from nltk import word_tokenize
stemmer = HungarianStemmer()
test_sentence = "Péter szereti Enikőt és Marit"
tokenized_sentence = word_tokenize(test_sentence)
print('With SnowballStemmer')
for word in tokenized_sentence:
print "Original word form is '{}' and the root is '{}'".format(word, stemmer.stem(word))
|
Add more information to SnowballStemmer
|
Add more information to SnowballStemmer
|
Python
|
apache-2.0
|
davidpgero/hungarian-nltk
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from nltk.stem.snowball import HungarianStemmer
from nltk import word_tokenize
stemmer = HungarianStemmer()
test_sentence = "Szeretnék kérni tőled egy óriási szívességet az édesanyám számára."
tokenized_sentence = word_tokenize(test_sentence)
for word in tokenized_sentence:
print "Original word form is '{0}' and the root is '{1}'".format(word, stemmer.stem(word))
Add more information to SnowballStemmer
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from nltk.stem.snowball import HungarianStemmer
from nltk import word_tokenize
stemmer = HungarianStemmer()
test_sentence = "Péter szereti Enikőt és Marit"
tokenized_sentence = word_tokenize(test_sentence)
print('With SnowballStemmer')
for word in tokenized_sentence:
print "Original word form is '{}' and the root is '{}'".format(word, stemmer.stem(word))
|
<commit_before>#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from nltk.stem.snowball import HungarianStemmer
from nltk import word_tokenize
stemmer = HungarianStemmer()
test_sentence = "Szeretnék kérni tőled egy óriási szívességet az édesanyám számára."
tokenized_sentence = word_tokenize(test_sentence)
for word in tokenized_sentence:
print "Original word form is '{0}' and the root is '{1}'".format(word, stemmer.stem(word))
<commit_msg>Add more information to SnowballStemmer<commit_after>
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from nltk.stem.snowball import HungarianStemmer
from nltk import word_tokenize
stemmer = HungarianStemmer()
test_sentence = "Péter szereti Enikőt és Marit"
tokenized_sentence = word_tokenize(test_sentence)
print('With SnowballStemmer')
for word in tokenized_sentence:
print "Original word form is '{}' and the root is '{}'".format(word, stemmer.stem(word))
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from nltk.stem.snowball import HungarianStemmer
from nltk import word_tokenize
stemmer = HungarianStemmer()
test_sentence = "Szeretnék kérni tőled egy óriási szívességet az édesanyám számára."
tokenized_sentence = word_tokenize(test_sentence)
for word in tokenized_sentence:
print "Original word form is '{0}' and the root is '{1}'".format(word, stemmer.stem(word))
Add more information to SnowballStemmer#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from nltk.stem.snowball import HungarianStemmer
from nltk import word_tokenize
stemmer = HungarianStemmer()
test_sentence = "Péter szereti Enikőt és Marit"
tokenized_sentence = word_tokenize(test_sentence)
print('With SnowballStemmer')
for word in tokenized_sentence:
print "Original word form is '{}' and the root is '{}'".format(word, stemmer.stem(word))
|
<commit_before>#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from nltk.stem.snowball import HungarianStemmer
from nltk import word_tokenize
stemmer = HungarianStemmer()
test_sentence = "Szeretnék kérni tőled egy óriási szívességet az édesanyám számára."
tokenized_sentence = word_tokenize(test_sentence)
for word in tokenized_sentence:
print "Original word form is '{0}' and the root is '{1}'".format(word, stemmer.stem(word))
<commit_msg>Add more information to SnowballStemmer<commit_after>#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from nltk.stem.snowball import HungarianStemmer
from nltk import word_tokenize
stemmer = HungarianStemmer()
test_sentence = "Péter szereti Enikőt és Marit"
tokenized_sentence = word_tokenize(test_sentence)
print('With SnowballStemmer')
for word in tokenized_sentence:
print "Original word form is '{}' and the root is '{}'".format(word, stemmer.stem(word))
|
3e1b2b5e87f2d0ed8ecb06b07805723101c23636
|
km3pipe/tests/test_config.py
|
km3pipe/tests/test_config.py
|
# coding=utf-8
# Filename: test_config.py
"""
Test suite for configuration related functions and classes.
"""
from __future__ import division, absolute_import, print_function
from km3pipe.testing import TestCase, BytesIO
from km3pipe.config import Config
CONFIGURATION = BytesIO("\n".join((
"[DB]",
"username=foo",
"password=narf",
)))
class TestConfig(TestCase):
def setUp(self):
self.config = Config(None)
self.config._read_from_file(CONFIGURATION)
CONFIGURATION.seek(0, 0)
def test_db_credentials(self):
self.assertEqual('foo', self.config.db_credentials[0])
self.assertEqual('narf', self.config.db_credentials[1])
def test_check_for_updates_defaults_to_true(self):
self.assertTrue(self.config.check_for_updates)
def test_time_zone_defaults_to_utc(self):
self.assertEqual('UTC', self.config.time_zone._tzname)
def test_slack_token_raises_error_by_default(self):
with self.assertRaises(ValueError):
self.config.slack_token
|
# coding=utf-8
# Filename: test_config.py
"""
Test suite for configuration related functions and classes.
"""
from __future__ import division, absolute_import, print_function
from km3pipe.testing import TestCase, StringIO
from km3pipe.config import Config
CONFIGURATION = StringIO("\n".join((
"[DB]",
"username=foo",
"password=narf",
)))
class TestConfig(TestCase):
def setUp(self):
self.config = Config(None)
self.config._read_from_file(CONFIGURATION)
CONFIGURATION.seek(0, 0)
def test_db_credentials(self):
self.assertEqual('foo', self.config.db_credentials[0])
self.assertEqual('narf', self.config.db_credentials[1])
def test_check_for_updates_defaults_to_true(self):
self.assertTrue(self.config.check_for_updates)
def test_time_zone_defaults_to_utc(self):
self.assertEqual('UTC', self.config.time_zone._tzname)
def test_slack_token_raises_error_by_default(self):
with self.assertRaises(ValueError):
self.config.slack_token
|
Switch to StringIO for fake config data
|
Switch to StringIO for fake config data
|
Python
|
mit
|
tamasgal/km3pipe,tamasgal/km3pipe
|
# coding=utf-8
# Filename: test_config.py
"""
Test suite for configuration related functions and classes.
"""
from __future__ import division, absolute_import, print_function
from km3pipe.testing import TestCase, BytesIO
from km3pipe.config import Config
CONFIGURATION = BytesIO("\n".join((
"[DB]",
"username=foo",
"password=narf",
)))
class TestConfig(TestCase):
def setUp(self):
self.config = Config(None)
self.config._read_from_file(CONFIGURATION)
CONFIGURATION.seek(0, 0)
def test_db_credentials(self):
self.assertEqual('foo', self.config.db_credentials[0])
self.assertEqual('narf', self.config.db_credentials[1])
def test_check_for_updates_defaults_to_true(self):
self.assertTrue(self.config.check_for_updates)
def test_time_zone_defaults_to_utc(self):
self.assertEqual('UTC', self.config.time_zone._tzname)
def test_slack_token_raises_error_by_default(self):
with self.assertRaises(ValueError):
self.config.slack_token
Switch to StringIO for fake config data
|
# coding=utf-8
# Filename: test_config.py
"""
Test suite for configuration related functions and classes.
"""
from __future__ import division, absolute_import, print_function
from km3pipe.testing import TestCase, StringIO
from km3pipe.config import Config
CONFIGURATION = StringIO("\n".join((
"[DB]",
"username=foo",
"password=narf",
)))
class TestConfig(TestCase):
def setUp(self):
self.config = Config(None)
self.config._read_from_file(CONFIGURATION)
CONFIGURATION.seek(0, 0)
def test_db_credentials(self):
self.assertEqual('foo', self.config.db_credentials[0])
self.assertEqual('narf', self.config.db_credentials[1])
def test_check_for_updates_defaults_to_true(self):
self.assertTrue(self.config.check_for_updates)
def test_time_zone_defaults_to_utc(self):
self.assertEqual('UTC', self.config.time_zone._tzname)
def test_slack_token_raises_error_by_default(self):
with self.assertRaises(ValueError):
self.config.slack_token
|
<commit_before># coding=utf-8
# Filename: test_config.py
"""
Test suite for configuration related functions and classes.
"""
from __future__ import division, absolute_import, print_function
from km3pipe.testing import TestCase, BytesIO
from km3pipe.config import Config
CONFIGURATION = BytesIO("\n".join((
"[DB]",
"username=foo",
"password=narf",
)))
class TestConfig(TestCase):
def setUp(self):
self.config = Config(None)
self.config._read_from_file(CONFIGURATION)
CONFIGURATION.seek(0, 0)
def test_db_credentials(self):
self.assertEqual('foo', self.config.db_credentials[0])
self.assertEqual('narf', self.config.db_credentials[1])
def test_check_for_updates_defaults_to_true(self):
self.assertTrue(self.config.check_for_updates)
def test_time_zone_defaults_to_utc(self):
self.assertEqual('UTC', self.config.time_zone._tzname)
def test_slack_token_raises_error_by_default(self):
with self.assertRaises(ValueError):
self.config.slack_token
<commit_msg>Switch to StringIO for fake config data<commit_after>
|
# coding=utf-8
# Filename: test_config.py
"""
Test suite for configuration related functions and classes.
"""
from __future__ import division, absolute_import, print_function
from km3pipe.testing import TestCase, StringIO
from km3pipe.config import Config
CONFIGURATION = StringIO("\n".join((
"[DB]",
"username=foo",
"password=narf",
)))
class TestConfig(TestCase):
def setUp(self):
self.config = Config(None)
self.config._read_from_file(CONFIGURATION)
CONFIGURATION.seek(0, 0)
def test_db_credentials(self):
self.assertEqual('foo', self.config.db_credentials[0])
self.assertEqual('narf', self.config.db_credentials[1])
def test_check_for_updates_defaults_to_true(self):
self.assertTrue(self.config.check_for_updates)
def test_time_zone_defaults_to_utc(self):
self.assertEqual('UTC', self.config.time_zone._tzname)
def test_slack_token_raises_error_by_default(self):
with self.assertRaises(ValueError):
self.config.slack_token
|
# coding=utf-8
# Filename: test_config.py
"""
Test suite for configuration related functions and classes.
"""
from __future__ import division, absolute_import, print_function
from km3pipe.testing import TestCase, BytesIO
from km3pipe.config import Config
CONFIGURATION = BytesIO("\n".join((
"[DB]",
"username=foo",
"password=narf",
)))
class TestConfig(TestCase):
def setUp(self):
self.config = Config(None)
self.config._read_from_file(CONFIGURATION)
CONFIGURATION.seek(0, 0)
def test_db_credentials(self):
self.assertEqual('foo', self.config.db_credentials[0])
self.assertEqual('narf', self.config.db_credentials[1])
def test_check_for_updates_defaults_to_true(self):
self.assertTrue(self.config.check_for_updates)
def test_time_zone_defaults_to_utc(self):
self.assertEqual('UTC', self.config.time_zone._tzname)
def test_slack_token_raises_error_by_default(self):
with self.assertRaises(ValueError):
self.config.slack_token
Switch to StringIO for fake config data# coding=utf-8
# Filename: test_config.py
"""
Test suite for configuration related functions and classes.
"""
from __future__ import division, absolute_import, print_function
from km3pipe.testing import TestCase, StringIO
from km3pipe.config import Config
CONFIGURATION = StringIO("\n".join((
"[DB]",
"username=foo",
"password=narf",
)))
class TestConfig(TestCase):
def setUp(self):
self.config = Config(None)
self.config._read_from_file(CONFIGURATION)
CONFIGURATION.seek(0, 0)
def test_db_credentials(self):
self.assertEqual('foo', self.config.db_credentials[0])
self.assertEqual('narf', self.config.db_credentials[1])
def test_check_for_updates_defaults_to_true(self):
self.assertTrue(self.config.check_for_updates)
def test_time_zone_defaults_to_utc(self):
self.assertEqual('UTC', self.config.time_zone._tzname)
def test_slack_token_raises_error_by_default(self):
with self.assertRaises(ValueError):
self.config.slack_token
|
<commit_before># coding=utf-8
# Filename: test_config.py
"""
Test suite for configuration related functions and classes.
"""
from __future__ import division, absolute_import, print_function
from km3pipe.testing import TestCase, BytesIO
from km3pipe.config import Config
CONFIGURATION = BytesIO("\n".join((
"[DB]",
"username=foo",
"password=narf",
)))
class TestConfig(TestCase):
def setUp(self):
self.config = Config(None)
self.config._read_from_file(CONFIGURATION)
CONFIGURATION.seek(0, 0)
def test_db_credentials(self):
self.assertEqual('foo', self.config.db_credentials[0])
self.assertEqual('narf', self.config.db_credentials[1])
def test_check_for_updates_defaults_to_true(self):
self.assertTrue(self.config.check_for_updates)
def test_time_zone_defaults_to_utc(self):
self.assertEqual('UTC', self.config.time_zone._tzname)
def test_slack_token_raises_error_by_default(self):
with self.assertRaises(ValueError):
self.config.slack_token
<commit_msg>Switch to StringIO for fake config data<commit_after># coding=utf-8
# Filename: test_config.py
"""
Test suite for configuration related functions and classes.
"""
from __future__ import division, absolute_import, print_function
from km3pipe.testing import TestCase, StringIO
from km3pipe.config import Config
CONFIGURATION = StringIO("\n".join((
"[DB]",
"username=foo",
"password=narf",
)))
class TestConfig(TestCase):
def setUp(self):
self.config = Config(None)
self.config._read_from_file(CONFIGURATION)
CONFIGURATION.seek(0, 0)
def test_db_credentials(self):
self.assertEqual('foo', self.config.db_credentials[0])
self.assertEqual('narf', self.config.db_credentials[1])
def test_check_for_updates_defaults_to_true(self):
self.assertTrue(self.config.check_for_updates)
def test_time_zone_defaults_to_utc(self):
self.assertEqual('UTC', self.config.time_zone._tzname)
def test_slack_token_raises_error_by_default(self):
with self.assertRaises(ValueError):
self.config.slack_token
|
28c898c39515601b3ed0379cd3721f47e019ef65
|
common/lib/xmodule/xmodule/tests/test_conditional_logic.py
|
common/lib/xmodule/xmodule/tests/test_conditional_logic.py
|
# -*- coding: utf-8 -*-
"""Test for Conditional Xmodule functional logic."""
from xmodule.conditional_module import ConditionalDescriptor
from . import LogicTest
class ConditionalModuleTest(LogicTest):
"""Logic tests for Conditional Xmodule."""
descriptor_class = ConditionalDescriptor
def test_ajax_request(self):
"Make shure that ajax request works correctly"
# Mock is_condition_satisfied
self.xmodule.is_condition_satisfied = lambda: True
self.xmodule.descriptor.get_children = lambda: []
response = self.ajax_request('No', {})
html = response['html']
self.assertEqual(html, [])
|
# -*- coding: utf-8 -*-
"""Test for Conditional Xmodule functional logic."""
from xmodule.conditional_module import ConditionalDescriptor
from . import LogicTest
class ConditionalModuleTest(LogicTest):
"""Logic tests for Conditional Xmodule."""
descriptor_class = ConditionalDescriptor
def test_ajax_request(self):
"Make sure that ajax request works correctly"
# Mock is_condition_satisfied
self.xmodule.is_condition_satisfied = lambda: True
self.xmodule.descriptor.get_children = lambda: []
response = self.ajax_request('No', {})
html = response['html']
self.assertEqual(html, [])
|
Correct spelling in a comment
|
Correct spelling in a comment
|
Python
|
agpl-3.0
|
EDUlib/edx-platform,10clouds/edx-platform,chand3040/cloud_that,DefyVentures/edx-platform,vismartltd/edx-platform,zerobatu/edx-platform,ubc/edx-platform,eduNEXT/edx-platform,cpennington/edx-platform,jazztpt/edx-platform,Lektorium-LLC/edx-platform,Kalyzee/edx-platform,nttks/edx-platform,longmen21/edx-platform,chudaol/edx-platform,Edraak/edx-platform,adoosii/edx-platform,SivilTaram/edx-platform,caesar2164/edx-platform,AkA84/edx-platform,JCBarahona/edX,TeachAtTUM/edx-platform,ak2703/edx-platform,prarthitm/edxplatform,don-github/edx-platform,chauhanhardik/populo_2,MakeHer/edx-platform,alu042/edx-platform,ak2703/edx-platform,DefyVentures/edx-platform,martynovp/edx-platform,eemirtekin/edx-platform,playm2mboy/edx-platform,Semi-global/edx-platform,ovnicraft/edx-platform,IONISx/edx-platform,alexthered/kienhoc-platform,appliedx/edx-platform,nttks/edx-platform,jolyonb/edx-platform,don-github/edx-platform,don-github/edx-platform,mushtaqak/edx-platform,mbareta/edx-platform-ft,marcore/edx-platform,10clouds/edx-platform,longmen21/edx-platform,shabab12/edx-platform,rue89-tech/edx-platform,ahmadiga/min_edx,simbs/edx-platform,JCBarahona/edX,zhenzhai/edx-platform,tiagochiavericosta/edx-platform,shubhdev/edxOnBaadal,xuxiao19910803/edx-platform,atsolakid/edx-platform,mushtaqak/edx-platform,jamiefolsom/edx-platform,zerobatu/edx-platform,eduNEXT/edunext-platform,antonve/s4-project-mooc,antoviaque/edx-platform,pepeportela/edx-platform,openfun/edx-platform,a-parhom/edx-platform,RPI-OPENEDX/edx-platform,SivilTaram/edx-platform,BehavioralInsightsTeam/edx-platform,mahendra-r/edx-platform,mitocw/edx-platform,zerobatu/edx-platform,arbrandes/edx-platform,jamesblunt/edx-platform,pomegranited/edx-platform,bigdatauniversity/edx-platform,pomegranited/edx-platform,adoosii/edx-platform,jazztpt/edx-platform,B-MOOC/edx-platform,cpennington/edx-platform,xinjiguaike/edx-platform,jazztpt/edx-platform,lduarte1991/edx-platform,halvertoluke/edx-platform,shubhdev/edxOnBaadal,cognitiveclass/edx-platform,Ayub-Khan/edx-platform,jazkarta/edx-platform,proversity-org/edx-platform,hamzehd/edx-platform,10clouds/edx-platform,jamesblunt/edx-platform,utecuy/edx-platform,jbzdak/edx-platform,shabab12/edx-platform,CredoReference/edx-platform,pomegranited/edx-platform,kamalx/edx-platform,DefyVentures/edx-platform,eduNEXT/edx-platform,caesar2164/edx-platform,halvertoluke/edx-platform,ampax/edx-platform,itsjeyd/edx-platform,motion2015/edx-platform,devs1991/test_edx_docmode,Edraak/circleci-edx-platform,edx-solutions/edx-platform,hamzehd/edx-platform,Stanford-Online/edx-platform,proversity-org/edx-platform,amir-qayyum-khan/edx-platform,vikas1885/test1,IndonesiaX/edx-platform,chudaol/edx-platform,rue89-tech/edx-platform,ZLLab-Mooc/edx-platform,chrisndodge/edx-platform,mjirayu/sit_academy,antonve/s4-project-mooc,polimediaupv/edx-platform,jazkarta/edx-platform,Shrhawk/edx-platform,chauhanhardik/populo_2,Kalyzee/edx-platform,B-MOOC/edx-platform,doganov/edx-platform,martynovp/edx-platform,kxliugang/edx-platform,bitifirefly/edx-platform,mahendra-r/edx-platform,shashank971/edx-platform,chauhanhardik/populo,zadgroup/edx-platform,Kalyzee/edx-platform,solashirai/edx-platform,zubair-arbi/edx-platform,rismalrv/edx-platform,ovnicraft/edx-platform,SivilTaram/edx-platform,CourseTalk/edx-platform,miptliot/edx-platform,Softmotions/edx-platform,appsembler/edx-platform,chand3040/cloud_that,hamzehd/edx-platform,dkarakats/edx-platform,SravanthiSinha/edx-platform,deepsrijit1105/edx-platform,Livit/Livit.Learn.EdX,beacloudgenius/edx-platform,romain-li/edx-platform,appliedx/edx-platform,doismellburning/edx-platform,jamiefolsom/edx-platform,zofuthan/edx-platform,xingyepei/edx-platform,vismartltd/edx-platform,bitifirefly/edx-platform,etzhou/edx-platform,ak2703/edx-platform,romain-li/edx-platform,alexthered/kienhoc-platform,edx-solutions/edx-platform,Edraak/circleci-edx-platform,nanolearningllc/edx-platform-cypress,doismellburning/edx-platform,don-github/edx-platform,halvertoluke/edx-platform,ahmadiga/min_edx,edry/edx-platform,Edraak/edraak-platform,eemirtekin/edx-platform,shubhdev/openedx,louyihua/edx-platform,alexthered/kienhoc-platform,DefyVentures/edx-platform,adoosii/edx-platform,xinjiguaike/edx-platform,shubhdev/edxOnBaadal,teltek/edx-platform,IONISx/edx-platform,TeachAtTUM/edx-platform,shubhdev/openedx,unicri/edx-platform,bitifirefly/edx-platform,halvertoluke/edx-platform,kursitet/edx-platform,zubair-arbi/edx-platform,ferabra/edx-platform,polimediaupv/edx-platform,kxliugang/edx-platform,CourseTalk/edx-platform,arifsetiawan/edx-platform,ferabra/edx-platform,J861449197/edx-platform,naresh21/synergetics-edx-platform,BehavioralInsightsTeam/edx-platform,philanthropy-u/edx-platform,IONISx/edx-platform,benpatterson/edx-platform,vikas1885/test1,xingyepei/edx-platform,Livit/Livit.Learn.EdX,openfun/edx-platform,raccoongang/edx-platform,vikas1885/test1,longmen21/edx-platform,shubhdev/openedx,playm2mboy/edx-platform,iivic/BoiseStateX,ubc/edx-platform,ovnicraft/edx-platform,rismalrv/edx-platform,nikolas/edx-platform,doismellburning/edx-platform,ubc/edx-platform,Edraak/circleci-edx-platform,cecep-edu/edx-platform,Livit/Livit.Learn.EdX,devs1991/test_edx_docmode,alu042/edx-platform,zhenzhai/edx-platform,dkarakats/edx-platform,nagyistoce/edx-platform,jamesblunt/edx-platform,Edraak/edx-platform,eduNEXT/edunext-platform,zerobatu/edx-platform,shashank971/edx-platform,Lektorium-LLC/edx-platform,EDUlib/edx-platform,RPI-OPENEDX/edx-platform,eemirtekin/edx-platform,franosincic/edx-platform,jazkarta/edx-platform,raccoongang/edx-platform,solashirai/edx-platform,MakeHer/edx-platform,knehez/edx-platform,wwj718/edx-platform,caesar2164/edx-platform,knehez/edx-platform,utecuy/edx-platform,SravanthiSinha/edx-platform,ferabra/edx-platform,devs1991/test_edx_docmode,ubc/edx-platform,gsehub/edx-platform,wwj718/edx-platform,jbzdak/edx-platform,chauhanhardik/populo_2,shabab12/edx-platform,fintech-circle/edx-platform,AkA84/edx-platform,BehavioralInsightsTeam/edx-platform,solashirai/edx-platform,doismellburning/edx-platform,benpatterson/edx-platform,shubhdev/openedx,nanolearningllc/edx-platform-cypress,unicri/edx-platform,DefyVentures/edx-platform,Ayub-Khan/edx-platform,eemirtekin/edx-platform,devs1991/test_edx_docmode,kxliugang/edx-platform,mcgachey/edx-platform,lduarte1991/edx-platform,franosincic/edx-platform,xinjiguaike/edx-platform,eduNEXT/edx-platform,motion2015/edx-platform,miptliot/edx-platform,mahendra-r/edx-platform,jonathan-beard/edx-platform,beacloudgenius/edx-platform,pomegranited/edx-platform,wwj718/edx-platform,msegado/edx-platform,EDUlib/edx-platform,nanolearningllc/edx-platform-cypress,prarthitm/edxplatform,shubhdev/edx-platform,rismalrv/edx-platform,rhndg/openedx,mbareta/edx-platform-ft,zofuthan/edx-platform,bigdatauniversity/edx-platform,cpennington/edx-platform,openfun/edx-platform,edry/edx-platform,knehez/edx-platform,rhndg/openedx,eduNEXT/edx-platform,Edraak/edx-platform,rismalrv/edx-platform,gymnasium/edx-platform,Shrhawk/edx-platform,fly19890211/edx-platform,jbassen/edx-platform,zadgroup/edx-platform,ahmedaljazzar/edx-platform,rue89-tech/edx-platform,msegado/edx-platform,doganov/edx-platform,jzoldak/edx-platform,kmoocdev2/edx-platform,synergeticsedx/deployment-wipro,longmen21/edx-platform,devs1991/test_edx_docmode,doganov/edx-platform,appliedx/edx-platform,mbareta/edx-platform-ft,ZLLab-Mooc/edx-platform,arbrandes/edx-platform,tanmaykm/edx-platform,devs1991/test_edx_docmode,antoviaque/edx-platform,antonve/s4-project-mooc,zubair-arbi/edx-platform,defance/edx-platform,Ayub-Khan/edx-platform,benpatterson/edx-platform,bigdatauniversity/edx-platform,ampax/edx-platform,itsjeyd/edx-platform,zadgroup/edx-platform,edx/edx-platform,UOMx/edx-platform,ahmadio/edx-platform,jamesblunt/edx-platform,leansoft/edx-platform,appsembler/edx-platform,jazztpt/edx-platform,synergeticsedx/deployment-wipro,shashank971/edx-platform,xinjiguaike/edx-platform,antoviaque/edx-platform,jamiefolsom/edx-platform,nagyistoce/edx-platform,dkarakats/edx-platform,a-parhom/edx-platform,etzhou/edx-platform,Semi-global/edx-platform,arifsetiawan/edx-platform,hamzehd/edx-platform,marcore/edx-platform,mitocw/edx-platform,jbassen/edx-platform,bigdatauniversity/edx-platform,a-parhom/edx-platform,simbs/edx-platform,jolyonb/edx-platform,pepeportela/edx-platform,ahmedaljazzar/edx-platform,AkA84/edx-platform,atsolakid/edx-platform,proversity-org/edx-platform,inares/edx-platform,chrisndodge/edx-platform,romain-li/edx-platform,teltek/edx-platform,doganov/edx-platform,ahmadio/edx-platform,don-github/edx-platform,antonve/s4-project-mooc,chand3040/cloud_that,antonve/s4-project-mooc,nttks/edx-platform,ESOedX/edx-platform,leansoft/edx-platform,miptliot/edx-platform,benpatterson/edx-platform,mushtaqak/edx-platform,edx/edx-platform,chauhanhardik/populo_2,chauhanhardik/populo_2,mushtaqak/edx-platform,arifsetiawan/edx-platform,pabloborrego93/edx-platform,UOMx/edx-platform,vismartltd/edx-platform,iivic/BoiseStateX,shubhdev/openedx,vasyarv/edx-platform,rismalrv/edx-platform,shashank971/edx-platform,xingyepei/edx-platform,IndonesiaX/edx-platform,Shrhawk/edx-platform,inares/edx-platform,kmoocdev2/edx-platform,Kalyzee/edx-platform,franosincic/edx-platform,lduarte1991/edx-platform,polimediaupv/edx-platform,IONISx/edx-platform,dkarakats/edx-platform,zhenzhai/edx-platform,JCBarahona/edX,teltek/edx-platform,MakeHer/edx-platform,jzoldak/edx-platform,synergeticsedx/deployment-wipro,jolyonb/edx-platform,ovnicraft/edx-platform,xinjiguaike/edx-platform,eemirtekin/edx-platform,naresh21/synergetics-edx-platform,Semi-global/edx-platform,waheedahmed/edx-platform,cecep-edu/edx-platform,leansoft/edx-platform,Shrhawk/edx-platform,bitifirefly/edx-platform,benpatterson/edx-platform,beacloudgenius/edx-platform,RPI-OPENEDX/edx-platform,4eek/edx-platform,jamiefolsom/edx-platform,edry/edx-platform,jbassen/edx-platform,ahmadio/edx-platform,chudaol/edx-platform,jbassen/edx-platform,utecuy/edx-platform,polimediaupv/edx-platform,doismellburning/edx-platform,jzoldak/edx-platform,nikolas/edx-platform,zofuthan/edx-platform,synergeticsedx/deployment-wipro,msegado/edx-platform,Lektorium-LLC/edx-platform,itsjeyd/edx-platform,leansoft/edx-platform,J861449197/edx-platform,nikolas/edx-platform,SravanthiSinha/edx-platform,atsolakid/edx-platform,romain-li/edx-platform,franosincic/edx-platform,deepsrijit1105/edx-platform,RPI-OPENEDX/edx-platform,amir-qayyum-khan/edx-platform,proversity-org/edx-platform,Shrhawk/edx-platform,SravanthiSinha/edx-platform,amir-qayyum-khan/edx-platform,nanolearningllc/edx-platform-cypress-2,shubhdev/edx-platform,fintech-circle/edx-platform,cognitiveclass/edx-platform,vasyarv/edx-platform,a-parhom/edx-platform,zadgroup/edx-platform,edx-solutions/edx-platform,msegado/edx-platform,chrisndodge/edx-platform,kursitet/edx-platform,zofuthan/edx-platform,mcgachey/edx-platform,hastexo/edx-platform,unicri/edx-platform,xuxiao19910803/edx,inares/edx-platform,kmoocdev2/edx-platform,jzoldak/edx-platform,ahmedaljazzar/edx-platform,xuxiao19910803/edx-platform,shubhdev/edxOnBaadal,nttks/edx-platform,procangroup/edx-platform,xuxiao19910803/edx,beacloudgenius/edx-platform,waheedahmed/edx-platform,hastexo/edx-platform,CourseTalk/edx-platform,ahmadio/edx-platform,hamzehd/edx-platform,AkA84/edx-platform,ampax/edx-platform,etzhou/edx-platform,Ayub-Khan/edx-platform,devs1991/test_edx_docmode,cecep-edu/edx-platform,antoviaque/edx-platform,ubc/edx-platform,chrisndodge/edx-platform,J861449197/edx-platform,waheedahmed/edx-platform,Livit/Livit.Learn.EdX,mjirayu/sit_academy,naresh21/synergetics-edx-platform,stvstnfrd/edx-platform,jazztpt/edx-platform,ahmedaljazzar/edx-platform,nanolearningllc/edx-platform-cypress,procangroup/edx-platform,itsjeyd/edx-platform,gymnasium/edx-platform,motion2015/edx-platform,martynovp/edx-platform,cpennington/edx-platform,jamesblunt/edx-platform,xuxiao19910803/edx,nttks/edx-platform,fly19890211/edx-platform,dkarakats/edx-platform,Endika/edx-platform,4eek/edx-platform,gsehub/edx-platform,chauhanhardik/populo,philanthropy-u/edx-platform,4eek/edx-platform,cecep-edu/edx-platform,edry/edx-platform,pomegranited/edx-platform,Edraak/edraak-platform,kxliugang/edx-platform,B-MOOC/edx-platform,Edraak/edx-platform,EDUlib/edx-platform,procangroup/edx-platform,shurihell/testasia,knehez/edx-platform,zadgroup/edx-platform,arifsetiawan/edx-platform,procangroup/edx-platform,Edraak/circleci-edx-platform,zofuthan/edx-platform,mjirayu/sit_academy,leansoft/edx-platform,mbareta/edx-platform-ft,teltek/edx-platform,kxliugang/edx-platform,nanolearningllc/edx-platform-cypress,pabloborrego93/edx-platform,UOMx/edx-platform,chauhanhardik/populo,gsehub/edx-platform,SivilTaram/edx-platform,rue89-tech/edx-platform,gymnasium/edx-platform,eduNEXT/edunext-platform,jbzdak/edx-platform,ZLLab-Mooc/edx-platform,ahmadio/edx-platform,nagyistoce/edx-platform,stvstnfrd/edx-platform,arifsetiawan/edx-platform,nanolearningllc/edx-platform-cypress-2,Edraak/edx-platform,shurihell/testasia,chauhanhardik/populo,jjmiranda/edx-platform,xuxiao19910803/edx,louyihua/edx-platform,Edraak/edraak-platform,ampax/edx-platform,pabloborrego93/edx-platform,zubair-arbi/edx-platform,CredoReference/edx-platform,rhndg/openedx,shubhdev/edxOnBaadal,appliedx/edx-platform,eduNEXT/edunext-platform,4eek/edx-platform,edry/edx-platform,playm2mboy/edx-platform,prarthitm/edxplatform,deepsrijit1105/edx-platform,ahmadiga/min_edx,openfun/edx-platform,inares/edx-platform,rhndg/openedx,devs1991/test_edx_docmode,appsembler/edx-platform,nagyistoce/edx-platform,ak2703/edx-platform,analyseuc3m/ANALYSE-v1,mushtaqak/edx-platform,shubhdev/edx-platform,Softmotions/edx-platform,ESOedX/edx-platform,ferabra/edx-platform,IONISx/edx-platform,SravanthiSinha/edx-platform,longmen21/edx-platform,tanmaykm/edx-platform,Endika/edx-platform,pabloborrego93/edx-platform,utecuy/edx-platform,ESOedX/edx-platform,tanmaykm/edx-platform,wwj718/edx-platform,solashirai/edx-platform,analyseuc3m/ANALYSE-v1,4eek/edx-platform,IndonesiaX/edx-platform,Edraak/edraak-platform,franosincic/edx-platform,arbrandes/edx-platform,TeachAtTUM/edx-platform,etzhou/edx-platform,shashank971/edx-platform,motion2015/edx-platform,chudaol/edx-platform,waheedahmed/edx-platform,marcore/edx-platform,nanolearningllc/edx-platform-cypress-2,stvstnfrd/edx-platform,cognitiveclass/edx-platform,vismartltd/edx-platform,chudaol/edx-platform,mahendra-r/edx-platform,bitifirefly/edx-platform,romain-li/edx-platform,waheedahmed/edx-platform,nikolas/edx-platform,Ayub-Khan/edx-platform,SivilTaram/edx-platform,nikolas/edx-platform,B-MOOC/edx-platform,shabab12/edx-platform,kursitet/edx-platform,fly19890211/edx-platform,jbassen/edx-platform,atsolakid/edx-platform,nagyistoce/edx-platform,10clouds/edx-platform,motion2015/edx-platform,Endika/edx-platform,ZLLab-Mooc/edx-platform,defance/edx-platform,kamalx/edx-platform,beacloudgenius/edx-platform,Softmotions/edx-platform,zhenzhai/edx-platform,MakeHer/edx-platform,J861449197/edx-platform,fintech-circle/edx-platform,jazkarta/edx-platform,rue89-tech/edx-platform,Stanford-Online/edx-platform,ZLLab-Mooc/edx-platform,analyseuc3m/ANALYSE-v1,shubhdev/edx-platform,nanolearningllc/edx-platform-cypress-2,inares/edx-platform,solashirai/edx-platform,xuxiao19910803/edx,angelapper/edx-platform,angelapper/edx-platform,mcgachey/edx-platform,Softmotions/edx-platform,Endika/edx-platform,martynovp/edx-platform,gymnasium/edx-platform,mjirayu/sit_academy,Kalyzee/edx-platform,kamalx/edx-platform,pepeportela/edx-platform,edx/edx-platform,jbzdak/edx-platform,ahmadiga/min_edx,raccoongang/edx-platform,fly19890211/edx-platform,jjmiranda/edx-platform,CredoReference/edx-platform,mitocw/edx-platform,ESOedX/edx-platform,JioEducation/edx-platform,prarthitm/edxplatform,iivic/BoiseStateX,chand3040/cloud_that,zubair-arbi/edx-platform,UOMx/edx-platform,Softmotions/edx-platform,wwj718/edx-platform,edx/edx-platform,alu042/edx-platform,cognitiveclass/edx-platform,Semi-global/edx-platform,jolyonb/edx-platform,arbrandes/edx-platform,TeachAtTUM/edx-platform,mitocw/edx-platform,fly19890211/edx-platform,jamiefolsom/edx-platform,halvertoluke/edx-platform,alexthered/kienhoc-platform,shubhdev/edx-platform,simbs/edx-platform,kursitet/edx-platform,utecuy/edx-platform,tanmaykm/edx-platform,Edraak/circleci-edx-platform,kamalx/edx-platform,appsembler/edx-platform,gsehub/edx-platform,IndonesiaX/edx-platform,BehavioralInsightsTeam/edx-platform,jjmiranda/edx-platform,JCBarahona/edX,jjmiranda/edx-platform,iivic/BoiseStateX,zhenzhai/edx-platform,edx-solutions/edx-platform,philanthropy-u/edx-platform,unicri/edx-platform,kmoocdev2/edx-platform,Lektorium-LLC/edx-platform,vasyarv/edx-platform,vikas1885/test1,mjirayu/sit_academy,appliedx/edx-platform,iivic/BoiseStateX,doganov/edx-platform,hastexo/edx-platform,jonathan-beard/edx-platform,JCBarahona/edX,chauhanhardik/populo,unicri/edx-platform,tiagochiavericosta/edx-platform,kamalx/edx-platform,jbzdak/edx-platform,chand3040/cloud_that,analyseuc3m/ANALYSE-v1,marcore/edx-platform,playm2mboy/edx-platform,jonathan-beard/edx-platform,B-MOOC/edx-platform,RPI-OPENEDX/edx-platform,xuxiao19910803/edx-platform,alu042/edx-platform,ferabra/edx-platform,atsolakid/edx-platform,deepsrijit1105/edx-platform,kursitet/edx-platform,raccoongang/edx-platform,IndonesiaX/edx-platform,mahendra-r/edx-platform,ak2703/edx-platform,openfun/edx-platform,polimediaupv/edx-platform,jonathan-beard/edx-platform,pepeportela/edx-platform,msegado/edx-platform,JioEducation/edx-platform,martynovp/edx-platform,cecep-edu/edx-platform,louyihua/edx-platform,lduarte1991/edx-platform,simbs/edx-platform,vikas1885/test1,tiagochiavericosta/edx-platform,simbs/edx-platform,hastexo/edx-platform,philanthropy-u/edx-platform,stvstnfrd/edx-platform,adoosii/edx-platform,xuxiao19910803/edx-platform,vasyarv/edx-platform,adoosii/edx-platform,JioEducation/edx-platform,vismartltd/edx-platform,angelapper/edx-platform,defance/edx-platform,J861449197/edx-platform,bigdatauniversity/edx-platform,AkA84/edx-platform,nanolearningllc/edx-platform-cypress-2,ahmadiga/min_edx,jonathan-beard/edx-platform,tiagochiavericosta/edx-platform,caesar2164/edx-platform,playm2mboy/edx-platform,Semi-global/edx-platform,rhndg/openedx,mcgachey/edx-platform,xingyepei/edx-platform,miptliot/edx-platform,alexthered/kienhoc-platform,tiagochiavericosta/edx-platform,shurihell/testasia,cognitiveclass/edx-platform,naresh21/synergetics-edx-platform,JioEducation/edx-platform,mcgachey/edx-platform,vasyarv/edx-platform,fintech-circle/edx-platform,MakeHer/edx-platform,CredoReference/edx-platform,jazkarta/edx-platform,etzhou/edx-platform,xingyepei/edx-platform,Stanford-Online/edx-platform,Stanford-Online/edx-platform,angelapper/edx-platform,xuxiao19910803/edx-platform,ovnicraft/edx-platform,CourseTalk/edx-platform,defance/edx-platform,shurihell/testasia,zerobatu/edx-platform,knehez/edx-platform,shurihell/testasia,louyihua/edx-platform,amir-qayyum-khan/edx-platform,kmoocdev2/edx-platform
|
# -*- coding: utf-8 -*-
"""Test for Conditional Xmodule functional logic."""
from xmodule.conditional_module import ConditionalDescriptor
from . import LogicTest
class ConditionalModuleTest(LogicTest):
"""Logic tests for Conditional Xmodule."""
descriptor_class = ConditionalDescriptor
def test_ajax_request(self):
"Make shure that ajax request works correctly"
# Mock is_condition_satisfied
self.xmodule.is_condition_satisfied = lambda: True
self.xmodule.descriptor.get_children = lambda: []
response = self.ajax_request('No', {})
html = response['html']
self.assertEqual(html, [])
Correct spelling in a comment
|
# -*- coding: utf-8 -*-
"""Test for Conditional Xmodule functional logic."""
from xmodule.conditional_module import ConditionalDescriptor
from . import LogicTest
class ConditionalModuleTest(LogicTest):
"""Logic tests for Conditional Xmodule."""
descriptor_class = ConditionalDescriptor
def test_ajax_request(self):
"Make sure that ajax request works correctly"
# Mock is_condition_satisfied
self.xmodule.is_condition_satisfied = lambda: True
self.xmodule.descriptor.get_children = lambda: []
response = self.ajax_request('No', {})
html = response['html']
self.assertEqual(html, [])
|
<commit_before># -*- coding: utf-8 -*-
"""Test for Conditional Xmodule functional logic."""
from xmodule.conditional_module import ConditionalDescriptor
from . import LogicTest
class ConditionalModuleTest(LogicTest):
"""Logic tests for Conditional Xmodule."""
descriptor_class = ConditionalDescriptor
def test_ajax_request(self):
"Make shure that ajax request works correctly"
# Mock is_condition_satisfied
self.xmodule.is_condition_satisfied = lambda: True
self.xmodule.descriptor.get_children = lambda: []
response = self.ajax_request('No', {})
html = response['html']
self.assertEqual(html, [])
<commit_msg>Correct spelling in a comment<commit_after>
|
# -*- coding: utf-8 -*-
"""Test for Conditional Xmodule functional logic."""
from xmodule.conditional_module import ConditionalDescriptor
from . import LogicTest
class ConditionalModuleTest(LogicTest):
"""Logic tests for Conditional Xmodule."""
descriptor_class = ConditionalDescriptor
def test_ajax_request(self):
"Make sure that ajax request works correctly"
# Mock is_condition_satisfied
self.xmodule.is_condition_satisfied = lambda: True
self.xmodule.descriptor.get_children = lambda: []
response = self.ajax_request('No', {})
html = response['html']
self.assertEqual(html, [])
|
# -*- coding: utf-8 -*-
"""Test for Conditional Xmodule functional logic."""
from xmodule.conditional_module import ConditionalDescriptor
from . import LogicTest
class ConditionalModuleTest(LogicTest):
"""Logic tests for Conditional Xmodule."""
descriptor_class = ConditionalDescriptor
def test_ajax_request(self):
"Make shure that ajax request works correctly"
# Mock is_condition_satisfied
self.xmodule.is_condition_satisfied = lambda: True
self.xmodule.descriptor.get_children = lambda: []
response = self.ajax_request('No', {})
html = response['html']
self.assertEqual(html, [])
Correct spelling in a comment# -*- coding: utf-8 -*-
"""Test for Conditional Xmodule functional logic."""
from xmodule.conditional_module import ConditionalDescriptor
from . import LogicTest
class ConditionalModuleTest(LogicTest):
"""Logic tests for Conditional Xmodule."""
descriptor_class = ConditionalDescriptor
def test_ajax_request(self):
"Make sure that ajax request works correctly"
# Mock is_condition_satisfied
self.xmodule.is_condition_satisfied = lambda: True
self.xmodule.descriptor.get_children = lambda: []
response = self.ajax_request('No', {})
html = response['html']
self.assertEqual(html, [])
|
<commit_before># -*- coding: utf-8 -*-
"""Test for Conditional Xmodule functional logic."""
from xmodule.conditional_module import ConditionalDescriptor
from . import LogicTest
class ConditionalModuleTest(LogicTest):
"""Logic tests for Conditional Xmodule."""
descriptor_class = ConditionalDescriptor
def test_ajax_request(self):
"Make shure that ajax request works correctly"
# Mock is_condition_satisfied
self.xmodule.is_condition_satisfied = lambda: True
self.xmodule.descriptor.get_children = lambda: []
response = self.ajax_request('No', {})
html = response['html']
self.assertEqual(html, [])
<commit_msg>Correct spelling in a comment<commit_after># -*- coding: utf-8 -*-
"""Test for Conditional Xmodule functional logic."""
from xmodule.conditional_module import ConditionalDescriptor
from . import LogicTest
class ConditionalModuleTest(LogicTest):
"""Logic tests for Conditional Xmodule."""
descriptor_class = ConditionalDescriptor
def test_ajax_request(self):
"Make sure that ajax request works correctly"
# Mock is_condition_satisfied
self.xmodule.is_condition_satisfied = lambda: True
self.xmodule.descriptor.get_children = lambda: []
response = self.ajax_request('No', {})
html = response['html']
self.assertEqual(html, [])
|
0225a79fa43f03a48a6134fce4122ae243a10df2
|
ptt_preproc_to_target.py
|
ptt_preproc_to_target.py
|
#!/usr/bin/env python
import json
from pathlib import Path
import ptt_core
l = ptt_core.l
_TARGETS_DIR_PATH = Path('targets')
if not _TARGETS_DIR_PATH.exists():
_TARGETS_DIR_PATH.mkdir()
def generate_target_from(json_path):
l.info('Generate target from {} ...'.format(json_path))
txt_path = _TARGETS_DIR_PATH / '{}.txt'.format(json_path.stem)
if txt_path.exists():
l.info('Existed and skip {}'.format(txt_path))
return
with json_path.open() as f:
d = json.load(f)
push_score_sum = sum(push_d['score'] for push_d in d['push_ds'])
with txt_path.open('w') as f:
f.write(str(push_score_sum))
l.info('Wrote into {}'.format(txt_path))
def generate_all(preprocessed_dir_path_str):
for path in Path(preprocessed_dir_path_str).iterdir():
generate_target_from(path)
if __name__ == '__main__':
generate_all('preprocessed')
|
#!/usr/bin/env python
import json
from pathlib import Path
import ptt_core
l = ptt_core.l
_TARGETS_DIR_PATH = Path('targets/')
if not _TARGETS_DIR_PATH.exists():
_TARGETS_DIR_PATH.mkdir()
def generate_target_from(json_path):
l.info('Generate target from {} ...'.format(json_path))
txt_path = _TARGETS_DIR_PATH / '{}.txt'.format(json_path.stem)
if txt_path.exists():
l.info('Existed and skip {}'.format(txt_path))
return
with json_path.open() as f:
d = json.load(f)
push_score_sum = sum(push_d['score'] for push_d in d['push_ds'])
with txt_path.open('w') as f:
f.write(str(push_score_sum))
l.info('Wrote into {}'.format(txt_path))
def generate_all(preprocessed_dir_path_str):
for path in Path(preprocessed_dir_path_str).iterdir():
generate_target_from(path)
if __name__ == '__main__':
generate_all('preprocessed')
|
Put / after dir path
|
Put / after dir path
|
Python
|
mit
|
moskytw/mining-news
|
#!/usr/bin/env python
import json
from pathlib import Path
import ptt_core
l = ptt_core.l
_TARGETS_DIR_PATH = Path('targets')
if not _TARGETS_DIR_PATH.exists():
_TARGETS_DIR_PATH.mkdir()
def generate_target_from(json_path):
l.info('Generate target from {} ...'.format(json_path))
txt_path = _TARGETS_DIR_PATH / '{}.txt'.format(json_path.stem)
if txt_path.exists():
l.info('Existed and skip {}'.format(txt_path))
return
with json_path.open() as f:
d = json.load(f)
push_score_sum = sum(push_d['score'] for push_d in d['push_ds'])
with txt_path.open('w') as f:
f.write(str(push_score_sum))
l.info('Wrote into {}'.format(txt_path))
def generate_all(preprocessed_dir_path_str):
for path in Path(preprocessed_dir_path_str).iterdir():
generate_target_from(path)
if __name__ == '__main__':
generate_all('preprocessed')
Put / after dir path
|
#!/usr/bin/env python
import json
from pathlib import Path
import ptt_core
l = ptt_core.l
_TARGETS_DIR_PATH = Path('targets/')
if not _TARGETS_DIR_PATH.exists():
_TARGETS_DIR_PATH.mkdir()
def generate_target_from(json_path):
l.info('Generate target from {} ...'.format(json_path))
txt_path = _TARGETS_DIR_PATH / '{}.txt'.format(json_path.stem)
if txt_path.exists():
l.info('Existed and skip {}'.format(txt_path))
return
with json_path.open() as f:
d = json.load(f)
push_score_sum = sum(push_d['score'] for push_d in d['push_ds'])
with txt_path.open('w') as f:
f.write(str(push_score_sum))
l.info('Wrote into {}'.format(txt_path))
def generate_all(preprocessed_dir_path_str):
for path in Path(preprocessed_dir_path_str).iterdir():
generate_target_from(path)
if __name__ == '__main__':
generate_all('preprocessed')
|
<commit_before>#!/usr/bin/env python
import json
from pathlib import Path
import ptt_core
l = ptt_core.l
_TARGETS_DIR_PATH = Path('targets')
if not _TARGETS_DIR_PATH.exists():
_TARGETS_DIR_PATH.mkdir()
def generate_target_from(json_path):
l.info('Generate target from {} ...'.format(json_path))
txt_path = _TARGETS_DIR_PATH / '{}.txt'.format(json_path.stem)
if txt_path.exists():
l.info('Existed and skip {}'.format(txt_path))
return
with json_path.open() as f:
d = json.load(f)
push_score_sum = sum(push_d['score'] for push_d in d['push_ds'])
with txt_path.open('w') as f:
f.write(str(push_score_sum))
l.info('Wrote into {}'.format(txt_path))
def generate_all(preprocessed_dir_path_str):
for path in Path(preprocessed_dir_path_str).iterdir():
generate_target_from(path)
if __name__ == '__main__':
generate_all('preprocessed')
<commit_msg>Put / after dir path<commit_after>
|
#!/usr/bin/env python
import json
from pathlib import Path
import ptt_core
l = ptt_core.l
_TARGETS_DIR_PATH = Path('targets/')
if not _TARGETS_DIR_PATH.exists():
_TARGETS_DIR_PATH.mkdir()
def generate_target_from(json_path):
l.info('Generate target from {} ...'.format(json_path))
txt_path = _TARGETS_DIR_PATH / '{}.txt'.format(json_path.stem)
if txt_path.exists():
l.info('Existed and skip {}'.format(txt_path))
return
with json_path.open() as f:
d = json.load(f)
push_score_sum = sum(push_d['score'] for push_d in d['push_ds'])
with txt_path.open('w') as f:
f.write(str(push_score_sum))
l.info('Wrote into {}'.format(txt_path))
def generate_all(preprocessed_dir_path_str):
for path in Path(preprocessed_dir_path_str).iterdir():
generate_target_from(path)
if __name__ == '__main__':
generate_all('preprocessed')
|
#!/usr/bin/env python
import json
from pathlib import Path
import ptt_core
l = ptt_core.l
_TARGETS_DIR_PATH = Path('targets')
if not _TARGETS_DIR_PATH.exists():
_TARGETS_DIR_PATH.mkdir()
def generate_target_from(json_path):
l.info('Generate target from {} ...'.format(json_path))
txt_path = _TARGETS_DIR_PATH / '{}.txt'.format(json_path.stem)
if txt_path.exists():
l.info('Existed and skip {}'.format(txt_path))
return
with json_path.open() as f:
d = json.load(f)
push_score_sum = sum(push_d['score'] for push_d in d['push_ds'])
with txt_path.open('w') as f:
f.write(str(push_score_sum))
l.info('Wrote into {}'.format(txt_path))
def generate_all(preprocessed_dir_path_str):
for path in Path(preprocessed_dir_path_str).iterdir():
generate_target_from(path)
if __name__ == '__main__':
generate_all('preprocessed')
Put / after dir path#!/usr/bin/env python
import json
from pathlib import Path
import ptt_core
l = ptt_core.l
_TARGETS_DIR_PATH = Path('targets/')
if not _TARGETS_DIR_PATH.exists():
_TARGETS_DIR_PATH.mkdir()
def generate_target_from(json_path):
l.info('Generate target from {} ...'.format(json_path))
txt_path = _TARGETS_DIR_PATH / '{}.txt'.format(json_path.stem)
if txt_path.exists():
l.info('Existed and skip {}'.format(txt_path))
return
with json_path.open() as f:
d = json.load(f)
push_score_sum = sum(push_d['score'] for push_d in d['push_ds'])
with txt_path.open('w') as f:
f.write(str(push_score_sum))
l.info('Wrote into {}'.format(txt_path))
def generate_all(preprocessed_dir_path_str):
for path in Path(preprocessed_dir_path_str).iterdir():
generate_target_from(path)
if __name__ == '__main__':
generate_all('preprocessed')
|
<commit_before>#!/usr/bin/env python
import json
from pathlib import Path
import ptt_core
l = ptt_core.l
_TARGETS_DIR_PATH = Path('targets')
if not _TARGETS_DIR_PATH.exists():
_TARGETS_DIR_PATH.mkdir()
def generate_target_from(json_path):
l.info('Generate target from {} ...'.format(json_path))
txt_path = _TARGETS_DIR_PATH / '{}.txt'.format(json_path.stem)
if txt_path.exists():
l.info('Existed and skip {}'.format(txt_path))
return
with json_path.open() as f:
d = json.load(f)
push_score_sum = sum(push_d['score'] for push_d in d['push_ds'])
with txt_path.open('w') as f:
f.write(str(push_score_sum))
l.info('Wrote into {}'.format(txt_path))
def generate_all(preprocessed_dir_path_str):
for path in Path(preprocessed_dir_path_str).iterdir():
generate_target_from(path)
if __name__ == '__main__':
generate_all('preprocessed')
<commit_msg>Put / after dir path<commit_after>#!/usr/bin/env python
import json
from pathlib import Path
import ptt_core
l = ptt_core.l
_TARGETS_DIR_PATH = Path('targets/')
if not _TARGETS_DIR_PATH.exists():
_TARGETS_DIR_PATH.mkdir()
def generate_target_from(json_path):
l.info('Generate target from {} ...'.format(json_path))
txt_path = _TARGETS_DIR_PATH / '{}.txt'.format(json_path.stem)
if txt_path.exists():
l.info('Existed and skip {}'.format(txt_path))
return
with json_path.open() as f:
d = json.load(f)
push_score_sum = sum(push_d['score'] for push_d in d['push_ds'])
with txt_path.open('w') as f:
f.write(str(push_score_sum))
l.info('Wrote into {}'.format(txt_path))
def generate_all(preprocessed_dir_path_str):
for path in Path(preprocessed_dir_path_str).iterdir():
generate_target_from(path)
if __name__ == '__main__':
generate_all('preprocessed')
|
2a8dd80c9769731963fcd75cb24cd8918e48b269
|
mythril/analysis/security.py
|
mythril/analysis/security.py
|
from mythril.analysis.report import Report
from .modules import delegatecall_forward, unchecked_suicide, ether_send, unchecked_retval, delegatecall_to_dynamic, integer_underflow, call_to_dynamic_with_gas
def fire_lasers(statespace):
issues = []
issues += delegatecall_forward.execute(statespace)
issues += delegatecall_to_dynamic.execute(statespace)
issues += call_to_dynamic_with_gas.execute(statespace)
issues += unchecked_suicide.execute(statespace)
issues += unchecked_retval.execute(statespace)
issues += ether_send.execute(statespace)
issues += integer_underflow.execute(statespace)
if (len(issues)):
report = Report(issues)
print(report.as_text())
|
from mythril.analysis.report import Report
from mythril.analysis import modules
import pkgutil
def fire_lasers(statespace):
issues = []
_modules = []
for loader, name, is_pkg in pkgutil.walk_packages(modules.__path__):
_modules.append(loader.find_module(name).load_module(name))
for module in _modules:
issues += module.execute(statespace)
if (len(issues)):
report = Report(issues)
print(report.as_text())
|
Implement a nicer way of execution modules
|
Implement a nicer way of execution modules
|
Python
|
mit
|
b-mueller/mythril,b-mueller/mythril,b-mueller/mythril,b-mueller/mythril
|
from mythril.analysis.report import Report
from .modules import delegatecall_forward, unchecked_suicide, ether_send, unchecked_retval, delegatecall_to_dynamic, integer_underflow, call_to_dynamic_with_gas
def fire_lasers(statespace):
issues = []
issues += delegatecall_forward.execute(statespace)
issues += delegatecall_to_dynamic.execute(statespace)
issues += call_to_dynamic_with_gas.execute(statespace)
issues += unchecked_suicide.execute(statespace)
issues += unchecked_retval.execute(statespace)
issues += ether_send.execute(statespace)
issues += integer_underflow.execute(statespace)
if (len(issues)):
report = Report(issues)
print(report.as_text())
Implement a nicer way of execution modules
|
from mythril.analysis.report import Report
from mythril.analysis import modules
import pkgutil
def fire_lasers(statespace):
issues = []
_modules = []
for loader, name, is_pkg in pkgutil.walk_packages(modules.__path__):
_modules.append(loader.find_module(name).load_module(name))
for module in _modules:
issues += module.execute(statespace)
if (len(issues)):
report = Report(issues)
print(report.as_text())
|
<commit_before>from mythril.analysis.report import Report
from .modules import delegatecall_forward, unchecked_suicide, ether_send, unchecked_retval, delegatecall_to_dynamic, integer_underflow, call_to_dynamic_with_gas
def fire_lasers(statespace):
issues = []
issues += delegatecall_forward.execute(statespace)
issues += delegatecall_to_dynamic.execute(statespace)
issues += call_to_dynamic_with_gas.execute(statespace)
issues += unchecked_suicide.execute(statespace)
issues += unchecked_retval.execute(statespace)
issues += ether_send.execute(statespace)
issues += integer_underflow.execute(statespace)
if (len(issues)):
report = Report(issues)
print(report.as_text())
<commit_msg>Implement a nicer way of execution modules<commit_after>
|
from mythril.analysis.report import Report
from mythril.analysis import modules
import pkgutil
def fire_lasers(statespace):
issues = []
_modules = []
for loader, name, is_pkg in pkgutil.walk_packages(modules.__path__):
_modules.append(loader.find_module(name).load_module(name))
for module in _modules:
issues += module.execute(statespace)
if (len(issues)):
report = Report(issues)
print(report.as_text())
|
from mythril.analysis.report import Report
from .modules import delegatecall_forward, unchecked_suicide, ether_send, unchecked_retval, delegatecall_to_dynamic, integer_underflow, call_to_dynamic_with_gas
def fire_lasers(statespace):
issues = []
issues += delegatecall_forward.execute(statespace)
issues += delegatecall_to_dynamic.execute(statespace)
issues += call_to_dynamic_with_gas.execute(statespace)
issues += unchecked_suicide.execute(statespace)
issues += unchecked_retval.execute(statespace)
issues += ether_send.execute(statespace)
issues += integer_underflow.execute(statespace)
if (len(issues)):
report = Report(issues)
print(report.as_text())
Implement a nicer way of execution modulesfrom mythril.analysis.report import Report
from mythril.analysis import modules
import pkgutil
def fire_lasers(statespace):
issues = []
_modules = []
for loader, name, is_pkg in pkgutil.walk_packages(modules.__path__):
_modules.append(loader.find_module(name).load_module(name))
for module in _modules:
issues += module.execute(statespace)
if (len(issues)):
report = Report(issues)
print(report.as_text())
|
<commit_before>from mythril.analysis.report import Report
from .modules import delegatecall_forward, unchecked_suicide, ether_send, unchecked_retval, delegatecall_to_dynamic, integer_underflow, call_to_dynamic_with_gas
def fire_lasers(statespace):
issues = []
issues += delegatecall_forward.execute(statespace)
issues += delegatecall_to_dynamic.execute(statespace)
issues += call_to_dynamic_with_gas.execute(statespace)
issues += unchecked_suicide.execute(statespace)
issues += unchecked_retval.execute(statespace)
issues += ether_send.execute(statespace)
issues += integer_underflow.execute(statespace)
if (len(issues)):
report = Report(issues)
print(report.as_text())
<commit_msg>Implement a nicer way of execution modules<commit_after>from mythril.analysis.report import Report
from mythril.analysis import modules
import pkgutil
def fire_lasers(statespace):
issues = []
_modules = []
for loader, name, is_pkg in pkgutil.walk_packages(modules.__path__):
_modules.append(loader.find_module(name).load_module(name))
for module in _modules:
issues += module.execute(statespace)
if (len(issues)):
report = Report(issues)
print(report.as_text())
|
dbb27799a92e58cf03a9b60c17e0f3116fbcf687
|
modish/__init__.py
|
modish/__init__.py
|
# -*- coding: utf-8 -*-
from .model import ModalityModel
from .estimator import ModalityEstimator
from .visualize import MODALITY_TO_COLOR, MODALITY_ORDER, MODALITY_PALETTE,\
MODALITY_TO_CMAP, ModalitiesViz, violinplot
__author__ = 'Olga Botvinnik'
__email__ = 'olga.botvinnik@gmail.com'
__version__ = '0.1.0'
__all__ = ['ModalityModel', 'ModalityEstimator', 'MODALITY_ORDER',
'MODALITY_PALETTE', 'MODALITY_TO_COLOR', 'ModalitiesViz',
'violinplot', 'MODALITY_TO_CMAP']
|
# -*- coding: utf-8 -*-
from .model import ModalityModel
from .estimator import ModalityEstimator
from .visualize import MODALITY_TO_COLOR, MODALITY_ORDER, MODALITY_PALETTE,\
MODALITY_TO_CMAP, ModalitiesViz, violinplot, barplot
__author__ = 'Olga Botvinnik'
__email__ = 'olga.botvinnik@gmail.com'
__version__ = '0.1.0'
__all__ = ['ModalityModel', 'ModalityEstimator', 'MODALITY_ORDER',
'MODALITY_PALETTE', 'MODALITY_TO_COLOR', 'ModalitiesViz',
'violinplot', 'MODALITY_TO_CMAP']
class ModishTestResult(object):
def __init__(self, original_data, estimator, modality_assignments,
bayesian_fit, data_with_noise, waypoint_transformer,
waypoints):
self.original_data = original_data
self.estimator = estimator
self.modality_assignments = modality_assignments
self.bayesian_fit = bayesian_fit
self.data_with_noise = data_with_noise
self.waypoint_transformer = waypoint_transformer
self.waypoints = waypoints
|
Add object for saving modish test results
|
Add object for saving modish test results
|
Python
|
bsd-3-clause
|
olgabot/anchor,olgabot/modish,YeoLab/anchor
|
# -*- coding: utf-8 -*-
from .model import ModalityModel
from .estimator import ModalityEstimator
from .visualize import MODALITY_TO_COLOR, MODALITY_ORDER, MODALITY_PALETTE,\
MODALITY_TO_CMAP, ModalitiesViz, violinplot
__author__ = 'Olga Botvinnik'
__email__ = 'olga.botvinnik@gmail.com'
__version__ = '0.1.0'
__all__ = ['ModalityModel', 'ModalityEstimator', 'MODALITY_ORDER',
'MODALITY_PALETTE', 'MODALITY_TO_COLOR', 'ModalitiesViz',
'violinplot', 'MODALITY_TO_CMAP']
Add object for saving modish test results
|
# -*- coding: utf-8 -*-
from .model import ModalityModel
from .estimator import ModalityEstimator
from .visualize import MODALITY_TO_COLOR, MODALITY_ORDER, MODALITY_PALETTE,\
MODALITY_TO_CMAP, ModalitiesViz, violinplot, barplot
__author__ = 'Olga Botvinnik'
__email__ = 'olga.botvinnik@gmail.com'
__version__ = '0.1.0'
__all__ = ['ModalityModel', 'ModalityEstimator', 'MODALITY_ORDER',
'MODALITY_PALETTE', 'MODALITY_TO_COLOR', 'ModalitiesViz',
'violinplot', 'MODALITY_TO_CMAP']
class ModishTestResult(object):
def __init__(self, original_data, estimator, modality_assignments,
bayesian_fit, data_with_noise, waypoint_transformer,
waypoints):
self.original_data = original_data
self.estimator = estimator
self.modality_assignments = modality_assignments
self.bayesian_fit = bayesian_fit
self.data_with_noise = data_with_noise
self.waypoint_transformer = waypoint_transformer
self.waypoints = waypoints
|
<commit_before># -*- coding: utf-8 -*-
from .model import ModalityModel
from .estimator import ModalityEstimator
from .visualize import MODALITY_TO_COLOR, MODALITY_ORDER, MODALITY_PALETTE,\
MODALITY_TO_CMAP, ModalitiesViz, violinplot
__author__ = 'Olga Botvinnik'
__email__ = 'olga.botvinnik@gmail.com'
__version__ = '0.1.0'
__all__ = ['ModalityModel', 'ModalityEstimator', 'MODALITY_ORDER',
'MODALITY_PALETTE', 'MODALITY_TO_COLOR', 'ModalitiesViz',
'violinplot', 'MODALITY_TO_CMAP']
<commit_msg>Add object for saving modish test results<commit_after>
|
# -*- coding: utf-8 -*-
from .model import ModalityModel
from .estimator import ModalityEstimator
from .visualize import MODALITY_TO_COLOR, MODALITY_ORDER, MODALITY_PALETTE,\
MODALITY_TO_CMAP, ModalitiesViz, violinplot, barplot
__author__ = 'Olga Botvinnik'
__email__ = 'olga.botvinnik@gmail.com'
__version__ = '0.1.0'
__all__ = ['ModalityModel', 'ModalityEstimator', 'MODALITY_ORDER',
'MODALITY_PALETTE', 'MODALITY_TO_COLOR', 'ModalitiesViz',
'violinplot', 'MODALITY_TO_CMAP']
class ModishTestResult(object):
def __init__(self, original_data, estimator, modality_assignments,
bayesian_fit, data_with_noise, waypoint_transformer,
waypoints):
self.original_data = original_data
self.estimator = estimator
self.modality_assignments = modality_assignments
self.bayesian_fit = bayesian_fit
self.data_with_noise = data_with_noise
self.waypoint_transformer = waypoint_transformer
self.waypoints = waypoints
|
# -*- coding: utf-8 -*-
from .model import ModalityModel
from .estimator import ModalityEstimator
from .visualize import MODALITY_TO_COLOR, MODALITY_ORDER, MODALITY_PALETTE,\
MODALITY_TO_CMAP, ModalitiesViz, violinplot
__author__ = 'Olga Botvinnik'
__email__ = 'olga.botvinnik@gmail.com'
__version__ = '0.1.0'
__all__ = ['ModalityModel', 'ModalityEstimator', 'MODALITY_ORDER',
'MODALITY_PALETTE', 'MODALITY_TO_COLOR', 'ModalitiesViz',
'violinplot', 'MODALITY_TO_CMAP']
Add object for saving modish test results# -*- coding: utf-8 -*-
from .model import ModalityModel
from .estimator import ModalityEstimator
from .visualize import MODALITY_TO_COLOR, MODALITY_ORDER, MODALITY_PALETTE,\
MODALITY_TO_CMAP, ModalitiesViz, violinplot, barplot
__author__ = 'Olga Botvinnik'
__email__ = 'olga.botvinnik@gmail.com'
__version__ = '0.1.0'
__all__ = ['ModalityModel', 'ModalityEstimator', 'MODALITY_ORDER',
'MODALITY_PALETTE', 'MODALITY_TO_COLOR', 'ModalitiesViz',
'violinplot', 'MODALITY_TO_CMAP']
class ModishTestResult(object):
def __init__(self, original_data, estimator, modality_assignments,
bayesian_fit, data_with_noise, waypoint_transformer,
waypoints):
self.original_data = original_data
self.estimator = estimator
self.modality_assignments = modality_assignments
self.bayesian_fit = bayesian_fit
self.data_with_noise = data_with_noise
self.waypoint_transformer = waypoint_transformer
self.waypoints = waypoints
|
<commit_before># -*- coding: utf-8 -*-
from .model import ModalityModel
from .estimator import ModalityEstimator
from .visualize import MODALITY_TO_COLOR, MODALITY_ORDER, MODALITY_PALETTE,\
MODALITY_TO_CMAP, ModalitiesViz, violinplot
__author__ = 'Olga Botvinnik'
__email__ = 'olga.botvinnik@gmail.com'
__version__ = '0.1.0'
__all__ = ['ModalityModel', 'ModalityEstimator', 'MODALITY_ORDER',
'MODALITY_PALETTE', 'MODALITY_TO_COLOR', 'ModalitiesViz',
'violinplot', 'MODALITY_TO_CMAP']
<commit_msg>Add object for saving modish test results<commit_after># -*- coding: utf-8 -*-
from .model import ModalityModel
from .estimator import ModalityEstimator
from .visualize import MODALITY_TO_COLOR, MODALITY_ORDER, MODALITY_PALETTE,\
MODALITY_TO_CMAP, ModalitiesViz, violinplot, barplot
__author__ = 'Olga Botvinnik'
__email__ = 'olga.botvinnik@gmail.com'
__version__ = '0.1.0'
__all__ = ['ModalityModel', 'ModalityEstimator', 'MODALITY_ORDER',
'MODALITY_PALETTE', 'MODALITY_TO_COLOR', 'ModalitiesViz',
'violinplot', 'MODALITY_TO_CMAP']
class ModishTestResult(object):
def __init__(self, original_data, estimator, modality_assignments,
bayesian_fit, data_with_noise, waypoint_transformer,
waypoints):
self.original_data = original_data
self.estimator = estimator
self.modality_assignments = modality_assignments
self.bayesian_fit = bayesian_fit
self.data_with_noise = data_with_noise
self.waypoint_transformer = waypoint_transformer
self.waypoints = waypoints
|
ab4333ad10713b0df25e0ff9bb46da3a0749326f
|
analyser/tasks.py
|
analyser/tasks.py
|
import os
import time
import requests
from krunchr.vendors.celery import celery
@celery.task
def get_file(url, path):
name, ext = os.path.splitext(url)
name = str(int(time.time()))
path = "%s/%s%s" % (path, name, ext)
response = requests.get(url)
print path
with open(path, 'w') as f:
f.write(response.content)
|
import os
import time
import rethinkdb as r
import requests
from krunchr.vendors.celery import celery, db
@celery.task(bind=True)
def get_file(self, url, path):
name, ext = os.path.splitext(url)
name = str(int(time.time()))
path = "%s/%s%s" % (path, name, ext)
response = requests.get(url)
with open(path, 'w') as f:
f.write(response.content)
r.table('jobs').filter({
'task_id': self.request.id
}).update({'state': 'done'}).run(db)
|
Update job state when we finish the task
|
Update job state when we finish the task
|
Python
|
apache-2.0
|
vtemian/kruncher
|
import os
import time
import requests
from krunchr.vendors.celery import celery
@celery.task
def get_file(url, path):
name, ext = os.path.splitext(url)
name = str(int(time.time()))
path = "%s/%s%s" % (path, name, ext)
response = requests.get(url)
print path
with open(path, 'w') as f:
f.write(response.content)
Update job state when we finish the task
|
import os
import time
import rethinkdb as r
import requests
from krunchr.vendors.celery import celery, db
@celery.task(bind=True)
def get_file(self, url, path):
name, ext = os.path.splitext(url)
name = str(int(time.time()))
path = "%s/%s%s" % (path, name, ext)
response = requests.get(url)
with open(path, 'w') as f:
f.write(response.content)
r.table('jobs').filter({
'task_id': self.request.id
}).update({'state': 'done'}).run(db)
|
<commit_before>import os
import time
import requests
from krunchr.vendors.celery import celery
@celery.task
def get_file(url, path):
name, ext = os.path.splitext(url)
name = str(int(time.time()))
path = "%s/%s%s" % (path, name, ext)
response = requests.get(url)
print path
with open(path, 'w') as f:
f.write(response.content)
<commit_msg>Update job state when we finish the task<commit_after>
|
import os
import time
import rethinkdb as r
import requests
from krunchr.vendors.celery import celery, db
@celery.task(bind=True)
def get_file(self, url, path):
name, ext = os.path.splitext(url)
name = str(int(time.time()))
path = "%s/%s%s" % (path, name, ext)
response = requests.get(url)
with open(path, 'w') as f:
f.write(response.content)
r.table('jobs').filter({
'task_id': self.request.id
}).update({'state': 'done'}).run(db)
|
import os
import time
import requests
from krunchr.vendors.celery import celery
@celery.task
def get_file(url, path):
name, ext = os.path.splitext(url)
name = str(int(time.time()))
path = "%s/%s%s" % (path, name, ext)
response = requests.get(url)
print path
with open(path, 'w') as f:
f.write(response.content)
Update job state when we finish the taskimport os
import time
import rethinkdb as r
import requests
from krunchr.vendors.celery import celery, db
@celery.task(bind=True)
def get_file(self, url, path):
name, ext = os.path.splitext(url)
name = str(int(time.time()))
path = "%s/%s%s" % (path, name, ext)
response = requests.get(url)
with open(path, 'w') as f:
f.write(response.content)
r.table('jobs').filter({
'task_id': self.request.id
}).update({'state': 'done'}).run(db)
|
<commit_before>import os
import time
import requests
from krunchr.vendors.celery import celery
@celery.task
def get_file(url, path):
name, ext = os.path.splitext(url)
name = str(int(time.time()))
path = "%s/%s%s" % (path, name, ext)
response = requests.get(url)
print path
with open(path, 'w') as f:
f.write(response.content)
<commit_msg>Update job state when we finish the task<commit_after>import os
import time
import rethinkdb as r
import requests
from krunchr.vendors.celery import celery, db
@celery.task(bind=True)
def get_file(self, url, path):
name, ext = os.path.splitext(url)
name = str(int(time.time()))
path = "%s/%s%s" % (path, name, ext)
response = requests.get(url)
with open(path, 'w') as f:
f.write(response.content)
r.table('jobs').filter({
'task_id': self.request.id
}).update({'state': 'done'}).run(db)
|
28c26bf5d220a5a60b807470bbaf339bdf9206a9
|
pylearn2/testing/skip.py
|
pylearn2/testing/skip.py
|
"""
Helper functions for determining which tests to skip.
"""
__authors__ = "Ian Goodfellow"
__copyright__ = "Copyright 2010-2012, Universite de Montreal"
__credits__ = ["Ian Goodfellow"]
__license__ = "3-clause BSD"
__maintainer__ = "LISA Lab"
__email__ = "pylearn-dev@googlegroups"
from nose.plugins.skip import SkipTest
import os
from theano.sandbox import cuda
scipy_works = True
try:
import scipy
except ImportError:
# pyflakes gets mad if you set scipy to None here
scipy_works = False
sklearn_works = True
try:
import sklearn
except ImportError:
sklearn_works = False
h5py_works = True
try:
import h5py
except ImportError:
h5py_works = False
def skip_if_no_data():
if 'PYLEARN2_DATA_PATH' not in os.environ:
raise SkipTest()
def skip_if_no_scipy():
if not scipy_works:
raise SkipTest()
def skip_if_no_sklearn():
if not sklearn_works:
raise SkipTest()
def skip_if_no_gpu():
if cuda.cuda_available == False:
raise SkipTest('Optional package cuda disabled.')
def skip_if_no_h5py():
if not h5py_works:
raise SkipTest()
|
"""
Helper functions for determining which tests to skip.
"""
__authors__ = "Ian Goodfellow"
__copyright__ = "Copyright 2010-2012, Universite de Montreal"
__credits__ = ["Ian Goodfellow"]
__license__ = "3-clause BSD"
__maintainer__ = "LISA Lab"
__email__ = "pylearn-dev@googlegroups"
from nose.plugins.skip import SkipTest
import os
from theano.sandbox import cuda
scipy_works = True
try:
import scipy
except ImportError:
# pyflakes gets mad if you set scipy to None here
scipy_works = False
sklearn_works = True
try:
import sklearn
except ImportError:
sklearn_works = False
h5py_works = True
try:
import h5py
except ImportError:
h5py_works = False
matplotlib_works = True
try:
from matplotlib import pyplot
except ImportError:
matplotlib_works = False
def skip_if_no_data():
if 'PYLEARN2_DATA_PATH' not in os.environ:
raise SkipTest()
def skip_if_no_scipy():
if not scipy_works:
raise SkipTest()
def skip_if_no_sklearn():
if not sklearn_works:
raise SkipTest()
def skip_if_no_gpu():
if cuda.cuda_available == False:
raise SkipTest('Optional package cuda disabled.')
def skip_if_no_h5py():
if not h5py_works:
raise SkipTest()
def skip_if_no_matplotlib():
if not matplotlib_works:
raise SkipTest("matplotlib and pyplot are not available")
|
Add SkipTest for when matplotlib and/or pyplot are not present
|
Add SkipTest for when matplotlib and/or pyplot are not present
|
Python
|
bsd-3-clause
|
CIFASIS/pylearn2,lisa-lab/pylearn2,msingh172/pylearn2,w1kke/pylearn2,lamblin/pylearn2,bartvm/pylearn2,sandeepkbhat/pylearn2,cosmoharrigan/pylearn2,jeremyfix/pylearn2,mkraemer67/pylearn2,jamessergeant/pylearn2,junbochen/pylearn2,TNick/pylearn2,hantek/pylearn2,nouiz/pylearn2,aalmah/pylearn2,skearnes/pylearn2,kose-y/pylearn2,mclaughlin6464/pylearn2,mkraemer67/pylearn2,KennethPierce/pylearnk,lancezlin/pylearn2,jamessergeant/pylearn2,hyqneuron/pylearn2-maxsom,kose-y/pylearn2,bartvm/pylearn2,lancezlin/pylearn2,JesseLivezey/pylearn2,matrogers/pylearn2,hantek/pylearn2,shiquanwang/pylearn2,jeremyfix/pylearn2,junbochen/pylearn2,aalmah/pylearn2,kastnerkyle/pylearn2,ddboline/pylearn2,w1kke/pylearn2,alexjc/pylearn2,daemonmaker/pylearn2,nouiz/pylearn2,theoryno3/pylearn2,lunyang/pylearn2,lunyang/pylearn2,TNick/pylearn2,fishcorn/pylearn2,hyqneuron/pylearn2-maxsom,hyqneuron/pylearn2-maxsom,daemonmaker/pylearn2,se4u/pylearn2,matrogers/pylearn2,bartvm/pylearn2,pkainz/pylearn2,JesseLivezey/plankton,fulmicoton/pylearn2,CIFASIS/pylearn2,matrogers/pylearn2,ashhher3/pylearn2,cosmoharrigan/pylearn2,lamblin/pylearn2,bartvm/pylearn2,lisa-lab/pylearn2,ashhher3/pylearn2,caidongyun/pylearn2,abergeron/pylearn2,ddboline/pylearn2,goodfeli/pylearn2,woozzu/pylearn2,daemonmaker/pylearn2,chrish42/pylearn,JesseLivezey/pylearn2,Refefer/pylearn2,kose-y/pylearn2,chrish42/pylearn,fishcorn/pylearn2,goodfeli/pylearn2,alexjc/pylearn2,caidongyun/pylearn2,caidongyun/pylearn2,nouiz/pylearn2,abergeron/pylearn2,KennethPierce/pylearnk,theoryno3/pylearn2,ashhher3/pylearn2,JesseLivezey/pylearn2,pombredanne/pylearn2,Refefer/pylearn2,fyffyt/pylearn2,skearnes/pylearn2,kastnerkyle/pylearn2,junbochen/pylearn2,nouiz/pylearn2,ddboline/pylearn2,pombredanne/pylearn2,mkraemer67/pylearn2,theoryno3/pylearn2,lamblin/pylearn2,abergeron/pylearn2,jeremyfix/pylearn2,theoryno3/pylearn2,msingh172/pylearn2,hantek/pylearn2,lamblin/pylearn2,fyffyt/pylearn2,lancezlin/pylearn2,KennethPierce/pylearnk,daemonmaker/pylearn2,w1kke/pylearn2,abergeron/pylearn2,skearnes/pylearn2,lancezlin/pylearn2,pombredanne/pylearn2,alexjc/pylearn2,pkainz/pylearn2,aalmah/pylearn2,woozzu/pylearn2,aalmah/pylearn2,mclaughlin6464/pylearn2,shiquanwang/pylearn2,kastnerkyle/pylearn2,fyffyt/pylearn2,jamessergeant/pylearn2,ddboline/pylearn2,w1kke/pylearn2,woozzu/pylearn2,chrish42/pylearn,fyffyt/pylearn2,cosmoharrigan/pylearn2,kastnerkyle/pylearn2,se4u/pylearn2,fulmicoton/pylearn2,matrogers/pylearn2,goodfeli/pylearn2,sandeepkbhat/pylearn2,msingh172/pylearn2,Refefer/pylearn2,woozzu/pylearn2,fishcorn/pylearn2,pombredanne/pylearn2,fishcorn/pylearn2,pkainz/pylearn2,KennethPierce/pylearnk,JesseLivezey/pylearn2,lunyang/pylearn2,mkraemer67/pylearn2,lisa-lab/pylearn2,shiquanwang/pylearn2,junbochen/pylearn2,sandeepkbhat/pylearn2,lisa-lab/pylearn2,se4u/pylearn2,alexjc/pylearn2,ashhher3/pylearn2,TNick/pylearn2,JesseLivezey/plankton,jeremyfix/pylearn2,jamessergeant/pylearn2,chrish42/pylearn,JesseLivezey/plankton,cosmoharrigan/pylearn2,mclaughlin6464/pylearn2,hyqneuron/pylearn2-maxsom,kose-y/pylearn2,fulmicoton/pylearn2,se4u/pylearn2,lunyang/pylearn2,pkainz/pylearn2,skearnes/pylearn2,msingh172/pylearn2,shiquanwang/pylearn2,TNick/pylearn2,sandeepkbhat/pylearn2,JesseLivezey/plankton,CIFASIS/pylearn2,hantek/pylearn2,Refefer/pylearn2,goodfeli/pylearn2,caidongyun/pylearn2,CIFASIS/pylearn2,fulmicoton/pylearn2,mclaughlin6464/pylearn2
|
"""
Helper functions for determining which tests to skip.
"""
__authors__ = "Ian Goodfellow"
__copyright__ = "Copyright 2010-2012, Universite de Montreal"
__credits__ = ["Ian Goodfellow"]
__license__ = "3-clause BSD"
__maintainer__ = "LISA Lab"
__email__ = "pylearn-dev@googlegroups"
from nose.plugins.skip import SkipTest
import os
from theano.sandbox import cuda
scipy_works = True
try:
import scipy
except ImportError:
# pyflakes gets mad if you set scipy to None here
scipy_works = False
sklearn_works = True
try:
import sklearn
except ImportError:
sklearn_works = False
h5py_works = True
try:
import h5py
except ImportError:
h5py_works = False
def skip_if_no_data():
if 'PYLEARN2_DATA_PATH' not in os.environ:
raise SkipTest()
def skip_if_no_scipy():
if not scipy_works:
raise SkipTest()
def skip_if_no_sklearn():
if not sklearn_works:
raise SkipTest()
def skip_if_no_gpu():
if cuda.cuda_available == False:
raise SkipTest('Optional package cuda disabled.')
def skip_if_no_h5py():
if not h5py_works:
raise SkipTest()
Add SkipTest for when matplotlib and/or pyplot are not present
|
"""
Helper functions for determining which tests to skip.
"""
__authors__ = "Ian Goodfellow"
__copyright__ = "Copyright 2010-2012, Universite de Montreal"
__credits__ = ["Ian Goodfellow"]
__license__ = "3-clause BSD"
__maintainer__ = "LISA Lab"
__email__ = "pylearn-dev@googlegroups"
from nose.plugins.skip import SkipTest
import os
from theano.sandbox import cuda
scipy_works = True
try:
import scipy
except ImportError:
# pyflakes gets mad if you set scipy to None here
scipy_works = False
sklearn_works = True
try:
import sklearn
except ImportError:
sklearn_works = False
h5py_works = True
try:
import h5py
except ImportError:
h5py_works = False
matplotlib_works = True
try:
from matplotlib import pyplot
except ImportError:
matplotlib_works = False
def skip_if_no_data():
if 'PYLEARN2_DATA_PATH' not in os.environ:
raise SkipTest()
def skip_if_no_scipy():
if not scipy_works:
raise SkipTest()
def skip_if_no_sklearn():
if not sklearn_works:
raise SkipTest()
def skip_if_no_gpu():
if cuda.cuda_available == False:
raise SkipTest('Optional package cuda disabled.')
def skip_if_no_h5py():
if not h5py_works:
raise SkipTest()
def skip_if_no_matplotlib():
if not matplotlib_works:
raise SkipTest("matplotlib and pyplot are not available")
|
<commit_before>"""
Helper functions for determining which tests to skip.
"""
__authors__ = "Ian Goodfellow"
__copyright__ = "Copyright 2010-2012, Universite de Montreal"
__credits__ = ["Ian Goodfellow"]
__license__ = "3-clause BSD"
__maintainer__ = "LISA Lab"
__email__ = "pylearn-dev@googlegroups"
from nose.plugins.skip import SkipTest
import os
from theano.sandbox import cuda
scipy_works = True
try:
import scipy
except ImportError:
# pyflakes gets mad if you set scipy to None here
scipy_works = False
sklearn_works = True
try:
import sklearn
except ImportError:
sklearn_works = False
h5py_works = True
try:
import h5py
except ImportError:
h5py_works = False
def skip_if_no_data():
if 'PYLEARN2_DATA_PATH' not in os.environ:
raise SkipTest()
def skip_if_no_scipy():
if not scipy_works:
raise SkipTest()
def skip_if_no_sklearn():
if not sklearn_works:
raise SkipTest()
def skip_if_no_gpu():
if cuda.cuda_available == False:
raise SkipTest('Optional package cuda disabled.')
def skip_if_no_h5py():
if not h5py_works:
raise SkipTest()
<commit_msg>Add SkipTest for when matplotlib and/or pyplot are not present<commit_after>
|
"""
Helper functions for determining which tests to skip.
"""
__authors__ = "Ian Goodfellow"
__copyright__ = "Copyright 2010-2012, Universite de Montreal"
__credits__ = ["Ian Goodfellow"]
__license__ = "3-clause BSD"
__maintainer__ = "LISA Lab"
__email__ = "pylearn-dev@googlegroups"
from nose.plugins.skip import SkipTest
import os
from theano.sandbox import cuda
scipy_works = True
try:
import scipy
except ImportError:
# pyflakes gets mad if you set scipy to None here
scipy_works = False
sklearn_works = True
try:
import sklearn
except ImportError:
sklearn_works = False
h5py_works = True
try:
import h5py
except ImportError:
h5py_works = False
matplotlib_works = True
try:
from matplotlib import pyplot
except ImportError:
matplotlib_works = False
def skip_if_no_data():
if 'PYLEARN2_DATA_PATH' not in os.environ:
raise SkipTest()
def skip_if_no_scipy():
if not scipy_works:
raise SkipTest()
def skip_if_no_sklearn():
if not sklearn_works:
raise SkipTest()
def skip_if_no_gpu():
if cuda.cuda_available == False:
raise SkipTest('Optional package cuda disabled.')
def skip_if_no_h5py():
if not h5py_works:
raise SkipTest()
def skip_if_no_matplotlib():
if not matplotlib_works:
raise SkipTest("matplotlib and pyplot are not available")
|
"""
Helper functions for determining which tests to skip.
"""
__authors__ = "Ian Goodfellow"
__copyright__ = "Copyright 2010-2012, Universite de Montreal"
__credits__ = ["Ian Goodfellow"]
__license__ = "3-clause BSD"
__maintainer__ = "LISA Lab"
__email__ = "pylearn-dev@googlegroups"
from nose.plugins.skip import SkipTest
import os
from theano.sandbox import cuda
scipy_works = True
try:
import scipy
except ImportError:
# pyflakes gets mad if you set scipy to None here
scipy_works = False
sklearn_works = True
try:
import sklearn
except ImportError:
sklearn_works = False
h5py_works = True
try:
import h5py
except ImportError:
h5py_works = False
def skip_if_no_data():
if 'PYLEARN2_DATA_PATH' not in os.environ:
raise SkipTest()
def skip_if_no_scipy():
if not scipy_works:
raise SkipTest()
def skip_if_no_sklearn():
if not sklearn_works:
raise SkipTest()
def skip_if_no_gpu():
if cuda.cuda_available == False:
raise SkipTest('Optional package cuda disabled.')
def skip_if_no_h5py():
if not h5py_works:
raise SkipTest()
Add SkipTest for when matplotlib and/or pyplot are not present"""
Helper functions for determining which tests to skip.
"""
__authors__ = "Ian Goodfellow"
__copyright__ = "Copyright 2010-2012, Universite de Montreal"
__credits__ = ["Ian Goodfellow"]
__license__ = "3-clause BSD"
__maintainer__ = "LISA Lab"
__email__ = "pylearn-dev@googlegroups"
from nose.plugins.skip import SkipTest
import os
from theano.sandbox import cuda
scipy_works = True
try:
import scipy
except ImportError:
# pyflakes gets mad if you set scipy to None here
scipy_works = False
sklearn_works = True
try:
import sklearn
except ImportError:
sklearn_works = False
h5py_works = True
try:
import h5py
except ImportError:
h5py_works = False
matplotlib_works = True
try:
from matplotlib import pyplot
except ImportError:
matplotlib_works = False
def skip_if_no_data():
if 'PYLEARN2_DATA_PATH' not in os.environ:
raise SkipTest()
def skip_if_no_scipy():
if not scipy_works:
raise SkipTest()
def skip_if_no_sklearn():
if not sklearn_works:
raise SkipTest()
def skip_if_no_gpu():
if cuda.cuda_available == False:
raise SkipTest('Optional package cuda disabled.')
def skip_if_no_h5py():
if not h5py_works:
raise SkipTest()
def skip_if_no_matplotlib():
if not matplotlib_works:
raise SkipTest("matplotlib and pyplot are not available")
|
<commit_before>"""
Helper functions for determining which tests to skip.
"""
__authors__ = "Ian Goodfellow"
__copyright__ = "Copyright 2010-2012, Universite de Montreal"
__credits__ = ["Ian Goodfellow"]
__license__ = "3-clause BSD"
__maintainer__ = "LISA Lab"
__email__ = "pylearn-dev@googlegroups"
from nose.plugins.skip import SkipTest
import os
from theano.sandbox import cuda
scipy_works = True
try:
import scipy
except ImportError:
# pyflakes gets mad if you set scipy to None here
scipy_works = False
sklearn_works = True
try:
import sklearn
except ImportError:
sklearn_works = False
h5py_works = True
try:
import h5py
except ImportError:
h5py_works = False
def skip_if_no_data():
if 'PYLEARN2_DATA_PATH' not in os.environ:
raise SkipTest()
def skip_if_no_scipy():
if not scipy_works:
raise SkipTest()
def skip_if_no_sklearn():
if not sklearn_works:
raise SkipTest()
def skip_if_no_gpu():
if cuda.cuda_available == False:
raise SkipTest('Optional package cuda disabled.')
def skip_if_no_h5py():
if not h5py_works:
raise SkipTest()
<commit_msg>Add SkipTest for when matplotlib and/or pyplot are not present<commit_after>"""
Helper functions for determining which tests to skip.
"""
__authors__ = "Ian Goodfellow"
__copyright__ = "Copyright 2010-2012, Universite de Montreal"
__credits__ = ["Ian Goodfellow"]
__license__ = "3-clause BSD"
__maintainer__ = "LISA Lab"
__email__ = "pylearn-dev@googlegroups"
from nose.plugins.skip import SkipTest
import os
from theano.sandbox import cuda
scipy_works = True
try:
import scipy
except ImportError:
# pyflakes gets mad if you set scipy to None here
scipy_works = False
sklearn_works = True
try:
import sklearn
except ImportError:
sklearn_works = False
h5py_works = True
try:
import h5py
except ImportError:
h5py_works = False
matplotlib_works = True
try:
from matplotlib import pyplot
except ImportError:
matplotlib_works = False
def skip_if_no_data():
if 'PYLEARN2_DATA_PATH' not in os.environ:
raise SkipTest()
def skip_if_no_scipy():
if not scipy_works:
raise SkipTest()
def skip_if_no_sklearn():
if not sklearn_works:
raise SkipTest()
def skip_if_no_gpu():
if cuda.cuda_available == False:
raise SkipTest('Optional package cuda disabled.')
def skip_if_no_h5py():
if not h5py_works:
raise SkipTest()
def skip_if_no_matplotlib():
if not matplotlib_works:
raise SkipTest("matplotlib and pyplot are not available")
|
817aa49dc7abc73863560c510cd81a2fad8f854b
|
python/servo/packages.py
|
python/servo/packages.py
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at https://mozilla.org/MPL/2.0/.
WINDOWS_MSVC = {
"cmake": "3.7.2",
"llvm": "7.0.0",
"moztools": "3.2",
"ninja": "1.7.1",
"openssl": "1.1.0e-vs2015",
}
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at https://mozilla.org/MPL/2.0/.
WINDOWS_MSVC = {
"cmake": "3.14.3",
"llvm": "7.0.0",
"moztools": "3.2",
"ninja": "1.7.1",
"openssl": "1.1.0e-vs2015",
}
|
Update cmake for MSVC builds.
|
Update cmake for MSVC builds.
|
Python
|
mpl-2.0
|
emilio/servo,saneyuki/servo,KiChjang/servo,DominoTree/servo,pyfisch/servo,splav/servo,KiChjang/servo,emilio/servo,splav/servo,emilio/servo,paulrouget/servo,larsbergstrom/servo,notriddle/servo,DominoTree/servo,nnethercote/servo,saneyuki/servo,paulrouget/servo,KiChjang/servo,saneyuki/servo,nnethercote/servo,DominoTree/servo,DominoTree/servo,paulrouget/servo,notriddle/servo,notriddle/servo,larsbergstrom/servo,DominoTree/servo,larsbergstrom/servo,KiChjang/servo,saneyuki/servo,nnethercote/servo,saneyuki/servo,pyfisch/servo,splav/servo,nnethercote/servo,notriddle/servo,KiChjang/servo,splav/servo,notriddle/servo,paulrouget/servo,saneyuki/servo,larsbergstrom/servo,DominoTree/servo,paulrouget/servo,emilio/servo,notriddle/servo,saneyuki/servo,splav/servo,emilio/servo,nnethercote/servo,DominoTree/servo,paulrouget/servo,saneyuki/servo,notriddle/servo,splav/servo,nnethercote/servo,emilio/servo,saneyuki/servo,larsbergstrom/servo,larsbergstrom/servo,saneyuki/servo,notriddle/servo,splav/servo,emilio/servo,paulrouget/servo,KiChjang/servo,DominoTree/servo,KiChjang/servo,emilio/servo,paulrouget/servo,paulrouget/servo,pyfisch/servo,nnethercote/servo,KiChjang/servo,pyfisch/servo,KiChjang/servo,splav/servo,emilio/servo,DominoTree/servo,notriddle/servo,larsbergstrom/servo,pyfisch/servo,paulrouget/servo,pyfisch/servo,pyfisch/servo,notriddle/servo,DominoTree/servo,larsbergstrom/servo,splav/servo,KiChjang/servo,emilio/servo,pyfisch/servo,larsbergstrom/servo,splav/servo,pyfisch/servo,nnethercote/servo,larsbergstrom/servo,pyfisch/servo,nnethercote/servo,nnethercote/servo
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at https://mozilla.org/MPL/2.0/.
WINDOWS_MSVC = {
"cmake": "3.7.2",
"llvm": "7.0.0",
"moztools": "3.2",
"ninja": "1.7.1",
"openssl": "1.1.0e-vs2015",
}
Update cmake for MSVC builds.
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at https://mozilla.org/MPL/2.0/.
WINDOWS_MSVC = {
"cmake": "3.14.3",
"llvm": "7.0.0",
"moztools": "3.2",
"ninja": "1.7.1",
"openssl": "1.1.0e-vs2015",
}
|
<commit_before># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at https://mozilla.org/MPL/2.0/.
WINDOWS_MSVC = {
"cmake": "3.7.2",
"llvm": "7.0.0",
"moztools": "3.2",
"ninja": "1.7.1",
"openssl": "1.1.0e-vs2015",
}
<commit_msg>Update cmake for MSVC builds.<commit_after>
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at https://mozilla.org/MPL/2.0/.
WINDOWS_MSVC = {
"cmake": "3.14.3",
"llvm": "7.0.0",
"moztools": "3.2",
"ninja": "1.7.1",
"openssl": "1.1.0e-vs2015",
}
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at https://mozilla.org/MPL/2.0/.
WINDOWS_MSVC = {
"cmake": "3.7.2",
"llvm": "7.0.0",
"moztools": "3.2",
"ninja": "1.7.1",
"openssl": "1.1.0e-vs2015",
}
Update cmake for MSVC builds.# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at https://mozilla.org/MPL/2.0/.
WINDOWS_MSVC = {
"cmake": "3.14.3",
"llvm": "7.0.0",
"moztools": "3.2",
"ninja": "1.7.1",
"openssl": "1.1.0e-vs2015",
}
|
<commit_before># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at https://mozilla.org/MPL/2.0/.
WINDOWS_MSVC = {
"cmake": "3.7.2",
"llvm": "7.0.0",
"moztools": "3.2",
"ninja": "1.7.1",
"openssl": "1.1.0e-vs2015",
}
<commit_msg>Update cmake for MSVC builds.<commit_after># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at https://mozilla.org/MPL/2.0/.
WINDOWS_MSVC = {
"cmake": "3.14.3",
"llvm": "7.0.0",
"moztools": "3.2",
"ninja": "1.7.1",
"openssl": "1.1.0e-vs2015",
}
|
1212d33d849155f8c1cdc6a610e893318937e7c5
|
silk/webdoc/html/v5.py
|
silk/webdoc/html/v5.py
|
"""Module containing only html v5 tags. All deprecated tags have been removed.
"""
from .common import *
del ACRONYM
del APPLET
del BASEFONT
del BIG
del CENTER
del DIR
del FONT
del FRAME
del FRAMESET
del NOFRAMES
del STRIKE
del TT
del U
|
"""
Module containing only html v5 tags. All deprecated tags have been removed.
"""
from .common import ( # flake8: noqa
A,
ABBR,
# ACRONYM,
ADDRESS,
# APPLET,
AREA,
B,
BASE,
# BASEFONT,
BDO,
# BIG,
BLOCKQUOTE,
BODY,
BR,
BUTTON,
Body,
CAPTION,
CAT,
# CENTER,
CITE,
CODE,
COL,
COLGROUP,
COMMENT,
CONDITIONAL_COMMENT,
DD,
DEL,
DFN,
# DIR,
DIV,
DL,
DT,
EM,
FIELDSET,
# FONT,
FORM,
# FRAME,
# FRAMESET,
Form,
H1,
H2,
H3,
H4,
H5,
H6,
HEAD,
HR,
HTML,
HTMLDoc,
Hyper,
I,
IFRAME,
IMG,
INPUT,
INS,
Image,
Javascript,
KBD,
LABEL,
LEGEND,
LI,
LINK,
MAP,
MENU,
META,
NBSP,
# NOFRAMES,
NOSCRIPT,
OBJECT,
OL,
OPTGROUP,
OPTION,
P,
PARAM,
PRE,
Q,
S,
SAMP,
SCRIPT,
SELECT,
SMALL,
SPAN,
# STRIKE,
STRONG,
STYLE,
SUB,
SUP,
TABLE,
TBODY,
TD,
TEXTAREA,
TFOOT,
TH,
THEAD,
TITLE,
TR,
# TT,
# U,
UL,
VAR,
XML,
XMLEntity,
XMLNode,
XMP,
xmlescape,
xmlunescape
)
|
Replace import * with explicit names
|
Replace import * with explicit names
|
Python
|
bsd-3-clause
|
orbnauticus/silk
|
"""Module containing only html v5 tags. All deprecated tags have been removed.
"""
from .common import *
del ACRONYM
del APPLET
del BASEFONT
del BIG
del CENTER
del DIR
del FONT
del FRAME
del FRAMESET
del NOFRAMES
del STRIKE
del TT
del U
Replace import * with explicit names
|
"""
Module containing only html v5 tags. All deprecated tags have been removed.
"""
from .common import ( # flake8: noqa
A,
ABBR,
# ACRONYM,
ADDRESS,
# APPLET,
AREA,
B,
BASE,
# BASEFONT,
BDO,
# BIG,
BLOCKQUOTE,
BODY,
BR,
BUTTON,
Body,
CAPTION,
CAT,
# CENTER,
CITE,
CODE,
COL,
COLGROUP,
COMMENT,
CONDITIONAL_COMMENT,
DD,
DEL,
DFN,
# DIR,
DIV,
DL,
DT,
EM,
FIELDSET,
# FONT,
FORM,
# FRAME,
# FRAMESET,
Form,
H1,
H2,
H3,
H4,
H5,
H6,
HEAD,
HR,
HTML,
HTMLDoc,
Hyper,
I,
IFRAME,
IMG,
INPUT,
INS,
Image,
Javascript,
KBD,
LABEL,
LEGEND,
LI,
LINK,
MAP,
MENU,
META,
NBSP,
# NOFRAMES,
NOSCRIPT,
OBJECT,
OL,
OPTGROUP,
OPTION,
P,
PARAM,
PRE,
Q,
S,
SAMP,
SCRIPT,
SELECT,
SMALL,
SPAN,
# STRIKE,
STRONG,
STYLE,
SUB,
SUP,
TABLE,
TBODY,
TD,
TEXTAREA,
TFOOT,
TH,
THEAD,
TITLE,
TR,
# TT,
# U,
UL,
VAR,
XML,
XMLEntity,
XMLNode,
XMP,
xmlescape,
xmlunescape
)
|
<commit_before>
"""Module containing only html v5 tags. All deprecated tags have been removed.
"""
from .common import *
del ACRONYM
del APPLET
del BASEFONT
del BIG
del CENTER
del DIR
del FONT
del FRAME
del FRAMESET
del NOFRAMES
del STRIKE
del TT
del U
<commit_msg>Replace import * with explicit names<commit_after>
|
"""
Module containing only html v5 tags. All deprecated tags have been removed.
"""
from .common import ( # flake8: noqa
A,
ABBR,
# ACRONYM,
ADDRESS,
# APPLET,
AREA,
B,
BASE,
# BASEFONT,
BDO,
# BIG,
BLOCKQUOTE,
BODY,
BR,
BUTTON,
Body,
CAPTION,
CAT,
# CENTER,
CITE,
CODE,
COL,
COLGROUP,
COMMENT,
CONDITIONAL_COMMENT,
DD,
DEL,
DFN,
# DIR,
DIV,
DL,
DT,
EM,
FIELDSET,
# FONT,
FORM,
# FRAME,
# FRAMESET,
Form,
H1,
H2,
H3,
H4,
H5,
H6,
HEAD,
HR,
HTML,
HTMLDoc,
Hyper,
I,
IFRAME,
IMG,
INPUT,
INS,
Image,
Javascript,
KBD,
LABEL,
LEGEND,
LI,
LINK,
MAP,
MENU,
META,
NBSP,
# NOFRAMES,
NOSCRIPT,
OBJECT,
OL,
OPTGROUP,
OPTION,
P,
PARAM,
PRE,
Q,
S,
SAMP,
SCRIPT,
SELECT,
SMALL,
SPAN,
# STRIKE,
STRONG,
STYLE,
SUB,
SUP,
TABLE,
TBODY,
TD,
TEXTAREA,
TFOOT,
TH,
THEAD,
TITLE,
TR,
# TT,
# U,
UL,
VAR,
XML,
XMLEntity,
XMLNode,
XMP,
xmlescape,
xmlunescape
)
|
"""Module containing only html v5 tags. All deprecated tags have been removed.
"""
from .common import *
del ACRONYM
del APPLET
del BASEFONT
del BIG
del CENTER
del DIR
del FONT
del FRAME
del FRAMESET
del NOFRAMES
del STRIKE
del TT
del U
Replace import * with explicit names
"""
Module containing only html v5 tags. All deprecated tags have been removed.
"""
from .common import ( # flake8: noqa
A,
ABBR,
# ACRONYM,
ADDRESS,
# APPLET,
AREA,
B,
BASE,
# BASEFONT,
BDO,
# BIG,
BLOCKQUOTE,
BODY,
BR,
BUTTON,
Body,
CAPTION,
CAT,
# CENTER,
CITE,
CODE,
COL,
COLGROUP,
COMMENT,
CONDITIONAL_COMMENT,
DD,
DEL,
DFN,
# DIR,
DIV,
DL,
DT,
EM,
FIELDSET,
# FONT,
FORM,
# FRAME,
# FRAMESET,
Form,
H1,
H2,
H3,
H4,
H5,
H6,
HEAD,
HR,
HTML,
HTMLDoc,
Hyper,
I,
IFRAME,
IMG,
INPUT,
INS,
Image,
Javascript,
KBD,
LABEL,
LEGEND,
LI,
LINK,
MAP,
MENU,
META,
NBSP,
# NOFRAMES,
NOSCRIPT,
OBJECT,
OL,
OPTGROUP,
OPTION,
P,
PARAM,
PRE,
Q,
S,
SAMP,
SCRIPT,
SELECT,
SMALL,
SPAN,
# STRIKE,
STRONG,
STYLE,
SUB,
SUP,
TABLE,
TBODY,
TD,
TEXTAREA,
TFOOT,
TH,
THEAD,
TITLE,
TR,
# TT,
# U,
UL,
VAR,
XML,
XMLEntity,
XMLNode,
XMP,
xmlescape,
xmlunescape
)
|
<commit_before>
"""Module containing only html v5 tags. All deprecated tags have been removed.
"""
from .common import *
del ACRONYM
del APPLET
del BASEFONT
del BIG
del CENTER
del DIR
del FONT
del FRAME
del FRAMESET
del NOFRAMES
del STRIKE
del TT
del U
<commit_msg>Replace import * with explicit names<commit_after>
"""
Module containing only html v5 tags. All deprecated tags have been removed.
"""
from .common import ( # flake8: noqa
A,
ABBR,
# ACRONYM,
ADDRESS,
# APPLET,
AREA,
B,
BASE,
# BASEFONT,
BDO,
# BIG,
BLOCKQUOTE,
BODY,
BR,
BUTTON,
Body,
CAPTION,
CAT,
# CENTER,
CITE,
CODE,
COL,
COLGROUP,
COMMENT,
CONDITIONAL_COMMENT,
DD,
DEL,
DFN,
# DIR,
DIV,
DL,
DT,
EM,
FIELDSET,
# FONT,
FORM,
# FRAME,
# FRAMESET,
Form,
H1,
H2,
H3,
H4,
H5,
H6,
HEAD,
HR,
HTML,
HTMLDoc,
Hyper,
I,
IFRAME,
IMG,
INPUT,
INS,
Image,
Javascript,
KBD,
LABEL,
LEGEND,
LI,
LINK,
MAP,
MENU,
META,
NBSP,
# NOFRAMES,
NOSCRIPT,
OBJECT,
OL,
OPTGROUP,
OPTION,
P,
PARAM,
PRE,
Q,
S,
SAMP,
SCRIPT,
SELECT,
SMALL,
SPAN,
# STRIKE,
STRONG,
STYLE,
SUB,
SUP,
TABLE,
TBODY,
TD,
TEXTAREA,
TFOOT,
TH,
THEAD,
TITLE,
TR,
# TT,
# U,
UL,
VAR,
XML,
XMLEntity,
XMLNode,
XMP,
xmlescape,
xmlunescape
)
|
0c8835bb4ab1715ee0de948d9b15a813752b60b5
|
dthm4kaiako/gunicorn.conf.py
|
dthm4kaiako/gunicorn.conf.py
|
"""Configuration file for gunicorn."""
# Details from https://cloud.google.com/appengine/docs/flexible/python/runtime
worker_class = "gevent"
forwarded_allow_ips = "*"
secure_scheme_headers = {"X-APPENGINE-HTTPS": "on"}
|
"""Configuration file for gunicorn."""
from multiprocessing import cpu_count
# Worker count from http://docs.gunicorn.org/en/stable/design.html#how-many-workers
workers = cpu_count() * 2 + 1
# Details from https://cloud.google.com/appengine/docs/flexible/python/runtime
worker_class = "gevent"
forwarded_allow_ips = "*"
secure_scheme_headers = {"X-APPENGINE-HTTPS": "on"}
|
Increase number of server workers
|
Increase number of server workers
|
Python
|
mit
|
uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers
|
"""Configuration file for gunicorn."""
# Details from https://cloud.google.com/appengine/docs/flexible/python/runtime
worker_class = "gevent"
forwarded_allow_ips = "*"
secure_scheme_headers = {"X-APPENGINE-HTTPS": "on"}
Increase number of server workers
|
"""Configuration file for gunicorn."""
from multiprocessing import cpu_count
# Worker count from http://docs.gunicorn.org/en/stable/design.html#how-many-workers
workers = cpu_count() * 2 + 1
# Details from https://cloud.google.com/appengine/docs/flexible/python/runtime
worker_class = "gevent"
forwarded_allow_ips = "*"
secure_scheme_headers = {"X-APPENGINE-HTTPS": "on"}
|
<commit_before>"""Configuration file for gunicorn."""
# Details from https://cloud.google.com/appengine/docs/flexible/python/runtime
worker_class = "gevent"
forwarded_allow_ips = "*"
secure_scheme_headers = {"X-APPENGINE-HTTPS": "on"}
<commit_msg>Increase number of server workers<commit_after>
|
"""Configuration file for gunicorn."""
from multiprocessing import cpu_count
# Worker count from http://docs.gunicorn.org/en/stable/design.html#how-many-workers
workers = cpu_count() * 2 + 1
# Details from https://cloud.google.com/appengine/docs/flexible/python/runtime
worker_class = "gevent"
forwarded_allow_ips = "*"
secure_scheme_headers = {"X-APPENGINE-HTTPS": "on"}
|
"""Configuration file for gunicorn."""
# Details from https://cloud.google.com/appengine/docs/flexible/python/runtime
worker_class = "gevent"
forwarded_allow_ips = "*"
secure_scheme_headers = {"X-APPENGINE-HTTPS": "on"}
Increase number of server workers"""Configuration file for gunicorn."""
from multiprocessing import cpu_count
# Worker count from http://docs.gunicorn.org/en/stable/design.html#how-many-workers
workers = cpu_count() * 2 + 1
# Details from https://cloud.google.com/appengine/docs/flexible/python/runtime
worker_class = "gevent"
forwarded_allow_ips = "*"
secure_scheme_headers = {"X-APPENGINE-HTTPS": "on"}
|
<commit_before>"""Configuration file for gunicorn."""
# Details from https://cloud.google.com/appengine/docs/flexible/python/runtime
worker_class = "gevent"
forwarded_allow_ips = "*"
secure_scheme_headers = {"X-APPENGINE-HTTPS": "on"}
<commit_msg>Increase number of server workers<commit_after>"""Configuration file for gunicorn."""
from multiprocessing import cpu_count
# Worker count from http://docs.gunicorn.org/en/stable/design.html#how-many-workers
workers = cpu_count() * 2 + 1
# Details from https://cloud.google.com/appengine/docs/flexible/python/runtime
worker_class = "gevent"
forwarded_allow_ips = "*"
secure_scheme_headers = {"X-APPENGINE-HTTPS": "on"}
|
40e9375f6b35b4a05ad311822705b7a7efe46b56
|
site_scons/get_libs.py
|
site_scons/get_libs.py
|
import os
import sys
from SCons.Script import File
from path_helpers import path
def get_lib_paths():
if sys.platform == 'win32':
lib_paths = set(os.environ['PATH'].split(';'))
else:
lib_paths = set()
if os.environ.has_key('LIBRARY_PATH'):
lib_paths.update(os.environ['LIBRARY_PATH'].split(':'))
if os.environ.has_key('LD_LIBRARY_PATH'):
lib_paths.update(os.environ['LD_LIBRARY_PATH'].split(':'))
lib_paths = (['/usr/lib', '/usr/lib/x86_64-linux-gnu',
'/usr/local/lib'] + list(lib_paths))
return lib_paths
def get_lib(lib_name, LIBPATH=None):
if not LIBPATH:
LIBPATH = []
else:
LIBPATH = LIBPATH[:]
LIBPATH += get_lib_paths()
for lp in [path(p) for p in LIBPATH]:
try:
files = lp.files(lib_name)
except OSError:
continue
if files:
return File(sorted(files, key=len)[0])
return None
|
import os
import sys
from SCons.Script import File
from path_helpers import path
def get_lib_paths():
if sys.platform == 'win32':
lib_paths = set(os.environ['PATH'].split(';'))
else:
lib_paths = set()
if os.environ.has_key('LIBRARY_PATH'):
lib_paths.update(os.environ['LIBRARY_PATH'].split(':'))
if os.environ.has_key('LD_LIBRARY_PATH'):
lib_paths.update(os.environ['LD_LIBRARY_PATH'].split(':'))
lib_paths = (['/usr/lib', '/usr/lib/i386-linux-gnu',
'/usr/lib/x86_64-linux-gnu', '/usr/local/lib'] +
list(lib_paths))
return lib_paths
def get_lib(lib_name, LIBPATH=None):
if not LIBPATH:
LIBPATH = []
else:
LIBPATH = LIBPATH[:]
LIBPATH += get_lib_paths()
for lp in [path(p) for p in LIBPATH]:
try:
files = lp.files(lib_name)
except OSError:
continue
if files:
return File(sorted(files, key=len)[0])
return None
|
Add Linux 32-bit search path for Boost libraries
|
Add Linux 32-bit search path for Boost libraries
|
Python
|
bsd-3-clause
|
wheeler-microfluidics/dmf-control-board-firmware,wheeler-microfluidics/dmf-control-board-firmware,wheeler-microfluidics/dmf-control-board-firmware,wheeler-microfluidics/dmf-control-board-firmware
|
import os
import sys
from SCons.Script import File
from path_helpers import path
def get_lib_paths():
if sys.platform == 'win32':
lib_paths = set(os.environ['PATH'].split(';'))
else:
lib_paths = set()
if os.environ.has_key('LIBRARY_PATH'):
lib_paths.update(os.environ['LIBRARY_PATH'].split(':'))
if os.environ.has_key('LD_LIBRARY_PATH'):
lib_paths.update(os.environ['LD_LIBRARY_PATH'].split(':'))
lib_paths = (['/usr/lib', '/usr/lib/x86_64-linux-gnu',
'/usr/local/lib'] + list(lib_paths))
return lib_paths
def get_lib(lib_name, LIBPATH=None):
if not LIBPATH:
LIBPATH = []
else:
LIBPATH = LIBPATH[:]
LIBPATH += get_lib_paths()
for lp in [path(p) for p in LIBPATH]:
try:
files = lp.files(lib_name)
except OSError:
continue
if files:
return File(sorted(files, key=len)[0])
return None
Add Linux 32-bit search path for Boost libraries
|
import os
import sys
from SCons.Script import File
from path_helpers import path
def get_lib_paths():
if sys.platform == 'win32':
lib_paths = set(os.environ['PATH'].split(';'))
else:
lib_paths = set()
if os.environ.has_key('LIBRARY_PATH'):
lib_paths.update(os.environ['LIBRARY_PATH'].split(':'))
if os.environ.has_key('LD_LIBRARY_PATH'):
lib_paths.update(os.environ['LD_LIBRARY_PATH'].split(':'))
lib_paths = (['/usr/lib', '/usr/lib/i386-linux-gnu',
'/usr/lib/x86_64-linux-gnu', '/usr/local/lib'] +
list(lib_paths))
return lib_paths
def get_lib(lib_name, LIBPATH=None):
if not LIBPATH:
LIBPATH = []
else:
LIBPATH = LIBPATH[:]
LIBPATH += get_lib_paths()
for lp in [path(p) for p in LIBPATH]:
try:
files = lp.files(lib_name)
except OSError:
continue
if files:
return File(sorted(files, key=len)[0])
return None
|
<commit_before>import os
import sys
from SCons.Script import File
from path_helpers import path
def get_lib_paths():
if sys.platform == 'win32':
lib_paths = set(os.environ['PATH'].split(';'))
else:
lib_paths = set()
if os.environ.has_key('LIBRARY_PATH'):
lib_paths.update(os.environ['LIBRARY_PATH'].split(':'))
if os.environ.has_key('LD_LIBRARY_PATH'):
lib_paths.update(os.environ['LD_LIBRARY_PATH'].split(':'))
lib_paths = (['/usr/lib', '/usr/lib/x86_64-linux-gnu',
'/usr/local/lib'] + list(lib_paths))
return lib_paths
def get_lib(lib_name, LIBPATH=None):
if not LIBPATH:
LIBPATH = []
else:
LIBPATH = LIBPATH[:]
LIBPATH += get_lib_paths()
for lp in [path(p) for p in LIBPATH]:
try:
files = lp.files(lib_name)
except OSError:
continue
if files:
return File(sorted(files, key=len)[0])
return None
<commit_msg>Add Linux 32-bit search path for Boost libraries<commit_after>
|
import os
import sys
from SCons.Script import File
from path_helpers import path
def get_lib_paths():
if sys.platform == 'win32':
lib_paths = set(os.environ['PATH'].split(';'))
else:
lib_paths = set()
if os.environ.has_key('LIBRARY_PATH'):
lib_paths.update(os.environ['LIBRARY_PATH'].split(':'))
if os.environ.has_key('LD_LIBRARY_PATH'):
lib_paths.update(os.environ['LD_LIBRARY_PATH'].split(':'))
lib_paths = (['/usr/lib', '/usr/lib/i386-linux-gnu',
'/usr/lib/x86_64-linux-gnu', '/usr/local/lib'] +
list(lib_paths))
return lib_paths
def get_lib(lib_name, LIBPATH=None):
if not LIBPATH:
LIBPATH = []
else:
LIBPATH = LIBPATH[:]
LIBPATH += get_lib_paths()
for lp in [path(p) for p in LIBPATH]:
try:
files = lp.files(lib_name)
except OSError:
continue
if files:
return File(sorted(files, key=len)[0])
return None
|
import os
import sys
from SCons.Script import File
from path_helpers import path
def get_lib_paths():
if sys.platform == 'win32':
lib_paths = set(os.environ['PATH'].split(';'))
else:
lib_paths = set()
if os.environ.has_key('LIBRARY_PATH'):
lib_paths.update(os.environ['LIBRARY_PATH'].split(':'))
if os.environ.has_key('LD_LIBRARY_PATH'):
lib_paths.update(os.environ['LD_LIBRARY_PATH'].split(':'))
lib_paths = (['/usr/lib', '/usr/lib/x86_64-linux-gnu',
'/usr/local/lib'] + list(lib_paths))
return lib_paths
def get_lib(lib_name, LIBPATH=None):
if not LIBPATH:
LIBPATH = []
else:
LIBPATH = LIBPATH[:]
LIBPATH += get_lib_paths()
for lp in [path(p) for p in LIBPATH]:
try:
files = lp.files(lib_name)
except OSError:
continue
if files:
return File(sorted(files, key=len)[0])
return None
Add Linux 32-bit search path for Boost librariesimport os
import sys
from SCons.Script import File
from path_helpers import path
def get_lib_paths():
if sys.platform == 'win32':
lib_paths = set(os.environ['PATH'].split(';'))
else:
lib_paths = set()
if os.environ.has_key('LIBRARY_PATH'):
lib_paths.update(os.environ['LIBRARY_PATH'].split(':'))
if os.environ.has_key('LD_LIBRARY_PATH'):
lib_paths.update(os.environ['LD_LIBRARY_PATH'].split(':'))
lib_paths = (['/usr/lib', '/usr/lib/i386-linux-gnu',
'/usr/lib/x86_64-linux-gnu', '/usr/local/lib'] +
list(lib_paths))
return lib_paths
def get_lib(lib_name, LIBPATH=None):
if not LIBPATH:
LIBPATH = []
else:
LIBPATH = LIBPATH[:]
LIBPATH += get_lib_paths()
for lp in [path(p) for p in LIBPATH]:
try:
files = lp.files(lib_name)
except OSError:
continue
if files:
return File(sorted(files, key=len)[0])
return None
|
<commit_before>import os
import sys
from SCons.Script import File
from path_helpers import path
def get_lib_paths():
if sys.platform == 'win32':
lib_paths = set(os.environ['PATH'].split(';'))
else:
lib_paths = set()
if os.environ.has_key('LIBRARY_PATH'):
lib_paths.update(os.environ['LIBRARY_PATH'].split(':'))
if os.environ.has_key('LD_LIBRARY_PATH'):
lib_paths.update(os.environ['LD_LIBRARY_PATH'].split(':'))
lib_paths = (['/usr/lib', '/usr/lib/x86_64-linux-gnu',
'/usr/local/lib'] + list(lib_paths))
return lib_paths
def get_lib(lib_name, LIBPATH=None):
if not LIBPATH:
LIBPATH = []
else:
LIBPATH = LIBPATH[:]
LIBPATH += get_lib_paths()
for lp in [path(p) for p in LIBPATH]:
try:
files = lp.files(lib_name)
except OSError:
continue
if files:
return File(sorted(files, key=len)[0])
return None
<commit_msg>Add Linux 32-bit search path for Boost libraries<commit_after>import os
import sys
from SCons.Script import File
from path_helpers import path
def get_lib_paths():
if sys.platform == 'win32':
lib_paths = set(os.environ['PATH'].split(';'))
else:
lib_paths = set()
if os.environ.has_key('LIBRARY_PATH'):
lib_paths.update(os.environ['LIBRARY_PATH'].split(':'))
if os.environ.has_key('LD_LIBRARY_PATH'):
lib_paths.update(os.environ['LD_LIBRARY_PATH'].split(':'))
lib_paths = (['/usr/lib', '/usr/lib/i386-linux-gnu',
'/usr/lib/x86_64-linux-gnu', '/usr/local/lib'] +
list(lib_paths))
return lib_paths
def get_lib(lib_name, LIBPATH=None):
if not LIBPATH:
LIBPATH = []
else:
LIBPATH = LIBPATH[:]
LIBPATH += get_lib_paths()
for lp in [path(p) for p in LIBPATH]:
try:
files = lp.files(lib_name)
except OSError:
continue
if files:
return File(sorted(files, key=len)[0])
return None
|
90d8411412b79513338b014da63b18d0d29396d9
|
snmpy/log_processor.py
|
snmpy/log_processor.py
|
import re, snmpy_plugins
class log_processor:
def __init__(self, conf):
self.data = [{'value':0, 'label': conf['objects'][item]['label'], 'regex': re.compile(conf['objects'][item]['regex'])} for item in sorted(conf['objects'])]
self.proc(conf['logfile'])
def len(self):
return len(self.data)
def key(self, idx):
return 'string', self.data[idx - 1]['label']
def val(self, idx):
return 'integer', self.data[idx - 1]['value']
@snmpy_plugins.task
def proc(self, file):
for line in snmpy_plugins.tail(file):
for item in xrange(len(self.data)):
find = self.data[item]['regex'].search(line)
if find:
self.data[item]['value'] += 1
break
|
import re
import snmpy
class log_processor(snmpy.plugin):
def __init__(self, conf, script=False):
snmpy.plugin.__init__(self, conf, script)
def key(self, idx):
return 'string', self.data[idx - 1]['label']
def val(self, idx):
return 'integer', self.data[idx - 1]['value']
def worker(self):
self.data = [{'value':0, 'label': self.conf['objects'][item]['label'], 'regex': re.compile(self.conf['objects'][item]['regex'])} for item in sorted(self.conf['objects'])]
self.tail()
@snmpy.task
def tail(self):
for line in snmpy.tail(self.conf['logfile']):
for item in xrange(len(self.data)):
find = self.data[item]['regex'].search(line)
if find:
self.data[item]['value'] += 1
break
|
Convert to use the base class and update for new plugin path.
|
Convert to use the base class and update for new plugin path.
|
Python
|
mit
|
mk23/snmpy,mk23/snmpy
|
import re, snmpy_plugins
class log_processor:
def __init__(self, conf):
self.data = [{'value':0, 'label': conf['objects'][item]['label'], 'regex': re.compile(conf['objects'][item]['regex'])} for item in sorted(conf['objects'])]
self.proc(conf['logfile'])
def len(self):
return len(self.data)
def key(self, idx):
return 'string', self.data[idx - 1]['label']
def val(self, idx):
return 'integer', self.data[idx - 1]['value']
@snmpy_plugins.task
def proc(self, file):
for line in snmpy_plugins.tail(file):
for item in xrange(len(self.data)):
find = self.data[item]['regex'].search(line)
if find:
self.data[item]['value'] += 1
break
Convert to use the base class and update for new plugin path.
|
import re
import snmpy
class log_processor(snmpy.plugin):
def __init__(self, conf, script=False):
snmpy.plugin.__init__(self, conf, script)
def key(self, idx):
return 'string', self.data[idx - 1]['label']
def val(self, idx):
return 'integer', self.data[idx - 1]['value']
def worker(self):
self.data = [{'value':0, 'label': self.conf['objects'][item]['label'], 'regex': re.compile(self.conf['objects'][item]['regex'])} for item in sorted(self.conf['objects'])]
self.tail()
@snmpy.task
def tail(self):
for line in snmpy.tail(self.conf['logfile']):
for item in xrange(len(self.data)):
find = self.data[item]['regex'].search(line)
if find:
self.data[item]['value'] += 1
break
|
<commit_before>import re, snmpy_plugins
class log_processor:
def __init__(self, conf):
self.data = [{'value':0, 'label': conf['objects'][item]['label'], 'regex': re.compile(conf['objects'][item]['regex'])} for item in sorted(conf['objects'])]
self.proc(conf['logfile'])
def len(self):
return len(self.data)
def key(self, idx):
return 'string', self.data[idx - 1]['label']
def val(self, idx):
return 'integer', self.data[idx - 1]['value']
@snmpy_plugins.task
def proc(self, file):
for line in snmpy_plugins.tail(file):
for item in xrange(len(self.data)):
find = self.data[item]['regex'].search(line)
if find:
self.data[item]['value'] += 1
break
<commit_msg>Convert to use the base class and update for new plugin path.<commit_after>
|
import re
import snmpy
class log_processor(snmpy.plugin):
def __init__(self, conf, script=False):
snmpy.plugin.__init__(self, conf, script)
def key(self, idx):
return 'string', self.data[idx - 1]['label']
def val(self, idx):
return 'integer', self.data[idx - 1]['value']
def worker(self):
self.data = [{'value':0, 'label': self.conf['objects'][item]['label'], 'regex': re.compile(self.conf['objects'][item]['regex'])} for item in sorted(self.conf['objects'])]
self.tail()
@snmpy.task
def tail(self):
for line in snmpy.tail(self.conf['logfile']):
for item in xrange(len(self.data)):
find = self.data[item]['regex'].search(line)
if find:
self.data[item]['value'] += 1
break
|
import re, snmpy_plugins
class log_processor:
def __init__(self, conf):
self.data = [{'value':0, 'label': conf['objects'][item]['label'], 'regex': re.compile(conf['objects'][item]['regex'])} for item in sorted(conf['objects'])]
self.proc(conf['logfile'])
def len(self):
return len(self.data)
def key(self, idx):
return 'string', self.data[idx - 1]['label']
def val(self, idx):
return 'integer', self.data[idx - 1]['value']
@snmpy_plugins.task
def proc(self, file):
for line in snmpy_plugins.tail(file):
for item in xrange(len(self.data)):
find = self.data[item]['regex'].search(line)
if find:
self.data[item]['value'] += 1
break
Convert to use the base class and update for new plugin path.import re
import snmpy
class log_processor(snmpy.plugin):
def __init__(self, conf, script=False):
snmpy.plugin.__init__(self, conf, script)
def key(self, idx):
return 'string', self.data[idx - 1]['label']
def val(self, idx):
return 'integer', self.data[idx - 1]['value']
def worker(self):
self.data = [{'value':0, 'label': self.conf['objects'][item]['label'], 'regex': re.compile(self.conf['objects'][item]['regex'])} for item in sorted(self.conf['objects'])]
self.tail()
@snmpy.task
def tail(self):
for line in snmpy.tail(self.conf['logfile']):
for item in xrange(len(self.data)):
find = self.data[item]['regex'].search(line)
if find:
self.data[item]['value'] += 1
break
|
<commit_before>import re, snmpy_plugins
class log_processor:
def __init__(self, conf):
self.data = [{'value':0, 'label': conf['objects'][item]['label'], 'regex': re.compile(conf['objects'][item]['regex'])} for item in sorted(conf['objects'])]
self.proc(conf['logfile'])
def len(self):
return len(self.data)
def key(self, idx):
return 'string', self.data[idx - 1]['label']
def val(self, idx):
return 'integer', self.data[idx - 1]['value']
@snmpy_plugins.task
def proc(self, file):
for line in snmpy_plugins.tail(file):
for item in xrange(len(self.data)):
find = self.data[item]['regex'].search(line)
if find:
self.data[item]['value'] += 1
break
<commit_msg>Convert to use the base class and update for new plugin path.<commit_after>import re
import snmpy
class log_processor(snmpy.plugin):
def __init__(self, conf, script=False):
snmpy.plugin.__init__(self, conf, script)
def key(self, idx):
return 'string', self.data[idx - 1]['label']
def val(self, idx):
return 'integer', self.data[idx - 1]['value']
def worker(self):
self.data = [{'value':0, 'label': self.conf['objects'][item]['label'], 'regex': re.compile(self.conf['objects'][item]['regex'])} for item in sorted(self.conf['objects'])]
self.tail()
@snmpy.task
def tail(self):
for line in snmpy.tail(self.conf['logfile']):
for item in xrange(len(self.data)):
find = self.data[item]['regex'].search(line)
if find:
self.data[item]['value'] += 1
break
|
34330aec6cf0c038d47c43ef926fa615bd568ea3
|
sqlservice/__init__.py
|
sqlservice/__init__.py
|
# -*- coding: utf-8 -*-
"""The sqlservice package.
"""
from .__pkg__ import (
__description__,
__url__,
__version__,
__author__,
__email__,
__license__
)
from .client import SQLClient
from .model import ModelBase, declarative_base
from .query import Query
from .service import SQLService
|
# -*- coding: utf-8 -*-
"""The sqlservice package.
"""
from .__pkg__ import (
__description__,
__url__,
__version__,
__author__,
__email__,
__license__
)
from .client import SQLClient
from .model import ModelBase, declarative_base
from .service import SQLService
from . import event
|
Remove Query from import and add explicit event module import.
|
Remove Query from import and add explicit event module import.
|
Python
|
mit
|
dgilland/sqlservice
|
# -*- coding: utf-8 -*-
"""The sqlservice package.
"""
from .__pkg__ import (
__description__,
__url__,
__version__,
__author__,
__email__,
__license__
)
from .client import SQLClient
from .model import ModelBase, declarative_base
from .query import Query
from .service import SQLService
Remove Query from import and add explicit event module import.
|
# -*- coding: utf-8 -*-
"""The sqlservice package.
"""
from .__pkg__ import (
__description__,
__url__,
__version__,
__author__,
__email__,
__license__
)
from .client import SQLClient
from .model import ModelBase, declarative_base
from .service import SQLService
from . import event
|
<commit_before># -*- coding: utf-8 -*-
"""The sqlservice package.
"""
from .__pkg__ import (
__description__,
__url__,
__version__,
__author__,
__email__,
__license__
)
from .client import SQLClient
from .model import ModelBase, declarative_base
from .query import Query
from .service import SQLService
<commit_msg>Remove Query from import and add explicit event module import.<commit_after>
|
# -*- coding: utf-8 -*-
"""The sqlservice package.
"""
from .__pkg__ import (
__description__,
__url__,
__version__,
__author__,
__email__,
__license__
)
from .client import SQLClient
from .model import ModelBase, declarative_base
from .service import SQLService
from . import event
|
# -*- coding: utf-8 -*-
"""The sqlservice package.
"""
from .__pkg__ import (
__description__,
__url__,
__version__,
__author__,
__email__,
__license__
)
from .client import SQLClient
from .model import ModelBase, declarative_base
from .query import Query
from .service import SQLService
Remove Query from import and add explicit event module import.# -*- coding: utf-8 -*-
"""The sqlservice package.
"""
from .__pkg__ import (
__description__,
__url__,
__version__,
__author__,
__email__,
__license__
)
from .client import SQLClient
from .model import ModelBase, declarative_base
from .service import SQLService
from . import event
|
<commit_before># -*- coding: utf-8 -*-
"""The sqlservice package.
"""
from .__pkg__ import (
__description__,
__url__,
__version__,
__author__,
__email__,
__license__
)
from .client import SQLClient
from .model import ModelBase, declarative_base
from .query import Query
from .service import SQLService
<commit_msg>Remove Query from import and add explicit event module import.<commit_after># -*- coding: utf-8 -*-
"""The sqlservice package.
"""
from .__pkg__ import (
__description__,
__url__,
__version__,
__author__,
__email__,
__license__
)
from .client import SQLClient
from .model import ModelBase, declarative_base
from .service import SQLService
from . import event
|
b61ded5a1f59eca7838219d9e904941bd04aa064
|
lib/euedb.py
|
lib/euedb.py
|
#!/usr/bin/python
# -*- coding: <encoding name> -*-
import os
import sys
import time
import re
import MySQLdb
from MySQLdb import cursors
class mysql:
cn = None
host = None
port = None
user = None
passw = None
def __init__(self, host, user, passw, db, port=3306):
self.port = port
self.host = host
self.user = user
self.passw = passw
self.db = db
if not self.connect():
return False
def connect(self):
"""
connect to the mysql db
"""
try:
self.cn = MySQLdb.connect(
host=self.host, user=self.user, passwd=self.passw,
db=self.db, port=self.port)
return True
except:
return False
def querySelect(self, query):
"""
do the select query and return the result as an array of dictionaries
"""
data = []
cur = self.cn.cursor(cursors.DictCursor)
try:
cur.execute(query)
except:
return False
data = cur.fetchall()
cur.close()
return data
|
#!/usr/bin/python
# -*- coding: <encoding name> -*-
import os
import sys
import time
import re
import MySQLdb
from MySQLdb import cursors
class mysql:
cn = None
host = None
port = None
user = None
passw = None
connected = False
def __init__(self, host, user, passw, db, port=3306):
self.port = port
self.host = host
self.user = user
self.passw = passw
self.db = db
if not self.connect():
self.connected = False
def connect(self):
"""
connect to the mysql db
"""
try:
if self.connected:
return True
else:
self.cn = MySQLdb.connect(
host=self.host, user=self.user, passwd=self.passw,
db=self.db, port=self.port)
self.connected = True
return True
except:
self.connected = False
return False
def querySelect(self, query):
"""
do the select query and return the result as an array of dictionaries
"""
if not self.connected:
self.connect()
data = []
cur = self.cn.cursor(cursors.DictCursor)
try:
cur.execute(query)
except:
return False
data = cur.fetchall()
cur.close()
return data
|
Add better connection management ...
|
Add better connection management ...
|
Python
|
agpl-3.0
|
david-guenault/eue-ng,david-guenault/eue-ng,david-guenault/eue-ng,david-guenault/eue-ng,david-guenault/eue-ng,david-guenault/eue-ng
|
#!/usr/bin/python
# -*- coding: <encoding name> -*-
import os
import sys
import time
import re
import MySQLdb
from MySQLdb import cursors
class mysql:
cn = None
host = None
port = None
user = None
passw = None
def __init__(self, host, user, passw, db, port=3306):
self.port = port
self.host = host
self.user = user
self.passw = passw
self.db = db
if not self.connect():
return False
def connect(self):
"""
connect to the mysql db
"""
try:
self.cn = MySQLdb.connect(
host=self.host, user=self.user, passwd=self.passw,
db=self.db, port=self.port)
return True
except:
return False
def querySelect(self, query):
"""
do the select query and return the result as an array of dictionaries
"""
data = []
cur = self.cn.cursor(cursors.DictCursor)
try:
cur.execute(query)
except:
return False
data = cur.fetchall()
cur.close()
return data
Add better connection management ...
|
#!/usr/bin/python
# -*- coding: <encoding name> -*-
import os
import sys
import time
import re
import MySQLdb
from MySQLdb import cursors
class mysql:
cn = None
host = None
port = None
user = None
passw = None
connected = False
def __init__(self, host, user, passw, db, port=3306):
self.port = port
self.host = host
self.user = user
self.passw = passw
self.db = db
if not self.connect():
self.connected = False
def connect(self):
"""
connect to the mysql db
"""
try:
if self.connected:
return True
else:
self.cn = MySQLdb.connect(
host=self.host, user=self.user, passwd=self.passw,
db=self.db, port=self.port)
self.connected = True
return True
except:
self.connected = False
return False
def querySelect(self, query):
"""
do the select query and return the result as an array of dictionaries
"""
if not self.connected:
self.connect()
data = []
cur = self.cn.cursor(cursors.DictCursor)
try:
cur.execute(query)
except:
return False
data = cur.fetchall()
cur.close()
return data
|
<commit_before>#!/usr/bin/python
# -*- coding: <encoding name> -*-
import os
import sys
import time
import re
import MySQLdb
from MySQLdb import cursors
class mysql:
cn = None
host = None
port = None
user = None
passw = None
def __init__(self, host, user, passw, db, port=3306):
self.port = port
self.host = host
self.user = user
self.passw = passw
self.db = db
if not self.connect():
return False
def connect(self):
"""
connect to the mysql db
"""
try:
self.cn = MySQLdb.connect(
host=self.host, user=self.user, passwd=self.passw,
db=self.db, port=self.port)
return True
except:
return False
def querySelect(self, query):
"""
do the select query and return the result as an array of dictionaries
"""
data = []
cur = self.cn.cursor(cursors.DictCursor)
try:
cur.execute(query)
except:
return False
data = cur.fetchall()
cur.close()
return data
<commit_msg>Add better connection management ...<commit_after>
|
#!/usr/bin/python
# -*- coding: <encoding name> -*-
import os
import sys
import time
import re
import MySQLdb
from MySQLdb import cursors
class mysql:
cn = None
host = None
port = None
user = None
passw = None
connected = False
def __init__(self, host, user, passw, db, port=3306):
self.port = port
self.host = host
self.user = user
self.passw = passw
self.db = db
if not self.connect():
self.connected = False
def connect(self):
"""
connect to the mysql db
"""
try:
if self.connected:
return True
else:
self.cn = MySQLdb.connect(
host=self.host, user=self.user, passwd=self.passw,
db=self.db, port=self.port)
self.connected = True
return True
except:
self.connected = False
return False
def querySelect(self, query):
"""
do the select query and return the result as an array of dictionaries
"""
if not self.connected:
self.connect()
data = []
cur = self.cn.cursor(cursors.DictCursor)
try:
cur.execute(query)
except:
return False
data = cur.fetchall()
cur.close()
return data
|
#!/usr/bin/python
# -*- coding: <encoding name> -*-
import os
import sys
import time
import re
import MySQLdb
from MySQLdb import cursors
class mysql:
cn = None
host = None
port = None
user = None
passw = None
def __init__(self, host, user, passw, db, port=3306):
self.port = port
self.host = host
self.user = user
self.passw = passw
self.db = db
if not self.connect():
return False
def connect(self):
"""
connect to the mysql db
"""
try:
self.cn = MySQLdb.connect(
host=self.host, user=self.user, passwd=self.passw,
db=self.db, port=self.port)
return True
except:
return False
def querySelect(self, query):
"""
do the select query and return the result as an array of dictionaries
"""
data = []
cur = self.cn.cursor(cursors.DictCursor)
try:
cur.execute(query)
except:
return False
data = cur.fetchall()
cur.close()
return data
Add better connection management ...#!/usr/bin/python
# -*- coding: <encoding name> -*-
import os
import sys
import time
import re
import MySQLdb
from MySQLdb import cursors
class mysql:
cn = None
host = None
port = None
user = None
passw = None
connected = False
def __init__(self, host, user, passw, db, port=3306):
self.port = port
self.host = host
self.user = user
self.passw = passw
self.db = db
if not self.connect():
self.connected = False
def connect(self):
"""
connect to the mysql db
"""
try:
if self.connected:
return True
else:
self.cn = MySQLdb.connect(
host=self.host, user=self.user, passwd=self.passw,
db=self.db, port=self.port)
self.connected = True
return True
except:
self.connected = False
return False
def querySelect(self, query):
"""
do the select query and return the result as an array of dictionaries
"""
if not self.connected:
self.connect()
data = []
cur = self.cn.cursor(cursors.DictCursor)
try:
cur.execute(query)
except:
return False
data = cur.fetchall()
cur.close()
return data
|
<commit_before>#!/usr/bin/python
# -*- coding: <encoding name> -*-
import os
import sys
import time
import re
import MySQLdb
from MySQLdb import cursors
class mysql:
cn = None
host = None
port = None
user = None
passw = None
def __init__(self, host, user, passw, db, port=3306):
self.port = port
self.host = host
self.user = user
self.passw = passw
self.db = db
if not self.connect():
return False
def connect(self):
"""
connect to the mysql db
"""
try:
self.cn = MySQLdb.connect(
host=self.host, user=self.user, passwd=self.passw,
db=self.db, port=self.port)
return True
except:
return False
def querySelect(self, query):
"""
do the select query and return the result as an array of dictionaries
"""
data = []
cur = self.cn.cursor(cursors.DictCursor)
try:
cur.execute(query)
except:
return False
data = cur.fetchall()
cur.close()
return data
<commit_msg>Add better connection management ...<commit_after>#!/usr/bin/python
# -*- coding: <encoding name> -*-
import os
import sys
import time
import re
import MySQLdb
from MySQLdb import cursors
class mysql:
cn = None
host = None
port = None
user = None
passw = None
connected = False
def __init__(self, host, user, passw, db, port=3306):
self.port = port
self.host = host
self.user = user
self.passw = passw
self.db = db
if not self.connect():
self.connected = False
def connect(self):
"""
connect to the mysql db
"""
try:
if self.connected:
return True
else:
self.cn = MySQLdb.connect(
host=self.host, user=self.user, passwd=self.passw,
db=self.db, port=self.port)
self.connected = True
return True
except:
self.connected = False
return False
def querySelect(self, query):
"""
do the select query and return the result as an array of dictionaries
"""
if not self.connected:
self.connect()
data = []
cur = self.cn.cursor(cursors.DictCursor)
try:
cur.execute(query)
except:
return False
data = cur.fetchall()
cur.close()
return data
|
9c24dfc6a6207c9688332a16ee1600b73aec44d8
|
narcis/urls.py
|
narcis/urls.py
|
from django.conf.urls import patterns, include, url
from django.views.generic.base import RedirectView
from django.conf import settings
from django.contrib import admin
admin.autodiscover()
from . import views
from projects.views import screenshot
urlpatterns = patterns('',
# Examples:
# url(r'^blog/', include('blog.urls')),
url(r'^$', 'narcis.views.home', name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'^favicon\.ico$', RedirectView.as_view(url='/static/favicon.ico')),
url(r'^projects/', include('projects.urls')),
# Access controlled screenshot images
url(r'^{0}(?P<id>[0-9a-f\-]+)'.format(settings.PRIVATE_SCREENSHOT_URL.lstrip('/')), screenshot),
)
|
from django.conf.urls import patterns, include, url
from django.views.generic.base import RedirectView
from django.conf import settings
from django.contrib import admin
admin.autodiscover()
from . import views
from projects.views import screenshot
urlpatterns = patterns('',
# Examples:
# url(r'^blog/', include('blog.urls')),
url(r'^$', 'narcis.views.home', name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'^favicon\.ico$', RedirectView.as_view(url='/static/favicon.ico')),
url(r'^projects/', include('projects.urls')),
# Access controlled screenshot images
url(r'^{0}[0-9]+/[0-9]+/[0-9]+/(?P<id>[0-9a-f\-]+)'.format(settings.PRIVATE_SCREENSHOT_URL.lstrip('/')), screenshot),
url(r'^{0}(?P<id>[0-9a-f\-]+)'.format(settings.PRIVATE_SCREENSHOT_URL.lstrip('/')), screenshot),
)
|
Add URL for django admin access to screenshots
|
Add URL for django admin access to screenshots
|
Python
|
mit
|
deckar01/narcis,deckar01/narcis,deckar01/narcis
|
from django.conf.urls import patterns, include, url
from django.views.generic.base import RedirectView
from django.conf import settings
from django.contrib import admin
admin.autodiscover()
from . import views
from projects.views import screenshot
urlpatterns = patterns('',
# Examples:
# url(r'^blog/', include('blog.urls')),
url(r'^$', 'narcis.views.home', name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'^favicon\.ico$', RedirectView.as_view(url='/static/favicon.ico')),
url(r'^projects/', include('projects.urls')),
# Access controlled screenshot images
url(r'^{0}(?P<id>[0-9a-f\-]+)'.format(settings.PRIVATE_SCREENSHOT_URL.lstrip('/')), screenshot),
)
Add URL for django admin access to screenshots
|
from django.conf.urls import patterns, include, url
from django.views.generic.base import RedirectView
from django.conf import settings
from django.contrib import admin
admin.autodiscover()
from . import views
from projects.views import screenshot
urlpatterns = patterns('',
# Examples:
# url(r'^blog/', include('blog.urls')),
url(r'^$', 'narcis.views.home', name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'^favicon\.ico$', RedirectView.as_view(url='/static/favicon.ico')),
url(r'^projects/', include('projects.urls')),
# Access controlled screenshot images
url(r'^{0}[0-9]+/[0-9]+/[0-9]+/(?P<id>[0-9a-f\-]+)'.format(settings.PRIVATE_SCREENSHOT_URL.lstrip('/')), screenshot),
url(r'^{0}(?P<id>[0-9a-f\-]+)'.format(settings.PRIVATE_SCREENSHOT_URL.lstrip('/')), screenshot),
)
|
<commit_before>from django.conf.urls import patterns, include, url
from django.views.generic.base import RedirectView
from django.conf import settings
from django.contrib import admin
admin.autodiscover()
from . import views
from projects.views import screenshot
urlpatterns = patterns('',
# Examples:
# url(r'^blog/', include('blog.urls')),
url(r'^$', 'narcis.views.home', name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'^favicon\.ico$', RedirectView.as_view(url='/static/favicon.ico')),
url(r'^projects/', include('projects.urls')),
# Access controlled screenshot images
url(r'^{0}(?P<id>[0-9a-f\-]+)'.format(settings.PRIVATE_SCREENSHOT_URL.lstrip('/')), screenshot),
)
<commit_msg>Add URL for django admin access to screenshots<commit_after>
|
from django.conf.urls import patterns, include, url
from django.views.generic.base import RedirectView
from django.conf import settings
from django.contrib import admin
admin.autodiscover()
from . import views
from projects.views import screenshot
urlpatterns = patterns('',
# Examples:
# url(r'^blog/', include('blog.urls')),
url(r'^$', 'narcis.views.home', name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'^favicon\.ico$', RedirectView.as_view(url='/static/favicon.ico')),
url(r'^projects/', include('projects.urls')),
# Access controlled screenshot images
url(r'^{0}[0-9]+/[0-9]+/[0-9]+/(?P<id>[0-9a-f\-]+)'.format(settings.PRIVATE_SCREENSHOT_URL.lstrip('/')), screenshot),
url(r'^{0}(?P<id>[0-9a-f\-]+)'.format(settings.PRIVATE_SCREENSHOT_URL.lstrip('/')), screenshot),
)
|
from django.conf.urls import patterns, include, url
from django.views.generic.base import RedirectView
from django.conf import settings
from django.contrib import admin
admin.autodiscover()
from . import views
from projects.views import screenshot
urlpatterns = patterns('',
# Examples:
# url(r'^blog/', include('blog.urls')),
url(r'^$', 'narcis.views.home', name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'^favicon\.ico$', RedirectView.as_view(url='/static/favicon.ico')),
url(r'^projects/', include('projects.urls')),
# Access controlled screenshot images
url(r'^{0}(?P<id>[0-9a-f\-]+)'.format(settings.PRIVATE_SCREENSHOT_URL.lstrip('/')), screenshot),
)
Add URL for django admin access to screenshotsfrom django.conf.urls import patterns, include, url
from django.views.generic.base import RedirectView
from django.conf import settings
from django.contrib import admin
admin.autodiscover()
from . import views
from projects.views import screenshot
urlpatterns = patterns('',
# Examples:
# url(r'^blog/', include('blog.urls')),
url(r'^$', 'narcis.views.home', name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'^favicon\.ico$', RedirectView.as_view(url='/static/favicon.ico')),
url(r'^projects/', include('projects.urls')),
# Access controlled screenshot images
url(r'^{0}[0-9]+/[0-9]+/[0-9]+/(?P<id>[0-9a-f\-]+)'.format(settings.PRIVATE_SCREENSHOT_URL.lstrip('/')), screenshot),
url(r'^{0}(?P<id>[0-9a-f\-]+)'.format(settings.PRIVATE_SCREENSHOT_URL.lstrip('/')), screenshot),
)
|
<commit_before>from django.conf.urls import patterns, include, url
from django.views.generic.base import RedirectView
from django.conf import settings
from django.contrib import admin
admin.autodiscover()
from . import views
from projects.views import screenshot
urlpatterns = patterns('',
# Examples:
# url(r'^blog/', include('blog.urls')),
url(r'^$', 'narcis.views.home', name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'^favicon\.ico$', RedirectView.as_view(url='/static/favicon.ico')),
url(r'^projects/', include('projects.urls')),
# Access controlled screenshot images
url(r'^{0}(?P<id>[0-9a-f\-]+)'.format(settings.PRIVATE_SCREENSHOT_URL.lstrip('/')), screenshot),
)
<commit_msg>Add URL for django admin access to screenshots<commit_after>from django.conf.urls import patterns, include, url
from django.views.generic.base import RedirectView
from django.conf import settings
from django.contrib import admin
admin.autodiscover()
from . import views
from projects.views import screenshot
urlpatterns = patterns('',
# Examples:
# url(r'^blog/', include('blog.urls')),
url(r'^$', 'narcis.views.home', name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'^favicon\.ico$', RedirectView.as_view(url='/static/favicon.ico')),
url(r'^projects/', include('projects.urls')),
# Access controlled screenshot images
url(r'^{0}[0-9]+/[0-9]+/[0-9]+/(?P<id>[0-9a-f\-]+)'.format(settings.PRIVATE_SCREENSHOT_URL.lstrip('/')), screenshot),
url(r'^{0}(?P<id>[0-9a-f\-]+)'.format(settings.PRIVATE_SCREENSHOT_URL.lstrip('/')), screenshot),
)
|
94c0c60172c1114d6f0938de88af67ae7203ae95
|
pi_setup/system.py
|
pi_setup/system.py
|
#!/usr/bin/env python
import subprocess
def main():
subprocess.call(["apt-get", "update"])
subprocess.call(["apt-get", "-y", "upgrade"])
subprocess.call(["apt-get", "-y", "install", "python-dev"])
subprocess.call(["apt-get", "-y", "install", "python-pip"])
subprocess.call(["apt-get", "-y", "install", "avahi-daemon"])
subprocess.call(["apt-get", "-y", "install", "rpi-update"])
subprocess.call(["pip", "install", "virtualenv"])
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
import subprocess
def main():
subprocess.call(["apt-get", "update"])
subprocess.call(["apt-get", "-y", "upgrade"])
subprocess.call(["apt-get", "-y", "install", "python-dev"])
subprocess.call(["apt-get", "-y", "install", "python-pip"])
subprocess.call(["apt-get", "-y", "install", "ipython-notebook"])
subprocess.call(["apt-get", "-y", "install", "avahi-daemon"])
subprocess.call(["apt-get", "-y", "install", "rpi-update"])
subprocess.call(["pip", "install", "virtualenv"])
if __name__ == '__main__':
main()
|
Add python to install script
|
Add python to install script
|
Python
|
mit
|
projectweekend/Pi-Setup,projectweekend/Pi-Setup
|
#!/usr/bin/env python
import subprocess
def main():
subprocess.call(["apt-get", "update"])
subprocess.call(["apt-get", "-y", "upgrade"])
subprocess.call(["apt-get", "-y", "install", "python-dev"])
subprocess.call(["apt-get", "-y", "install", "python-pip"])
subprocess.call(["apt-get", "-y", "install", "avahi-daemon"])
subprocess.call(["apt-get", "-y", "install", "rpi-update"])
subprocess.call(["pip", "install", "virtualenv"])
if __name__ == '__main__':
main()
Add python to install script
|
#!/usr/bin/env python
import subprocess
def main():
subprocess.call(["apt-get", "update"])
subprocess.call(["apt-get", "-y", "upgrade"])
subprocess.call(["apt-get", "-y", "install", "python-dev"])
subprocess.call(["apt-get", "-y", "install", "python-pip"])
subprocess.call(["apt-get", "-y", "install", "ipython-notebook"])
subprocess.call(["apt-get", "-y", "install", "avahi-daemon"])
subprocess.call(["apt-get", "-y", "install", "rpi-update"])
subprocess.call(["pip", "install", "virtualenv"])
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
import subprocess
def main():
subprocess.call(["apt-get", "update"])
subprocess.call(["apt-get", "-y", "upgrade"])
subprocess.call(["apt-get", "-y", "install", "python-dev"])
subprocess.call(["apt-get", "-y", "install", "python-pip"])
subprocess.call(["apt-get", "-y", "install", "avahi-daemon"])
subprocess.call(["apt-get", "-y", "install", "rpi-update"])
subprocess.call(["pip", "install", "virtualenv"])
if __name__ == '__main__':
main()
<commit_msg>Add python to install script<commit_after>
|
#!/usr/bin/env python
import subprocess
def main():
subprocess.call(["apt-get", "update"])
subprocess.call(["apt-get", "-y", "upgrade"])
subprocess.call(["apt-get", "-y", "install", "python-dev"])
subprocess.call(["apt-get", "-y", "install", "python-pip"])
subprocess.call(["apt-get", "-y", "install", "ipython-notebook"])
subprocess.call(["apt-get", "-y", "install", "avahi-daemon"])
subprocess.call(["apt-get", "-y", "install", "rpi-update"])
subprocess.call(["pip", "install", "virtualenv"])
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
import subprocess
def main():
subprocess.call(["apt-get", "update"])
subprocess.call(["apt-get", "-y", "upgrade"])
subprocess.call(["apt-get", "-y", "install", "python-dev"])
subprocess.call(["apt-get", "-y", "install", "python-pip"])
subprocess.call(["apt-get", "-y", "install", "avahi-daemon"])
subprocess.call(["apt-get", "-y", "install", "rpi-update"])
subprocess.call(["pip", "install", "virtualenv"])
if __name__ == '__main__':
main()
Add python to install script#!/usr/bin/env python
import subprocess
def main():
subprocess.call(["apt-get", "update"])
subprocess.call(["apt-get", "-y", "upgrade"])
subprocess.call(["apt-get", "-y", "install", "python-dev"])
subprocess.call(["apt-get", "-y", "install", "python-pip"])
subprocess.call(["apt-get", "-y", "install", "ipython-notebook"])
subprocess.call(["apt-get", "-y", "install", "avahi-daemon"])
subprocess.call(["apt-get", "-y", "install", "rpi-update"])
subprocess.call(["pip", "install", "virtualenv"])
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
import subprocess
def main():
subprocess.call(["apt-get", "update"])
subprocess.call(["apt-get", "-y", "upgrade"])
subprocess.call(["apt-get", "-y", "install", "python-dev"])
subprocess.call(["apt-get", "-y", "install", "python-pip"])
subprocess.call(["apt-get", "-y", "install", "avahi-daemon"])
subprocess.call(["apt-get", "-y", "install", "rpi-update"])
subprocess.call(["pip", "install", "virtualenv"])
if __name__ == '__main__':
main()
<commit_msg>Add python to install script<commit_after>#!/usr/bin/env python
import subprocess
def main():
subprocess.call(["apt-get", "update"])
subprocess.call(["apt-get", "-y", "upgrade"])
subprocess.call(["apt-get", "-y", "install", "python-dev"])
subprocess.call(["apt-get", "-y", "install", "python-pip"])
subprocess.call(["apt-get", "-y", "install", "ipython-notebook"])
subprocess.call(["apt-get", "-y", "install", "avahi-daemon"])
subprocess.call(["apt-get", "-y", "install", "rpi-update"])
subprocess.call(["pip", "install", "virtualenv"])
if __name__ == '__main__':
main()
|
9d3750881eaa215f6d06087e6d0f7b6d223c3cd1
|
feincms3/plugins/richtext.py
|
feincms3/plugins/richtext.py
|
from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.html import strip_tags
from django.utils.text import Truncator
from django.utils.translation import ugettext_lazy as _
from content_editor.admin import ContentEditorInline
from feincms3.cleanse import CleansedRichTextField
__all__ = ('CleansedRichTextField', 'RichText', 'RichTextInline')
@python_2_unicode_compatible
class RichText(models.Model):
text = CleansedRichTextField(_('text'), config_name='richtext-plugin')
class Meta:
abstract = True
verbose_name = _('rich text')
verbose_name_plural = _('rich texts')
def __str__(self):
# Return the first few words of the content (with tags stripped)
return Truncator(strip_tags(self.text)).words(10, truncate=' ...')
class RichTextInline(ContentEditorInline):
class Media:
js = ('feincms3/plugin_ckeditor.js',)
|
from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.html import strip_tags
from django.utils.text import Truncator
from django.utils.translation import ugettext_lazy as _
from content_editor.admin import ContentEditorInline
from feincms3.cleanse import CleansedRichTextField
__all__ = ('RichText', 'RichTextInline')
@python_2_unicode_compatible
class RichText(models.Model):
"""
Rich text plugin
Usage::
class Page(...):
# ...
PagePlugin = create_plugin_base(Page)
class RichText(plugins.RichText, PagePlugin):
pass
To use this, a django-ckeditor_ configuration named ``richtext-plugin`` is
required. See the section :mod:`HTML cleansing <feincms3.cleanse>` for the
recommended configuration.
"""
text = CleansedRichTextField(_('text'), config_name='richtext-plugin')
class Meta:
abstract = True
verbose_name = _('rich text')
verbose_name_plural = _('rich texts')
def __str__(self):
# Return the first few words of the content (with tags stripped)
return Truncator(strip_tags(self.text)).words(10, truncate=' ...')
class RichTextInline(ContentEditorInline):
"""
The only difference with the standard ``ContentEditorInline`` is that this
inline adds the ``feincms3/plugin_ckeditor.js`` file which handles the
CKEditor widget activation and deactivation inside the content editor.
"""
class Media:
js = ('feincms3/plugin_ckeditor.js',)
|
Document the rich text plugin
|
Document the rich text plugin
|
Python
|
bsd-3-clause
|
matthiask/feincms3,matthiask/feincms3,matthiask/feincms3
|
from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.html import strip_tags
from django.utils.text import Truncator
from django.utils.translation import ugettext_lazy as _
from content_editor.admin import ContentEditorInline
from feincms3.cleanse import CleansedRichTextField
__all__ = ('CleansedRichTextField', 'RichText', 'RichTextInline')
@python_2_unicode_compatible
class RichText(models.Model):
text = CleansedRichTextField(_('text'), config_name='richtext-plugin')
class Meta:
abstract = True
verbose_name = _('rich text')
verbose_name_plural = _('rich texts')
def __str__(self):
# Return the first few words of the content (with tags stripped)
return Truncator(strip_tags(self.text)).words(10, truncate=' ...')
class RichTextInline(ContentEditorInline):
class Media:
js = ('feincms3/plugin_ckeditor.js',)
Document the rich text plugin
|
from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.html import strip_tags
from django.utils.text import Truncator
from django.utils.translation import ugettext_lazy as _
from content_editor.admin import ContentEditorInline
from feincms3.cleanse import CleansedRichTextField
__all__ = ('RichText', 'RichTextInline')
@python_2_unicode_compatible
class RichText(models.Model):
"""
Rich text plugin
Usage::
class Page(...):
# ...
PagePlugin = create_plugin_base(Page)
class RichText(plugins.RichText, PagePlugin):
pass
To use this, a django-ckeditor_ configuration named ``richtext-plugin`` is
required. See the section :mod:`HTML cleansing <feincms3.cleanse>` for the
recommended configuration.
"""
text = CleansedRichTextField(_('text'), config_name='richtext-plugin')
class Meta:
abstract = True
verbose_name = _('rich text')
verbose_name_plural = _('rich texts')
def __str__(self):
# Return the first few words of the content (with tags stripped)
return Truncator(strip_tags(self.text)).words(10, truncate=' ...')
class RichTextInline(ContentEditorInline):
"""
The only difference with the standard ``ContentEditorInline`` is that this
inline adds the ``feincms3/plugin_ckeditor.js`` file which handles the
CKEditor widget activation and deactivation inside the content editor.
"""
class Media:
js = ('feincms3/plugin_ckeditor.js',)
|
<commit_before>from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.html import strip_tags
from django.utils.text import Truncator
from django.utils.translation import ugettext_lazy as _
from content_editor.admin import ContentEditorInline
from feincms3.cleanse import CleansedRichTextField
__all__ = ('CleansedRichTextField', 'RichText', 'RichTextInline')
@python_2_unicode_compatible
class RichText(models.Model):
text = CleansedRichTextField(_('text'), config_name='richtext-plugin')
class Meta:
abstract = True
verbose_name = _('rich text')
verbose_name_plural = _('rich texts')
def __str__(self):
# Return the first few words of the content (with tags stripped)
return Truncator(strip_tags(self.text)).words(10, truncate=' ...')
class RichTextInline(ContentEditorInline):
class Media:
js = ('feincms3/plugin_ckeditor.js',)
<commit_msg>Document the rich text plugin<commit_after>
|
from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.html import strip_tags
from django.utils.text import Truncator
from django.utils.translation import ugettext_lazy as _
from content_editor.admin import ContentEditorInline
from feincms3.cleanse import CleansedRichTextField
__all__ = ('RichText', 'RichTextInline')
@python_2_unicode_compatible
class RichText(models.Model):
"""
Rich text plugin
Usage::
class Page(...):
# ...
PagePlugin = create_plugin_base(Page)
class RichText(plugins.RichText, PagePlugin):
pass
To use this, a django-ckeditor_ configuration named ``richtext-plugin`` is
required. See the section :mod:`HTML cleansing <feincms3.cleanse>` for the
recommended configuration.
"""
text = CleansedRichTextField(_('text'), config_name='richtext-plugin')
class Meta:
abstract = True
verbose_name = _('rich text')
verbose_name_plural = _('rich texts')
def __str__(self):
# Return the first few words of the content (with tags stripped)
return Truncator(strip_tags(self.text)).words(10, truncate=' ...')
class RichTextInline(ContentEditorInline):
"""
The only difference with the standard ``ContentEditorInline`` is that this
inline adds the ``feincms3/plugin_ckeditor.js`` file which handles the
CKEditor widget activation and deactivation inside the content editor.
"""
class Media:
js = ('feincms3/plugin_ckeditor.js',)
|
from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.html import strip_tags
from django.utils.text import Truncator
from django.utils.translation import ugettext_lazy as _
from content_editor.admin import ContentEditorInline
from feincms3.cleanse import CleansedRichTextField
__all__ = ('CleansedRichTextField', 'RichText', 'RichTextInline')
@python_2_unicode_compatible
class RichText(models.Model):
text = CleansedRichTextField(_('text'), config_name='richtext-plugin')
class Meta:
abstract = True
verbose_name = _('rich text')
verbose_name_plural = _('rich texts')
def __str__(self):
# Return the first few words of the content (with tags stripped)
return Truncator(strip_tags(self.text)).words(10, truncate=' ...')
class RichTextInline(ContentEditorInline):
class Media:
js = ('feincms3/plugin_ckeditor.js',)
Document the rich text pluginfrom __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.html import strip_tags
from django.utils.text import Truncator
from django.utils.translation import ugettext_lazy as _
from content_editor.admin import ContentEditorInline
from feincms3.cleanse import CleansedRichTextField
__all__ = ('RichText', 'RichTextInline')
@python_2_unicode_compatible
class RichText(models.Model):
"""
Rich text plugin
Usage::
class Page(...):
# ...
PagePlugin = create_plugin_base(Page)
class RichText(plugins.RichText, PagePlugin):
pass
To use this, a django-ckeditor_ configuration named ``richtext-plugin`` is
required. See the section :mod:`HTML cleansing <feincms3.cleanse>` for the
recommended configuration.
"""
text = CleansedRichTextField(_('text'), config_name='richtext-plugin')
class Meta:
abstract = True
verbose_name = _('rich text')
verbose_name_plural = _('rich texts')
def __str__(self):
# Return the first few words of the content (with tags stripped)
return Truncator(strip_tags(self.text)).words(10, truncate=' ...')
class RichTextInline(ContentEditorInline):
"""
The only difference with the standard ``ContentEditorInline`` is that this
inline adds the ``feincms3/plugin_ckeditor.js`` file which handles the
CKEditor widget activation and deactivation inside the content editor.
"""
class Media:
js = ('feincms3/plugin_ckeditor.js',)
|
<commit_before>from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.html import strip_tags
from django.utils.text import Truncator
from django.utils.translation import ugettext_lazy as _
from content_editor.admin import ContentEditorInline
from feincms3.cleanse import CleansedRichTextField
__all__ = ('CleansedRichTextField', 'RichText', 'RichTextInline')
@python_2_unicode_compatible
class RichText(models.Model):
text = CleansedRichTextField(_('text'), config_name='richtext-plugin')
class Meta:
abstract = True
verbose_name = _('rich text')
verbose_name_plural = _('rich texts')
def __str__(self):
# Return the first few words of the content (with tags stripped)
return Truncator(strip_tags(self.text)).words(10, truncate=' ...')
class RichTextInline(ContentEditorInline):
class Media:
js = ('feincms3/plugin_ckeditor.js',)
<commit_msg>Document the rich text plugin<commit_after>from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.html import strip_tags
from django.utils.text import Truncator
from django.utils.translation import ugettext_lazy as _
from content_editor.admin import ContentEditorInline
from feincms3.cleanse import CleansedRichTextField
__all__ = ('RichText', 'RichTextInline')
@python_2_unicode_compatible
class RichText(models.Model):
"""
Rich text plugin
Usage::
class Page(...):
# ...
PagePlugin = create_plugin_base(Page)
class RichText(plugins.RichText, PagePlugin):
pass
To use this, a django-ckeditor_ configuration named ``richtext-plugin`` is
required. See the section :mod:`HTML cleansing <feincms3.cleanse>` for the
recommended configuration.
"""
text = CleansedRichTextField(_('text'), config_name='richtext-plugin')
class Meta:
abstract = True
verbose_name = _('rich text')
verbose_name_plural = _('rich texts')
def __str__(self):
# Return the first few words of the content (with tags stripped)
return Truncator(strip_tags(self.text)).words(10, truncate=' ...')
class RichTextInline(ContentEditorInline):
"""
The only difference with the standard ``ContentEditorInline`` is that this
inline adds the ``feincms3/plugin_ckeditor.js`` file which handles the
CKEditor widget activation and deactivation inside the content editor.
"""
class Media:
js = ('feincms3/plugin_ckeditor.js',)
|
c6d589859d621ac0eb2b4843a22cfe8e011bbeaf
|
braid/postgres.py
|
braid/postgres.py
|
from fabric.api import sudo, hide
from braid import package
from pipes import quote
def install():
package.install(['postgresql-9.1', 'postgresql-server-dev-9.1'])
def _runQuery(query):
with hide('running', 'output'):
return sudo('psql --no-align --no-readline --no-password --quiet '
'--tuples-only -c {}'.format(quote(query)),
user='postgres', pty=False, combine_stderr=False)
def _dbExists(name):
res = _runQuery("select count(*) from pg_database "
"where datname = '{}';".format(name))
return res == '1'
def _userExists(name):
res = _runQuery("select count(*) from pg_user "
"where usename = '{}';".format(name))
return res == '1'
def createUser(name):
if not _userExists(name):
sudo('createuser -D -R -S {}'.format(name), user='postgres', pty=False)
def createDb(name, owner):
if not _dbExists(name):
sudo('createdb -O {} {}'.format(owner, name), user='postgres',
pty=False)
def grantRead(user, database):
"""
Grant read permissions to C{user} to all tables in C{database}.
"""
def grantReadWrite(user, database):
"""
Grant read and write permissions to C{user} to all tables in C{database}.
"""
|
from fabric.api import sudo, hide
from braid import package
from pipes import quote
def install():
package.install(['postgresql-9.1', 'postgresql-server-dev-9.1'])
def _runQuery(query, database=None):
with hide('running', 'output'):
database = '--dbname={}'.format(database) if database else ''
return sudo('psql --no-align --no-readline --no-password --quiet '
'--tuples-only {} -c {}'.format(database, quote(query)),
user='postgres', pty=False, combine_stderr=False)
def _dbExists(name):
res = _runQuery("select count(*) from pg_database "
"where datname = '{}';".format(name))
return res == '1'
def _userExists(name):
res = _runQuery("select count(*) from pg_user "
"where usename = '{}';".format(name))
return res == '1'
def createUser(name):
if not _userExists(name):
sudo('createuser -D -R -S {}'.format(name), user='postgres', pty=False)
def createDb(name, owner):
if not _dbExists(name):
sudo('createdb -O {} {}'.format(owner, name), user='postgres',
pty=False)
def grantRead(user, database):
"""
Grant read permissions to C{user} to all tables in C{database}.
"""
def grantReadWrite(user, database):
"""
Grant read and write permissions to C{user} to all tables in C{database}.
"""
|
Allow to specify a database when running a query
|
Allow to specify a database when running a query
|
Python
|
mit
|
alex/braid,alex/braid
|
from fabric.api import sudo, hide
from braid import package
from pipes import quote
def install():
package.install(['postgresql-9.1', 'postgresql-server-dev-9.1'])
def _runQuery(query):
with hide('running', 'output'):
return sudo('psql --no-align --no-readline --no-password --quiet '
'--tuples-only -c {}'.format(quote(query)),
user='postgres', pty=False, combine_stderr=False)
def _dbExists(name):
res = _runQuery("select count(*) from pg_database "
"where datname = '{}';".format(name))
return res == '1'
def _userExists(name):
res = _runQuery("select count(*) from pg_user "
"where usename = '{}';".format(name))
return res == '1'
def createUser(name):
if not _userExists(name):
sudo('createuser -D -R -S {}'.format(name), user='postgres', pty=False)
def createDb(name, owner):
if not _dbExists(name):
sudo('createdb -O {} {}'.format(owner, name), user='postgres',
pty=False)
def grantRead(user, database):
"""
Grant read permissions to C{user} to all tables in C{database}.
"""
def grantReadWrite(user, database):
"""
Grant read and write permissions to C{user} to all tables in C{database}.
"""
Allow to specify a database when running a query
|
from fabric.api import sudo, hide
from braid import package
from pipes import quote
def install():
package.install(['postgresql-9.1', 'postgresql-server-dev-9.1'])
def _runQuery(query, database=None):
with hide('running', 'output'):
database = '--dbname={}'.format(database) if database else ''
return sudo('psql --no-align --no-readline --no-password --quiet '
'--tuples-only {} -c {}'.format(database, quote(query)),
user='postgres', pty=False, combine_stderr=False)
def _dbExists(name):
res = _runQuery("select count(*) from pg_database "
"where datname = '{}';".format(name))
return res == '1'
def _userExists(name):
res = _runQuery("select count(*) from pg_user "
"where usename = '{}';".format(name))
return res == '1'
def createUser(name):
if not _userExists(name):
sudo('createuser -D -R -S {}'.format(name), user='postgres', pty=False)
def createDb(name, owner):
if not _dbExists(name):
sudo('createdb -O {} {}'.format(owner, name), user='postgres',
pty=False)
def grantRead(user, database):
"""
Grant read permissions to C{user} to all tables in C{database}.
"""
def grantReadWrite(user, database):
"""
Grant read and write permissions to C{user} to all tables in C{database}.
"""
|
<commit_before>from fabric.api import sudo, hide
from braid import package
from pipes import quote
def install():
package.install(['postgresql-9.1', 'postgresql-server-dev-9.1'])
def _runQuery(query):
with hide('running', 'output'):
return sudo('psql --no-align --no-readline --no-password --quiet '
'--tuples-only -c {}'.format(quote(query)),
user='postgres', pty=False, combine_stderr=False)
def _dbExists(name):
res = _runQuery("select count(*) from pg_database "
"where datname = '{}';".format(name))
return res == '1'
def _userExists(name):
res = _runQuery("select count(*) from pg_user "
"where usename = '{}';".format(name))
return res == '1'
def createUser(name):
if not _userExists(name):
sudo('createuser -D -R -S {}'.format(name), user='postgres', pty=False)
def createDb(name, owner):
if not _dbExists(name):
sudo('createdb -O {} {}'.format(owner, name), user='postgres',
pty=False)
def grantRead(user, database):
"""
Grant read permissions to C{user} to all tables in C{database}.
"""
def grantReadWrite(user, database):
"""
Grant read and write permissions to C{user} to all tables in C{database}.
"""
<commit_msg>Allow to specify a database when running a query<commit_after>
|
from fabric.api import sudo, hide
from braid import package
from pipes import quote
def install():
package.install(['postgresql-9.1', 'postgresql-server-dev-9.1'])
def _runQuery(query, database=None):
with hide('running', 'output'):
database = '--dbname={}'.format(database) if database else ''
return sudo('psql --no-align --no-readline --no-password --quiet '
'--tuples-only {} -c {}'.format(database, quote(query)),
user='postgres', pty=False, combine_stderr=False)
def _dbExists(name):
res = _runQuery("select count(*) from pg_database "
"where datname = '{}';".format(name))
return res == '1'
def _userExists(name):
res = _runQuery("select count(*) from pg_user "
"where usename = '{}';".format(name))
return res == '1'
def createUser(name):
if not _userExists(name):
sudo('createuser -D -R -S {}'.format(name), user='postgres', pty=False)
def createDb(name, owner):
if not _dbExists(name):
sudo('createdb -O {} {}'.format(owner, name), user='postgres',
pty=False)
def grantRead(user, database):
"""
Grant read permissions to C{user} to all tables in C{database}.
"""
def grantReadWrite(user, database):
"""
Grant read and write permissions to C{user} to all tables in C{database}.
"""
|
from fabric.api import sudo, hide
from braid import package
from pipes import quote
def install():
package.install(['postgresql-9.1', 'postgresql-server-dev-9.1'])
def _runQuery(query):
with hide('running', 'output'):
return sudo('psql --no-align --no-readline --no-password --quiet '
'--tuples-only -c {}'.format(quote(query)),
user='postgres', pty=False, combine_stderr=False)
def _dbExists(name):
res = _runQuery("select count(*) from pg_database "
"where datname = '{}';".format(name))
return res == '1'
def _userExists(name):
res = _runQuery("select count(*) from pg_user "
"where usename = '{}';".format(name))
return res == '1'
def createUser(name):
if not _userExists(name):
sudo('createuser -D -R -S {}'.format(name), user='postgres', pty=False)
def createDb(name, owner):
if not _dbExists(name):
sudo('createdb -O {} {}'.format(owner, name), user='postgres',
pty=False)
def grantRead(user, database):
"""
Grant read permissions to C{user} to all tables in C{database}.
"""
def grantReadWrite(user, database):
"""
Grant read and write permissions to C{user} to all tables in C{database}.
"""
Allow to specify a database when running a queryfrom fabric.api import sudo, hide
from braid import package
from pipes import quote
def install():
package.install(['postgresql-9.1', 'postgresql-server-dev-9.1'])
def _runQuery(query, database=None):
with hide('running', 'output'):
database = '--dbname={}'.format(database) if database else ''
return sudo('psql --no-align --no-readline --no-password --quiet '
'--tuples-only {} -c {}'.format(database, quote(query)),
user='postgres', pty=False, combine_stderr=False)
def _dbExists(name):
res = _runQuery("select count(*) from pg_database "
"where datname = '{}';".format(name))
return res == '1'
def _userExists(name):
res = _runQuery("select count(*) from pg_user "
"where usename = '{}';".format(name))
return res == '1'
def createUser(name):
if not _userExists(name):
sudo('createuser -D -R -S {}'.format(name), user='postgres', pty=False)
def createDb(name, owner):
if not _dbExists(name):
sudo('createdb -O {} {}'.format(owner, name), user='postgres',
pty=False)
def grantRead(user, database):
"""
Grant read permissions to C{user} to all tables in C{database}.
"""
def grantReadWrite(user, database):
"""
Grant read and write permissions to C{user} to all tables in C{database}.
"""
|
<commit_before>from fabric.api import sudo, hide
from braid import package
from pipes import quote
def install():
package.install(['postgresql-9.1', 'postgresql-server-dev-9.1'])
def _runQuery(query):
with hide('running', 'output'):
return sudo('psql --no-align --no-readline --no-password --quiet '
'--tuples-only -c {}'.format(quote(query)),
user='postgres', pty=False, combine_stderr=False)
def _dbExists(name):
res = _runQuery("select count(*) from pg_database "
"where datname = '{}';".format(name))
return res == '1'
def _userExists(name):
res = _runQuery("select count(*) from pg_user "
"where usename = '{}';".format(name))
return res == '1'
def createUser(name):
if not _userExists(name):
sudo('createuser -D -R -S {}'.format(name), user='postgres', pty=False)
def createDb(name, owner):
if not _dbExists(name):
sudo('createdb -O {} {}'.format(owner, name), user='postgres',
pty=False)
def grantRead(user, database):
"""
Grant read permissions to C{user} to all tables in C{database}.
"""
def grantReadWrite(user, database):
"""
Grant read and write permissions to C{user} to all tables in C{database}.
"""
<commit_msg>Allow to specify a database when running a query<commit_after>from fabric.api import sudo, hide
from braid import package
from pipes import quote
def install():
package.install(['postgresql-9.1', 'postgresql-server-dev-9.1'])
def _runQuery(query, database=None):
with hide('running', 'output'):
database = '--dbname={}'.format(database) if database else ''
return sudo('psql --no-align --no-readline --no-password --quiet '
'--tuples-only {} -c {}'.format(database, quote(query)),
user='postgres', pty=False, combine_stderr=False)
def _dbExists(name):
res = _runQuery("select count(*) from pg_database "
"where datname = '{}';".format(name))
return res == '1'
def _userExists(name):
res = _runQuery("select count(*) from pg_user "
"where usename = '{}';".format(name))
return res == '1'
def createUser(name):
if not _userExists(name):
sudo('createuser -D -R -S {}'.format(name), user='postgres', pty=False)
def createDb(name, owner):
if not _dbExists(name):
sudo('createdb -O {} {}'.format(owner, name), user='postgres',
pty=False)
def grantRead(user, database):
"""
Grant read permissions to C{user} to all tables in C{database}.
"""
def grantReadWrite(user, database):
"""
Grant read and write permissions to C{user} to all tables in C{database}.
"""
|
e2f2541d909861e140030d50cc1981697118bf2e
|
webvtt/parser.py
|
webvtt/parser.py
|
from .exceptions import MalformedFileError
class WebVTTParser:
def _parse(self, content):
self.content = content
def read(self, file):
with open(file, encoding='utf-8') as f:
self._parse(f.readlines())
if not self.is_valid():
raise MalformedFileError
return self
def is_valid(self):
return self.content and 'WEBVTT' in self.content[0]
|
from .exceptions import MalformedFileError
class WebVTTParser:
def _parse(self, content):
self.content = content
def read(self, file):
with open(file, encoding='utf-8') as f:
self._parse(f.readlines())
if not self.is_valid():
raise MalformedFileError('The file does not have a valid format')
return self
def is_valid(self):
return self.content and 'WEBVTT' in self.content[0]
|
Add message to malformed exception
|
Add message to malformed exception
|
Python
|
mit
|
glut23/webvtt-py,sampattuzzi/webvtt-py
|
from .exceptions import MalformedFileError
class WebVTTParser:
def _parse(self, content):
self.content = content
def read(self, file):
with open(file, encoding='utf-8') as f:
self._parse(f.readlines())
if not self.is_valid():
raise MalformedFileError
return self
def is_valid(self):
return self.content and 'WEBVTT' in self.content[0]
Add message to malformed exception
|
from .exceptions import MalformedFileError
class WebVTTParser:
def _parse(self, content):
self.content = content
def read(self, file):
with open(file, encoding='utf-8') as f:
self._parse(f.readlines())
if not self.is_valid():
raise MalformedFileError('The file does not have a valid format')
return self
def is_valid(self):
return self.content and 'WEBVTT' in self.content[0]
|
<commit_before>from .exceptions import MalformedFileError
class WebVTTParser:
def _parse(self, content):
self.content = content
def read(self, file):
with open(file, encoding='utf-8') as f:
self._parse(f.readlines())
if not self.is_valid():
raise MalformedFileError
return self
def is_valid(self):
return self.content and 'WEBVTT' in self.content[0]
<commit_msg>Add message to malformed exception<commit_after>
|
from .exceptions import MalformedFileError
class WebVTTParser:
def _parse(self, content):
self.content = content
def read(self, file):
with open(file, encoding='utf-8') as f:
self._parse(f.readlines())
if not self.is_valid():
raise MalformedFileError('The file does not have a valid format')
return self
def is_valid(self):
return self.content and 'WEBVTT' in self.content[0]
|
from .exceptions import MalformedFileError
class WebVTTParser:
def _parse(self, content):
self.content = content
def read(self, file):
with open(file, encoding='utf-8') as f:
self._parse(f.readlines())
if not self.is_valid():
raise MalformedFileError
return self
def is_valid(self):
return self.content and 'WEBVTT' in self.content[0]
Add message to malformed exceptionfrom .exceptions import MalformedFileError
class WebVTTParser:
def _parse(self, content):
self.content = content
def read(self, file):
with open(file, encoding='utf-8') as f:
self._parse(f.readlines())
if not self.is_valid():
raise MalformedFileError('The file does not have a valid format')
return self
def is_valid(self):
return self.content and 'WEBVTT' in self.content[0]
|
<commit_before>from .exceptions import MalformedFileError
class WebVTTParser:
def _parse(self, content):
self.content = content
def read(self, file):
with open(file, encoding='utf-8') as f:
self._parse(f.readlines())
if not self.is_valid():
raise MalformedFileError
return self
def is_valid(self):
return self.content and 'WEBVTT' in self.content[0]
<commit_msg>Add message to malformed exception<commit_after>from .exceptions import MalformedFileError
class WebVTTParser:
def _parse(self, content):
self.content = content
def read(self, file):
with open(file, encoding='utf-8') as f:
self._parse(f.readlines())
if not self.is_valid():
raise MalformedFileError('The file does not have a valid format')
return self
def is_valid(self):
return self.content and 'WEBVTT' in self.content[0]
|
bc8aa0f8aab15dd704fd34f836464d5e7397c08e
|
SessionTools/features_JSON.py
|
SessionTools/features_JSON.py
|
import json
def getVersion(b64decodedData):
json_map = json.loads(b64decodedData)
if not json_map.has_key("View"):
return None
return json.loads(b64decodedData)["View"]["Dynamo"]["Version"]
def usesListAtLevel(data):
usesList = data.find('"UseLevels": true') > -1
return usesList
|
# Feature extractors for JSON files (Dynamo 2+)
import json
def getVersion(b64decodedData):
json_map = json.loads(b64decodedData)
if not json_map.has_key("View"):
return None
return json.loads(b64decodedData)["View"]["Dynamo"]["Version"]
def usesListAtLevel(data):
usesList = data.find('"UseLevels": true') > -1
return usesList
def hasHiddenNodes(data):
return data.find('"ShowGeometry": false,') > -1 # TO CONFIRM
def hasUpstreamHiddenNodes(data):
return False # TODO
def hasShortestLacing(data):
return False # TODO
def hasLongestLacing(data):
return False # TODO
def hasDisabledLacing(data):
return data.find('"Replication": "Disabled"') > -1 # TO CONFIRM
def hasCrossProductLacing(data):
return False # TODO
def hasPinned(data):
return False # TODO
def hasFrozen(data):
return False # TODO
|
Add structure for feature extractors for JSON files
|
Add structure for feature extractors for JSON files
|
Python
|
mit
|
DynamoDS/Coulomb,DynamoDS/Coulomb,DynamoDS/Coulomb
|
import json
def getVersion(b64decodedData):
json_map = json.loads(b64decodedData)
if not json_map.has_key("View"):
return None
return json.loads(b64decodedData)["View"]["Dynamo"]["Version"]
def usesListAtLevel(data):
usesList = data.find('"UseLevels": true') > -1
return usesListAdd structure for feature extractors for JSON files
|
# Feature extractors for JSON files (Dynamo 2+)
import json
def getVersion(b64decodedData):
json_map = json.loads(b64decodedData)
if not json_map.has_key("View"):
return None
return json.loads(b64decodedData)["View"]["Dynamo"]["Version"]
def usesListAtLevel(data):
usesList = data.find('"UseLevels": true') > -1
return usesList
def hasHiddenNodes(data):
return data.find('"ShowGeometry": false,') > -1 # TO CONFIRM
def hasUpstreamHiddenNodes(data):
return False # TODO
def hasShortestLacing(data):
return False # TODO
def hasLongestLacing(data):
return False # TODO
def hasDisabledLacing(data):
return data.find('"Replication": "Disabled"') > -1 # TO CONFIRM
def hasCrossProductLacing(data):
return False # TODO
def hasPinned(data):
return False # TODO
def hasFrozen(data):
return False # TODO
|
<commit_before>import json
def getVersion(b64decodedData):
json_map = json.loads(b64decodedData)
if not json_map.has_key("View"):
return None
return json.loads(b64decodedData)["View"]["Dynamo"]["Version"]
def usesListAtLevel(data):
usesList = data.find('"UseLevels": true') > -1
return usesList<commit_msg>Add structure for feature extractors for JSON files<commit_after>
|
# Feature extractors for JSON files (Dynamo 2+)
import json
def getVersion(b64decodedData):
json_map = json.loads(b64decodedData)
if not json_map.has_key("View"):
return None
return json.loads(b64decodedData)["View"]["Dynamo"]["Version"]
def usesListAtLevel(data):
usesList = data.find('"UseLevels": true') > -1
return usesList
def hasHiddenNodes(data):
return data.find('"ShowGeometry": false,') > -1 # TO CONFIRM
def hasUpstreamHiddenNodes(data):
return False # TODO
def hasShortestLacing(data):
return False # TODO
def hasLongestLacing(data):
return False # TODO
def hasDisabledLacing(data):
return data.find('"Replication": "Disabled"') > -1 # TO CONFIRM
def hasCrossProductLacing(data):
return False # TODO
def hasPinned(data):
return False # TODO
def hasFrozen(data):
return False # TODO
|
import json
def getVersion(b64decodedData):
json_map = json.loads(b64decodedData)
if not json_map.has_key("View"):
return None
return json.loads(b64decodedData)["View"]["Dynamo"]["Version"]
def usesListAtLevel(data):
usesList = data.find('"UseLevels": true') > -1
return usesListAdd structure for feature extractors for JSON files# Feature extractors for JSON files (Dynamo 2+)
import json
def getVersion(b64decodedData):
json_map = json.loads(b64decodedData)
if not json_map.has_key("View"):
return None
return json.loads(b64decodedData)["View"]["Dynamo"]["Version"]
def usesListAtLevel(data):
usesList = data.find('"UseLevels": true') > -1
return usesList
def hasHiddenNodes(data):
return data.find('"ShowGeometry": false,') > -1 # TO CONFIRM
def hasUpstreamHiddenNodes(data):
return False # TODO
def hasShortestLacing(data):
return False # TODO
def hasLongestLacing(data):
return False # TODO
def hasDisabledLacing(data):
return data.find('"Replication": "Disabled"') > -1 # TO CONFIRM
def hasCrossProductLacing(data):
return False # TODO
def hasPinned(data):
return False # TODO
def hasFrozen(data):
return False # TODO
|
<commit_before>import json
def getVersion(b64decodedData):
json_map = json.loads(b64decodedData)
if not json_map.has_key("View"):
return None
return json.loads(b64decodedData)["View"]["Dynamo"]["Version"]
def usesListAtLevel(data):
usesList = data.find('"UseLevels": true') > -1
return usesList<commit_msg>Add structure for feature extractors for JSON files<commit_after># Feature extractors for JSON files (Dynamo 2+)
import json
def getVersion(b64decodedData):
json_map = json.loads(b64decodedData)
if not json_map.has_key("View"):
return None
return json.loads(b64decodedData)["View"]["Dynamo"]["Version"]
def usesListAtLevel(data):
usesList = data.find('"UseLevels": true') > -1
return usesList
def hasHiddenNodes(data):
return data.find('"ShowGeometry": false,') > -1 # TO CONFIRM
def hasUpstreamHiddenNodes(data):
return False # TODO
def hasShortestLacing(data):
return False # TODO
def hasLongestLacing(data):
return False # TODO
def hasDisabledLacing(data):
return data.find('"Replication": "Disabled"') > -1 # TO CONFIRM
def hasCrossProductLacing(data):
return False # TODO
def hasPinned(data):
return False # TODO
def hasFrozen(data):
return False # TODO
|
dd6a446a9a1ce2624769e276ddb0700da909334b
|
pep438/core.py
|
pep438/core.py
|
"""Core pep438 utility functions"""
from __future__ import unicode_literals
import requests
try:
import xmlrpclib
except:
import xmlrpc.client as xmlrpclib # noqa
from xml.etree import ElementTree
from requirements import parse
def valid_package(package_name):
"""Return bool if package_name is a valid package on PyPI"""
response = requests.head('https://pypi.python.org/pypi/%s' % package_name)
if response.status_code != 404:
response.raise_for_status()
return response.status_code != 404
def get_urls(package_name):
"""Return list of URLs on package's PyPI page that would be crawled"""
response = requests.get('https://pypi.python.org/simple/%s' % package_name)
response.raise_for_status()
page = ElementTree.fromstring(response.content)
crawled_urls = {link.get('href') for link in page.findall('.//a')
if link.get('rel') in ("homepage", "download")}
return crawled_urls
def get_pypi_packages(fileobj):
"""Return all PyPI-hosted packages from file-like object"""
return [p['name'] for p in parse(fileobj) if not p.get('uri')]
def get_pypi_user_packages(user):
client = xmlrpclib.ServerProxy('https://pypi.python.org/pypi')
return [x[1] for x in client.user_packages(user)]
|
"""Core pep438 utility functions"""
from __future__ import unicode_literals
import requests
try:
import xmlrpclib
except:
import xmlrpc.client as xmlrpclib # noqa
from xml.etree import ElementTree
from requirements import parse
def valid_package(package_name):
"""Return bool if package_name is a valid package on PyPI"""
response = requests.head('https://pypi.python.org/pypi/%s' % package_name)
if response.status_code != 404:
response.raise_for_status()
return response.status_code != 404
def get_urls(package_name):
"""Return list of URLs on package's PyPI page that would be crawled"""
response = requests.get('https://pypi.python.org/simple/%s' % package_name)
response.raise_for_status()
page = ElementTree.fromstring(response.content)
crawled_urls = {link.get('href') for link in page.findall('.//a')
if link.get('rel') in ("homepage", "download")}
return crawled_urls
def get_pypi_packages(fileobj):
"""Return all PyPI-hosted packages from file-like object"""
return [p['name'] for p in parse(fileobj) if not p['uri']]
def get_pypi_user_packages(user):
client = xmlrpclib.ServerProxy('https://pypi.python.org/pypi')
return [x[1] for x in client.user_packages(user)]
|
Fix get_pypi_packages for new requirements-parser
|
Fix get_pypi_packages for new requirements-parser
|
Python
|
mit
|
treyhunner/pep438
|
"""Core pep438 utility functions"""
from __future__ import unicode_literals
import requests
try:
import xmlrpclib
except:
import xmlrpc.client as xmlrpclib # noqa
from xml.etree import ElementTree
from requirements import parse
def valid_package(package_name):
"""Return bool if package_name is a valid package on PyPI"""
response = requests.head('https://pypi.python.org/pypi/%s' % package_name)
if response.status_code != 404:
response.raise_for_status()
return response.status_code != 404
def get_urls(package_name):
"""Return list of URLs on package's PyPI page that would be crawled"""
response = requests.get('https://pypi.python.org/simple/%s' % package_name)
response.raise_for_status()
page = ElementTree.fromstring(response.content)
crawled_urls = {link.get('href') for link in page.findall('.//a')
if link.get('rel') in ("homepage", "download")}
return crawled_urls
def get_pypi_packages(fileobj):
"""Return all PyPI-hosted packages from file-like object"""
return [p['name'] for p in parse(fileobj) if not p.get('uri')]
def get_pypi_user_packages(user):
client = xmlrpclib.ServerProxy('https://pypi.python.org/pypi')
return [x[1] for x in client.user_packages(user)]
Fix get_pypi_packages for new requirements-parser
|
"""Core pep438 utility functions"""
from __future__ import unicode_literals
import requests
try:
import xmlrpclib
except:
import xmlrpc.client as xmlrpclib # noqa
from xml.etree import ElementTree
from requirements import parse
def valid_package(package_name):
"""Return bool if package_name is a valid package on PyPI"""
response = requests.head('https://pypi.python.org/pypi/%s' % package_name)
if response.status_code != 404:
response.raise_for_status()
return response.status_code != 404
def get_urls(package_name):
"""Return list of URLs on package's PyPI page that would be crawled"""
response = requests.get('https://pypi.python.org/simple/%s' % package_name)
response.raise_for_status()
page = ElementTree.fromstring(response.content)
crawled_urls = {link.get('href') for link in page.findall('.//a')
if link.get('rel') in ("homepage", "download")}
return crawled_urls
def get_pypi_packages(fileobj):
"""Return all PyPI-hosted packages from file-like object"""
return [p['name'] for p in parse(fileobj) if not p['uri']]
def get_pypi_user_packages(user):
client = xmlrpclib.ServerProxy('https://pypi.python.org/pypi')
return [x[1] for x in client.user_packages(user)]
|
<commit_before>"""Core pep438 utility functions"""
from __future__ import unicode_literals
import requests
try:
import xmlrpclib
except:
import xmlrpc.client as xmlrpclib # noqa
from xml.etree import ElementTree
from requirements import parse
def valid_package(package_name):
"""Return bool if package_name is a valid package on PyPI"""
response = requests.head('https://pypi.python.org/pypi/%s' % package_name)
if response.status_code != 404:
response.raise_for_status()
return response.status_code != 404
def get_urls(package_name):
"""Return list of URLs on package's PyPI page that would be crawled"""
response = requests.get('https://pypi.python.org/simple/%s' % package_name)
response.raise_for_status()
page = ElementTree.fromstring(response.content)
crawled_urls = {link.get('href') for link in page.findall('.//a')
if link.get('rel') in ("homepage", "download")}
return crawled_urls
def get_pypi_packages(fileobj):
"""Return all PyPI-hosted packages from file-like object"""
return [p['name'] for p in parse(fileobj) if not p.get('uri')]
def get_pypi_user_packages(user):
client = xmlrpclib.ServerProxy('https://pypi.python.org/pypi')
return [x[1] for x in client.user_packages(user)]
<commit_msg>Fix get_pypi_packages for new requirements-parser<commit_after>
|
"""Core pep438 utility functions"""
from __future__ import unicode_literals
import requests
try:
import xmlrpclib
except:
import xmlrpc.client as xmlrpclib # noqa
from xml.etree import ElementTree
from requirements import parse
def valid_package(package_name):
"""Return bool if package_name is a valid package on PyPI"""
response = requests.head('https://pypi.python.org/pypi/%s' % package_name)
if response.status_code != 404:
response.raise_for_status()
return response.status_code != 404
def get_urls(package_name):
"""Return list of URLs on package's PyPI page that would be crawled"""
response = requests.get('https://pypi.python.org/simple/%s' % package_name)
response.raise_for_status()
page = ElementTree.fromstring(response.content)
crawled_urls = {link.get('href') for link in page.findall('.//a')
if link.get('rel') in ("homepage", "download")}
return crawled_urls
def get_pypi_packages(fileobj):
"""Return all PyPI-hosted packages from file-like object"""
return [p['name'] for p in parse(fileobj) if not p['uri']]
def get_pypi_user_packages(user):
client = xmlrpclib.ServerProxy('https://pypi.python.org/pypi')
return [x[1] for x in client.user_packages(user)]
|
"""Core pep438 utility functions"""
from __future__ import unicode_literals
import requests
try:
import xmlrpclib
except:
import xmlrpc.client as xmlrpclib # noqa
from xml.etree import ElementTree
from requirements import parse
def valid_package(package_name):
"""Return bool if package_name is a valid package on PyPI"""
response = requests.head('https://pypi.python.org/pypi/%s' % package_name)
if response.status_code != 404:
response.raise_for_status()
return response.status_code != 404
def get_urls(package_name):
"""Return list of URLs on package's PyPI page that would be crawled"""
response = requests.get('https://pypi.python.org/simple/%s' % package_name)
response.raise_for_status()
page = ElementTree.fromstring(response.content)
crawled_urls = {link.get('href') for link in page.findall('.//a')
if link.get('rel') in ("homepage", "download")}
return crawled_urls
def get_pypi_packages(fileobj):
"""Return all PyPI-hosted packages from file-like object"""
return [p['name'] for p in parse(fileobj) if not p.get('uri')]
def get_pypi_user_packages(user):
client = xmlrpclib.ServerProxy('https://pypi.python.org/pypi')
return [x[1] for x in client.user_packages(user)]
Fix get_pypi_packages for new requirements-parser"""Core pep438 utility functions"""
from __future__ import unicode_literals
import requests
try:
import xmlrpclib
except:
import xmlrpc.client as xmlrpclib # noqa
from xml.etree import ElementTree
from requirements import parse
def valid_package(package_name):
"""Return bool if package_name is a valid package on PyPI"""
response = requests.head('https://pypi.python.org/pypi/%s' % package_name)
if response.status_code != 404:
response.raise_for_status()
return response.status_code != 404
def get_urls(package_name):
"""Return list of URLs on package's PyPI page that would be crawled"""
response = requests.get('https://pypi.python.org/simple/%s' % package_name)
response.raise_for_status()
page = ElementTree.fromstring(response.content)
crawled_urls = {link.get('href') for link in page.findall('.//a')
if link.get('rel') in ("homepage", "download")}
return crawled_urls
def get_pypi_packages(fileobj):
"""Return all PyPI-hosted packages from file-like object"""
return [p['name'] for p in parse(fileobj) if not p['uri']]
def get_pypi_user_packages(user):
client = xmlrpclib.ServerProxy('https://pypi.python.org/pypi')
return [x[1] for x in client.user_packages(user)]
|
<commit_before>"""Core pep438 utility functions"""
from __future__ import unicode_literals
import requests
try:
import xmlrpclib
except:
import xmlrpc.client as xmlrpclib # noqa
from xml.etree import ElementTree
from requirements import parse
def valid_package(package_name):
"""Return bool if package_name is a valid package on PyPI"""
response = requests.head('https://pypi.python.org/pypi/%s' % package_name)
if response.status_code != 404:
response.raise_for_status()
return response.status_code != 404
def get_urls(package_name):
"""Return list of URLs on package's PyPI page that would be crawled"""
response = requests.get('https://pypi.python.org/simple/%s' % package_name)
response.raise_for_status()
page = ElementTree.fromstring(response.content)
crawled_urls = {link.get('href') for link in page.findall('.//a')
if link.get('rel') in ("homepage", "download")}
return crawled_urls
def get_pypi_packages(fileobj):
"""Return all PyPI-hosted packages from file-like object"""
return [p['name'] for p in parse(fileobj) if not p.get('uri')]
def get_pypi_user_packages(user):
client = xmlrpclib.ServerProxy('https://pypi.python.org/pypi')
return [x[1] for x in client.user_packages(user)]
<commit_msg>Fix get_pypi_packages for new requirements-parser<commit_after>"""Core pep438 utility functions"""
from __future__ import unicode_literals
import requests
try:
import xmlrpclib
except:
import xmlrpc.client as xmlrpclib # noqa
from xml.etree import ElementTree
from requirements import parse
def valid_package(package_name):
"""Return bool if package_name is a valid package on PyPI"""
response = requests.head('https://pypi.python.org/pypi/%s' % package_name)
if response.status_code != 404:
response.raise_for_status()
return response.status_code != 404
def get_urls(package_name):
"""Return list of URLs on package's PyPI page that would be crawled"""
response = requests.get('https://pypi.python.org/simple/%s' % package_name)
response.raise_for_status()
page = ElementTree.fromstring(response.content)
crawled_urls = {link.get('href') for link in page.findall('.//a')
if link.get('rel') in ("homepage", "download")}
return crawled_urls
def get_pypi_packages(fileobj):
"""Return all PyPI-hosted packages from file-like object"""
return [p['name'] for p in parse(fileobj) if not p['uri']]
def get_pypi_user_packages(user):
client = xmlrpclib.ServerProxy('https://pypi.python.org/pypi')
return [x[1] for x in client.user_packages(user)]
|
746439a977cd556e91424c80cf532e1da8551ae7
|
imager/imager_images/urls.py
|
imager/imager_images/urls.py
|
from django.conf.urls import patterns, include, url
from django.contrib.auth.decorators import login_required
from imager_images.views import (AlbumCreate, AlbumUpdate, AlbumDelete,
PhotoCreate, PhotoDelete)
urlpatterns = patterns('imager_images.views',
url(r'^upload/$', login_required(PhotoCreate.as_view()), name='upload'),
url(r'^delete/(?P<id>\d+)/$', login_required(PhotoDelete.as_view()), name='delete'),
url(r'^album_create/$', login_required(AlbumCreate.as_view()), name='album_create'),
url(r'^album_update/(?P<id>\d+)/$', login_required(AlbumUpdate.as_view()), name='album_update'),
url(r'^album_delete/(?P<id>\d+)/$', login_required(AlbumDelete.as_view()), name='album_delete'),
)
|
from django.conf.urls import patterns, include, url
from django.contrib.auth.decorators import login_required
from imager_images.views import (AlbumCreate, AlbumUpdate, AlbumDelete,
PhotoCreate, PhotoDelete)
urlpatterns = patterns('imager_images.views',
url(r'^upload/$', login_required(PhotoCreate.as_view()), name='upload'),
url(r'^delete/(?P<pk>\d+)/$', login_required(PhotoDelete.as_view()), name='delete'),
url(r'^album_create/$', login_required(AlbumCreate.as_view()), name='album_create'),
url(r'^album_update/(?P<pk>\d+)/$', login_required(AlbumUpdate.as_view()), name='album_update'),
url(r'^album_delete/(?P<pk>\d+)/$', login_required(AlbumDelete.as_view()), name='album_delete'),
)
|
Change url routes to use primary key instead of id
|
Change url routes to use primary key instead of id
|
Python
|
mit
|
nbeck90/django-imager,nbeck90/django-imager
|
from django.conf.urls import patterns, include, url
from django.contrib.auth.decorators import login_required
from imager_images.views import (AlbumCreate, AlbumUpdate, AlbumDelete,
PhotoCreate, PhotoDelete)
urlpatterns = patterns('imager_images.views',
url(r'^upload/$', login_required(PhotoCreate.as_view()), name='upload'),
url(r'^delete/(?P<id>\d+)/$', login_required(PhotoDelete.as_view()), name='delete'),
url(r'^album_create/$', login_required(AlbumCreate.as_view()), name='album_create'),
url(r'^album_update/(?P<id>\d+)/$', login_required(AlbumUpdate.as_view()), name='album_update'),
url(r'^album_delete/(?P<id>\d+)/$', login_required(AlbumDelete.as_view()), name='album_delete'),
)
Change url routes to use primary key instead of id
|
from django.conf.urls import patterns, include, url
from django.contrib.auth.decorators import login_required
from imager_images.views import (AlbumCreate, AlbumUpdate, AlbumDelete,
PhotoCreate, PhotoDelete)
urlpatterns = patterns('imager_images.views',
url(r'^upload/$', login_required(PhotoCreate.as_view()), name='upload'),
url(r'^delete/(?P<pk>\d+)/$', login_required(PhotoDelete.as_view()), name='delete'),
url(r'^album_create/$', login_required(AlbumCreate.as_view()), name='album_create'),
url(r'^album_update/(?P<pk>\d+)/$', login_required(AlbumUpdate.as_view()), name='album_update'),
url(r'^album_delete/(?P<pk>\d+)/$', login_required(AlbumDelete.as_view()), name='album_delete'),
)
|
<commit_before>from django.conf.urls import patterns, include, url
from django.contrib.auth.decorators import login_required
from imager_images.views import (AlbumCreate, AlbumUpdate, AlbumDelete,
PhotoCreate, PhotoDelete)
urlpatterns = patterns('imager_images.views',
url(r'^upload/$', login_required(PhotoCreate.as_view()), name='upload'),
url(r'^delete/(?P<id>\d+)/$', login_required(PhotoDelete.as_view()), name='delete'),
url(r'^album_create/$', login_required(AlbumCreate.as_view()), name='album_create'),
url(r'^album_update/(?P<id>\d+)/$', login_required(AlbumUpdate.as_view()), name='album_update'),
url(r'^album_delete/(?P<id>\d+)/$', login_required(AlbumDelete.as_view()), name='album_delete'),
)
<commit_msg>Change url routes to use primary key instead of id<commit_after>
|
from django.conf.urls import patterns, include, url
from django.contrib.auth.decorators import login_required
from imager_images.views import (AlbumCreate, AlbumUpdate, AlbumDelete,
PhotoCreate, PhotoDelete)
urlpatterns = patterns('imager_images.views',
url(r'^upload/$', login_required(PhotoCreate.as_view()), name='upload'),
url(r'^delete/(?P<pk>\d+)/$', login_required(PhotoDelete.as_view()), name='delete'),
url(r'^album_create/$', login_required(AlbumCreate.as_view()), name='album_create'),
url(r'^album_update/(?P<pk>\d+)/$', login_required(AlbumUpdate.as_view()), name='album_update'),
url(r'^album_delete/(?P<pk>\d+)/$', login_required(AlbumDelete.as_view()), name='album_delete'),
)
|
from django.conf.urls import patterns, include, url
from django.contrib.auth.decorators import login_required
from imager_images.views import (AlbumCreate, AlbumUpdate, AlbumDelete,
PhotoCreate, PhotoDelete)
urlpatterns = patterns('imager_images.views',
url(r'^upload/$', login_required(PhotoCreate.as_view()), name='upload'),
url(r'^delete/(?P<id>\d+)/$', login_required(PhotoDelete.as_view()), name='delete'),
url(r'^album_create/$', login_required(AlbumCreate.as_view()), name='album_create'),
url(r'^album_update/(?P<id>\d+)/$', login_required(AlbumUpdate.as_view()), name='album_update'),
url(r'^album_delete/(?P<id>\d+)/$', login_required(AlbumDelete.as_view()), name='album_delete'),
)
Change url routes to use primary key instead of idfrom django.conf.urls import patterns, include, url
from django.contrib.auth.decorators import login_required
from imager_images.views import (AlbumCreate, AlbumUpdate, AlbumDelete,
PhotoCreate, PhotoDelete)
urlpatterns = patterns('imager_images.views',
url(r'^upload/$', login_required(PhotoCreate.as_view()), name='upload'),
url(r'^delete/(?P<pk>\d+)/$', login_required(PhotoDelete.as_view()), name='delete'),
url(r'^album_create/$', login_required(AlbumCreate.as_view()), name='album_create'),
url(r'^album_update/(?P<pk>\d+)/$', login_required(AlbumUpdate.as_view()), name='album_update'),
url(r'^album_delete/(?P<pk>\d+)/$', login_required(AlbumDelete.as_view()), name='album_delete'),
)
|
<commit_before>from django.conf.urls import patterns, include, url
from django.contrib.auth.decorators import login_required
from imager_images.views import (AlbumCreate, AlbumUpdate, AlbumDelete,
PhotoCreate, PhotoDelete)
urlpatterns = patterns('imager_images.views',
url(r'^upload/$', login_required(PhotoCreate.as_view()), name='upload'),
url(r'^delete/(?P<id>\d+)/$', login_required(PhotoDelete.as_view()), name='delete'),
url(r'^album_create/$', login_required(AlbumCreate.as_view()), name='album_create'),
url(r'^album_update/(?P<id>\d+)/$', login_required(AlbumUpdate.as_view()), name='album_update'),
url(r'^album_delete/(?P<id>\d+)/$', login_required(AlbumDelete.as_view()), name='album_delete'),
)
<commit_msg>Change url routes to use primary key instead of id<commit_after>from django.conf.urls import patterns, include, url
from django.contrib.auth.decorators import login_required
from imager_images.views import (AlbumCreate, AlbumUpdate, AlbumDelete,
PhotoCreate, PhotoDelete)
urlpatterns = patterns('imager_images.views',
url(r'^upload/$', login_required(PhotoCreate.as_view()), name='upload'),
url(r'^delete/(?P<pk>\d+)/$', login_required(PhotoDelete.as_view()), name='delete'),
url(r'^album_create/$', login_required(AlbumCreate.as_view()), name='album_create'),
url(r'^album_update/(?P<pk>\d+)/$', login_required(AlbumUpdate.as_view()), name='album_update'),
url(r'^album_delete/(?P<pk>\d+)/$', login_required(AlbumDelete.as_view()), name='album_delete'),
)
|
92c70c5f54b6822f0f3815b66852a2771ef5d49c
|
scheduling/student_invite.py
|
scheduling/student_invite.py
|
from aiohttp.web import Application
from db_helper import get_most_recent_group
from mail import send_user_email
from permissions import get_users_with_permission
async def student_invite(app: Application) -> None:
print("Inviting students")
session = app["session"]
group = get_most_recent_group(session)
group.student_viewable = True
group.student_choosable = True
for user in get_users_with_permission(app, "join_projects"):
await send_user_email(app,
user,
"invite_sent",
group=group)
|
from aiohttp.web import Application
from db_helper import get_most_recent_group
from mail import send_user_email
from permissions import get_users_with_permission
async def student_invite(app: Application) -> None:
print("Inviting students")
session = app["session"]
group = get_most_recent_group(session)
group.student_viewable = True
group.student_choosable = True
group.read_only = True
for user in get_users_with_permission(app, "join_projects"):
await send_user_email(app,
user,
"invite_sent",
group=group)
|
Set group as read only when inviting students
|
Set group as read only when inviting students
|
Python
|
agpl-3.0
|
wtsi-hgi/CoGS-Webapp,wtsi-hgi/CoGS-Webapp,wtsi-hgi/CoGS-Webapp
|
from aiohttp.web import Application
from db_helper import get_most_recent_group
from mail import send_user_email
from permissions import get_users_with_permission
async def student_invite(app: Application) -> None:
print("Inviting students")
session = app["session"]
group = get_most_recent_group(session)
group.student_viewable = True
group.student_choosable = True
for user in get_users_with_permission(app, "join_projects"):
await send_user_email(app,
user,
"invite_sent",
group=group)
Set group as read only when inviting students
|
from aiohttp.web import Application
from db_helper import get_most_recent_group
from mail import send_user_email
from permissions import get_users_with_permission
async def student_invite(app: Application) -> None:
print("Inviting students")
session = app["session"]
group = get_most_recent_group(session)
group.student_viewable = True
group.student_choosable = True
group.read_only = True
for user in get_users_with_permission(app, "join_projects"):
await send_user_email(app,
user,
"invite_sent",
group=group)
|
<commit_before>from aiohttp.web import Application
from db_helper import get_most_recent_group
from mail import send_user_email
from permissions import get_users_with_permission
async def student_invite(app: Application) -> None:
print("Inviting students")
session = app["session"]
group = get_most_recent_group(session)
group.student_viewable = True
group.student_choosable = True
for user in get_users_with_permission(app, "join_projects"):
await send_user_email(app,
user,
"invite_sent",
group=group)
<commit_msg>Set group as read only when inviting students<commit_after>
|
from aiohttp.web import Application
from db_helper import get_most_recent_group
from mail import send_user_email
from permissions import get_users_with_permission
async def student_invite(app: Application) -> None:
print("Inviting students")
session = app["session"]
group = get_most_recent_group(session)
group.student_viewable = True
group.student_choosable = True
group.read_only = True
for user in get_users_with_permission(app, "join_projects"):
await send_user_email(app,
user,
"invite_sent",
group=group)
|
from aiohttp.web import Application
from db_helper import get_most_recent_group
from mail import send_user_email
from permissions import get_users_with_permission
async def student_invite(app: Application) -> None:
print("Inviting students")
session = app["session"]
group = get_most_recent_group(session)
group.student_viewable = True
group.student_choosable = True
for user in get_users_with_permission(app, "join_projects"):
await send_user_email(app,
user,
"invite_sent",
group=group)
Set group as read only when inviting studentsfrom aiohttp.web import Application
from db_helper import get_most_recent_group
from mail import send_user_email
from permissions import get_users_with_permission
async def student_invite(app: Application) -> None:
print("Inviting students")
session = app["session"]
group = get_most_recent_group(session)
group.student_viewable = True
group.student_choosable = True
group.read_only = True
for user in get_users_with_permission(app, "join_projects"):
await send_user_email(app,
user,
"invite_sent",
group=group)
|
<commit_before>from aiohttp.web import Application
from db_helper import get_most_recent_group
from mail import send_user_email
from permissions import get_users_with_permission
async def student_invite(app: Application) -> None:
print("Inviting students")
session = app["session"]
group = get_most_recent_group(session)
group.student_viewable = True
group.student_choosable = True
for user in get_users_with_permission(app, "join_projects"):
await send_user_email(app,
user,
"invite_sent",
group=group)
<commit_msg>Set group as read only when inviting students<commit_after>from aiohttp.web import Application
from db_helper import get_most_recent_group
from mail import send_user_email
from permissions import get_users_with_permission
async def student_invite(app: Application) -> None:
print("Inviting students")
session = app["session"]
group = get_most_recent_group(session)
group.student_viewable = True
group.student_choosable = True
group.read_only = True
for user in get_users_with_permission(app, "join_projects"):
await send_user_email(app,
user,
"invite_sent",
group=group)
|
027ae8b8029d01622e3a9647f3ec6b1fca4c4d9d
|
chainerrl/wrappers/__init__.py
|
chainerrl/wrappers/__init__.py
|
from chainerrl.wrappers.cast_observation import CastObservation # NOQA
from chainerrl.wrappers.cast_observation import CastObservationToFloat32 # NOQA
from chainerrl.wrappers.randomize_action import RandomizeAction # NOQA
from chainerrl.wrappers.scale_reward import ScaleReward # NOQA
|
from chainerrl.wrappers.cast_observation import CastObservation # NOQA
from chainerrl.wrappers.cast_observation import CastObservationToFloat32 # NOQA
from chainerrl.wrappers.randomize_action import RandomizeAction # NOQA
from chainerrl.wrappers.render import Render # NOQA
from chainerrl.wrappers.scale_reward import ScaleReward # NOQA
|
Make Render available under chainerrl.wrappers
|
Make Render available under chainerrl.wrappers
|
Python
|
mit
|
toslunar/chainerrl,toslunar/chainerrl
|
from chainerrl.wrappers.cast_observation import CastObservation # NOQA
from chainerrl.wrappers.cast_observation import CastObservationToFloat32 # NOQA
from chainerrl.wrappers.randomize_action import RandomizeAction # NOQA
from chainerrl.wrappers.scale_reward import ScaleReward # NOQA
Make Render available under chainerrl.wrappers
|
from chainerrl.wrappers.cast_observation import CastObservation # NOQA
from chainerrl.wrappers.cast_observation import CastObservationToFloat32 # NOQA
from chainerrl.wrappers.randomize_action import RandomizeAction # NOQA
from chainerrl.wrappers.render import Render # NOQA
from chainerrl.wrappers.scale_reward import ScaleReward # NOQA
|
<commit_before>from chainerrl.wrappers.cast_observation import CastObservation # NOQA
from chainerrl.wrappers.cast_observation import CastObservationToFloat32 # NOQA
from chainerrl.wrappers.randomize_action import RandomizeAction # NOQA
from chainerrl.wrappers.scale_reward import ScaleReward # NOQA
<commit_msg>Make Render available under chainerrl.wrappers<commit_after>
|
from chainerrl.wrappers.cast_observation import CastObservation # NOQA
from chainerrl.wrappers.cast_observation import CastObservationToFloat32 # NOQA
from chainerrl.wrappers.randomize_action import RandomizeAction # NOQA
from chainerrl.wrappers.render import Render # NOQA
from chainerrl.wrappers.scale_reward import ScaleReward # NOQA
|
from chainerrl.wrappers.cast_observation import CastObservation # NOQA
from chainerrl.wrappers.cast_observation import CastObservationToFloat32 # NOQA
from chainerrl.wrappers.randomize_action import RandomizeAction # NOQA
from chainerrl.wrappers.scale_reward import ScaleReward # NOQA
Make Render available under chainerrl.wrappersfrom chainerrl.wrappers.cast_observation import CastObservation # NOQA
from chainerrl.wrappers.cast_observation import CastObservationToFloat32 # NOQA
from chainerrl.wrappers.randomize_action import RandomizeAction # NOQA
from chainerrl.wrappers.render import Render # NOQA
from chainerrl.wrappers.scale_reward import ScaleReward # NOQA
|
<commit_before>from chainerrl.wrappers.cast_observation import CastObservation # NOQA
from chainerrl.wrappers.cast_observation import CastObservationToFloat32 # NOQA
from chainerrl.wrappers.randomize_action import RandomizeAction # NOQA
from chainerrl.wrappers.scale_reward import ScaleReward # NOQA
<commit_msg>Make Render available under chainerrl.wrappers<commit_after>from chainerrl.wrappers.cast_observation import CastObservation # NOQA
from chainerrl.wrappers.cast_observation import CastObservationToFloat32 # NOQA
from chainerrl.wrappers.randomize_action import RandomizeAction # NOQA
from chainerrl.wrappers.render import Render # NOQA
from chainerrl.wrappers.scale_reward import ScaleReward # NOQA
|
79aa9edde1bba39a433475929970dd519fecfdf3
|
requests/_oauth.py
|
requests/_oauth.py
|
# -*- coding: utf-8 -*-
"""
requests._oauth
~~~~~~~~~~~~~~~
This module comtains the path hack neccesary for oauthlib to be vendored into requests
while allowing upstream changes.
"""
import os
import sys
try:
from oauthlib.oauth1 import rfc5849
from oauthlib.common import extract_params
from oauthlib.oauth1.rfc5849 import (Client, SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER)
except ImportError:
path = os.path.abspath('/'.join(__file__.split('/')[:-1]+['packages']))
sys.path.insert(0, path)
from oauthlib.oauth1 import rfc5849
from oauthlib.common import extract_params
from oauthlib.oauth1.rfc5849 import (Client, SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER)
|
# -*- coding: utf-8 -*-
"""
requests._oauth
~~~~~~~~~~~~~~~
This module comtains the path hack neccesary for oauthlib to be vendored into requests
while allowing upstream changes.
"""
import os
import sys
try:
from oauthlib.oauth1 import rfc5849
from oauthlib.common import extract_params
from oauthlib.oauth1.rfc5849 import (Client, SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER)
except ImportError:
directory = os.path.dirname(__file__)
path = os.path.join(directory, 'packages')
sys.path.insert(0, path)
from oauthlib.oauth1 import rfc5849
from oauthlib.common import extract_params
from oauthlib.oauth1.rfc5849 import (Client, SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER)
|
Make OAuth path hack platform independent.
|
Make OAuth path hack platform independent.
|
Python
|
isc
|
Bluehorn/requests,revolunet/requests,psf/requests,revolunet/requests
|
# -*- coding: utf-8 -*-
"""
requests._oauth
~~~~~~~~~~~~~~~
This module comtains the path hack neccesary for oauthlib to be vendored into requests
while allowing upstream changes.
"""
import os
import sys
try:
from oauthlib.oauth1 import rfc5849
from oauthlib.common import extract_params
from oauthlib.oauth1.rfc5849 import (Client, SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER)
except ImportError:
path = os.path.abspath('/'.join(__file__.split('/')[:-1]+['packages']))
sys.path.insert(0, path)
from oauthlib.oauth1 import rfc5849
from oauthlib.common import extract_params
from oauthlib.oauth1.rfc5849 import (Client, SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER)Make OAuth path hack platform independent.
|
# -*- coding: utf-8 -*-
"""
requests._oauth
~~~~~~~~~~~~~~~
This module comtains the path hack neccesary for oauthlib to be vendored into requests
while allowing upstream changes.
"""
import os
import sys
try:
from oauthlib.oauth1 import rfc5849
from oauthlib.common import extract_params
from oauthlib.oauth1.rfc5849 import (Client, SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER)
except ImportError:
directory = os.path.dirname(__file__)
path = os.path.join(directory, 'packages')
sys.path.insert(0, path)
from oauthlib.oauth1 import rfc5849
from oauthlib.common import extract_params
from oauthlib.oauth1.rfc5849 import (Client, SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER)
|
<commit_before># -*- coding: utf-8 -*-
"""
requests._oauth
~~~~~~~~~~~~~~~
This module comtains the path hack neccesary for oauthlib to be vendored into requests
while allowing upstream changes.
"""
import os
import sys
try:
from oauthlib.oauth1 import rfc5849
from oauthlib.common import extract_params
from oauthlib.oauth1.rfc5849 import (Client, SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER)
except ImportError:
path = os.path.abspath('/'.join(__file__.split('/')[:-1]+['packages']))
sys.path.insert(0, path)
from oauthlib.oauth1 import rfc5849
from oauthlib.common import extract_params
from oauthlib.oauth1.rfc5849 import (Client, SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER)<commit_msg>Make OAuth path hack platform independent.<commit_after>
|
# -*- coding: utf-8 -*-
"""
requests._oauth
~~~~~~~~~~~~~~~
This module comtains the path hack neccesary for oauthlib to be vendored into requests
while allowing upstream changes.
"""
import os
import sys
try:
from oauthlib.oauth1 import rfc5849
from oauthlib.common import extract_params
from oauthlib.oauth1.rfc5849 import (Client, SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER)
except ImportError:
directory = os.path.dirname(__file__)
path = os.path.join(directory, 'packages')
sys.path.insert(0, path)
from oauthlib.oauth1 import rfc5849
from oauthlib.common import extract_params
from oauthlib.oauth1.rfc5849 import (Client, SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER)
|
# -*- coding: utf-8 -*-
"""
requests._oauth
~~~~~~~~~~~~~~~
This module comtains the path hack neccesary for oauthlib to be vendored into requests
while allowing upstream changes.
"""
import os
import sys
try:
from oauthlib.oauth1 import rfc5849
from oauthlib.common import extract_params
from oauthlib.oauth1.rfc5849 import (Client, SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER)
except ImportError:
path = os.path.abspath('/'.join(__file__.split('/')[:-1]+['packages']))
sys.path.insert(0, path)
from oauthlib.oauth1 import rfc5849
from oauthlib.common import extract_params
from oauthlib.oauth1.rfc5849 import (Client, SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER)Make OAuth path hack platform independent.# -*- coding: utf-8 -*-
"""
requests._oauth
~~~~~~~~~~~~~~~
This module comtains the path hack neccesary for oauthlib to be vendored into requests
while allowing upstream changes.
"""
import os
import sys
try:
from oauthlib.oauth1 import rfc5849
from oauthlib.common import extract_params
from oauthlib.oauth1.rfc5849 import (Client, SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER)
except ImportError:
directory = os.path.dirname(__file__)
path = os.path.join(directory, 'packages')
sys.path.insert(0, path)
from oauthlib.oauth1 import rfc5849
from oauthlib.common import extract_params
from oauthlib.oauth1.rfc5849 import (Client, SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER)
|
<commit_before># -*- coding: utf-8 -*-
"""
requests._oauth
~~~~~~~~~~~~~~~
This module comtains the path hack neccesary for oauthlib to be vendored into requests
while allowing upstream changes.
"""
import os
import sys
try:
from oauthlib.oauth1 import rfc5849
from oauthlib.common import extract_params
from oauthlib.oauth1.rfc5849 import (Client, SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER)
except ImportError:
path = os.path.abspath('/'.join(__file__.split('/')[:-1]+['packages']))
sys.path.insert(0, path)
from oauthlib.oauth1 import rfc5849
from oauthlib.common import extract_params
from oauthlib.oauth1.rfc5849 import (Client, SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER)<commit_msg>Make OAuth path hack platform independent.<commit_after># -*- coding: utf-8 -*-
"""
requests._oauth
~~~~~~~~~~~~~~~
This module comtains the path hack neccesary for oauthlib to be vendored into requests
while allowing upstream changes.
"""
import os
import sys
try:
from oauthlib.oauth1 import rfc5849
from oauthlib.common import extract_params
from oauthlib.oauth1.rfc5849 import (Client, SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER)
except ImportError:
directory = os.path.dirname(__file__)
path = os.path.join(directory, 'packages')
sys.path.insert(0, path)
from oauthlib.oauth1 import rfc5849
from oauthlib.common import extract_params
from oauthlib.oauth1.rfc5849 import (Client, SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER)
|
429d840a34c4ec2e3b475e412b53e99ffe2a5677
|
studygroups/tasks.py
|
studygroups/tasks.py
|
from django.utils import timezone
from django.conf import settings
from studygroups.models import StudyGroup
from studygroups.models import Reminder
from studygroups.models import generate_reminder
from studygroups.models import send_reminder
from django.utils import translation
import datetime
def send_reminders():
now = timezone.now()
for reminder in Reminder.objects.filter(sent_at__isnull=True):
if reminder.meeting_time - now < datetime.timedelta(days=2):
send_reminder(reminder)
def gen_reminders():
for study_group in StudyGroup.objects.all():
translation.activate(settings.LANGUAGE_CODE)
generate_reminder(study_group)
|
from django.utils import timezone
from django.conf import settings
from studygroups.models import StudyGroup
from studygroups.models import Reminder
from studygroups.models import generate_reminder
from studygroups.models import send_reminder
from django.utils import translation
import datetime
def send_reminders():
now = timezone.now()
translation.activate(settings.LANGUAGE_CODE)
for reminder in Reminder.objects.filter(sent_at__isnull=True):
if reminder.study_group_meeting.meeting_time - now < datetime.timedelta(days=2):
send_reminder(reminder)
def gen_reminders():
for study_group in StudyGroup.objects.all():
translation.activate(settings.LANGUAGE_CODE)
generate_reminder(study_group)
|
Fix task and set language code when sending reminders
|
Fix task and set language code when sending reminders
|
Python
|
mit
|
p2pu/learning-circles,p2pu/learning-circles,p2pu/learning-circles,p2pu/learning-circles
|
from django.utils import timezone
from django.conf import settings
from studygroups.models import StudyGroup
from studygroups.models import Reminder
from studygroups.models import generate_reminder
from studygroups.models import send_reminder
from django.utils import translation
import datetime
def send_reminders():
now = timezone.now()
for reminder in Reminder.objects.filter(sent_at__isnull=True):
if reminder.meeting_time - now < datetime.timedelta(days=2):
send_reminder(reminder)
def gen_reminders():
for study_group in StudyGroup.objects.all():
translation.activate(settings.LANGUAGE_CODE)
generate_reminder(study_group)
Fix task and set language code when sending reminders
|
from django.utils import timezone
from django.conf import settings
from studygroups.models import StudyGroup
from studygroups.models import Reminder
from studygroups.models import generate_reminder
from studygroups.models import send_reminder
from django.utils import translation
import datetime
def send_reminders():
now = timezone.now()
translation.activate(settings.LANGUAGE_CODE)
for reminder in Reminder.objects.filter(sent_at__isnull=True):
if reminder.study_group_meeting.meeting_time - now < datetime.timedelta(days=2):
send_reminder(reminder)
def gen_reminders():
for study_group in StudyGroup.objects.all():
translation.activate(settings.LANGUAGE_CODE)
generate_reminder(study_group)
|
<commit_before>from django.utils import timezone
from django.conf import settings
from studygroups.models import StudyGroup
from studygroups.models import Reminder
from studygroups.models import generate_reminder
from studygroups.models import send_reminder
from django.utils import translation
import datetime
def send_reminders():
now = timezone.now()
for reminder in Reminder.objects.filter(sent_at__isnull=True):
if reminder.meeting_time - now < datetime.timedelta(days=2):
send_reminder(reminder)
def gen_reminders():
for study_group in StudyGroup.objects.all():
translation.activate(settings.LANGUAGE_CODE)
generate_reminder(study_group)
<commit_msg>Fix task and set language code when sending reminders<commit_after>
|
from django.utils import timezone
from django.conf import settings
from studygroups.models import StudyGroup
from studygroups.models import Reminder
from studygroups.models import generate_reminder
from studygroups.models import send_reminder
from django.utils import translation
import datetime
def send_reminders():
now = timezone.now()
translation.activate(settings.LANGUAGE_CODE)
for reminder in Reminder.objects.filter(sent_at__isnull=True):
if reminder.study_group_meeting.meeting_time - now < datetime.timedelta(days=2):
send_reminder(reminder)
def gen_reminders():
for study_group in StudyGroup.objects.all():
translation.activate(settings.LANGUAGE_CODE)
generate_reminder(study_group)
|
from django.utils import timezone
from django.conf import settings
from studygroups.models import StudyGroup
from studygroups.models import Reminder
from studygroups.models import generate_reminder
from studygroups.models import send_reminder
from django.utils import translation
import datetime
def send_reminders():
now = timezone.now()
for reminder in Reminder.objects.filter(sent_at__isnull=True):
if reminder.meeting_time - now < datetime.timedelta(days=2):
send_reminder(reminder)
def gen_reminders():
for study_group in StudyGroup.objects.all():
translation.activate(settings.LANGUAGE_CODE)
generate_reminder(study_group)
Fix task and set language code when sending remindersfrom django.utils import timezone
from django.conf import settings
from studygroups.models import StudyGroup
from studygroups.models import Reminder
from studygroups.models import generate_reminder
from studygroups.models import send_reminder
from django.utils import translation
import datetime
def send_reminders():
now = timezone.now()
translation.activate(settings.LANGUAGE_CODE)
for reminder in Reminder.objects.filter(sent_at__isnull=True):
if reminder.study_group_meeting.meeting_time - now < datetime.timedelta(days=2):
send_reminder(reminder)
def gen_reminders():
for study_group in StudyGroup.objects.all():
translation.activate(settings.LANGUAGE_CODE)
generate_reminder(study_group)
|
<commit_before>from django.utils import timezone
from django.conf import settings
from studygroups.models import StudyGroup
from studygroups.models import Reminder
from studygroups.models import generate_reminder
from studygroups.models import send_reminder
from django.utils import translation
import datetime
def send_reminders():
now = timezone.now()
for reminder in Reminder.objects.filter(sent_at__isnull=True):
if reminder.meeting_time - now < datetime.timedelta(days=2):
send_reminder(reminder)
def gen_reminders():
for study_group in StudyGroup.objects.all():
translation.activate(settings.LANGUAGE_CODE)
generate_reminder(study_group)
<commit_msg>Fix task and set language code when sending reminders<commit_after>from django.utils import timezone
from django.conf import settings
from studygroups.models import StudyGroup
from studygroups.models import Reminder
from studygroups.models import generate_reminder
from studygroups.models import send_reminder
from django.utils import translation
import datetime
def send_reminders():
now = timezone.now()
translation.activate(settings.LANGUAGE_CODE)
for reminder in Reminder.objects.filter(sent_at__isnull=True):
if reminder.study_group_meeting.meeting_time - now < datetime.timedelta(days=2):
send_reminder(reminder)
def gen_reminders():
for study_group in StudyGroup.objects.all():
translation.activate(settings.LANGUAGE_CODE)
generate_reminder(study_group)
|
fa19a6ec882727bb96f27993d7ac765797c19556
|
logger/utilities.py
|
logger/utilities.py
|
#!/usr/bin/env python3
"""Small utility functions for use in various places."""
__all__ = ["pick", "is_dunder"]
def pick(arg, default):
"""Handler for default versus given argument."""
return default if arg is None else arg
def is_dunder(name):
"""Return True if a __dunder__ name, False otherwise."""
return name[:2] == name[-2:] == "__" and "_" not in (name[2:3], name[-3:-2])
|
#!/usr/bin/env python3
"""Small utility functions for use in various places."""
__all__ = ["pick", "is_dunder", "find_name"]
import sys
def pick(arg, default):
"""Handler for default versus given argument."""
return default if arg is None else arg
def is_dunder(name):
"""Return True if a __dunder__ name, False otherwise."""
return name[:2] == name[-2:] == "__" and "_" not in (name[2:3], name[-3:-2])
def find_name(name, depth=0):
"""Find a name in the calling frame's scopes."""
calling_frame = sys._getframe(depth + 2)
if name in calling_frame.f_locals:
return calling_frame.f_locals[name]
if name in calling_frame.f_globals:
return calling_frame.f_globals[name]
if name in calling_frame.f_builtins:
return calling_frame.f_builtins[name]
raise NameError("could not find {!r}".format(name))
|
Add a find_name utility function
|
Add a find_name utility function
|
Python
|
bsd-2-clause
|
Vgr255/logging
|
#!/usr/bin/env python3
"""Small utility functions for use in various places."""
__all__ = ["pick", "is_dunder"]
def pick(arg, default):
"""Handler for default versus given argument."""
return default if arg is None else arg
def is_dunder(name):
"""Return True if a __dunder__ name, False otherwise."""
return name[:2] == name[-2:] == "__" and "_" not in (name[2:3], name[-3:-2])
Add a find_name utility function
|
#!/usr/bin/env python3
"""Small utility functions for use in various places."""
__all__ = ["pick", "is_dunder", "find_name"]
import sys
def pick(arg, default):
"""Handler for default versus given argument."""
return default if arg is None else arg
def is_dunder(name):
"""Return True if a __dunder__ name, False otherwise."""
return name[:2] == name[-2:] == "__" and "_" not in (name[2:3], name[-3:-2])
def find_name(name, depth=0):
"""Find a name in the calling frame's scopes."""
calling_frame = sys._getframe(depth + 2)
if name in calling_frame.f_locals:
return calling_frame.f_locals[name]
if name in calling_frame.f_globals:
return calling_frame.f_globals[name]
if name in calling_frame.f_builtins:
return calling_frame.f_builtins[name]
raise NameError("could not find {!r}".format(name))
|
<commit_before>#!/usr/bin/env python3
"""Small utility functions for use in various places."""
__all__ = ["pick", "is_dunder"]
def pick(arg, default):
"""Handler for default versus given argument."""
return default if arg is None else arg
def is_dunder(name):
"""Return True if a __dunder__ name, False otherwise."""
return name[:2] == name[-2:] == "__" and "_" not in (name[2:3], name[-3:-2])
<commit_msg>Add a find_name utility function<commit_after>
|
#!/usr/bin/env python3
"""Small utility functions for use in various places."""
__all__ = ["pick", "is_dunder", "find_name"]
import sys
def pick(arg, default):
"""Handler for default versus given argument."""
return default if arg is None else arg
def is_dunder(name):
"""Return True if a __dunder__ name, False otherwise."""
return name[:2] == name[-2:] == "__" and "_" not in (name[2:3], name[-3:-2])
def find_name(name, depth=0):
"""Find a name in the calling frame's scopes."""
calling_frame = sys._getframe(depth + 2)
if name in calling_frame.f_locals:
return calling_frame.f_locals[name]
if name in calling_frame.f_globals:
return calling_frame.f_globals[name]
if name in calling_frame.f_builtins:
return calling_frame.f_builtins[name]
raise NameError("could not find {!r}".format(name))
|
#!/usr/bin/env python3
"""Small utility functions for use in various places."""
__all__ = ["pick", "is_dunder"]
def pick(arg, default):
"""Handler for default versus given argument."""
return default if arg is None else arg
def is_dunder(name):
"""Return True if a __dunder__ name, False otherwise."""
return name[:2] == name[-2:] == "__" and "_" not in (name[2:3], name[-3:-2])
Add a find_name utility function#!/usr/bin/env python3
"""Small utility functions for use in various places."""
__all__ = ["pick", "is_dunder", "find_name"]
import sys
def pick(arg, default):
"""Handler for default versus given argument."""
return default if arg is None else arg
def is_dunder(name):
"""Return True if a __dunder__ name, False otherwise."""
return name[:2] == name[-2:] == "__" and "_" not in (name[2:3], name[-3:-2])
def find_name(name, depth=0):
"""Find a name in the calling frame's scopes."""
calling_frame = sys._getframe(depth + 2)
if name in calling_frame.f_locals:
return calling_frame.f_locals[name]
if name in calling_frame.f_globals:
return calling_frame.f_globals[name]
if name in calling_frame.f_builtins:
return calling_frame.f_builtins[name]
raise NameError("could not find {!r}".format(name))
|
<commit_before>#!/usr/bin/env python3
"""Small utility functions for use in various places."""
__all__ = ["pick", "is_dunder"]
def pick(arg, default):
"""Handler for default versus given argument."""
return default if arg is None else arg
def is_dunder(name):
"""Return True if a __dunder__ name, False otherwise."""
return name[:2] == name[-2:] == "__" and "_" not in (name[2:3], name[-3:-2])
<commit_msg>Add a find_name utility function<commit_after>#!/usr/bin/env python3
"""Small utility functions for use in various places."""
__all__ = ["pick", "is_dunder", "find_name"]
import sys
def pick(arg, default):
"""Handler for default versus given argument."""
return default if arg is None else arg
def is_dunder(name):
"""Return True if a __dunder__ name, False otherwise."""
return name[:2] == name[-2:] == "__" and "_" not in (name[2:3], name[-3:-2])
def find_name(name, depth=0):
"""Find a name in the calling frame's scopes."""
calling_frame = sys._getframe(depth + 2)
if name in calling_frame.f_locals:
return calling_frame.f_locals[name]
if name in calling_frame.f_globals:
return calling_frame.f_globals[name]
if name in calling_frame.f_builtins:
return calling_frame.f_builtins[name]
raise NameError("could not find {!r}".format(name))
|
9eb07a5b7d2875cf79bb698864d11ef29576133e
|
comics/utils/hash.py
|
comics/utils/hash.py
|
import hashlib
def sha256sum(filename):
"""Returns sha256sum for file"""
f = file(filename, 'rb')
m = hashlib.sha256()
while True:
b = f.read(8096)
if not b:
break
m.update(b)
f.close()
return m.hexdigest()
|
import hashlib
def sha256sum(filename=None, filehandle=None):
"""Returns sha256sum for file"""
if filename is not None:
f = file(filename, 'rb')
else:
f = filehandle
m = hashlib.sha256()
while True:
b = f.read(8096)
if not b:
break
m.update(b)
if filename is not None:
f.close()
return m.hexdigest()
|
Make sha256sum work with open filehandles too
|
Make sha256sum work with open filehandles too
|
Python
|
agpl-3.0
|
datagutten/comics,jodal/comics,jodal/comics,datagutten/comics,jodal/comics,datagutten/comics,klette/comics,klette/comics,klette/comics,jodal/comics,datagutten/comics
|
import hashlib
def sha256sum(filename):
"""Returns sha256sum for file"""
f = file(filename, 'rb')
m = hashlib.sha256()
while True:
b = f.read(8096)
if not b:
break
m.update(b)
f.close()
return m.hexdigest()
Make sha256sum work with open filehandles too
|
import hashlib
def sha256sum(filename=None, filehandle=None):
"""Returns sha256sum for file"""
if filename is not None:
f = file(filename, 'rb')
else:
f = filehandle
m = hashlib.sha256()
while True:
b = f.read(8096)
if not b:
break
m.update(b)
if filename is not None:
f.close()
return m.hexdigest()
|
<commit_before>import hashlib
def sha256sum(filename):
"""Returns sha256sum for file"""
f = file(filename, 'rb')
m = hashlib.sha256()
while True:
b = f.read(8096)
if not b:
break
m.update(b)
f.close()
return m.hexdigest()
<commit_msg>Make sha256sum work with open filehandles too<commit_after>
|
import hashlib
def sha256sum(filename=None, filehandle=None):
"""Returns sha256sum for file"""
if filename is not None:
f = file(filename, 'rb')
else:
f = filehandle
m = hashlib.sha256()
while True:
b = f.read(8096)
if not b:
break
m.update(b)
if filename is not None:
f.close()
return m.hexdigest()
|
import hashlib
def sha256sum(filename):
"""Returns sha256sum for file"""
f = file(filename, 'rb')
m = hashlib.sha256()
while True:
b = f.read(8096)
if not b:
break
m.update(b)
f.close()
return m.hexdigest()
Make sha256sum work with open filehandles tooimport hashlib
def sha256sum(filename=None, filehandle=None):
"""Returns sha256sum for file"""
if filename is not None:
f = file(filename, 'rb')
else:
f = filehandle
m = hashlib.sha256()
while True:
b = f.read(8096)
if not b:
break
m.update(b)
if filename is not None:
f.close()
return m.hexdigest()
|
<commit_before>import hashlib
def sha256sum(filename):
"""Returns sha256sum for file"""
f = file(filename, 'rb')
m = hashlib.sha256()
while True:
b = f.read(8096)
if not b:
break
m.update(b)
f.close()
return m.hexdigest()
<commit_msg>Make sha256sum work with open filehandles too<commit_after>import hashlib
def sha256sum(filename=None, filehandle=None):
"""Returns sha256sum for file"""
if filename is not None:
f = file(filename, 'rb')
else:
f = filehandle
m = hashlib.sha256()
while True:
b = f.read(8096)
if not b:
break
m.update(b)
if filename is not None:
f.close()
return m.hexdigest()
|
e99976cbee1e43e7112b4759cf5a1a17a1be8170
|
utils/gyb_syntax_support/protocolsMap.py
|
utils/gyb_syntax_support/protocolsMap.py
|
SYNTAX_BUILDABLE_EXPRESSIBLE_AS_CONFORMANCES = {
'DeclBuildable': [
'CodeBlockItem',
'MemberDeclListItem',
'SyntaxBuildable'
],
'ExprList': [
'ConditionElement',
'SyntaxBuildable'
],
'IdentifierPattern': [
'PatternBuildable'
],
'SimpleTypeIdentifier': [
'TypeAnnotation',
'TypeBuildable',
'TypeExpr'
],
'StmtBuildable': [
'CodeBlockItem',
'SyntaxBuildable'
]
}
|
SYNTAX_BUILDABLE_EXPRESSIBLE_AS_CONFORMANCES = {
'DeclBuildable': [
'CodeBlockItem',
'MemberDeclListItem',
'SyntaxBuildable'
],
'ExprList': [
'ConditionElement',
'SyntaxBuildable'
],
'IdentifierPattern': [
'PatternBuildable'
],
'MemberDeclList': [
'MemberDeclBlock'
],
'SimpleTypeIdentifier': [
'TypeAnnotation',
'TypeBuildable',
'TypeExpr'
],
'StmtBuildable': [
'CodeBlockItem',
'SyntaxBuildable'
]
}
|
Add convenience initializers for `MemberDeclList`
|
Add convenience initializers for `MemberDeclList`
|
Python
|
apache-2.0
|
glessard/swift,atrick/swift,apple/swift,benlangmuir/swift,rudkx/swift,glessard/swift,atrick/swift,rudkx/swift,gregomni/swift,glessard/swift,apple/swift,atrick/swift,glessard/swift,JGiola/swift,atrick/swift,ahoppen/swift,gregomni/swift,JGiola/swift,glessard/swift,ahoppen/swift,ahoppen/swift,JGiola/swift,rudkx/swift,atrick/swift,gregomni/swift,ahoppen/swift,benlangmuir/swift,glessard/swift,atrick/swift,gregomni/swift,apple/swift,roambotics/swift,benlangmuir/swift,roambotics/swift,rudkx/swift,roambotics/swift,roambotics/swift,apple/swift,ahoppen/swift,JGiola/swift,gregomni/swift,roambotics/swift,apple/swift,rudkx/swift,benlangmuir/swift,rudkx/swift,gregomni/swift,roambotics/swift,JGiola/swift,ahoppen/swift,apple/swift,benlangmuir/swift,benlangmuir/swift,JGiola/swift
|
SYNTAX_BUILDABLE_EXPRESSIBLE_AS_CONFORMANCES = {
'DeclBuildable': [
'CodeBlockItem',
'MemberDeclListItem',
'SyntaxBuildable'
],
'ExprList': [
'ConditionElement',
'SyntaxBuildable'
],
'IdentifierPattern': [
'PatternBuildable'
],
'SimpleTypeIdentifier': [
'TypeAnnotation',
'TypeBuildable',
'TypeExpr'
],
'StmtBuildable': [
'CodeBlockItem',
'SyntaxBuildable'
]
}
Add convenience initializers for `MemberDeclList`
|
SYNTAX_BUILDABLE_EXPRESSIBLE_AS_CONFORMANCES = {
'DeclBuildable': [
'CodeBlockItem',
'MemberDeclListItem',
'SyntaxBuildable'
],
'ExprList': [
'ConditionElement',
'SyntaxBuildable'
],
'IdentifierPattern': [
'PatternBuildable'
],
'MemberDeclList': [
'MemberDeclBlock'
],
'SimpleTypeIdentifier': [
'TypeAnnotation',
'TypeBuildable',
'TypeExpr'
],
'StmtBuildable': [
'CodeBlockItem',
'SyntaxBuildable'
]
}
|
<commit_before>SYNTAX_BUILDABLE_EXPRESSIBLE_AS_CONFORMANCES = {
'DeclBuildable': [
'CodeBlockItem',
'MemberDeclListItem',
'SyntaxBuildable'
],
'ExprList': [
'ConditionElement',
'SyntaxBuildable'
],
'IdentifierPattern': [
'PatternBuildable'
],
'SimpleTypeIdentifier': [
'TypeAnnotation',
'TypeBuildable',
'TypeExpr'
],
'StmtBuildable': [
'CodeBlockItem',
'SyntaxBuildable'
]
}
<commit_msg>Add convenience initializers for `MemberDeclList`<commit_after>
|
SYNTAX_BUILDABLE_EXPRESSIBLE_AS_CONFORMANCES = {
'DeclBuildable': [
'CodeBlockItem',
'MemberDeclListItem',
'SyntaxBuildable'
],
'ExprList': [
'ConditionElement',
'SyntaxBuildable'
],
'IdentifierPattern': [
'PatternBuildable'
],
'MemberDeclList': [
'MemberDeclBlock'
],
'SimpleTypeIdentifier': [
'TypeAnnotation',
'TypeBuildable',
'TypeExpr'
],
'StmtBuildable': [
'CodeBlockItem',
'SyntaxBuildable'
]
}
|
SYNTAX_BUILDABLE_EXPRESSIBLE_AS_CONFORMANCES = {
'DeclBuildable': [
'CodeBlockItem',
'MemberDeclListItem',
'SyntaxBuildable'
],
'ExprList': [
'ConditionElement',
'SyntaxBuildable'
],
'IdentifierPattern': [
'PatternBuildable'
],
'SimpleTypeIdentifier': [
'TypeAnnotation',
'TypeBuildable',
'TypeExpr'
],
'StmtBuildable': [
'CodeBlockItem',
'SyntaxBuildable'
]
}
Add convenience initializers for `MemberDeclList`SYNTAX_BUILDABLE_EXPRESSIBLE_AS_CONFORMANCES = {
'DeclBuildable': [
'CodeBlockItem',
'MemberDeclListItem',
'SyntaxBuildable'
],
'ExprList': [
'ConditionElement',
'SyntaxBuildable'
],
'IdentifierPattern': [
'PatternBuildable'
],
'MemberDeclList': [
'MemberDeclBlock'
],
'SimpleTypeIdentifier': [
'TypeAnnotation',
'TypeBuildable',
'TypeExpr'
],
'StmtBuildable': [
'CodeBlockItem',
'SyntaxBuildable'
]
}
|
<commit_before>SYNTAX_BUILDABLE_EXPRESSIBLE_AS_CONFORMANCES = {
'DeclBuildable': [
'CodeBlockItem',
'MemberDeclListItem',
'SyntaxBuildable'
],
'ExprList': [
'ConditionElement',
'SyntaxBuildable'
],
'IdentifierPattern': [
'PatternBuildable'
],
'SimpleTypeIdentifier': [
'TypeAnnotation',
'TypeBuildable',
'TypeExpr'
],
'StmtBuildable': [
'CodeBlockItem',
'SyntaxBuildable'
]
}
<commit_msg>Add convenience initializers for `MemberDeclList`<commit_after>SYNTAX_BUILDABLE_EXPRESSIBLE_AS_CONFORMANCES = {
'DeclBuildable': [
'CodeBlockItem',
'MemberDeclListItem',
'SyntaxBuildable'
],
'ExprList': [
'ConditionElement',
'SyntaxBuildable'
],
'IdentifierPattern': [
'PatternBuildable'
],
'MemberDeclList': [
'MemberDeclBlock'
],
'SimpleTypeIdentifier': [
'TypeAnnotation',
'TypeBuildable',
'TypeExpr'
],
'StmtBuildable': [
'CodeBlockItem',
'SyntaxBuildable'
]
}
|
46b00107e90df8f34a9cce5c4b010fdfb88f5f52
|
shovel/code.py
|
shovel/code.py
|
# coding: utf-8
from __future__ import absolute_import, division, print_function
from pathlib import Path
from isort import SortImports
from shovel import task
# isort multi_line_output modes
GRID = 0
VERTICAL = 1
HANGING_INDENT = 2
VERTICAL_HANGING_INDENT = 3
HANGING_GRID = 4
HANGING_GRID_GROUPED = 5
@task
def format_imports():
"""Sort imports into a consistent style."""
astrodynamics_dir = Path('astrodynamics')
constants_dir = astrodynamics_dir / 'constants'
for initfile in astrodynamics_dir.glob('**/__init__.py'):
if constants_dir in initfile.parents:
continue
SortImports(str(initfile),
multi_line_output=VERTICAL_HANGING_INDENT,
not_skip=['__init__.py'])
# Exclude __init__.py
# Exclude generated constants/ python files
for pyfile in astrodynamics_dir.glob('**/*.py'):
if constants_dir in pyfile.parents and pyfile.stem != 'constant':
continue
SortImports(str(pyfile),
multi_line_output=HANGING_GRID,
skip=['__init__.py'])
|
# coding: utf-8
from __future__ import absolute_import, division, print_function
from pathlib import Path
from isort import SortImports
from shovel import task
# isort multi_line_output modes
GRID = 0
VERTICAL = 1
HANGING_INDENT = 2
VERTICAL_HANGING_INDENT = 3
HANGING_GRID = 4
HANGING_GRID_GROUPED = 5
@task
def format_imports():
"""Sort imports into a consistent style."""
astrodynamics_dir = Path('astrodynamics')
constants_dir = astrodynamics_dir / 'constants'
for initfile in astrodynamics_dir.glob('**/__init__.py'):
if constants_dir in initfile.parents:
continue
SortImports(str(initfile),
multi_line_output=VERTICAL_HANGING_INDENT,
not_skip=['__init__.py'])
# Exclude __init__.py
# Exclude generated constants/ python files
for pyfile in astrodynamics_dir.glob('**/*.py'):
if constants_dir in pyfile.parents and pyfile.stem != 'constant':
continue
SortImports(str(pyfile),
multi_line_output=HANGING_GRID,
skip=['__init__.py'],
known_third_party=['six'])
|
Add 'six' to known_third_party for SortImports
|
Add 'six' to known_third_party for SortImports
six was being sorted incorrectly due to being classed as first party.
|
Python
|
mit
|
python-astrodynamics/astrodynamics,python-astrodynamics/astrodynamics
|
# coding: utf-8
from __future__ import absolute_import, division, print_function
from pathlib import Path
from isort import SortImports
from shovel import task
# isort multi_line_output modes
GRID = 0
VERTICAL = 1
HANGING_INDENT = 2
VERTICAL_HANGING_INDENT = 3
HANGING_GRID = 4
HANGING_GRID_GROUPED = 5
@task
def format_imports():
"""Sort imports into a consistent style."""
astrodynamics_dir = Path('astrodynamics')
constants_dir = astrodynamics_dir / 'constants'
for initfile in astrodynamics_dir.glob('**/__init__.py'):
if constants_dir in initfile.parents:
continue
SortImports(str(initfile),
multi_line_output=VERTICAL_HANGING_INDENT,
not_skip=['__init__.py'])
# Exclude __init__.py
# Exclude generated constants/ python files
for pyfile in astrodynamics_dir.glob('**/*.py'):
if constants_dir in pyfile.parents and pyfile.stem != 'constant':
continue
SortImports(str(pyfile),
multi_line_output=HANGING_GRID,
skip=['__init__.py'])
Add 'six' to known_third_party for SortImports
six was being sorted incorrectly due to being classed as first party.
|
# coding: utf-8
from __future__ import absolute_import, division, print_function
from pathlib import Path
from isort import SortImports
from shovel import task
# isort multi_line_output modes
GRID = 0
VERTICAL = 1
HANGING_INDENT = 2
VERTICAL_HANGING_INDENT = 3
HANGING_GRID = 4
HANGING_GRID_GROUPED = 5
@task
def format_imports():
"""Sort imports into a consistent style."""
astrodynamics_dir = Path('astrodynamics')
constants_dir = astrodynamics_dir / 'constants'
for initfile in astrodynamics_dir.glob('**/__init__.py'):
if constants_dir in initfile.parents:
continue
SortImports(str(initfile),
multi_line_output=VERTICAL_HANGING_INDENT,
not_skip=['__init__.py'])
# Exclude __init__.py
# Exclude generated constants/ python files
for pyfile in astrodynamics_dir.glob('**/*.py'):
if constants_dir in pyfile.parents and pyfile.stem != 'constant':
continue
SortImports(str(pyfile),
multi_line_output=HANGING_GRID,
skip=['__init__.py'],
known_third_party=['six'])
|
<commit_before># coding: utf-8
from __future__ import absolute_import, division, print_function
from pathlib import Path
from isort import SortImports
from shovel import task
# isort multi_line_output modes
GRID = 0
VERTICAL = 1
HANGING_INDENT = 2
VERTICAL_HANGING_INDENT = 3
HANGING_GRID = 4
HANGING_GRID_GROUPED = 5
@task
def format_imports():
"""Sort imports into a consistent style."""
astrodynamics_dir = Path('astrodynamics')
constants_dir = astrodynamics_dir / 'constants'
for initfile in astrodynamics_dir.glob('**/__init__.py'):
if constants_dir in initfile.parents:
continue
SortImports(str(initfile),
multi_line_output=VERTICAL_HANGING_INDENT,
not_skip=['__init__.py'])
# Exclude __init__.py
# Exclude generated constants/ python files
for pyfile in astrodynamics_dir.glob('**/*.py'):
if constants_dir in pyfile.parents and pyfile.stem != 'constant':
continue
SortImports(str(pyfile),
multi_line_output=HANGING_GRID,
skip=['__init__.py'])
<commit_msg>Add 'six' to known_third_party for SortImports
six was being sorted incorrectly due to being classed as first party.<commit_after>
|
# coding: utf-8
from __future__ import absolute_import, division, print_function
from pathlib import Path
from isort import SortImports
from shovel import task
# isort multi_line_output modes
GRID = 0
VERTICAL = 1
HANGING_INDENT = 2
VERTICAL_HANGING_INDENT = 3
HANGING_GRID = 4
HANGING_GRID_GROUPED = 5
@task
def format_imports():
"""Sort imports into a consistent style."""
astrodynamics_dir = Path('astrodynamics')
constants_dir = astrodynamics_dir / 'constants'
for initfile in astrodynamics_dir.glob('**/__init__.py'):
if constants_dir in initfile.parents:
continue
SortImports(str(initfile),
multi_line_output=VERTICAL_HANGING_INDENT,
not_skip=['__init__.py'])
# Exclude __init__.py
# Exclude generated constants/ python files
for pyfile in astrodynamics_dir.glob('**/*.py'):
if constants_dir in pyfile.parents and pyfile.stem != 'constant':
continue
SortImports(str(pyfile),
multi_line_output=HANGING_GRID,
skip=['__init__.py'],
known_third_party=['six'])
|
# coding: utf-8
from __future__ import absolute_import, division, print_function
from pathlib import Path
from isort import SortImports
from shovel import task
# isort multi_line_output modes
GRID = 0
VERTICAL = 1
HANGING_INDENT = 2
VERTICAL_HANGING_INDENT = 3
HANGING_GRID = 4
HANGING_GRID_GROUPED = 5
@task
def format_imports():
"""Sort imports into a consistent style."""
astrodynamics_dir = Path('astrodynamics')
constants_dir = astrodynamics_dir / 'constants'
for initfile in astrodynamics_dir.glob('**/__init__.py'):
if constants_dir in initfile.parents:
continue
SortImports(str(initfile),
multi_line_output=VERTICAL_HANGING_INDENT,
not_skip=['__init__.py'])
# Exclude __init__.py
# Exclude generated constants/ python files
for pyfile in astrodynamics_dir.glob('**/*.py'):
if constants_dir in pyfile.parents and pyfile.stem != 'constant':
continue
SortImports(str(pyfile),
multi_line_output=HANGING_GRID,
skip=['__init__.py'])
Add 'six' to known_third_party for SortImports
six was being sorted incorrectly due to being classed as first party.# coding: utf-8
from __future__ import absolute_import, division, print_function
from pathlib import Path
from isort import SortImports
from shovel import task
# isort multi_line_output modes
GRID = 0
VERTICAL = 1
HANGING_INDENT = 2
VERTICAL_HANGING_INDENT = 3
HANGING_GRID = 4
HANGING_GRID_GROUPED = 5
@task
def format_imports():
"""Sort imports into a consistent style."""
astrodynamics_dir = Path('astrodynamics')
constants_dir = astrodynamics_dir / 'constants'
for initfile in astrodynamics_dir.glob('**/__init__.py'):
if constants_dir in initfile.parents:
continue
SortImports(str(initfile),
multi_line_output=VERTICAL_HANGING_INDENT,
not_skip=['__init__.py'])
# Exclude __init__.py
# Exclude generated constants/ python files
for pyfile in astrodynamics_dir.glob('**/*.py'):
if constants_dir in pyfile.parents and pyfile.stem != 'constant':
continue
SortImports(str(pyfile),
multi_line_output=HANGING_GRID,
skip=['__init__.py'],
known_third_party=['six'])
|
<commit_before># coding: utf-8
from __future__ import absolute_import, division, print_function
from pathlib import Path
from isort import SortImports
from shovel import task
# isort multi_line_output modes
GRID = 0
VERTICAL = 1
HANGING_INDENT = 2
VERTICAL_HANGING_INDENT = 3
HANGING_GRID = 4
HANGING_GRID_GROUPED = 5
@task
def format_imports():
"""Sort imports into a consistent style."""
astrodynamics_dir = Path('astrodynamics')
constants_dir = astrodynamics_dir / 'constants'
for initfile in astrodynamics_dir.glob('**/__init__.py'):
if constants_dir in initfile.parents:
continue
SortImports(str(initfile),
multi_line_output=VERTICAL_HANGING_INDENT,
not_skip=['__init__.py'])
# Exclude __init__.py
# Exclude generated constants/ python files
for pyfile in astrodynamics_dir.glob('**/*.py'):
if constants_dir in pyfile.parents and pyfile.stem != 'constant':
continue
SortImports(str(pyfile),
multi_line_output=HANGING_GRID,
skip=['__init__.py'])
<commit_msg>Add 'six' to known_third_party for SortImports
six was being sorted incorrectly due to being classed as first party.<commit_after># coding: utf-8
from __future__ import absolute_import, division, print_function
from pathlib import Path
from isort import SortImports
from shovel import task
# isort multi_line_output modes
GRID = 0
VERTICAL = 1
HANGING_INDENT = 2
VERTICAL_HANGING_INDENT = 3
HANGING_GRID = 4
HANGING_GRID_GROUPED = 5
@task
def format_imports():
"""Sort imports into a consistent style."""
astrodynamics_dir = Path('astrodynamics')
constants_dir = astrodynamics_dir / 'constants'
for initfile in astrodynamics_dir.glob('**/__init__.py'):
if constants_dir in initfile.parents:
continue
SortImports(str(initfile),
multi_line_output=VERTICAL_HANGING_INDENT,
not_skip=['__init__.py'])
# Exclude __init__.py
# Exclude generated constants/ python files
for pyfile in astrodynamics_dir.glob('**/*.py'):
if constants_dir in pyfile.parents and pyfile.stem != 'constant':
continue
SortImports(str(pyfile),
multi_line_output=HANGING_GRID,
skip=['__init__.py'],
known_third_party=['six'])
|
4d27a526dc6d76989ce65cc60991a7156b333fac
|
tests/test_format.py
|
tests/test_format.py
|
import unittest
import ipuz
import puz
import crossword
class FormatUnitTest(unittest.TestCase):
def test_to_ipuz_only_include_ipuz_specific_data(self):
puz_object = puz.read('fixtures/chronicle_20140815.puz')
puzzle = crossword.from_puz(puz_object)
ipuz_dict = crossword.to_ipuz(puzzle)
self.assertNotIn('puzzletype', ipuz_dict)
self.assertNotIn('fileversion', ipuz_dict)
self.assertNotIn('extensions', ipuz_dict)
def test_to_puz_only_include_puz_specific_data(self):
with open('fixtures/first.ipuz') as f:
ipuz_dict = ipuz.read(f.read())
puzzle = crossword.from_ipuz(ipuz_dict)
puz_object = crossword.to_puz(puzzle)
self.assertFalse(hasattr(puz_object, "kind"))
|
import unittest
import ipuz
import puz
import crossword
class FormatUnitTest(unittest.TestCase):
def test_to_ipuz_only_include_ipuz_specific_data(self):
puz_object = puz.read('fixtures/chronicle_20140815.puz')
puzzle = crossword.from_puz(puz_object)
ipuz_dict = crossword.to_ipuz(puzzle)
self.assertNotIn('puzzletype', ipuz_dict)
self.assertNotIn('fileversion', ipuz_dict)
self.assertNotIn('extensions', ipuz_dict)
def test_to_puz_only_include_puz_specific_data(self):
with open('fixtures/example.ipuz') as f:
ipuz_dict = ipuz.read(f.read())
puzzle = crossword.from_ipuz(ipuz_dict)
puz_object = crossword.to_puz(puzzle)
self.assertFalse(hasattr(puz_object, "kind"))
|
Use different puzzle in this test as it uses number that .puz accepts
|
Use different puzzle in this test as it uses number that .puz accepts
|
Python
|
mit
|
svisser/crossword
|
import unittest
import ipuz
import puz
import crossword
class FormatUnitTest(unittest.TestCase):
def test_to_ipuz_only_include_ipuz_specific_data(self):
puz_object = puz.read('fixtures/chronicle_20140815.puz')
puzzle = crossword.from_puz(puz_object)
ipuz_dict = crossword.to_ipuz(puzzle)
self.assertNotIn('puzzletype', ipuz_dict)
self.assertNotIn('fileversion', ipuz_dict)
self.assertNotIn('extensions', ipuz_dict)
def test_to_puz_only_include_puz_specific_data(self):
with open('fixtures/first.ipuz') as f:
ipuz_dict = ipuz.read(f.read())
puzzle = crossword.from_ipuz(ipuz_dict)
puz_object = crossword.to_puz(puzzle)
self.assertFalse(hasattr(puz_object, "kind"))
Use different puzzle in this test as it uses number that .puz accepts
|
import unittest
import ipuz
import puz
import crossword
class FormatUnitTest(unittest.TestCase):
def test_to_ipuz_only_include_ipuz_specific_data(self):
puz_object = puz.read('fixtures/chronicle_20140815.puz')
puzzle = crossword.from_puz(puz_object)
ipuz_dict = crossword.to_ipuz(puzzle)
self.assertNotIn('puzzletype', ipuz_dict)
self.assertNotIn('fileversion', ipuz_dict)
self.assertNotIn('extensions', ipuz_dict)
def test_to_puz_only_include_puz_specific_data(self):
with open('fixtures/example.ipuz') as f:
ipuz_dict = ipuz.read(f.read())
puzzle = crossword.from_ipuz(ipuz_dict)
puz_object = crossword.to_puz(puzzle)
self.assertFalse(hasattr(puz_object, "kind"))
|
<commit_before>import unittest
import ipuz
import puz
import crossword
class FormatUnitTest(unittest.TestCase):
def test_to_ipuz_only_include_ipuz_specific_data(self):
puz_object = puz.read('fixtures/chronicle_20140815.puz')
puzzle = crossword.from_puz(puz_object)
ipuz_dict = crossword.to_ipuz(puzzle)
self.assertNotIn('puzzletype', ipuz_dict)
self.assertNotIn('fileversion', ipuz_dict)
self.assertNotIn('extensions', ipuz_dict)
def test_to_puz_only_include_puz_specific_data(self):
with open('fixtures/first.ipuz') as f:
ipuz_dict = ipuz.read(f.read())
puzzle = crossword.from_ipuz(ipuz_dict)
puz_object = crossword.to_puz(puzzle)
self.assertFalse(hasattr(puz_object, "kind"))
<commit_msg>Use different puzzle in this test as it uses number that .puz accepts<commit_after>
|
import unittest
import ipuz
import puz
import crossword
class FormatUnitTest(unittest.TestCase):
def test_to_ipuz_only_include_ipuz_specific_data(self):
puz_object = puz.read('fixtures/chronicle_20140815.puz')
puzzle = crossword.from_puz(puz_object)
ipuz_dict = crossword.to_ipuz(puzzle)
self.assertNotIn('puzzletype', ipuz_dict)
self.assertNotIn('fileversion', ipuz_dict)
self.assertNotIn('extensions', ipuz_dict)
def test_to_puz_only_include_puz_specific_data(self):
with open('fixtures/example.ipuz') as f:
ipuz_dict = ipuz.read(f.read())
puzzle = crossword.from_ipuz(ipuz_dict)
puz_object = crossword.to_puz(puzzle)
self.assertFalse(hasattr(puz_object, "kind"))
|
import unittest
import ipuz
import puz
import crossword
class FormatUnitTest(unittest.TestCase):
def test_to_ipuz_only_include_ipuz_specific_data(self):
puz_object = puz.read('fixtures/chronicle_20140815.puz')
puzzle = crossword.from_puz(puz_object)
ipuz_dict = crossword.to_ipuz(puzzle)
self.assertNotIn('puzzletype', ipuz_dict)
self.assertNotIn('fileversion', ipuz_dict)
self.assertNotIn('extensions', ipuz_dict)
def test_to_puz_only_include_puz_specific_data(self):
with open('fixtures/first.ipuz') as f:
ipuz_dict = ipuz.read(f.read())
puzzle = crossword.from_ipuz(ipuz_dict)
puz_object = crossword.to_puz(puzzle)
self.assertFalse(hasattr(puz_object, "kind"))
Use different puzzle in this test as it uses number that .puz acceptsimport unittest
import ipuz
import puz
import crossword
class FormatUnitTest(unittest.TestCase):
def test_to_ipuz_only_include_ipuz_specific_data(self):
puz_object = puz.read('fixtures/chronicle_20140815.puz')
puzzle = crossword.from_puz(puz_object)
ipuz_dict = crossword.to_ipuz(puzzle)
self.assertNotIn('puzzletype', ipuz_dict)
self.assertNotIn('fileversion', ipuz_dict)
self.assertNotIn('extensions', ipuz_dict)
def test_to_puz_only_include_puz_specific_data(self):
with open('fixtures/example.ipuz') as f:
ipuz_dict = ipuz.read(f.read())
puzzle = crossword.from_ipuz(ipuz_dict)
puz_object = crossword.to_puz(puzzle)
self.assertFalse(hasattr(puz_object, "kind"))
|
<commit_before>import unittest
import ipuz
import puz
import crossword
class FormatUnitTest(unittest.TestCase):
def test_to_ipuz_only_include_ipuz_specific_data(self):
puz_object = puz.read('fixtures/chronicle_20140815.puz')
puzzle = crossword.from_puz(puz_object)
ipuz_dict = crossword.to_ipuz(puzzle)
self.assertNotIn('puzzletype', ipuz_dict)
self.assertNotIn('fileversion', ipuz_dict)
self.assertNotIn('extensions', ipuz_dict)
def test_to_puz_only_include_puz_specific_data(self):
with open('fixtures/first.ipuz') as f:
ipuz_dict = ipuz.read(f.read())
puzzle = crossword.from_ipuz(ipuz_dict)
puz_object = crossword.to_puz(puzzle)
self.assertFalse(hasattr(puz_object, "kind"))
<commit_msg>Use different puzzle in this test as it uses number that .puz accepts<commit_after>import unittest
import ipuz
import puz
import crossword
class FormatUnitTest(unittest.TestCase):
def test_to_ipuz_only_include_ipuz_specific_data(self):
puz_object = puz.read('fixtures/chronicle_20140815.puz')
puzzle = crossword.from_puz(puz_object)
ipuz_dict = crossword.to_ipuz(puzzle)
self.assertNotIn('puzzletype', ipuz_dict)
self.assertNotIn('fileversion', ipuz_dict)
self.assertNotIn('extensions', ipuz_dict)
def test_to_puz_only_include_puz_specific_data(self):
with open('fixtures/example.ipuz') as f:
ipuz_dict = ipuz.read(f.read())
puzzle = crossword.from_ipuz(ipuz_dict)
puz_object = crossword.to_puz(puzzle)
self.assertFalse(hasattr(puz_object, "kind"))
|
9b12a9cdab0021fea7e5f2d8fd8ffe11d065f0d0
|
tests/test_replay.py
|
tests/test_replay.py
|
# -*- coding: utf-8 -*-
"""
test_replay
-----------
"""
def test_get_user_config():
config_dict = get_user_config()
assert 'replay_dir' in config_dict
expected_dir = os.path.expanduser('~/.cookiecutter_replay/')
assert config_dict['replay_dir'] == expected_dir
|
# -*- coding: utf-8 -*-
"""
test_replay
-----------
"""
import os
import pytest
from cookiecutter import replay
from cookiecutter.config import get_user_config
def test_get_user_config():
config_dict = get_user_config()
assert 'replay_dir' in config_dict
expected_dir = os.path.expanduser('~/.cookiecutter_replay/')
assert config_dict['replay_dir'] == expected_dir
@pytest.fixture
def template_name():
return 'cookiedozer'
@pytest.fixture
def context():
return {
u'email': u'raphael@hackebrot.de',
u'full_name': u'Raphael Pierzina',
u'github_username': u'hackebrot',
u'version': u'0.1.0',
}
def test_dump_value_error_if_no_template_name(context):
with pytest.raises(ValueError):
replay.dump(None, context)
def test_dump_type_error_if_not_dict_context(template_name):
with pytest.raises(TypeError):
replay.dump(template_name, 'not_a_dict')
|
Implement tests for replay.dump args
|
Implement tests for replay.dump args
|
Python
|
bsd-3-clause
|
agconti/cookiecutter,luzfcb/cookiecutter,christabor/cookiecutter,terryjbates/cookiecutter,benthomasson/cookiecutter,christabor/cookiecutter,audreyr/cookiecutter,audreyr/cookiecutter,benthomasson/cookiecutter,Springerle/cookiecutter,ramiroluz/cookiecutter,terryjbates/cookiecutter,luzfcb/cookiecutter,takeflight/cookiecutter,moi65/cookiecutter,dajose/cookiecutter,cguardia/cookiecutter,venumech/cookiecutter,moi65/cookiecutter,michaeljoseph/cookiecutter,pjbull/cookiecutter,willingc/cookiecutter,pjbull/cookiecutter,stevepiercy/cookiecutter,ramiroluz/cookiecutter,dajose/cookiecutter,agconti/cookiecutter,willingc/cookiecutter,venumech/cookiecutter,stevepiercy/cookiecutter,hackebrot/cookiecutter,takeflight/cookiecutter,cguardia/cookiecutter,hackebrot/cookiecutter,michaeljoseph/cookiecutter,Springerle/cookiecutter
|
# -*- coding: utf-8 -*-
"""
test_replay
-----------
"""
def test_get_user_config():
config_dict = get_user_config()
assert 'replay_dir' in config_dict
expected_dir = os.path.expanduser('~/.cookiecutter_replay/')
assert config_dict['replay_dir'] == expected_dir
Implement tests for replay.dump args
|
# -*- coding: utf-8 -*-
"""
test_replay
-----------
"""
import os
import pytest
from cookiecutter import replay
from cookiecutter.config import get_user_config
def test_get_user_config():
config_dict = get_user_config()
assert 'replay_dir' in config_dict
expected_dir = os.path.expanduser('~/.cookiecutter_replay/')
assert config_dict['replay_dir'] == expected_dir
@pytest.fixture
def template_name():
return 'cookiedozer'
@pytest.fixture
def context():
return {
u'email': u'raphael@hackebrot.de',
u'full_name': u'Raphael Pierzina',
u'github_username': u'hackebrot',
u'version': u'0.1.0',
}
def test_dump_value_error_if_no_template_name(context):
with pytest.raises(ValueError):
replay.dump(None, context)
def test_dump_type_error_if_not_dict_context(template_name):
with pytest.raises(TypeError):
replay.dump(template_name, 'not_a_dict')
|
<commit_before># -*- coding: utf-8 -*-
"""
test_replay
-----------
"""
def test_get_user_config():
config_dict = get_user_config()
assert 'replay_dir' in config_dict
expected_dir = os.path.expanduser('~/.cookiecutter_replay/')
assert config_dict['replay_dir'] == expected_dir
<commit_msg>Implement tests for replay.dump args<commit_after>
|
# -*- coding: utf-8 -*-
"""
test_replay
-----------
"""
import os
import pytest
from cookiecutter import replay
from cookiecutter.config import get_user_config
def test_get_user_config():
config_dict = get_user_config()
assert 'replay_dir' in config_dict
expected_dir = os.path.expanduser('~/.cookiecutter_replay/')
assert config_dict['replay_dir'] == expected_dir
@pytest.fixture
def template_name():
return 'cookiedozer'
@pytest.fixture
def context():
return {
u'email': u'raphael@hackebrot.de',
u'full_name': u'Raphael Pierzina',
u'github_username': u'hackebrot',
u'version': u'0.1.0',
}
def test_dump_value_error_if_no_template_name(context):
with pytest.raises(ValueError):
replay.dump(None, context)
def test_dump_type_error_if_not_dict_context(template_name):
with pytest.raises(TypeError):
replay.dump(template_name, 'not_a_dict')
|
# -*- coding: utf-8 -*-
"""
test_replay
-----------
"""
def test_get_user_config():
config_dict = get_user_config()
assert 'replay_dir' in config_dict
expected_dir = os.path.expanduser('~/.cookiecutter_replay/')
assert config_dict['replay_dir'] == expected_dir
Implement tests for replay.dump args# -*- coding: utf-8 -*-
"""
test_replay
-----------
"""
import os
import pytest
from cookiecutter import replay
from cookiecutter.config import get_user_config
def test_get_user_config():
config_dict = get_user_config()
assert 'replay_dir' in config_dict
expected_dir = os.path.expanduser('~/.cookiecutter_replay/')
assert config_dict['replay_dir'] == expected_dir
@pytest.fixture
def template_name():
return 'cookiedozer'
@pytest.fixture
def context():
return {
u'email': u'raphael@hackebrot.de',
u'full_name': u'Raphael Pierzina',
u'github_username': u'hackebrot',
u'version': u'0.1.0',
}
def test_dump_value_error_if_no_template_name(context):
with pytest.raises(ValueError):
replay.dump(None, context)
def test_dump_type_error_if_not_dict_context(template_name):
with pytest.raises(TypeError):
replay.dump(template_name, 'not_a_dict')
|
<commit_before># -*- coding: utf-8 -*-
"""
test_replay
-----------
"""
def test_get_user_config():
config_dict = get_user_config()
assert 'replay_dir' in config_dict
expected_dir = os.path.expanduser('~/.cookiecutter_replay/')
assert config_dict['replay_dir'] == expected_dir
<commit_msg>Implement tests for replay.dump args<commit_after># -*- coding: utf-8 -*-
"""
test_replay
-----------
"""
import os
import pytest
from cookiecutter import replay
from cookiecutter.config import get_user_config
def test_get_user_config():
config_dict = get_user_config()
assert 'replay_dir' in config_dict
expected_dir = os.path.expanduser('~/.cookiecutter_replay/')
assert config_dict['replay_dir'] == expected_dir
@pytest.fixture
def template_name():
return 'cookiedozer'
@pytest.fixture
def context():
return {
u'email': u'raphael@hackebrot.de',
u'full_name': u'Raphael Pierzina',
u'github_username': u'hackebrot',
u'version': u'0.1.0',
}
def test_dump_value_error_if_no_template_name(context):
with pytest.raises(ValueError):
replay.dump(None, context)
def test_dump_type_error_if_not_dict_context(template_name):
with pytest.raises(TypeError):
replay.dump(template_name, 'not_a_dict')
|
6b025a122e0b6ac4761f5e821b0f5465f867fe61
|
crypto-square/crypto_square.py
|
crypto-square/crypto_square.py
|
import math
def encode(s):
s = list(filter(str.isalnum, s.lower()))
size = math.ceil(math.sqrt(len(s)))
s += "." * (size**2 - len(s))
parts = [s[i*size:(i+1)*size] for i in range(size)]
return " ".join(map("".join, zip(*parts))).replace(".", "")
|
import math
def encode(s):
s = "".join(filter(str.isalnum, s.lower()))
size = math.ceil(math.sqrt(len(s)))
return " ".join(s[i::size] for i in range(size))
|
Use string slices with a stride
|
Use string slices with a stride
|
Python
|
agpl-3.0
|
CubicComet/exercism-python-solutions
|
import math
def encode(s):
s = list(filter(str.isalnum, s.lower()))
size = math.ceil(math.sqrt(len(s)))
s += "." * (size**2 - len(s))
parts = [s[i*size:(i+1)*size] for i in range(size)]
return " ".join(map("".join, zip(*parts))).replace(".", "")
Use string slices with a stride
|
import math
def encode(s):
s = "".join(filter(str.isalnum, s.lower()))
size = math.ceil(math.sqrt(len(s)))
return " ".join(s[i::size] for i in range(size))
|
<commit_before>import math
def encode(s):
s = list(filter(str.isalnum, s.lower()))
size = math.ceil(math.sqrt(len(s)))
s += "." * (size**2 - len(s))
parts = [s[i*size:(i+1)*size] for i in range(size)]
return " ".join(map("".join, zip(*parts))).replace(".", "")
<commit_msg>Use string slices with a stride<commit_after>
|
import math
def encode(s):
s = "".join(filter(str.isalnum, s.lower()))
size = math.ceil(math.sqrt(len(s)))
return " ".join(s[i::size] for i in range(size))
|
import math
def encode(s):
s = list(filter(str.isalnum, s.lower()))
size = math.ceil(math.sqrt(len(s)))
s += "." * (size**2 - len(s))
parts = [s[i*size:(i+1)*size] for i in range(size)]
return " ".join(map("".join, zip(*parts))).replace(".", "")
Use string slices with a strideimport math
def encode(s):
s = "".join(filter(str.isalnum, s.lower()))
size = math.ceil(math.sqrt(len(s)))
return " ".join(s[i::size] for i in range(size))
|
<commit_before>import math
def encode(s):
s = list(filter(str.isalnum, s.lower()))
size = math.ceil(math.sqrt(len(s)))
s += "." * (size**2 - len(s))
parts = [s[i*size:(i+1)*size] for i in range(size)]
return " ".join(map("".join, zip(*parts))).replace(".", "")
<commit_msg>Use string slices with a stride<commit_after>import math
def encode(s):
s = "".join(filter(str.isalnum, s.lower()))
size = math.ceil(math.sqrt(len(s)))
return " ".join(s[i::size] for i in range(size))
|
dfbdf5d55a2c8d243b09828ef05c7c3c3ffc8d50
|
dags/longitudinal.py
|
dags/longitudinal.py
|
from airflow import DAG
from datetime import datetime, timedelta
from operators.emr_spark_operator import EMRSparkOperator
default_args = {
'owner': 'rvitillo@mozilla.com',
'depends_on_past': False,
'start_date': datetime(2016, 6, 30),
'email': ['telemetry-alerts@mozilla.com', 'rvitillo@mozilla.com'],
'email_on_failure': True,
'email_on_retry': True,
'retries': 2,
'retry_delay': timedelta(minutes=30),
}
dag = DAG('longitudinal', default_args=default_args, schedule_interval='@weekly')
t0 = EMRSparkOperator(task_id="longitudinal",
job_name="Longitudinal View",
execution_timeout=timedelta(hours=10),
instance_count=30,
env = {"date": "{{ ds_nodash }}", "bucket": "{{ task.__class__.airflow_bucket }}"},
uri="https://raw.githubusercontent.com/mozilla/telemetry-airflow/master/jobs/longitudinal_view.sh",
dag=dag)
|
from airflow import DAG
from datetime import datetime, timedelta
from operators.emr_spark_operator import EMRSparkOperator
default_args = {
'owner': 'rvitillo@mozilla.com',
'depends_on_past': False,
'start_date': datetime(2016, 6, 30),
'email': ['telemetry-alerts@mozilla.com', 'rvitillo@mozilla.com'],
'email_on_failure': True,
'email_on_retry': True,
'retries': 2,
'retry_delay': timedelta(minutes=30),
}
dag = DAG('longitudinal', default_args=default_args, schedule_interval='@weekly')
t0 = EMRSparkOperator(task_id="longitudinal",
job_name="Longitudinal View",
execution_timeout=timedelta(hours=10),
instance_count=30,
env={"date": "{{ ds_nodash }}", "bucket": "{{ task.__class__.airflow_bucket }}"},
uri="https://raw.githubusercontent.com/mozilla/telemetry-airflow/master/jobs/longitudinal_view.sh",
dag=dag)
t1 = EMRSparkOperator(task_id="update_orphaning",
job_name="Update Orphaning View",
execution_timeout=timedelta(hours=10),
instance_count=1,
owner="spohl@mozilla.com",
email=["telemetry-alerts@mozilla.com", "spohl@mozilla.com",
"mhowell@mozilla.com"],
env={"date": "{{ ds_nodash }}"},
uri="https://raw.githubusercontent.com/mozilla-services/data-pipeline/master/reports/update-orphaning/Update%20orphaning%20analysis%20using%20longitudinal%20dataset.ipynb",
dag=dag)
t1.set_upstream(t0)
|
Add update orphaning job to Airflow
|
Add update orphaning job to Airflow
This depends on the longitudinal job running successfully to run.
|
Python
|
mpl-2.0
|
opentrials/opentrials-airflow,opentrials/opentrials-airflow
|
from airflow import DAG
from datetime import datetime, timedelta
from operators.emr_spark_operator import EMRSparkOperator
default_args = {
'owner': 'rvitillo@mozilla.com',
'depends_on_past': False,
'start_date': datetime(2016, 6, 30),
'email': ['telemetry-alerts@mozilla.com', 'rvitillo@mozilla.com'],
'email_on_failure': True,
'email_on_retry': True,
'retries': 2,
'retry_delay': timedelta(minutes=30),
}
dag = DAG('longitudinal', default_args=default_args, schedule_interval='@weekly')
t0 = EMRSparkOperator(task_id="longitudinal",
job_name="Longitudinal View",
execution_timeout=timedelta(hours=10),
instance_count=30,
env = {"date": "{{ ds_nodash }}", "bucket": "{{ task.__class__.airflow_bucket }}"},
uri="https://raw.githubusercontent.com/mozilla/telemetry-airflow/master/jobs/longitudinal_view.sh",
dag=dag)
Add update orphaning job to Airflow
This depends on the longitudinal job running successfully to run.
|
from airflow import DAG
from datetime import datetime, timedelta
from operators.emr_spark_operator import EMRSparkOperator
default_args = {
'owner': 'rvitillo@mozilla.com',
'depends_on_past': False,
'start_date': datetime(2016, 6, 30),
'email': ['telemetry-alerts@mozilla.com', 'rvitillo@mozilla.com'],
'email_on_failure': True,
'email_on_retry': True,
'retries': 2,
'retry_delay': timedelta(minutes=30),
}
dag = DAG('longitudinal', default_args=default_args, schedule_interval='@weekly')
t0 = EMRSparkOperator(task_id="longitudinal",
job_name="Longitudinal View",
execution_timeout=timedelta(hours=10),
instance_count=30,
env={"date": "{{ ds_nodash }}", "bucket": "{{ task.__class__.airflow_bucket }}"},
uri="https://raw.githubusercontent.com/mozilla/telemetry-airflow/master/jobs/longitudinal_view.sh",
dag=dag)
t1 = EMRSparkOperator(task_id="update_orphaning",
job_name="Update Orphaning View",
execution_timeout=timedelta(hours=10),
instance_count=1,
owner="spohl@mozilla.com",
email=["telemetry-alerts@mozilla.com", "spohl@mozilla.com",
"mhowell@mozilla.com"],
env={"date": "{{ ds_nodash }}"},
uri="https://raw.githubusercontent.com/mozilla-services/data-pipeline/master/reports/update-orphaning/Update%20orphaning%20analysis%20using%20longitudinal%20dataset.ipynb",
dag=dag)
t1.set_upstream(t0)
|
<commit_before>from airflow import DAG
from datetime import datetime, timedelta
from operators.emr_spark_operator import EMRSparkOperator
default_args = {
'owner': 'rvitillo@mozilla.com',
'depends_on_past': False,
'start_date': datetime(2016, 6, 30),
'email': ['telemetry-alerts@mozilla.com', 'rvitillo@mozilla.com'],
'email_on_failure': True,
'email_on_retry': True,
'retries': 2,
'retry_delay': timedelta(minutes=30),
}
dag = DAG('longitudinal', default_args=default_args, schedule_interval='@weekly')
t0 = EMRSparkOperator(task_id="longitudinal",
job_name="Longitudinal View",
execution_timeout=timedelta(hours=10),
instance_count=30,
env = {"date": "{{ ds_nodash }}", "bucket": "{{ task.__class__.airflow_bucket }}"},
uri="https://raw.githubusercontent.com/mozilla/telemetry-airflow/master/jobs/longitudinal_view.sh",
dag=dag)
<commit_msg>Add update orphaning job to Airflow
This depends on the longitudinal job running successfully to run.<commit_after>
|
from airflow import DAG
from datetime import datetime, timedelta
from operators.emr_spark_operator import EMRSparkOperator
default_args = {
'owner': 'rvitillo@mozilla.com',
'depends_on_past': False,
'start_date': datetime(2016, 6, 30),
'email': ['telemetry-alerts@mozilla.com', 'rvitillo@mozilla.com'],
'email_on_failure': True,
'email_on_retry': True,
'retries': 2,
'retry_delay': timedelta(minutes=30),
}
dag = DAG('longitudinal', default_args=default_args, schedule_interval='@weekly')
t0 = EMRSparkOperator(task_id="longitudinal",
job_name="Longitudinal View",
execution_timeout=timedelta(hours=10),
instance_count=30,
env={"date": "{{ ds_nodash }}", "bucket": "{{ task.__class__.airflow_bucket }}"},
uri="https://raw.githubusercontent.com/mozilla/telemetry-airflow/master/jobs/longitudinal_view.sh",
dag=dag)
t1 = EMRSparkOperator(task_id="update_orphaning",
job_name="Update Orphaning View",
execution_timeout=timedelta(hours=10),
instance_count=1,
owner="spohl@mozilla.com",
email=["telemetry-alerts@mozilla.com", "spohl@mozilla.com",
"mhowell@mozilla.com"],
env={"date": "{{ ds_nodash }}"},
uri="https://raw.githubusercontent.com/mozilla-services/data-pipeline/master/reports/update-orphaning/Update%20orphaning%20analysis%20using%20longitudinal%20dataset.ipynb",
dag=dag)
t1.set_upstream(t0)
|
from airflow import DAG
from datetime import datetime, timedelta
from operators.emr_spark_operator import EMRSparkOperator
default_args = {
'owner': 'rvitillo@mozilla.com',
'depends_on_past': False,
'start_date': datetime(2016, 6, 30),
'email': ['telemetry-alerts@mozilla.com', 'rvitillo@mozilla.com'],
'email_on_failure': True,
'email_on_retry': True,
'retries': 2,
'retry_delay': timedelta(minutes=30),
}
dag = DAG('longitudinal', default_args=default_args, schedule_interval='@weekly')
t0 = EMRSparkOperator(task_id="longitudinal",
job_name="Longitudinal View",
execution_timeout=timedelta(hours=10),
instance_count=30,
env = {"date": "{{ ds_nodash }}", "bucket": "{{ task.__class__.airflow_bucket }}"},
uri="https://raw.githubusercontent.com/mozilla/telemetry-airflow/master/jobs/longitudinal_view.sh",
dag=dag)
Add update orphaning job to Airflow
This depends on the longitudinal job running successfully to run.from airflow import DAG
from datetime import datetime, timedelta
from operators.emr_spark_operator import EMRSparkOperator
default_args = {
'owner': 'rvitillo@mozilla.com',
'depends_on_past': False,
'start_date': datetime(2016, 6, 30),
'email': ['telemetry-alerts@mozilla.com', 'rvitillo@mozilla.com'],
'email_on_failure': True,
'email_on_retry': True,
'retries': 2,
'retry_delay': timedelta(minutes=30),
}
dag = DAG('longitudinal', default_args=default_args, schedule_interval='@weekly')
t0 = EMRSparkOperator(task_id="longitudinal",
job_name="Longitudinal View",
execution_timeout=timedelta(hours=10),
instance_count=30,
env={"date": "{{ ds_nodash }}", "bucket": "{{ task.__class__.airflow_bucket }}"},
uri="https://raw.githubusercontent.com/mozilla/telemetry-airflow/master/jobs/longitudinal_view.sh",
dag=dag)
t1 = EMRSparkOperator(task_id="update_orphaning",
job_name="Update Orphaning View",
execution_timeout=timedelta(hours=10),
instance_count=1,
owner="spohl@mozilla.com",
email=["telemetry-alerts@mozilla.com", "spohl@mozilla.com",
"mhowell@mozilla.com"],
env={"date": "{{ ds_nodash }}"},
uri="https://raw.githubusercontent.com/mozilla-services/data-pipeline/master/reports/update-orphaning/Update%20orphaning%20analysis%20using%20longitudinal%20dataset.ipynb",
dag=dag)
t1.set_upstream(t0)
|
<commit_before>from airflow import DAG
from datetime import datetime, timedelta
from operators.emr_spark_operator import EMRSparkOperator
default_args = {
'owner': 'rvitillo@mozilla.com',
'depends_on_past': False,
'start_date': datetime(2016, 6, 30),
'email': ['telemetry-alerts@mozilla.com', 'rvitillo@mozilla.com'],
'email_on_failure': True,
'email_on_retry': True,
'retries': 2,
'retry_delay': timedelta(minutes=30),
}
dag = DAG('longitudinal', default_args=default_args, schedule_interval='@weekly')
t0 = EMRSparkOperator(task_id="longitudinal",
job_name="Longitudinal View",
execution_timeout=timedelta(hours=10),
instance_count=30,
env = {"date": "{{ ds_nodash }}", "bucket": "{{ task.__class__.airflow_bucket }}"},
uri="https://raw.githubusercontent.com/mozilla/telemetry-airflow/master/jobs/longitudinal_view.sh",
dag=dag)
<commit_msg>Add update orphaning job to Airflow
This depends on the longitudinal job running successfully to run.<commit_after>from airflow import DAG
from datetime import datetime, timedelta
from operators.emr_spark_operator import EMRSparkOperator
default_args = {
'owner': 'rvitillo@mozilla.com',
'depends_on_past': False,
'start_date': datetime(2016, 6, 30),
'email': ['telemetry-alerts@mozilla.com', 'rvitillo@mozilla.com'],
'email_on_failure': True,
'email_on_retry': True,
'retries': 2,
'retry_delay': timedelta(minutes=30),
}
dag = DAG('longitudinal', default_args=default_args, schedule_interval='@weekly')
t0 = EMRSparkOperator(task_id="longitudinal",
job_name="Longitudinal View",
execution_timeout=timedelta(hours=10),
instance_count=30,
env={"date": "{{ ds_nodash }}", "bucket": "{{ task.__class__.airflow_bucket }}"},
uri="https://raw.githubusercontent.com/mozilla/telemetry-airflow/master/jobs/longitudinal_view.sh",
dag=dag)
t1 = EMRSparkOperator(task_id="update_orphaning",
job_name="Update Orphaning View",
execution_timeout=timedelta(hours=10),
instance_count=1,
owner="spohl@mozilla.com",
email=["telemetry-alerts@mozilla.com", "spohl@mozilla.com",
"mhowell@mozilla.com"],
env={"date": "{{ ds_nodash }}"},
uri="https://raw.githubusercontent.com/mozilla-services/data-pipeline/master/reports/update-orphaning/Update%20orphaning%20analysis%20using%20longitudinal%20dataset.ipynb",
dag=dag)
t1.set_upstream(t0)
|
0c6bf8eca2d4cfd08cc98df3cb0ab706a6fbf7a2
|
cxfreeze-setup.py
|
cxfreeze-setup.py
|
import sys
from glob import glob
from cx_Freeze import setup, Executable
from version import VERSION
# Dependencies are automatically detected, but it might need
# fine tuning.
excludes = ["Tkinter"]
includes = ["logview", "colorwidget"]
includeFiles = [("data/icons/gitc.svg", "data/icons/gitc.svg"),
("data/licenses/Apache-2.0.html", "data/licenses/Apache-2.0.html"),
("LICENSE", "data/licenses/LICENSE")]
for qm in glob("data/translations/*.qm"):
includeFiles.append((qm, qm))
buildOptions = dict(
packages=[],
excludes=excludes,
includes=includes,
include_files=includeFiles,
include_msvcr=True,
silent=True)
base = None
icon = None
if sys.platform == "win32":
base = "Win32GUI"
icon = "data/icons/gitc.ico"
executables = [
Executable('gitc',
base=base,
icon=icon
)]
setup(name='gitc',
version=VERSION,
description='A file conflict viewer for git',
options=dict(build_exe=buildOptions),
executables=executables,
)
|
import sys
from glob import glob
from cx_Freeze import setup, Executable
from version import VERSION
# Dependencies are automatically detected, but it might need
# fine tuning.
excludes = ["Tkinter"]
includes = ["logview", "colorwidget", "pickle"]
includeFiles = [("data/icons/gitc.svg", "data/icons/gitc.svg"),
("data/licenses/Apache-2.0.html", "data/licenses/Apache-2.0.html"),
("LICENSE", "data/licenses/LICENSE")]
for qm in glob("data/translations/*.qm"):
includeFiles.append((qm, qm))
buildOptions = dict(
packages=[],
excludes=excludes,
includes=includes,
include_files=includeFiles,
include_msvcr=True,
silent=True)
base = None
icon = None
if sys.platform == "win32":
base = "Win32GUI"
icon = "data/icons/gitc.ico"
executables = [
Executable('gitc',
base=base,
icon=icon
)]
setup(name='gitc',
version=VERSION,
description='A file conflict viewer for git',
options=dict(build_exe=buildOptions),
executables=executables,
)
|
Fix a TypeError when load mergeTool setting
|
Fix a TypeError when load mergeTool setting
cx-freeze isn't include pickle module on the Windows platform,
it cause "TypeError: unable to convert a C++ 'QVariantList'
instance to a Python object"
|
Python
|
apache-2.0
|
timxx/gitc,timxx/gitc
|
import sys
from glob import glob
from cx_Freeze import setup, Executable
from version import VERSION
# Dependencies are automatically detected, but it might need
# fine tuning.
excludes = ["Tkinter"]
includes = ["logview", "colorwidget"]
includeFiles = [("data/icons/gitc.svg", "data/icons/gitc.svg"),
("data/licenses/Apache-2.0.html", "data/licenses/Apache-2.0.html"),
("LICENSE", "data/licenses/LICENSE")]
for qm in glob("data/translations/*.qm"):
includeFiles.append((qm, qm))
buildOptions = dict(
packages=[],
excludes=excludes,
includes=includes,
include_files=includeFiles,
include_msvcr=True,
silent=True)
base = None
icon = None
if sys.platform == "win32":
base = "Win32GUI"
icon = "data/icons/gitc.ico"
executables = [
Executable('gitc',
base=base,
icon=icon
)]
setup(name='gitc',
version=VERSION,
description='A file conflict viewer for git',
options=dict(build_exe=buildOptions),
executables=executables,
)
Fix a TypeError when load mergeTool setting
cx-freeze isn't include pickle module on the Windows platform,
it cause "TypeError: unable to convert a C++ 'QVariantList'
instance to a Python object"
|
import sys
from glob import glob
from cx_Freeze import setup, Executable
from version import VERSION
# Dependencies are automatically detected, but it might need
# fine tuning.
excludes = ["Tkinter"]
includes = ["logview", "colorwidget", "pickle"]
includeFiles = [("data/icons/gitc.svg", "data/icons/gitc.svg"),
("data/licenses/Apache-2.0.html", "data/licenses/Apache-2.0.html"),
("LICENSE", "data/licenses/LICENSE")]
for qm in glob("data/translations/*.qm"):
includeFiles.append((qm, qm))
buildOptions = dict(
packages=[],
excludes=excludes,
includes=includes,
include_files=includeFiles,
include_msvcr=True,
silent=True)
base = None
icon = None
if sys.platform == "win32":
base = "Win32GUI"
icon = "data/icons/gitc.ico"
executables = [
Executable('gitc',
base=base,
icon=icon
)]
setup(name='gitc',
version=VERSION,
description='A file conflict viewer for git',
options=dict(build_exe=buildOptions),
executables=executables,
)
|
<commit_before>import sys
from glob import glob
from cx_Freeze import setup, Executable
from version import VERSION
# Dependencies are automatically detected, but it might need
# fine tuning.
excludes = ["Tkinter"]
includes = ["logview", "colorwidget"]
includeFiles = [("data/icons/gitc.svg", "data/icons/gitc.svg"),
("data/licenses/Apache-2.0.html", "data/licenses/Apache-2.0.html"),
("LICENSE", "data/licenses/LICENSE")]
for qm in glob("data/translations/*.qm"):
includeFiles.append((qm, qm))
buildOptions = dict(
packages=[],
excludes=excludes,
includes=includes,
include_files=includeFiles,
include_msvcr=True,
silent=True)
base = None
icon = None
if sys.platform == "win32":
base = "Win32GUI"
icon = "data/icons/gitc.ico"
executables = [
Executable('gitc',
base=base,
icon=icon
)]
setup(name='gitc',
version=VERSION,
description='A file conflict viewer for git',
options=dict(build_exe=buildOptions),
executables=executables,
)
<commit_msg>Fix a TypeError when load mergeTool setting
cx-freeze isn't include pickle module on the Windows platform,
it cause "TypeError: unable to convert a C++ 'QVariantList'
instance to a Python object"<commit_after>
|
import sys
from glob import glob
from cx_Freeze import setup, Executable
from version import VERSION
# Dependencies are automatically detected, but it might need
# fine tuning.
excludes = ["Tkinter"]
includes = ["logview", "colorwidget", "pickle"]
includeFiles = [("data/icons/gitc.svg", "data/icons/gitc.svg"),
("data/licenses/Apache-2.0.html", "data/licenses/Apache-2.0.html"),
("LICENSE", "data/licenses/LICENSE")]
for qm in glob("data/translations/*.qm"):
includeFiles.append((qm, qm))
buildOptions = dict(
packages=[],
excludes=excludes,
includes=includes,
include_files=includeFiles,
include_msvcr=True,
silent=True)
base = None
icon = None
if sys.platform == "win32":
base = "Win32GUI"
icon = "data/icons/gitc.ico"
executables = [
Executable('gitc',
base=base,
icon=icon
)]
setup(name='gitc',
version=VERSION,
description='A file conflict viewer for git',
options=dict(build_exe=buildOptions),
executables=executables,
)
|
import sys
from glob import glob
from cx_Freeze import setup, Executable
from version import VERSION
# Dependencies are automatically detected, but it might need
# fine tuning.
excludes = ["Tkinter"]
includes = ["logview", "colorwidget"]
includeFiles = [("data/icons/gitc.svg", "data/icons/gitc.svg"),
("data/licenses/Apache-2.0.html", "data/licenses/Apache-2.0.html"),
("LICENSE", "data/licenses/LICENSE")]
for qm in glob("data/translations/*.qm"):
includeFiles.append((qm, qm))
buildOptions = dict(
packages=[],
excludes=excludes,
includes=includes,
include_files=includeFiles,
include_msvcr=True,
silent=True)
base = None
icon = None
if sys.platform == "win32":
base = "Win32GUI"
icon = "data/icons/gitc.ico"
executables = [
Executable('gitc',
base=base,
icon=icon
)]
setup(name='gitc',
version=VERSION,
description='A file conflict viewer for git',
options=dict(build_exe=buildOptions),
executables=executables,
)
Fix a TypeError when load mergeTool setting
cx-freeze isn't include pickle module on the Windows platform,
it cause "TypeError: unable to convert a C++ 'QVariantList'
instance to a Python object"import sys
from glob import glob
from cx_Freeze import setup, Executable
from version import VERSION
# Dependencies are automatically detected, but it might need
# fine tuning.
excludes = ["Tkinter"]
includes = ["logview", "colorwidget", "pickle"]
includeFiles = [("data/icons/gitc.svg", "data/icons/gitc.svg"),
("data/licenses/Apache-2.0.html", "data/licenses/Apache-2.0.html"),
("LICENSE", "data/licenses/LICENSE")]
for qm in glob("data/translations/*.qm"):
includeFiles.append((qm, qm))
buildOptions = dict(
packages=[],
excludes=excludes,
includes=includes,
include_files=includeFiles,
include_msvcr=True,
silent=True)
base = None
icon = None
if sys.platform == "win32":
base = "Win32GUI"
icon = "data/icons/gitc.ico"
executables = [
Executable('gitc',
base=base,
icon=icon
)]
setup(name='gitc',
version=VERSION,
description='A file conflict viewer for git',
options=dict(build_exe=buildOptions),
executables=executables,
)
|
<commit_before>import sys
from glob import glob
from cx_Freeze import setup, Executable
from version import VERSION
# Dependencies are automatically detected, but it might need
# fine tuning.
excludes = ["Tkinter"]
includes = ["logview", "colorwidget"]
includeFiles = [("data/icons/gitc.svg", "data/icons/gitc.svg"),
("data/licenses/Apache-2.0.html", "data/licenses/Apache-2.0.html"),
("LICENSE", "data/licenses/LICENSE")]
for qm in glob("data/translations/*.qm"):
includeFiles.append((qm, qm))
buildOptions = dict(
packages=[],
excludes=excludes,
includes=includes,
include_files=includeFiles,
include_msvcr=True,
silent=True)
base = None
icon = None
if sys.platform == "win32":
base = "Win32GUI"
icon = "data/icons/gitc.ico"
executables = [
Executable('gitc',
base=base,
icon=icon
)]
setup(name='gitc',
version=VERSION,
description='A file conflict viewer for git',
options=dict(build_exe=buildOptions),
executables=executables,
)
<commit_msg>Fix a TypeError when load mergeTool setting
cx-freeze isn't include pickle module on the Windows platform,
it cause "TypeError: unable to convert a C++ 'QVariantList'
instance to a Python object"<commit_after>import sys
from glob import glob
from cx_Freeze import setup, Executable
from version import VERSION
# Dependencies are automatically detected, but it might need
# fine tuning.
excludes = ["Tkinter"]
includes = ["logview", "colorwidget", "pickle"]
includeFiles = [("data/icons/gitc.svg", "data/icons/gitc.svg"),
("data/licenses/Apache-2.0.html", "data/licenses/Apache-2.0.html"),
("LICENSE", "data/licenses/LICENSE")]
for qm in glob("data/translations/*.qm"):
includeFiles.append((qm, qm))
buildOptions = dict(
packages=[],
excludes=excludes,
includes=includes,
include_files=includeFiles,
include_msvcr=True,
silent=True)
base = None
icon = None
if sys.platform == "win32":
base = "Win32GUI"
icon = "data/icons/gitc.ico"
executables = [
Executable('gitc',
base=base,
icon=icon
)]
setup(name='gitc',
version=VERSION,
description='A file conflict viewer for git',
options=dict(build_exe=buildOptions),
executables=executables,
)
|
64303ae1a02b707ff11231a9e3c46405a8a591e7
|
host/cgi-bin/liberator.py
|
host/cgi-bin/liberator.py
|
#!/usr/bin/env python
import cgi, subprocess, json
arguments = cgi.FieldStorage()
body = arguments.getvalue('body', '')
messageTo = arguments.getvalue('messageTo', '')
exitCode = subprocess.call(['./SendImessage.applescript', messageTo, body])
print 'Content-Type: application/json'
print ''
print json.dumps({'ok': exitCode == 0, 'body': body, 'messageTo': messageTo})
|
#!/usr/bin/env python
from os.path import expanduser
from time import sleep
import subprocess, json, sys, os
messagesDbPath = '%s/Library/Messages/chat.db' % expanduser('~')
# manually parse the QUERY_STRING because "+" is being weirdly decoded via FieldStorage
queryParameters = {}
keyValues = os.environ['QUERY_STRING'].split('&')
for pair in keyValues:
key, value = pair.split('=')
queryParameters[key] = value
body = queryParameters['body'] or ''
messageTo = queryParameters['messageTo'] or ''
payload = {'body': body, 'messageTo': messageTo}
sendExitCode = subprocess.call(['./SendImessage.applescript', messageTo, body])
verificationError = None
# monitor send status of message
if sendExitCode == 0:
iterations = 0
whereClause = 'handle.id = "%s" AND message.text = "%s"' % (messageTo, body)
query = 'SELECT message.ROWID, message.error, message.is_delivered, datetime(message.date + 978307200, "unixepoch", "localtime") FROM message JOIN handle ON message.handle_id = handle.ROWID WHERE %s ORDER BY message.date DESC LIMIT 1'
chatId = None
while iterations < 5:
if chatId is not None:
builtQuery = query % ('message.ROWID = %s' % chatId)
else:
builtQuery = query % whereClause
#builtQuery = 'select * from message limit 1'
try:
output = subprocess.check_output(['sqlite3', messagesDbPath, builtQuery], stderr=subprocess.STDOUT)
#print >> sys.stderr, builtQuery
#print >> sys.stderr, output
except subprocess.CalledProcessError as e:
print >> sys.stderr, e.output
verificationError = e.output
break
if output:
chatId, verificationError, isDelivered, date = output.split('|')
verificationError = int(verificationError)
if int(isDelivered) == 1 or verificationError != 0:
break
iterations += 1
sleep(1)
payload['ok'] = sendExitCode == 0 and verificationError == 0
if payload['ok'] is False:
if verificationError == 22:
payload['error'] = 'invalid handle'
elif sendExitCode != 0:
payload['error'] = 'message not sent'
else:
payload['error'] = 'imessage error `%d`' % verificationError
print 'Content-Type: application/json'
print ''
print json.dumps(payload)
|
Verify message sent via SQL
|
Verify message sent via SQL
|
Python
|
mit
|
chainsawsalad/imessage-liberator,chainsawsalad/imessage-liberator,chainsawsalad/imessage-liberator
|
#!/usr/bin/env python
import cgi, subprocess, json
arguments = cgi.FieldStorage()
body = arguments.getvalue('body', '')
messageTo = arguments.getvalue('messageTo', '')
exitCode = subprocess.call(['./SendImessage.applescript', messageTo, body])
print 'Content-Type: application/json'
print ''
print json.dumps({'ok': exitCode == 0, 'body': body, 'messageTo': messageTo})
Verify message sent via SQL
|
#!/usr/bin/env python
from os.path import expanduser
from time import sleep
import subprocess, json, sys, os
messagesDbPath = '%s/Library/Messages/chat.db' % expanduser('~')
# manually parse the QUERY_STRING because "+" is being weirdly decoded via FieldStorage
queryParameters = {}
keyValues = os.environ['QUERY_STRING'].split('&')
for pair in keyValues:
key, value = pair.split('=')
queryParameters[key] = value
body = queryParameters['body'] or ''
messageTo = queryParameters['messageTo'] or ''
payload = {'body': body, 'messageTo': messageTo}
sendExitCode = subprocess.call(['./SendImessage.applescript', messageTo, body])
verificationError = None
# monitor send status of message
if sendExitCode == 0:
iterations = 0
whereClause = 'handle.id = "%s" AND message.text = "%s"' % (messageTo, body)
query = 'SELECT message.ROWID, message.error, message.is_delivered, datetime(message.date + 978307200, "unixepoch", "localtime") FROM message JOIN handle ON message.handle_id = handle.ROWID WHERE %s ORDER BY message.date DESC LIMIT 1'
chatId = None
while iterations < 5:
if chatId is not None:
builtQuery = query % ('message.ROWID = %s' % chatId)
else:
builtQuery = query % whereClause
#builtQuery = 'select * from message limit 1'
try:
output = subprocess.check_output(['sqlite3', messagesDbPath, builtQuery], stderr=subprocess.STDOUT)
#print >> sys.stderr, builtQuery
#print >> sys.stderr, output
except subprocess.CalledProcessError as e:
print >> sys.stderr, e.output
verificationError = e.output
break
if output:
chatId, verificationError, isDelivered, date = output.split('|')
verificationError = int(verificationError)
if int(isDelivered) == 1 or verificationError != 0:
break
iterations += 1
sleep(1)
payload['ok'] = sendExitCode == 0 and verificationError == 0
if payload['ok'] is False:
if verificationError == 22:
payload['error'] = 'invalid handle'
elif sendExitCode != 0:
payload['error'] = 'message not sent'
else:
payload['error'] = 'imessage error `%d`' % verificationError
print 'Content-Type: application/json'
print ''
print json.dumps(payload)
|
<commit_before>#!/usr/bin/env python
import cgi, subprocess, json
arguments = cgi.FieldStorage()
body = arguments.getvalue('body', '')
messageTo = arguments.getvalue('messageTo', '')
exitCode = subprocess.call(['./SendImessage.applescript', messageTo, body])
print 'Content-Type: application/json'
print ''
print json.dumps({'ok': exitCode == 0, 'body': body, 'messageTo': messageTo})
<commit_msg>Verify message sent via SQL<commit_after>
|
#!/usr/bin/env python
from os.path import expanduser
from time import sleep
import subprocess, json, sys, os
messagesDbPath = '%s/Library/Messages/chat.db' % expanduser('~')
# manually parse the QUERY_STRING because "+" is being weirdly decoded via FieldStorage
queryParameters = {}
keyValues = os.environ['QUERY_STRING'].split('&')
for pair in keyValues:
key, value = pair.split('=')
queryParameters[key] = value
body = queryParameters['body'] or ''
messageTo = queryParameters['messageTo'] or ''
payload = {'body': body, 'messageTo': messageTo}
sendExitCode = subprocess.call(['./SendImessage.applescript', messageTo, body])
verificationError = None
# monitor send status of message
if sendExitCode == 0:
iterations = 0
whereClause = 'handle.id = "%s" AND message.text = "%s"' % (messageTo, body)
query = 'SELECT message.ROWID, message.error, message.is_delivered, datetime(message.date + 978307200, "unixepoch", "localtime") FROM message JOIN handle ON message.handle_id = handle.ROWID WHERE %s ORDER BY message.date DESC LIMIT 1'
chatId = None
while iterations < 5:
if chatId is not None:
builtQuery = query % ('message.ROWID = %s' % chatId)
else:
builtQuery = query % whereClause
#builtQuery = 'select * from message limit 1'
try:
output = subprocess.check_output(['sqlite3', messagesDbPath, builtQuery], stderr=subprocess.STDOUT)
#print >> sys.stderr, builtQuery
#print >> sys.stderr, output
except subprocess.CalledProcessError as e:
print >> sys.stderr, e.output
verificationError = e.output
break
if output:
chatId, verificationError, isDelivered, date = output.split('|')
verificationError = int(verificationError)
if int(isDelivered) == 1 or verificationError != 0:
break
iterations += 1
sleep(1)
payload['ok'] = sendExitCode == 0 and verificationError == 0
if payload['ok'] is False:
if verificationError == 22:
payload['error'] = 'invalid handle'
elif sendExitCode != 0:
payload['error'] = 'message not sent'
else:
payload['error'] = 'imessage error `%d`' % verificationError
print 'Content-Type: application/json'
print ''
print json.dumps(payload)
|
#!/usr/bin/env python
import cgi, subprocess, json
arguments = cgi.FieldStorage()
body = arguments.getvalue('body', '')
messageTo = arguments.getvalue('messageTo', '')
exitCode = subprocess.call(['./SendImessage.applescript', messageTo, body])
print 'Content-Type: application/json'
print ''
print json.dumps({'ok': exitCode == 0, 'body': body, 'messageTo': messageTo})
Verify message sent via SQL#!/usr/bin/env python
from os.path import expanduser
from time import sleep
import subprocess, json, sys, os
messagesDbPath = '%s/Library/Messages/chat.db' % expanduser('~')
# manually parse the QUERY_STRING because "+" is being weirdly decoded via FieldStorage
queryParameters = {}
keyValues = os.environ['QUERY_STRING'].split('&')
for pair in keyValues:
key, value = pair.split('=')
queryParameters[key] = value
body = queryParameters['body'] or ''
messageTo = queryParameters['messageTo'] or ''
payload = {'body': body, 'messageTo': messageTo}
sendExitCode = subprocess.call(['./SendImessage.applescript', messageTo, body])
verificationError = None
# monitor send status of message
if sendExitCode == 0:
iterations = 0
whereClause = 'handle.id = "%s" AND message.text = "%s"' % (messageTo, body)
query = 'SELECT message.ROWID, message.error, message.is_delivered, datetime(message.date + 978307200, "unixepoch", "localtime") FROM message JOIN handle ON message.handle_id = handle.ROWID WHERE %s ORDER BY message.date DESC LIMIT 1'
chatId = None
while iterations < 5:
if chatId is not None:
builtQuery = query % ('message.ROWID = %s' % chatId)
else:
builtQuery = query % whereClause
#builtQuery = 'select * from message limit 1'
try:
output = subprocess.check_output(['sqlite3', messagesDbPath, builtQuery], stderr=subprocess.STDOUT)
#print >> sys.stderr, builtQuery
#print >> sys.stderr, output
except subprocess.CalledProcessError as e:
print >> sys.stderr, e.output
verificationError = e.output
break
if output:
chatId, verificationError, isDelivered, date = output.split('|')
verificationError = int(verificationError)
if int(isDelivered) == 1 or verificationError != 0:
break
iterations += 1
sleep(1)
payload['ok'] = sendExitCode == 0 and verificationError == 0
if payload['ok'] is False:
if verificationError == 22:
payload['error'] = 'invalid handle'
elif sendExitCode != 0:
payload['error'] = 'message not sent'
else:
payload['error'] = 'imessage error `%d`' % verificationError
print 'Content-Type: application/json'
print ''
print json.dumps(payload)
|
<commit_before>#!/usr/bin/env python
import cgi, subprocess, json
arguments = cgi.FieldStorage()
body = arguments.getvalue('body', '')
messageTo = arguments.getvalue('messageTo', '')
exitCode = subprocess.call(['./SendImessage.applescript', messageTo, body])
print 'Content-Type: application/json'
print ''
print json.dumps({'ok': exitCode == 0, 'body': body, 'messageTo': messageTo})
<commit_msg>Verify message sent via SQL<commit_after>#!/usr/bin/env python
from os.path import expanduser
from time import sleep
import subprocess, json, sys, os
messagesDbPath = '%s/Library/Messages/chat.db' % expanduser('~')
# manually parse the QUERY_STRING because "+" is being weirdly decoded via FieldStorage
queryParameters = {}
keyValues = os.environ['QUERY_STRING'].split('&')
for pair in keyValues:
key, value = pair.split('=')
queryParameters[key] = value
body = queryParameters['body'] or ''
messageTo = queryParameters['messageTo'] or ''
payload = {'body': body, 'messageTo': messageTo}
sendExitCode = subprocess.call(['./SendImessage.applescript', messageTo, body])
verificationError = None
# monitor send status of message
if sendExitCode == 0:
iterations = 0
whereClause = 'handle.id = "%s" AND message.text = "%s"' % (messageTo, body)
query = 'SELECT message.ROWID, message.error, message.is_delivered, datetime(message.date + 978307200, "unixepoch", "localtime") FROM message JOIN handle ON message.handle_id = handle.ROWID WHERE %s ORDER BY message.date DESC LIMIT 1'
chatId = None
while iterations < 5:
if chatId is not None:
builtQuery = query % ('message.ROWID = %s' % chatId)
else:
builtQuery = query % whereClause
#builtQuery = 'select * from message limit 1'
try:
output = subprocess.check_output(['sqlite3', messagesDbPath, builtQuery], stderr=subprocess.STDOUT)
#print >> sys.stderr, builtQuery
#print >> sys.stderr, output
except subprocess.CalledProcessError as e:
print >> sys.stderr, e.output
verificationError = e.output
break
if output:
chatId, verificationError, isDelivered, date = output.split('|')
verificationError = int(verificationError)
if int(isDelivered) == 1 or verificationError != 0:
break
iterations += 1
sleep(1)
payload['ok'] = sendExitCode == 0 and verificationError == 0
if payload['ok'] is False:
if verificationError == 22:
payload['error'] = 'invalid handle'
elif sendExitCode != 0:
payload['error'] = 'message not sent'
else:
payload['error'] = 'imessage error `%d`' % verificationError
print 'Content-Type: application/json'
print ''
print json.dumps(payload)
|
3f3818e4a21ffc4e1b8d4426093fc093396b5a5b
|
pandas_finance.py
|
pandas_finance.py
|
#!/usr/bin/env python
import datetime
import scraperwiki
import numpy
import pandas.io.data as web
def get_stock(stock, start, end, service):
"""
Return data frame of finance data for stock.
Takes start and end datetimes, and service name of 'google' or 'yahoo'.
"""
return web.DataReader(stock, service, start, end)
def parse_finance_frame(stock, start, end, service='google'):
"""
Return rows of dicts from a finance data frame for scraperwiki.sqlite.
service can also be 'yahoo', start and end are datetimes.
"""
frame = get_stock(stock, start, end, service)
rows = []
for idx in range(len(frame)):
current_row_as_dict = frame.ix[idx].to_dict()
# have to convert dates because these are Pandas timestamps and
# dumptruck doesn't support them
current_row_as_dict['Date'] = frame.index[idx].to_datetime()
current_row_as_dict['Stock'] = stock
# horrible hack because data values are numpy.float64 and dumptruck
# doesn't support them
for key in current_row_as_dict:
if isinstance(current_row_as_dict[key], numpy.float64):
current_row_as_dict[key] = float(current_row_as_dict[key])
rows.append(current_row_as_dict)
return rows
def main():
"""
Dump stock data into scraperwiki.sqlite using pandas.io.data.
"""
# arbitrary start chosen
start = datetime.datetime(2014, 3, 1)
end = datetime.datetime.today()
stock_list = ['TWTR', 'FB']
rows = []
for stock in stock_list:
rows.extend(parse_finance_frame(stock, start, end))
scraperwiki.sqlite.save(data=rows, unique_keys=['Stock', 'Date'])
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
import datetime
import sqlite3
import pandas.io.data as web
import pandas.io.sql as sql
def get_stock(stock, start, end):
"""
Return data frame of Yahoo Finance data for stock.
Takes start and end datetimes.
"""
return web.DataReader(stock, 'yahoo', start, end)
def scrape_stock(stock, start, end):
sqlite_db.execute("drop table if exists {};".format(stock))
frame = (get_stock(stock, start, end))
# make Date not an index so it appears in table
frame = frame.reset_index()
# force Date datetime to string
frame[['Date']] = frame[['Date']].applymap(lambda x: x.isoformat())
sql.write_frame(frame, stock, sqlite_db)
def main():
global sqlite_db
sqlite_db = sqlite3.connect("scraperwiki.sqlite")
start = datetime.datetime(2014, 3, 1)
end = datetime.datetime.today()
for ticker in ['TWTR', 'FB']:
scrape_stock(ticker, start, end)
if __name__ == '__main__':
main()
|
Use pandas native saving by forcing date to not be index, and be string
|
Use pandas native saving by forcing date to not be index, and be string
|
Python
|
agpl-3.0
|
scraperwiki/stock-tool,scraperwiki/stock-tool
|
#!/usr/bin/env python
import datetime
import scraperwiki
import numpy
import pandas.io.data as web
def get_stock(stock, start, end, service):
"""
Return data frame of finance data for stock.
Takes start and end datetimes, and service name of 'google' or 'yahoo'.
"""
return web.DataReader(stock, service, start, end)
def parse_finance_frame(stock, start, end, service='google'):
"""
Return rows of dicts from a finance data frame for scraperwiki.sqlite.
service can also be 'yahoo', start and end are datetimes.
"""
frame = get_stock(stock, start, end, service)
rows = []
for idx in range(len(frame)):
current_row_as_dict = frame.ix[idx].to_dict()
# have to convert dates because these are Pandas timestamps and
# dumptruck doesn't support them
current_row_as_dict['Date'] = frame.index[idx].to_datetime()
current_row_as_dict['Stock'] = stock
# horrible hack because data values are numpy.float64 and dumptruck
# doesn't support them
for key in current_row_as_dict:
if isinstance(current_row_as_dict[key], numpy.float64):
current_row_as_dict[key] = float(current_row_as_dict[key])
rows.append(current_row_as_dict)
return rows
def main():
"""
Dump stock data into scraperwiki.sqlite using pandas.io.data.
"""
# arbitrary start chosen
start = datetime.datetime(2014, 3, 1)
end = datetime.datetime.today()
stock_list = ['TWTR', 'FB']
rows = []
for stock in stock_list:
rows.extend(parse_finance_frame(stock, start, end))
scraperwiki.sqlite.save(data=rows, unique_keys=['Stock', 'Date'])
if __name__ == '__main__':
main()
Use pandas native saving by forcing date to not be index, and be string
|
#!/usr/bin/env python
import datetime
import sqlite3
import pandas.io.data as web
import pandas.io.sql as sql
def get_stock(stock, start, end):
"""
Return data frame of Yahoo Finance data for stock.
Takes start and end datetimes.
"""
return web.DataReader(stock, 'yahoo', start, end)
def scrape_stock(stock, start, end):
sqlite_db.execute("drop table if exists {};".format(stock))
frame = (get_stock(stock, start, end))
# make Date not an index so it appears in table
frame = frame.reset_index()
# force Date datetime to string
frame[['Date']] = frame[['Date']].applymap(lambda x: x.isoformat())
sql.write_frame(frame, stock, sqlite_db)
def main():
global sqlite_db
sqlite_db = sqlite3.connect("scraperwiki.sqlite")
start = datetime.datetime(2014, 3, 1)
end = datetime.datetime.today()
for ticker in ['TWTR', 'FB']:
scrape_stock(ticker, start, end)
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
import datetime
import scraperwiki
import numpy
import pandas.io.data as web
def get_stock(stock, start, end, service):
"""
Return data frame of finance data for stock.
Takes start and end datetimes, and service name of 'google' or 'yahoo'.
"""
return web.DataReader(stock, service, start, end)
def parse_finance_frame(stock, start, end, service='google'):
"""
Return rows of dicts from a finance data frame for scraperwiki.sqlite.
service can also be 'yahoo', start and end are datetimes.
"""
frame = get_stock(stock, start, end, service)
rows = []
for idx in range(len(frame)):
current_row_as_dict = frame.ix[idx].to_dict()
# have to convert dates because these are Pandas timestamps and
# dumptruck doesn't support them
current_row_as_dict['Date'] = frame.index[idx].to_datetime()
current_row_as_dict['Stock'] = stock
# horrible hack because data values are numpy.float64 and dumptruck
# doesn't support them
for key in current_row_as_dict:
if isinstance(current_row_as_dict[key], numpy.float64):
current_row_as_dict[key] = float(current_row_as_dict[key])
rows.append(current_row_as_dict)
return rows
def main():
"""
Dump stock data into scraperwiki.sqlite using pandas.io.data.
"""
# arbitrary start chosen
start = datetime.datetime(2014, 3, 1)
end = datetime.datetime.today()
stock_list = ['TWTR', 'FB']
rows = []
for stock in stock_list:
rows.extend(parse_finance_frame(stock, start, end))
scraperwiki.sqlite.save(data=rows, unique_keys=['Stock', 'Date'])
if __name__ == '__main__':
main()
<commit_msg>Use pandas native saving by forcing date to not be index, and be string<commit_after>
|
#!/usr/bin/env python
import datetime
import sqlite3
import pandas.io.data as web
import pandas.io.sql as sql
def get_stock(stock, start, end):
"""
Return data frame of Yahoo Finance data for stock.
Takes start and end datetimes.
"""
return web.DataReader(stock, 'yahoo', start, end)
def scrape_stock(stock, start, end):
sqlite_db.execute("drop table if exists {};".format(stock))
frame = (get_stock(stock, start, end))
# make Date not an index so it appears in table
frame = frame.reset_index()
# force Date datetime to string
frame[['Date']] = frame[['Date']].applymap(lambda x: x.isoformat())
sql.write_frame(frame, stock, sqlite_db)
def main():
global sqlite_db
sqlite_db = sqlite3.connect("scraperwiki.sqlite")
start = datetime.datetime(2014, 3, 1)
end = datetime.datetime.today()
for ticker in ['TWTR', 'FB']:
scrape_stock(ticker, start, end)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
import datetime
import scraperwiki
import numpy
import pandas.io.data as web
def get_stock(stock, start, end, service):
"""
Return data frame of finance data for stock.
Takes start and end datetimes, and service name of 'google' or 'yahoo'.
"""
return web.DataReader(stock, service, start, end)
def parse_finance_frame(stock, start, end, service='google'):
"""
Return rows of dicts from a finance data frame for scraperwiki.sqlite.
service can also be 'yahoo', start and end are datetimes.
"""
frame = get_stock(stock, start, end, service)
rows = []
for idx in range(len(frame)):
current_row_as_dict = frame.ix[idx].to_dict()
# have to convert dates because these are Pandas timestamps and
# dumptruck doesn't support them
current_row_as_dict['Date'] = frame.index[idx].to_datetime()
current_row_as_dict['Stock'] = stock
# horrible hack because data values are numpy.float64 and dumptruck
# doesn't support them
for key in current_row_as_dict:
if isinstance(current_row_as_dict[key], numpy.float64):
current_row_as_dict[key] = float(current_row_as_dict[key])
rows.append(current_row_as_dict)
return rows
def main():
"""
Dump stock data into scraperwiki.sqlite using pandas.io.data.
"""
# arbitrary start chosen
start = datetime.datetime(2014, 3, 1)
end = datetime.datetime.today()
stock_list = ['TWTR', 'FB']
rows = []
for stock in stock_list:
rows.extend(parse_finance_frame(stock, start, end))
scraperwiki.sqlite.save(data=rows, unique_keys=['Stock', 'Date'])
if __name__ == '__main__':
main()
Use pandas native saving by forcing date to not be index, and be string#!/usr/bin/env python
import datetime
import sqlite3
import pandas.io.data as web
import pandas.io.sql as sql
def get_stock(stock, start, end):
"""
Return data frame of Yahoo Finance data for stock.
Takes start and end datetimes.
"""
return web.DataReader(stock, 'yahoo', start, end)
def scrape_stock(stock, start, end):
sqlite_db.execute("drop table if exists {};".format(stock))
frame = (get_stock(stock, start, end))
# make Date not an index so it appears in table
frame = frame.reset_index()
# force Date datetime to string
frame[['Date']] = frame[['Date']].applymap(lambda x: x.isoformat())
sql.write_frame(frame, stock, sqlite_db)
def main():
global sqlite_db
sqlite_db = sqlite3.connect("scraperwiki.sqlite")
start = datetime.datetime(2014, 3, 1)
end = datetime.datetime.today()
for ticker in ['TWTR', 'FB']:
scrape_stock(ticker, start, end)
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
import datetime
import scraperwiki
import numpy
import pandas.io.data as web
def get_stock(stock, start, end, service):
"""
Return data frame of finance data for stock.
Takes start and end datetimes, and service name of 'google' or 'yahoo'.
"""
return web.DataReader(stock, service, start, end)
def parse_finance_frame(stock, start, end, service='google'):
"""
Return rows of dicts from a finance data frame for scraperwiki.sqlite.
service can also be 'yahoo', start and end are datetimes.
"""
frame = get_stock(stock, start, end, service)
rows = []
for idx in range(len(frame)):
current_row_as_dict = frame.ix[idx].to_dict()
# have to convert dates because these are Pandas timestamps and
# dumptruck doesn't support them
current_row_as_dict['Date'] = frame.index[idx].to_datetime()
current_row_as_dict['Stock'] = stock
# horrible hack because data values are numpy.float64 and dumptruck
# doesn't support them
for key in current_row_as_dict:
if isinstance(current_row_as_dict[key], numpy.float64):
current_row_as_dict[key] = float(current_row_as_dict[key])
rows.append(current_row_as_dict)
return rows
def main():
"""
Dump stock data into scraperwiki.sqlite using pandas.io.data.
"""
# arbitrary start chosen
start = datetime.datetime(2014, 3, 1)
end = datetime.datetime.today()
stock_list = ['TWTR', 'FB']
rows = []
for stock in stock_list:
rows.extend(parse_finance_frame(stock, start, end))
scraperwiki.sqlite.save(data=rows, unique_keys=['Stock', 'Date'])
if __name__ == '__main__':
main()
<commit_msg>Use pandas native saving by forcing date to not be index, and be string<commit_after>#!/usr/bin/env python
import datetime
import sqlite3
import pandas.io.data as web
import pandas.io.sql as sql
def get_stock(stock, start, end):
"""
Return data frame of Yahoo Finance data for stock.
Takes start and end datetimes.
"""
return web.DataReader(stock, 'yahoo', start, end)
def scrape_stock(stock, start, end):
sqlite_db.execute("drop table if exists {};".format(stock))
frame = (get_stock(stock, start, end))
# make Date not an index so it appears in table
frame = frame.reset_index()
# force Date datetime to string
frame[['Date']] = frame[['Date']].applymap(lambda x: x.isoformat())
sql.write_frame(frame, stock, sqlite_db)
def main():
global sqlite_db
sqlite_db = sqlite3.connect("scraperwiki.sqlite")
start = datetime.datetime(2014, 3, 1)
end = datetime.datetime.today()
for ticker in ['TWTR', 'FB']:
scrape_stock(ticker, start, end)
if __name__ == '__main__':
main()
|
3919d64370825d8931672011af4b99355e52ef63
|
motobot/core_plugins/help.py
|
motobot/core_plugins/help.py
|
from motobot import IRCBot, command, Notice
def get_command_help(bot, command, modifier):
responses = []
func = lambda x: x.type == IRCBot.command_plugin and x.arg.lower() == command.lower()
for plugin in filter(func, bot.plugins):
func = plugin.func
if func.__doc__ is not None:
responses.append((' '.join(func.__doc__.split()), modifier))
return responses
@command('help')
def help_command(bot, database, nick, channel, message, args):
""" Print help messages for the user.
Takes a single argument for a command name.
No arguments gives a generic help message.
"""
response = None
modifier = Notice(nick)
if len(args) <= 1:
default_help = "For help on a specific command use '!help command'."
response = bot.default_help \
if bot.default_help is not None else default_help
response = (response, modifier)
else:
response = get_command_help(bot, args[1], modifier)
if response == []:
response = ("There is no help entry for the command: {}.".format(args[1]), modifier)
return response
|
from motobot import IRCBot, command, Notice
def get_command_help(bot, command):
responses = []
func = lambda x: x.type == IRCBot.command_plugin and x.arg.lower() == command.lower()
for plugin in filter(func, bot.plugins):
func = plugin.func
if func.__doc__ is not None:
responses.append(' '.join(func.__doc__.split()))
return responses
@command('help')
def help_command(bot, database, nick, channel, message, args):
""" Print help messages for the user.
Takes a single argument for a command name.
No arguments gives a generic help message.
"""
response = None
if len(args) <= 1:
default_help = "For help on a specific command use '!help command'."
response = bot.default_help \
if bot.default_help is not None else default_help
else:
response = get_command_help(bot, args[1])
if response == []:
response = "There is no help entry for the command: {}.".format(args[1])
return response, Notice(nick)
|
Update to new response handling
|
Update to new response handling
|
Python
|
mit
|
Motoko11/MotoBot
|
from motobot import IRCBot, command, Notice
def get_command_help(bot, command, modifier):
responses = []
func = lambda x: x.type == IRCBot.command_plugin and x.arg.lower() == command.lower()
for plugin in filter(func, bot.plugins):
func = plugin.func
if func.__doc__ is not None:
responses.append((' '.join(func.__doc__.split()), modifier))
return responses
@command('help')
def help_command(bot, database, nick, channel, message, args):
""" Print help messages for the user.
Takes a single argument for a command name.
No arguments gives a generic help message.
"""
response = None
modifier = Notice(nick)
if len(args) <= 1:
default_help = "For help on a specific command use '!help command'."
response = bot.default_help \
if bot.default_help is not None else default_help
response = (response, modifier)
else:
response = get_command_help(bot, args[1], modifier)
if response == []:
response = ("There is no help entry for the command: {}.".format(args[1]), modifier)
return response
Update to new response handling
|
from motobot import IRCBot, command, Notice
def get_command_help(bot, command):
responses = []
func = lambda x: x.type == IRCBot.command_plugin and x.arg.lower() == command.lower()
for plugin in filter(func, bot.plugins):
func = plugin.func
if func.__doc__ is not None:
responses.append(' '.join(func.__doc__.split()))
return responses
@command('help')
def help_command(bot, database, nick, channel, message, args):
""" Print help messages for the user.
Takes a single argument for a command name.
No arguments gives a generic help message.
"""
response = None
if len(args) <= 1:
default_help = "For help on a specific command use '!help command'."
response = bot.default_help \
if bot.default_help is not None else default_help
else:
response = get_command_help(bot, args[1])
if response == []:
response = "There is no help entry for the command: {}.".format(args[1])
return response, Notice(nick)
|
<commit_before>from motobot import IRCBot, command, Notice
def get_command_help(bot, command, modifier):
responses = []
func = lambda x: x.type == IRCBot.command_plugin and x.arg.lower() == command.lower()
for plugin in filter(func, bot.plugins):
func = plugin.func
if func.__doc__ is not None:
responses.append((' '.join(func.__doc__.split()), modifier))
return responses
@command('help')
def help_command(bot, database, nick, channel, message, args):
""" Print help messages for the user.
Takes a single argument for a command name.
No arguments gives a generic help message.
"""
response = None
modifier = Notice(nick)
if len(args) <= 1:
default_help = "For help on a specific command use '!help command'."
response = bot.default_help \
if bot.default_help is not None else default_help
response = (response, modifier)
else:
response = get_command_help(bot, args[1], modifier)
if response == []:
response = ("There is no help entry for the command: {}.".format(args[1]), modifier)
return response
<commit_msg>Update to new response handling<commit_after>
|
from motobot import IRCBot, command, Notice
def get_command_help(bot, command):
responses = []
func = lambda x: x.type == IRCBot.command_plugin and x.arg.lower() == command.lower()
for plugin in filter(func, bot.plugins):
func = plugin.func
if func.__doc__ is not None:
responses.append(' '.join(func.__doc__.split()))
return responses
@command('help')
def help_command(bot, database, nick, channel, message, args):
""" Print help messages for the user.
Takes a single argument for a command name.
No arguments gives a generic help message.
"""
response = None
if len(args) <= 1:
default_help = "For help on a specific command use '!help command'."
response = bot.default_help \
if bot.default_help is not None else default_help
else:
response = get_command_help(bot, args[1])
if response == []:
response = "There is no help entry for the command: {}.".format(args[1])
return response, Notice(nick)
|
from motobot import IRCBot, command, Notice
def get_command_help(bot, command, modifier):
responses = []
func = lambda x: x.type == IRCBot.command_plugin and x.arg.lower() == command.lower()
for plugin in filter(func, bot.plugins):
func = plugin.func
if func.__doc__ is not None:
responses.append((' '.join(func.__doc__.split()), modifier))
return responses
@command('help')
def help_command(bot, database, nick, channel, message, args):
""" Print help messages for the user.
Takes a single argument for a command name.
No arguments gives a generic help message.
"""
response = None
modifier = Notice(nick)
if len(args) <= 1:
default_help = "For help on a specific command use '!help command'."
response = bot.default_help \
if bot.default_help is not None else default_help
response = (response, modifier)
else:
response = get_command_help(bot, args[1], modifier)
if response == []:
response = ("There is no help entry for the command: {}.".format(args[1]), modifier)
return response
Update to new response handlingfrom motobot import IRCBot, command, Notice
def get_command_help(bot, command):
responses = []
func = lambda x: x.type == IRCBot.command_plugin and x.arg.lower() == command.lower()
for plugin in filter(func, bot.plugins):
func = plugin.func
if func.__doc__ is not None:
responses.append(' '.join(func.__doc__.split()))
return responses
@command('help')
def help_command(bot, database, nick, channel, message, args):
""" Print help messages for the user.
Takes a single argument for a command name.
No arguments gives a generic help message.
"""
response = None
if len(args) <= 1:
default_help = "For help on a specific command use '!help command'."
response = bot.default_help \
if bot.default_help is not None else default_help
else:
response = get_command_help(bot, args[1])
if response == []:
response = "There is no help entry for the command: {}.".format(args[1])
return response, Notice(nick)
|
<commit_before>from motobot import IRCBot, command, Notice
def get_command_help(bot, command, modifier):
responses = []
func = lambda x: x.type == IRCBot.command_plugin and x.arg.lower() == command.lower()
for plugin in filter(func, bot.plugins):
func = plugin.func
if func.__doc__ is not None:
responses.append((' '.join(func.__doc__.split()), modifier))
return responses
@command('help')
def help_command(bot, database, nick, channel, message, args):
""" Print help messages for the user.
Takes a single argument for a command name.
No arguments gives a generic help message.
"""
response = None
modifier = Notice(nick)
if len(args) <= 1:
default_help = "For help on a specific command use '!help command'."
response = bot.default_help \
if bot.default_help is not None else default_help
response = (response, modifier)
else:
response = get_command_help(bot, args[1], modifier)
if response == []:
response = ("There is no help entry for the command: {}.".format(args[1]), modifier)
return response
<commit_msg>Update to new response handling<commit_after>from motobot import IRCBot, command, Notice
def get_command_help(bot, command):
responses = []
func = lambda x: x.type == IRCBot.command_plugin and x.arg.lower() == command.lower()
for plugin in filter(func, bot.plugins):
func = plugin.func
if func.__doc__ is not None:
responses.append(' '.join(func.__doc__.split()))
return responses
@command('help')
def help_command(bot, database, nick, channel, message, args):
""" Print help messages for the user.
Takes a single argument for a command name.
No arguments gives a generic help message.
"""
response = None
if len(args) <= 1:
default_help = "For help on a specific command use '!help command'."
response = bot.default_help \
if bot.default_help is not None else default_help
else:
response = get_command_help(bot, args[1])
if response == []:
response = "There is no help entry for the command: {}.".format(args[1])
return response, Notice(nick)
|
0eb5966ec3261e6c6101b4c9874321a105fb4426
|
dj-resume/urls.py
|
dj-resume/urls.py
|
from django.conf.urls.defaults import patterns, include, url
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
(r'apply/', 'resume.views.cs_apply_handler'),
(r'cs/', 'resume.views.index_handler'),
(r'cap/.*', 'belaylibs.dj_belay.proxyHandler'),
(r'applicant/', 'resume.views.applicant_handler'),
(r'new-account/', 'resume.views.new_account_handler'),
(r'admin/', 'resume.views.admin_handler'),
(r'review/', 'resume.views.review_handler'),
(r'appreview/', 'resume.view.appreview_handler'),
(r'^common.js$', 'lib.py.common_js.common_js_handler')
)
|
from django.conf.urls.defaults import patterns, include, url
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
(r'cs/', 'resume.views.cs_index_handler'),
(r'cap/.*', 'belaylibs.dj_belay.proxyHandler'),
(r'applicant/', 'resume.views.applicant_handler'),
(r'new-account/', 'resume.views.new_account_handler'),
(r'admin/', 'resume.views.admin_handler'),
(r'review/', 'resume.views.review_handler'),
(r'appreview/', 'resume.view.appreview_handler'),
(r'^common.js$', 'lib.py.common_js.common_js_handler'),
(r'generate/', 'resume.generate.generate')
)
|
Make /cs/ point to the correct handler, no more apply handlre
|
Make /cs/ point to the correct handler, no more apply handlre
|
Python
|
apache-2.0
|
brownplt/k3,brownplt/k3,brownplt/k3
|
from django.conf.urls.defaults import patterns, include, url
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
(r'apply/', 'resume.views.cs_apply_handler'),
(r'cs/', 'resume.views.index_handler'),
(r'cap/.*', 'belaylibs.dj_belay.proxyHandler'),
(r'applicant/', 'resume.views.applicant_handler'),
(r'new-account/', 'resume.views.new_account_handler'),
(r'admin/', 'resume.views.admin_handler'),
(r'review/', 'resume.views.review_handler'),
(r'appreview/', 'resume.view.appreview_handler'),
(r'^common.js$', 'lib.py.common_js.common_js_handler')
)
Make /cs/ point to the correct handler, no more apply handlre
|
from django.conf.urls.defaults import patterns, include, url
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
(r'cs/', 'resume.views.cs_index_handler'),
(r'cap/.*', 'belaylibs.dj_belay.proxyHandler'),
(r'applicant/', 'resume.views.applicant_handler'),
(r'new-account/', 'resume.views.new_account_handler'),
(r'admin/', 'resume.views.admin_handler'),
(r'review/', 'resume.views.review_handler'),
(r'appreview/', 'resume.view.appreview_handler'),
(r'^common.js$', 'lib.py.common_js.common_js_handler'),
(r'generate/', 'resume.generate.generate')
)
|
<commit_before>from django.conf.urls.defaults import patterns, include, url
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
(r'apply/', 'resume.views.cs_apply_handler'),
(r'cs/', 'resume.views.index_handler'),
(r'cap/.*', 'belaylibs.dj_belay.proxyHandler'),
(r'applicant/', 'resume.views.applicant_handler'),
(r'new-account/', 'resume.views.new_account_handler'),
(r'admin/', 'resume.views.admin_handler'),
(r'review/', 'resume.views.review_handler'),
(r'appreview/', 'resume.view.appreview_handler'),
(r'^common.js$', 'lib.py.common_js.common_js_handler')
)
<commit_msg>Make /cs/ point to the correct handler, no more apply handlre<commit_after>
|
from django.conf.urls.defaults import patterns, include, url
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
(r'cs/', 'resume.views.cs_index_handler'),
(r'cap/.*', 'belaylibs.dj_belay.proxyHandler'),
(r'applicant/', 'resume.views.applicant_handler'),
(r'new-account/', 'resume.views.new_account_handler'),
(r'admin/', 'resume.views.admin_handler'),
(r'review/', 'resume.views.review_handler'),
(r'appreview/', 'resume.view.appreview_handler'),
(r'^common.js$', 'lib.py.common_js.common_js_handler'),
(r'generate/', 'resume.generate.generate')
)
|
from django.conf.urls.defaults import patterns, include, url
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
(r'apply/', 'resume.views.cs_apply_handler'),
(r'cs/', 'resume.views.index_handler'),
(r'cap/.*', 'belaylibs.dj_belay.proxyHandler'),
(r'applicant/', 'resume.views.applicant_handler'),
(r'new-account/', 'resume.views.new_account_handler'),
(r'admin/', 'resume.views.admin_handler'),
(r'review/', 'resume.views.review_handler'),
(r'appreview/', 'resume.view.appreview_handler'),
(r'^common.js$', 'lib.py.common_js.common_js_handler')
)
Make /cs/ point to the correct handler, no more apply handlrefrom django.conf.urls.defaults import patterns, include, url
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
(r'cs/', 'resume.views.cs_index_handler'),
(r'cap/.*', 'belaylibs.dj_belay.proxyHandler'),
(r'applicant/', 'resume.views.applicant_handler'),
(r'new-account/', 'resume.views.new_account_handler'),
(r'admin/', 'resume.views.admin_handler'),
(r'review/', 'resume.views.review_handler'),
(r'appreview/', 'resume.view.appreview_handler'),
(r'^common.js$', 'lib.py.common_js.common_js_handler'),
(r'generate/', 'resume.generate.generate')
)
|
<commit_before>from django.conf.urls.defaults import patterns, include, url
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
(r'apply/', 'resume.views.cs_apply_handler'),
(r'cs/', 'resume.views.index_handler'),
(r'cap/.*', 'belaylibs.dj_belay.proxyHandler'),
(r'applicant/', 'resume.views.applicant_handler'),
(r'new-account/', 'resume.views.new_account_handler'),
(r'admin/', 'resume.views.admin_handler'),
(r'review/', 'resume.views.review_handler'),
(r'appreview/', 'resume.view.appreview_handler'),
(r'^common.js$', 'lib.py.common_js.common_js_handler')
)
<commit_msg>Make /cs/ point to the correct handler, no more apply handlre<commit_after>from django.conf.urls.defaults import patterns, include, url
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
(r'cs/', 'resume.views.cs_index_handler'),
(r'cap/.*', 'belaylibs.dj_belay.proxyHandler'),
(r'applicant/', 'resume.views.applicant_handler'),
(r'new-account/', 'resume.views.new_account_handler'),
(r'admin/', 'resume.views.admin_handler'),
(r'review/', 'resume.views.review_handler'),
(r'appreview/', 'resume.view.appreview_handler'),
(r'^common.js$', 'lib.py.common_js.common_js_handler'),
(r'generate/', 'resume.generate.generate')
)
|
9be2846e408699308798b698754634ce7f370710
|
openedx/stanford/cms/urls.py
|
openedx/stanford/cms/urls.py
|
from django.conf import settings
from django.conf.urls import url
import contentstore.views
urlpatterns = [
url(
r'^settings/send_test_enrollment_email/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.send_test_enrollment_email,
name='send_test_enrollment_email',
),
url(
r'^utilities/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_handler,
),
url(
r'^utility/captions/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_captions_handler,
),
url(
r'^utility/bulksettings/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_bulksettings_handler,
),
]
if settings.SHIB_ONLY_SITE:
urlpatterns += [
url(
r'^backup_signup$',
contentstore.views.signup,
name='backup_signup',
),
url(
r'^backup_signin$',
contentstore.views.login_page,
name='backup_login',
),
]
|
from django.conf import settings
from django.conf.urls import url
import contentstore.views
urlpatterns = [
url(
r'^settings/send_test_enrollment_email/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.send_test_enrollment_email,
name='send_test_enrollment_email',
),
url(
r'^utilities/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_handler,
name='utility_handler',
),
url(
r'^utility/captions/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_captions_handler,
name='utility_captions_handler',
),
url(
r'^utility/bulksettings/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_bulksettings_handler,
name='utility_bulksettings_handler',
),
]
if settings.SHIB_ONLY_SITE:
urlpatterns += [
url(
r'^backup_signup$',
contentstore.views.signup,
name='backup_signup',
),
url(
r'^backup_signin$',
contentstore.views.login_page,
name='backup_login',
),
]
|
Add names to stanford view handlers
|
Add names to stanford view handlers
|
Python
|
agpl-3.0
|
Stanford-Online/edx-platform,Stanford-Online/edx-platform,Stanford-Online/edx-platform,Stanford-Online/edx-platform
|
from django.conf import settings
from django.conf.urls import url
import contentstore.views
urlpatterns = [
url(
r'^settings/send_test_enrollment_email/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.send_test_enrollment_email,
name='send_test_enrollment_email',
),
url(
r'^utilities/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_handler,
),
url(
r'^utility/captions/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_captions_handler,
),
url(
r'^utility/bulksettings/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_bulksettings_handler,
),
]
if settings.SHIB_ONLY_SITE:
urlpatterns += [
url(
r'^backup_signup$',
contentstore.views.signup,
name='backup_signup',
),
url(
r'^backup_signin$',
contentstore.views.login_page,
name='backup_login',
),
]
Add names to stanford view handlers
|
from django.conf import settings
from django.conf.urls import url
import contentstore.views
urlpatterns = [
url(
r'^settings/send_test_enrollment_email/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.send_test_enrollment_email,
name='send_test_enrollment_email',
),
url(
r'^utilities/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_handler,
name='utility_handler',
),
url(
r'^utility/captions/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_captions_handler,
name='utility_captions_handler',
),
url(
r'^utility/bulksettings/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_bulksettings_handler,
name='utility_bulksettings_handler',
),
]
if settings.SHIB_ONLY_SITE:
urlpatterns += [
url(
r'^backup_signup$',
contentstore.views.signup,
name='backup_signup',
),
url(
r'^backup_signin$',
contentstore.views.login_page,
name='backup_login',
),
]
|
<commit_before>from django.conf import settings
from django.conf.urls import url
import contentstore.views
urlpatterns = [
url(
r'^settings/send_test_enrollment_email/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.send_test_enrollment_email,
name='send_test_enrollment_email',
),
url(
r'^utilities/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_handler,
),
url(
r'^utility/captions/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_captions_handler,
),
url(
r'^utility/bulksettings/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_bulksettings_handler,
),
]
if settings.SHIB_ONLY_SITE:
urlpatterns += [
url(
r'^backup_signup$',
contentstore.views.signup,
name='backup_signup',
),
url(
r'^backup_signin$',
contentstore.views.login_page,
name='backup_login',
),
]
<commit_msg>Add names to stanford view handlers<commit_after>
|
from django.conf import settings
from django.conf.urls import url
import contentstore.views
urlpatterns = [
url(
r'^settings/send_test_enrollment_email/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.send_test_enrollment_email,
name='send_test_enrollment_email',
),
url(
r'^utilities/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_handler,
name='utility_handler',
),
url(
r'^utility/captions/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_captions_handler,
name='utility_captions_handler',
),
url(
r'^utility/bulksettings/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_bulksettings_handler,
name='utility_bulksettings_handler',
),
]
if settings.SHIB_ONLY_SITE:
urlpatterns += [
url(
r'^backup_signup$',
contentstore.views.signup,
name='backup_signup',
),
url(
r'^backup_signin$',
contentstore.views.login_page,
name='backup_login',
),
]
|
from django.conf import settings
from django.conf.urls import url
import contentstore.views
urlpatterns = [
url(
r'^settings/send_test_enrollment_email/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.send_test_enrollment_email,
name='send_test_enrollment_email',
),
url(
r'^utilities/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_handler,
),
url(
r'^utility/captions/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_captions_handler,
),
url(
r'^utility/bulksettings/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_bulksettings_handler,
),
]
if settings.SHIB_ONLY_SITE:
urlpatterns += [
url(
r'^backup_signup$',
contentstore.views.signup,
name='backup_signup',
),
url(
r'^backup_signin$',
contentstore.views.login_page,
name='backup_login',
),
]
Add names to stanford view handlersfrom django.conf import settings
from django.conf.urls import url
import contentstore.views
urlpatterns = [
url(
r'^settings/send_test_enrollment_email/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.send_test_enrollment_email,
name='send_test_enrollment_email',
),
url(
r'^utilities/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_handler,
name='utility_handler',
),
url(
r'^utility/captions/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_captions_handler,
name='utility_captions_handler',
),
url(
r'^utility/bulksettings/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_bulksettings_handler,
name='utility_bulksettings_handler',
),
]
if settings.SHIB_ONLY_SITE:
urlpatterns += [
url(
r'^backup_signup$',
contentstore.views.signup,
name='backup_signup',
),
url(
r'^backup_signin$',
contentstore.views.login_page,
name='backup_login',
),
]
|
<commit_before>from django.conf import settings
from django.conf.urls import url
import contentstore.views
urlpatterns = [
url(
r'^settings/send_test_enrollment_email/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.send_test_enrollment_email,
name='send_test_enrollment_email',
),
url(
r'^utilities/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_handler,
),
url(
r'^utility/captions/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_captions_handler,
),
url(
r'^utility/bulksettings/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_bulksettings_handler,
),
]
if settings.SHIB_ONLY_SITE:
urlpatterns += [
url(
r'^backup_signup$',
contentstore.views.signup,
name='backup_signup',
),
url(
r'^backup_signin$',
contentstore.views.login_page,
name='backup_login',
),
]
<commit_msg>Add names to stanford view handlers<commit_after>from django.conf import settings
from django.conf.urls import url
import contentstore.views
urlpatterns = [
url(
r'^settings/send_test_enrollment_email/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.send_test_enrollment_email,
name='send_test_enrollment_email',
),
url(
r'^utilities/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_handler,
name='utility_handler',
),
url(
r'^utility/captions/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_captions_handler,
name='utility_captions_handler',
),
url(
r'^utility/bulksettings/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_bulksettings_handler,
name='utility_bulksettings_handler',
),
]
if settings.SHIB_ONLY_SITE:
urlpatterns += [
url(
r'^backup_signup$',
contentstore.views.signup,
name='backup_signup',
),
url(
r'^backup_signin$',
contentstore.views.login_page,
name='backup_login',
),
]
|
89526f9af257096d2253b00b72d0cea1493fec52
|
django_prices_openexchangerates/management/commands/update_exchange_rates.py
|
django_prices_openexchangerates/management/commands/update_exchange_rates.py
|
from django.core.management.base import BaseCommand
from ...tasks import update_conversion_rates, create_conversion_dates
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument(
'--all',
action='store_true',
dest='all_currencies',
default=False,
help='Create entries for all currencies')
def handle(self, *args, **options):
if options['all_currencies']:
all_rates = create_conversion_dates()
else:
all_rates = update_conversion_rates()
for conversion_rate in all_rates:
self.stdout.write('%s' % (conversion_rate, ))
|
from django.core.management.base import BaseCommand
from ...tasks import update_conversion_rates, create_conversion_rates
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument(
'--all',
action='store_true',
dest='all_currencies',
default=False,
help='Create entries for all currencies')
def handle(self, *args, **options):
if options['all_currencies']:
all_rates = create_conversion_rates()
else:
all_rates = update_conversion_rates()
for conversion_rate in all_rates:
self.stdout.write('%s' % (conversion_rate, ))
|
Fix typo create_conversion_rates in commands
|
Fix typo create_conversion_rates in commands
|
Python
|
bsd-3-clause
|
mirumee/django-prices-openexchangerates
|
from django.core.management.base import BaseCommand
from ...tasks import update_conversion_rates, create_conversion_dates
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument(
'--all',
action='store_true',
dest='all_currencies',
default=False,
help='Create entries for all currencies')
def handle(self, *args, **options):
if options['all_currencies']:
all_rates = create_conversion_dates()
else:
all_rates = update_conversion_rates()
for conversion_rate in all_rates:
self.stdout.write('%s' % (conversion_rate, ))
Fix typo create_conversion_rates in commands
|
from django.core.management.base import BaseCommand
from ...tasks import update_conversion_rates, create_conversion_rates
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument(
'--all',
action='store_true',
dest='all_currencies',
default=False,
help='Create entries for all currencies')
def handle(self, *args, **options):
if options['all_currencies']:
all_rates = create_conversion_rates()
else:
all_rates = update_conversion_rates()
for conversion_rate in all_rates:
self.stdout.write('%s' % (conversion_rate, ))
|
<commit_before>from django.core.management.base import BaseCommand
from ...tasks import update_conversion_rates, create_conversion_dates
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument(
'--all',
action='store_true',
dest='all_currencies',
default=False,
help='Create entries for all currencies')
def handle(self, *args, **options):
if options['all_currencies']:
all_rates = create_conversion_dates()
else:
all_rates = update_conversion_rates()
for conversion_rate in all_rates:
self.stdout.write('%s' % (conversion_rate, ))
<commit_msg>Fix typo create_conversion_rates in commands<commit_after>
|
from django.core.management.base import BaseCommand
from ...tasks import update_conversion_rates, create_conversion_rates
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument(
'--all',
action='store_true',
dest='all_currencies',
default=False,
help='Create entries for all currencies')
def handle(self, *args, **options):
if options['all_currencies']:
all_rates = create_conversion_rates()
else:
all_rates = update_conversion_rates()
for conversion_rate in all_rates:
self.stdout.write('%s' % (conversion_rate, ))
|
from django.core.management.base import BaseCommand
from ...tasks import update_conversion_rates, create_conversion_dates
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument(
'--all',
action='store_true',
dest='all_currencies',
default=False,
help='Create entries for all currencies')
def handle(self, *args, **options):
if options['all_currencies']:
all_rates = create_conversion_dates()
else:
all_rates = update_conversion_rates()
for conversion_rate in all_rates:
self.stdout.write('%s' % (conversion_rate, ))
Fix typo create_conversion_rates in commandsfrom django.core.management.base import BaseCommand
from ...tasks import update_conversion_rates, create_conversion_rates
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument(
'--all',
action='store_true',
dest='all_currencies',
default=False,
help='Create entries for all currencies')
def handle(self, *args, **options):
if options['all_currencies']:
all_rates = create_conversion_rates()
else:
all_rates = update_conversion_rates()
for conversion_rate in all_rates:
self.stdout.write('%s' % (conversion_rate, ))
|
<commit_before>from django.core.management.base import BaseCommand
from ...tasks import update_conversion_rates, create_conversion_dates
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument(
'--all',
action='store_true',
dest='all_currencies',
default=False,
help='Create entries for all currencies')
def handle(self, *args, **options):
if options['all_currencies']:
all_rates = create_conversion_dates()
else:
all_rates = update_conversion_rates()
for conversion_rate in all_rates:
self.stdout.write('%s' % (conversion_rate, ))
<commit_msg>Fix typo create_conversion_rates in commands<commit_after>from django.core.management.base import BaseCommand
from ...tasks import update_conversion_rates, create_conversion_rates
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument(
'--all',
action='store_true',
dest='all_currencies',
default=False,
help='Create entries for all currencies')
def handle(self, *args, **options):
if options['all_currencies']:
all_rates = create_conversion_rates()
else:
all_rates = update_conversion_rates()
for conversion_rate in all_rates:
self.stdout.write('%s' % (conversion_rate, ))
|
d74b524cec824e77adbcf9cc23e28a6efba02985
|
takePicture.py
|
takePicture.py
|
import picamera as p
import os
import time
os.chdir('/home/pi/Desktop')
cam = p.PiCamera()
cam.resolution = (320,240)
cam.hflip = True
cam.vflip = True
x = 0
while x < 15:
img = cam.capture('gregTest.jpg')
time.sleep(.25)
x +=1
exit()
|
import picamera as p
import os
import time
os.chdir('/home/pi/Desktop')
cam = p.PiCamera()
cam.resolution = (320,240)
cam.hflip = True
cam.vflip = True
x = 0
while x < 50:
img = cam.capture('tempGregTest.jpg')
os.unlink('gregTest.jpg')
os.rename('tempGregTest.jpg','gregTest.jpg')
time.sleep(.25)
x +=1
exit()
|
Add temp pic file sequence to takepicture file
|
Add temp pic file sequence to takepicture file
|
Python
|
mit
|
jwarshaw/RaspberryDrive
|
import picamera as p
import os
import time
os.chdir('/home/pi/Desktop')
cam = p.PiCamera()
cam.resolution = (320,240)
cam.hflip = True
cam.vflip = True
x = 0
while x < 15:
img = cam.capture('gregTest.jpg')
time.sleep(.25)
x +=1
exit()
Add temp pic file sequence to takepicture file
|
import picamera as p
import os
import time
os.chdir('/home/pi/Desktop')
cam = p.PiCamera()
cam.resolution = (320,240)
cam.hflip = True
cam.vflip = True
x = 0
while x < 50:
img = cam.capture('tempGregTest.jpg')
os.unlink('gregTest.jpg')
os.rename('tempGregTest.jpg','gregTest.jpg')
time.sleep(.25)
x +=1
exit()
|
<commit_before>import picamera as p
import os
import time
os.chdir('/home/pi/Desktop')
cam = p.PiCamera()
cam.resolution = (320,240)
cam.hflip = True
cam.vflip = True
x = 0
while x < 15:
img = cam.capture('gregTest.jpg')
time.sleep(.25)
x +=1
exit()
<commit_msg>Add temp pic file sequence to takepicture file<commit_after>
|
import picamera as p
import os
import time
os.chdir('/home/pi/Desktop')
cam = p.PiCamera()
cam.resolution = (320,240)
cam.hflip = True
cam.vflip = True
x = 0
while x < 50:
img = cam.capture('tempGregTest.jpg')
os.unlink('gregTest.jpg')
os.rename('tempGregTest.jpg','gregTest.jpg')
time.sleep(.25)
x +=1
exit()
|
import picamera as p
import os
import time
os.chdir('/home/pi/Desktop')
cam = p.PiCamera()
cam.resolution = (320,240)
cam.hflip = True
cam.vflip = True
x = 0
while x < 15:
img = cam.capture('gregTest.jpg')
time.sleep(.25)
x +=1
exit()
Add temp pic file sequence to takepicture fileimport picamera as p
import os
import time
os.chdir('/home/pi/Desktop')
cam = p.PiCamera()
cam.resolution = (320,240)
cam.hflip = True
cam.vflip = True
x = 0
while x < 50:
img = cam.capture('tempGregTest.jpg')
os.unlink('gregTest.jpg')
os.rename('tempGregTest.jpg','gregTest.jpg')
time.sleep(.25)
x +=1
exit()
|
<commit_before>import picamera as p
import os
import time
os.chdir('/home/pi/Desktop')
cam = p.PiCamera()
cam.resolution = (320,240)
cam.hflip = True
cam.vflip = True
x = 0
while x < 15:
img = cam.capture('gregTest.jpg')
time.sleep(.25)
x +=1
exit()
<commit_msg>Add temp pic file sequence to takepicture file<commit_after>import picamera as p
import os
import time
os.chdir('/home/pi/Desktop')
cam = p.PiCamera()
cam.resolution = (320,240)
cam.hflip = True
cam.vflip = True
x = 0
while x < 50:
img = cam.capture('tempGregTest.jpg')
os.unlink('gregTest.jpg')
os.rename('tempGregTest.jpg','gregTest.jpg')
time.sleep(.25)
x +=1
exit()
|
0c5abad8259cccfd1ce50b27a124089d9ea946dd
|
copr_build.py
|
copr_build.py
|
#!/usr/bin/env python3
import json, os, sys
import requests
api_url = "https://copr.fedorainfracloud.org/api_2"
api_login = os.environ["copr_login"]
api_token = os.environ["copr_token"]
r = requests.get("%s/projects/%s/chroots" % (api_url, os.environ["copr_projectid"])).json()
chroots = []
for i in r.get("chroots"):
chroots.append(i.get("chroot").get("name"))
metadata = {
'chroots': chroots,
'project_id': int(os.environ["copr_projectid"]),
}
files = {
"srpm": (os.path.basename(sys.argv[1]), open(sys.argv[1], 'rb'), 'application/x-rpm'),
"metadata": ('', json.dumps(metadata))
}
r = requests.post("%s/builds" % api_url, auth=(api_login, api_token), files=files)
|
#!/usr/bin/env python3
import os
import sys
import requests
api_url = "https://copr.fedorainfracloud.org/api_2"
api_login = os.environ["copr_login"]
api_token = os.environ["copr_token"]
project_id = int(os.environ["copr_projectid"])
r = requests.get("%s/projects/%s/chroots" % (api_url, project_id))
if not r.ok:
print(r.json().get("message", "Error returned, but no message"))
sys.exit(1)
chroots = [i.get("chroot").get("name") for i in r.json().get("chroots")]
gh_url = "https://api.github.com/repos/{}/{}/releases/latest".format(
os.environ["CIRCLE_PROJECT_USERNAME"],
os.environ["CIRCLE_PROJECT_REPONAME"]
)
gh = requests.get(gh_url)
if not gh.ok:
print("Failed to fetch latest Github release")
print(gh.json())
sys.exit(1)
assets = gh.json().get("assets")
if len(assets) > 1:
print("More than 1 asset uploaded to Github, unexpected")
sys.exit(1)
asset = assets[0].get("browser_download_url")
if not asset.endswith(".src.rpm"):
print("Github asset is not a .src.rpm")
sys.exit(1)
metadata = {
'chroots': chroots,
'project_id': project_id,
'srpm_url': asset,
}
r = requests.post("%s/builds" % api_url,
auth=(api_login, api_token),
json=metadata)
if r.status_code != 201:
print(r.json().get("message", "Error returned, but no message"))
sys.exit(1)
print("Build started at {}".format(r.headers["Location"]))
|
Fix copr build trigger script
|
Fix copr build trigger script
|
Python
|
mit
|
kyl191/nginx-pagespeed,kyl191/nginx-pagespeed,kyl191/nginx-pagespeed
|
#!/usr/bin/env python3
import json, os, sys
import requests
api_url = "https://copr.fedorainfracloud.org/api_2"
api_login = os.environ["copr_login"]
api_token = os.environ["copr_token"]
r = requests.get("%s/projects/%s/chroots" % (api_url, os.environ["copr_projectid"])).json()
chroots = []
for i in r.get("chroots"):
chroots.append(i.get("chroot").get("name"))
metadata = {
'chroots': chroots,
'project_id': int(os.environ["copr_projectid"]),
}
files = {
"srpm": (os.path.basename(sys.argv[1]), open(sys.argv[1], 'rb'), 'application/x-rpm'),
"metadata": ('', json.dumps(metadata))
}
r = requests.post("%s/builds" % api_url, auth=(api_login, api_token), files=files)
Fix copr build trigger script
|
#!/usr/bin/env python3
import os
import sys
import requests
api_url = "https://copr.fedorainfracloud.org/api_2"
api_login = os.environ["copr_login"]
api_token = os.environ["copr_token"]
project_id = int(os.environ["copr_projectid"])
r = requests.get("%s/projects/%s/chroots" % (api_url, project_id))
if not r.ok:
print(r.json().get("message", "Error returned, but no message"))
sys.exit(1)
chroots = [i.get("chroot").get("name") for i in r.json().get("chroots")]
gh_url = "https://api.github.com/repos/{}/{}/releases/latest".format(
os.environ["CIRCLE_PROJECT_USERNAME"],
os.environ["CIRCLE_PROJECT_REPONAME"]
)
gh = requests.get(gh_url)
if not gh.ok:
print("Failed to fetch latest Github release")
print(gh.json())
sys.exit(1)
assets = gh.json().get("assets")
if len(assets) > 1:
print("More than 1 asset uploaded to Github, unexpected")
sys.exit(1)
asset = assets[0].get("browser_download_url")
if not asset.endswith(".src.rpm"):
print("Github asset is not a .src.rpm")
sys.exit(1)
metadata = {
'chroots': chroots,
'project_id': project_id,
'srpm_url': asset,
}
r = requests.post("%s/builds" % api_url,
auth=(api_login, api_token),
json=metadata)
if r.status_code != 201:
print(r.json().get("message", "Error returned, but no message"))
sys.exit(1)
print("Build started at {}".format(r.headers["Location"]))
|
<commit_before>#!/usr/bin/env python3
import json, os, sys
import requests
api_url = "https://copr.fedorainfracloud.org/api_2"
api_login = os.environ["copr_login"]
api_token = os.environ["copr_token"]
r = requests.get("%s/projects/%s/chroots" % (api_url, os.environ["copr_projectid"])).json()
chroots = []
for i in r.get("chroots"):
chroots.append(i.get("chroot").get("name"))
metadata = {
'chroots': chroots,
'project_id': int(os.environ["copr_projectid"]),
}
files = {
"srpm": (os.path.basename(sys.argv[1]), open(sys.argv[1], 'rb'), 'application/x-rpm'),
"metadata": ('', json.dumps(metadata))
}
r = requests.post("%s/builds" % api_url, auth=(api_login, api_token), files=files)
<commit_msg>Fix copr build trigger script<commit_after>
|
#!/usr/bin/env python3
import os
import sys
import requests
api_url = "https://copr.fedorainfracloud.org/api_2"
api_login = os.environ["copr_login"]
api_token = os.environ["copr_token"]
project_id = int(os.environ["copr_projectid"])
r = requests.get("%s/projects/%s/chroots" % (api_url, project_id))
if not r.ok:
print(r.json().get("message", "Error returned, but no message"))
sys.exit(1)
chroots = [i.get("chroot").get("name") for i in r.json().get("chroots")]
gh_url = "https://api.github.com/repos/{}/{}/releases/latest".format(
os.environ["CIRCLE_PROJECT_USERNAME"],
os.environ["CIRCLE_PROJECT_REPONAME"]
)
gh = requests.get(gh_url)
if not gh.ok:
print("Failed to fetch latest Github release")
print(gh.json())
sys.exit(1)
assets = gh.json().get("assets")
if len(assets) > 1:
print("More than 1 asset uploaded to Github, unexpected")
sys.exit(1)
asset = assets[0].get("browser_download_url")
if not asset.endswith(".src.rpm"):
print("Github asset is not a .src.rpm")
sys.exit(1)
metadata = {
'chroots': chroots,
'project_id': project_id,
'srpm_url': asset,
}
r = requests.post("%s/builds" % api_url,
auth=(api_login, api_token),
json=metadata)
if r.status_code != 201:
print(r.json().get("message", "Error returned, but no message"))
sys.exit(1)
print("Build started at {}".format(r.headers["Location"]))
|
#!/usr/bin/env python3
import json, os, sys
import requests
api_url = "https://copr.fedorainfracloud.org/api_2"
api_login = os.environ["copr_login"]
api_token = os.environ["copr_token"]
r = requests.get("%s/projects/%s/chroots" % (api_url, os.environ["copr_projectid"])).json()
chroots = []
for i in r.get("chroots"):
chroots.append(i.get("chroot").get("name"))
metadata = {
'chroots': chroots,
'project_id': int(os.environ["copr_projectid"]),
}
files = {
"srpm": (os.path.basename(sys.argv[1]), open(sys.argv[1], 'rb'), 'application/x-rpm'),
"metadata": ('', json.dumps(metadata))
}
r = requests.post("%s/builds" % api_url, auth=(api_login, api_token), files=files)
Fix copr build trigger script#!/usr/bin/env python3
import os
import sys
import requests
api_url = "https://copr.fedorainfracloud.org/api_2"
api_login = os.environ["copr_login"]
api_token = os.environ["copr_token"]
project_id = int(os.environ["copr_projectid"])
r = requests.get("%s/projects/%s/chroots" % (api_url, project_id))
if not r.ok:
print(r.json().get("message", "Error returned, but no message"))
sys.exit(1)
chroots = [i.get("chroot").get("name") for i in r.json().get("chroots")]
gh_url = "https://api.github.com/repos/{}/{}/releases/latest".format(
os.environ["CIRCLE_PROJECT_USERNAME"],
os.environ["CIRCLE_PROJECT_REPONAME"]
)
gh = requests.get(gh_url)
if not gh.ok:
print("Failed to fetch latest Github release")
print(gh.json())
sys.exit(1)
assets = gh.json().get("assets")
if len(assets) > 1:
print("More than 1 asset uploaded to Github, unexpected")
sys.exit(1)
asset = assets[0].get("browser_download_url")
if not asset.endswith(".src.rpm"):
print("Github asset is not a .src.rpm")
sys.exit(1)
metadata = {
'chroots': chroots,
'project_id': project_id,
'srpm_url': asset,
}
r = requests.post("%s/builds" % api_url,
auth=(api_login, api_token),
json=metadata)
if r.status_code != 201:
print(r.json().get("message", "Error returned, but no message"))
sys.exit(1)
print("Build started at {}".format(r.headers["Location"]))
|
<commit_before>#!/usr/bin/env python3
import json, os, sys
import requests
api_url = "https://copr.fedorainfracloud.org/api_2"
api_login = os.environ["copr_login"]
api_token = os.environ["copr_token"]
r = requests.get("%s/projects/%s/chroots" % (api_url, os.environ["copr_projectid"])).json()
chroots = []
for i in r.get("chroots"):
chroots.append(i.get("chroot").get("name"))
metadata = {
'chroots': chroots,
'project_id': int(os.environ["copr_projectid"]),
}
files = {
"srpm": (os.path.basename(sys.argv[1]), open(sys.argv[1], 'rb'), 'application/x-rpm'),
"metadata": ('', json.dumps(metadata))
}
r = requests.post("%s/builds" % api_url, auth=(api_login, api_token), files=files)
<commit_msg>Fix copr build trigger script<commit_after>#!/usr/bin/env python3
import os
import sys
import requests
api_url = "https://copr.fedorainfracloud.org/api_2"
api_login = os.environ["copr_login"]
api_token = os.environ["copr_token"]
project_id = int(os.environ["copr_projectid"])
r = requests.get("%s/projects/%s/chroots" % (api_url, project_id))
if not r.ok:
print(r.json().get("message", "Error returned, but no message"))
sys.exit(1)
chroots = [i.get("chroot").get("name") for i in r.json().get("chroots")]
gh_url = "https://api.github.com/repos/{}/{}/releases/latest".format(
os.environ["CIRCLE_PROJECT_USERNAME"],
os.environ["CIRCLE_PROJECT_REPONAME"]
)
gh = requests.get(gh_url)
if not gh.ok:
print("Failed to fetch latest Github release")
print(gh.json())
sys.exit(1)
assets = gh.json().get("assets")
if len(assets) > 1:
print("More than 1 asset uploaded to Github, unexpected")
sys.exit(1)
asset = assets[0].get("browser_download_url")
if not asset.endswith(".src.rpm"):
print("Github asset is not a .src.rpm")
sys.exit(1)
metadata = {
'chroots': chroots,
'project_id': project_id,
'srpm_url': asset,
}
r = requests.post("%s/builds" % api_url,
auth=(api_login, api_token),
json=metadata)
if r.status_code != 201:
print(r.json().get("message", "Error returned, but no message"))
sys.exit(1)
print("Build started at {}".format(r.headers["Location"]))
|
bff8d72c83c7f8a9321e4a065daa621c9e7d0539
|
pipeline_dart2js/compiler.py
|
pipeline_dart2js/compiler.py
|
from os.path import dirname
from django.conf import settings
from pipeline.compilers import SubProcessCompiler
class Dart2jsCompiler(SubProcessCompiler):
output_extension = 'js'
def match_file(self, filename):
return filename.endswith('.dart')
def compile_file(self, infile, outfile, outdated=False, force=False):
if not outdated and not force:
return # No need to recompiled file
command = "%s %s --out=%s %s" % (
getattr(settings, 'PIPELINE_DART2JS_BINARY', '/usr/bin/env dart2js'),
getattr(settings, 'PIPELINE_DART2JS_ARGUMENTS', ''),
outfile,
infile,
)
print(command)
return self.execute_command(command, cwd=dirname(infile))
|
from os.path import dirname
from django.conf import settings
from pipeline.compilers import SubProcessCompiler
class Dart2jsCompiler(SubProcessCompiler):
output_extension = 'js'
def match_file(self, filename):
return filename.endswith('.dart')
def compile_file(self, infile, outfile, outdated=False, force=False):
if not outdated and not force:
return # No need to recompiled file
command = "%s %s -o %s %s" % (
getattr(settings, 'PIPELINE_DART2JS_BINARY', '/usr/bin/env dart2js'),
getattr(settings, 'PIPELINE_DART2JS_ARGUMENTS', ''),
outfile,
infile,
)
print(command)
return self.execute_command(command, cwd=dirname(infile))
|
Update to new command line
|
Update to new command line
|
Python
|
apache-2.0
|
wienczny/django-pipeline-dart2js
|
from os.path import dirname
from django.conf import settings
from pipeline.compilers import SubProcessCompiler
class Dart2jsCompiler(SubProcessCompiler):
output_extension = 'js'
def match_file(self, filename):
return filename.endswith('.dart')
def compile_file(self, infile, outfile, outdated=False, force=False):
if not outdated and not force:
return # No need to recompiled file
command = "%s %s --out=%s %s" % (
getattr(settings, 'PIPELINE_DART2JS_BINARY', '/usr/bin/env dart2js'),
getattr(settings, 'PIPELINE_DART2JS_ARGUMENTS', ''),
outfile,
infile,
)
print(command)
return self.execute_command(command, cwd=dirname(infile))
Update to new command line
|
from os.path import dirname
from django.conf import settings
from pipeline.compilers import SubProcessCompiler
class Dart2jsCompiler(SubProcessCompiler):
output_extension = 'js'
def match_file(self, filename):
return filename.endswith('.dart')
def compile_file(self, infile, outfile, outdated=False, force=False):
if not outdated and not force:
return # No need to recompiled file
command = "%s %s -o %s %s" % (
getattr(settings, 'PIPELINE_DART2JS_BINARY', '/usr/bin/env dart2js'),
getattr(settings, 'PIPELINE_DART2JS_ARGUMENTS', ''),
outfile,
infile,
)
print(command)
return self.execute_command(command, cwd=dirname(infile))
|
<commit_before>from os.path import dirname
from django.conf import settings
from pipeline.compilers import SubProcessCompiler
class Dart2jsCompiler(SubProcessCompiler):
output_extension = 'js'
def match_file(self, filename):
return filename.endswith('.dart')
def compile_file(self, infile, outfile, outdated=False, force=False):
if not outdated and not force:
return # No need to recompiled file
command = "%s %s --out=%s %s" % (
getattr(settings, 'PIPELINE_DART2JS_BINARY', '/usr/bin/env dart2js'),
getattr(settings, 'PIPELINE_DART2JS_ARGUMENTS', ''),
outfile,
infile,
)
print(command)
return self.execute_command(command, cwd=dirname(infile))
<commit_msg>Update to new command line<commit_after>
|
from os.path import dirname
from django.conf import settings
from pipeline.compilers import SubProcessCompiler
class Dart2jsCompiler(SubProcessCompiler):
output_extension = 'js'
def match_file(self, filename):
return filename.endswith('.dart')
def compile_file(self, infile, outfile, outdated=False, force=False):
if not outdated and not force:
return # No need to recompiled file
command = "%s %s -o %s %s" % (
getattr(settings, 'PIPELINE_DART2JS_BINARY', '/usr/bin/env dart2js'),
getattr(settings, 'PIPELINE_DART2JS_ARGUMENTS', ''),
outfile,
infile,
)
print(command)
return self.execute_command(command, cwd=dirname(infile))
|
from os.path import dirname
from django.conf import settings
from pipeline.compilers import SubProcessCompiler
class Dart2jsCompiler(SubProcessCompiler):
output_extension = 'js'
def match_file(self, filename):
return filename.endswith('.dart')
def compile_file(self, infile, outfile, outdated=False, force=False):
if not outdated and not force:
return # No need to recompiled file
command = "%s %s --out=%s %s" % (
getattr(settings, 'PIPELINE_DART2JS_BINARY', '/usr/bin/env dart2js'),
getattr(settings, 'PIPELINE_DART2JS_ARGUMENTS', ''),
outfile,
infile,
)
print(command)
return self.execute_command(command, cwd=dirname(infile))
Update to new command linefrom os.path import dirname
from django.conf import settings
from pipeline.compilers import SubProcessCompiler
class Dart2jsCompiler(SubProcessCompiler):
output_extension = 'js'
def match_file(self, filename):
return filename.endswith('.dart')
def compile_file(self, infile, outfile, outdated=False, force=False):
if not outdated and not force:
return # No need to recompiled file
command = "%s %s -o %s %s" % (
getattr(settings, 'PIPELINE_DART2JS_BINARY', '/usr/bin/env dart2js'),
getattr(settings, 'PIPELINE_DART2JS_ARGUMENTS', ''),
outfile,
infile,
)
print(command)
return self.execute_command(command, cwd=dirname(infile))
|
<commit_before>from os.path import dirname
from django.conf import settings
from pipeline.compilers import SubProcessCompiler
class Dart2jsCompiler(SubProcessCompiler):
output_extension = 'js'
def match_file(self, filename):
return filename.endswith('.dart')
def compile_file(self, infile, outfile, outdated=False, force=False):
if not outdated and not force:
return # No need to recompiled file
command = "%s %s --out=%s %s" % (
getattr(settings, 'PIPELINE_DART2JS_BINARY', '/usr/bin/env dart2js'),
getattr(settings, 'PIPELINE_DART2JS_ARGUMENTS', ''),
outfile,
infile,
)
print(command)
return self.execute_command(command, cwd=dirname(infile))
<commit_msg>Update to new command line<commit_after>from os.path import dirname
from django.conf import settings
from pipeline.compilers import SubProcessCompiler
class Dart2jsCompiler(SubProcessCompiler):
output_extension = 'js'
def match_file(self, filename):
return filename.endswith('.dart')
def compile_file(self, infile, outfile, outdated=False, force=False):
if not outdated and not force:
return # No need to recompiled file
command = "%s %s -o %s %s" % (
getattr(settings, 'PIPELINE_DART2JS_BINARY', '/usr/bin/env dart2js'),
getattr(settings, 'PIPELINE_DART2JS_ARGUMENTS', ''),
outfile,
infile,
)
print(command)
return self.execute_command(command, cwd=dirname(infile))
|
9f1964f9f83c493f9bc6e08e2058d1e14ace031f
|
synapse/tests/test_cells.py
|
synapse/tests/test_cells.py
|
import synapse.axon as s_axon
import synapse.cells as s_cells
import synapse.cryotank as s_cryotank
from synapse.tests.common import *
class CellTest(SynTest):
def test_getcells(self):
data = s_cells.getCells()
data = {k: v for k, v in data}
self.isin('cortex', data)
|
import synapse.axon as s_axon
import synapse.cells as s_cells
import synapse.cryotank as s_cryotank
from synapse.tests.common import *
class CellTest(SynTest):
def test_getcells(self):
data = s_cells.getCells()
data = {k: v for k, v in data}
self.isin('cortex', data)
def test_deploy(self):
with self.getTestDir() as dirn:
s_cells.deploy('cortex', dirn, {'test': 1})
d = s_common.yamlload(dirn, 'boot.yaml')
self.eq(d, {'type': 'cortex', 'test': 1, })
|
Add a test for s_cells.deploy()
|
Add a test for s_cells.deploy()
|
Python
|
apache-2.0
|
vertexproject/synapse,vertexproject/synapse,vertexproject/synapse
|
import synapse.axon as s_axon
import synapse.cells as s_cells
import synapse.cryotank as s_cryotank
from synapse.tests.common import *
class CellTest(SynTest):
def test_getcells(self):
data = s_cells.getCells()
data = {k: v for k, v in data}
self.isin('cortex', data)
Add a test for s_cells.deploy()
|
import synapse.axon as s_axon
import synapse.cells as s_cells
import synapse.cryotank as s_cryotank
from synapse.tests.common import *
class CellTest(SynTest):
def test_getcells(self):
data = s_cells.getCells()
data = {k: v for k, v in data}
self.isin('cortex', data)
def test_deploy(self):
with self.getTestDir() as dirn:
s_cells.deploy('cortex', dirn, {'test': 1})
d = s_common.yamlload(dirn, 'boot.yaml')
self.eq(d, {'type': 'cortex', 'test': 1, })
|
<commit_before>import synapse.axon as s_axon
import synapse.cells as s_cells
import synapse.cryotank as s_cryotank
from synapse.tests.common import *
class CellTest(SynTest):
def test_getcells(self):
data = s_cells.getCells()
data = {k: v for k, v in data}
self.isin('cortex', data)
<commit_msg>Add a test for s_cells.deploy()<commit_after>
|
import synapse.axon as s_axon
import synapse.cells as s_cells
import synapse.cryotank as s_cryotank
from synapse.tests.common import *
class CellTest(SynTest):
def test_getcells(self):
data = s_cells.getCells()
data = {k: v for k, v in data}
self.isin('cortex', data)
def test_deploy(self):
with self.getTestDir() as dirn:
s_cells.deploy('cortex', dirn, {'test': 1})
d = s_common.yamlload(dirn, 'boot.yaml')
self.eq(d, {'type': 'cortex', 'test': 1, })
|
import synapse.axon as s_axon
import synapse.cells as s_cells
import synapse.cryotank as s_cryotank
from synapse.tests.common import *
class CellTest(SynTest):
def test_getcells(self):
data = s_cells.getCells()
data = {k: v for k, v in data}
self.isin('cortex', data)
Add a test for s_cells.deploy()import synapse.axon as s_axon
import synapse.cells as s_cells
import synapse.cryotank as s_cryotank
from synapse.tests.common import *
class CellTest(SynTest):
def test_getcells(self):
data = s_cells.getCells()
data = {k: v for k, v in data}
self.isin('cortex', data)
def test_deploy(self):
with self.getTestDir() as dirn:
s_cells.deploy('cortex', dirn, {'test': 1})
d = s_common.yamlload(dirn, 'boot.yaml')
self.eq(d, {'type': 'cortex', 'test': 1, })
|
<commit_before>import synapse.axon as s_axon
import synapse.cells as s_cells
import synapse.cryotank as s_cryotank
from synapse.tests.common import *
class CellTest(SynTest):
def test_getcells(self):
data = s_cells.getCells()
data = {k: v for k, v in data}
self.isin('cortex', data)
<commit_msg>Add a test for s_cells.deploy()<commit_after>import synapse.axon as s_axon
import synapse.cells as s_cells
import synapse.cryotank as s_cryotank
from synapse.tests.common import *
class CellTest(SynTest):
def test_getcells(self):
data = s_cells.getCells()
data = {k: v for k, v in data}
self.isin('cortex', data)
def test_deploy(self):
with self.getTestDir() as dirn:
s_cells.deploy('cortex', dirn, {'test': 1})
d = s_common.yamlload(dirn, 'boot.yaml')
self.eq(d, {'type': 'cortex', 'test': 1, })
|
e08b4f1b74f0fcd987299d4786b5374fe86a21bc
|
Lib/idlelib/macosxSupport.py
|
Lib/idlelib/macosxSupport.py
|
"""
A number of function that enhance IDLE on MacOSX when it used as a normal
GUI application (as opposed to an X11 application).
"""
import sys
def runningAsOSXApp():
""" Returns True iff running from the IDLE.app bundle on OSX """
return (sys.platform == 'darwin' and 'IDLE.app' in sys.argv[0])
def addOpenEventSupport(root, flist):
"""
This ensures that the application will respont to open AppleEvents, which
makes is feaseable to use IDLE as the default application for python files.
"""
def doOpenFile(*args):
for fn in args:
flist.open(fn)
# The command below is a hook in aquatk that is called whenever the app
# receives a file open event. The callback can have multiple arguments,
# one for every file that should be opened.
root.createcommand("::tk::mac::OpenDocument", doOpenFile)
def hideTkConsole(root):
root.tk.call('console', 'hide')
def setupApp(root, flist):
"""
Perform setup for the OSX application bundle.
"""
if not runningAsOSXApp(): return
hideTkConsole(root)
addOpenEventSupport(root, flist)
|
"""
A number of function that enhance IDLE on MacOSX when it used as a normal
GUI application (as opposed to an X11 application).
"""
import sys
def runningAsOSXApp():
""" Returns True iff running from the IDLE.app bundle on OSX """
return (sys.platform == 'darwin' and 'IDLE.app' in sys.argv[0])
def addOpenEventSupport(root, flist):
"""
This ensures that the application will respont to open AppleEvents, which
makes is feaseable to use IDLE as the default application for python files.
"""
def doOpenFile(*args):
for fn in args:
flist.open(fn)
# The command below is a hook in aquatk that is called whenever the app
# receives a file open event. The callback can have multiple arguments,
# one for every file that should be opened.
root.createcommand("::tk::mac::OpenDocument", doOpenFile)
def hideTkConsole(root):
root.tk.call('console', 'hide')
def setupApp(root, flist):
"""
Perform setup for the OSX application bundle.
"""
if not runningAsOSXApp(): return
hideTkConsole(root)
addOpenEventSupport(root, flist)
|
Add missing svn:eol-style property to text files.
|
Add missing svn:eol-style property to text files.
|
Python
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
"""
A number of function that enhance IDLE on MacOSX when it used as a normal
GUI application (as opposed to an X11 application).
"""
import sys
def runningAsOSXApp():
""" Returns True iff running from the IDLE.app bundle on OSX """
return (sys.platform == 'darwin' and 'IDLE.app' in sys.argv[0])
def addOpenEventSupport(root, flist):
"""
This ensures that the application will respont to open AppleEvents, which
makes is feaseable to use IDLE as the default application for python files.
"""
def doOpenFile(*args):
for fn in args:
flist.open(fn)
# The command below is a hook in aquatk that is called whenever the app
# receives a file open event. The callback can have multiple arguments,
# one for every file that should be opened.
root.createcommand("::tk::mac::OpenDocument", doOpenFile)
def hideTkConsole(root):
root.tk.call('console', 'hide')
def setupApp(root, flist):
"""
Perform setup for the OSX application bundle.
"""
if not runningAsOSXApp(): return
hideTkConsole(root)
addOpenEventSupport(root, flist)
Add missing svn:eol-style property to text files.
|
"""
A number of function that enhance IDLE on MacOSX when it used as a normal
GUI application (as opposed to an X11 application).
"""
import sys
def runningAsOSXApp():
""" Returns True iff running from the IDLE.app bundle on OSX """
return (sys.platform == 'darwin' and 'IDLE.app' in sys.argv[0])
def addOpenEventSupport(root, flist):
"""
This ensures that the application will respont to open AppleEvents, which
makes is feaseable to use IDLE as the default application for python files.
"""
def doOpenFile(*args):
for fn in args:
flist.open(fn)
# The command below is a hook in aquatk that is called whenever the app
# receives a file open event. The callback can have multiple arguments,
# one for every file that should be opened.
root.createcommand("::tk::mac::OpenDocument", doOpenFile)
def hideTkConsole(root):
root.tk.call('console', 'hide')
def setupApp(root, flist):
"""
Perform setup for the OSX application bundle.
"""
if not runningAsOSXApp(): return
hideTkConsole(root)
addOpenEventSupport(root, flist)
|
<commit_before>"""
A number of function that enhance IDLE on MacOSX when it used as a normal
GUI application (as opposed to an X11 application).
"""
import sys
def runningAsOSXApp():
""" Returns True iff running from the IDLE.app bundle on OSX """
return (sys.platform == 'darwin' and 'IDLE.app' in sys.argv[0])
def addOpenEventSupport(root, flist):
"""
This ensures that the application will respont to open AppleEvents, which
makes is feaseable to use IDLE as the default application for python files.
"""
def doOpenFile(*args):
for fn in args:
flist.open(fn)
# The command below is a hook in aquatk that is called whenever the app
# receives a file open event. The callback can have multiple arguments,
# one for every file that should be opened.
root.createcommand("::tk::mac::OpenDocument", doOpenFile)
def hideTkConsole(root):
root.tk.call('console', 'hide')
def setupApp(root, flist):
"""
Perform setup for the OSX application bundle.
"""
if not runningAsOSXApp(): return
hideTkConsole(root)
addOpenEventSupport(root, flist)
<commit_msg>Add missing svn:eol-style property to text files.<commit_after>
|
"""
A number of function that enhance IDLE on MacOSX when it used as a normal
GUI application (as opposed to an X11 application).
"""
import sys
def runningAsOSXApp():
""" Returns True iff running from the IDLE.app bundle on OSX """
return (sys.platform == 'darwin' and 'IDLE.app' in sys.argv[0])
def addOpenEventSupport(root, flist):
"""
This ensures that the application will respont to open AppleEvents, which
makes is feaseable to use IDLE as the default application for python files.
"""
def doOpenFile(*args):
for fn in args:
flist.open(fn)
# The command below is a hook in aquatk that is called whenever the app
# receives a file open event. The callback can have multiple arguments,
# one for every file that should be opened.
root.createcommand("::tk::mac::OpenDocument", doOpenFile)
def hideTkConsole(root):
root.tk.call('console', 'hide')
def setupApp(root, flist):
"""
Perform setup for the OSX application bundle.
"""
if not runningAsOSXApp(): return
hideTkConsole(root)
addOpenEventSupport(root, flist)
|
"""
A number of function that enhance IDLE on MacOSX when it used as a normal
GUI application (as opposed to an X11 application).
"""
import sys
def runningAsOSXApp():
""" Returns True iff running from the IDLE.app bundle on OSX """
return (sys.platform == 'darwin' and 'IDLE.app' in sys.argv[0])
def addOpenEventSupport(root, flist):
"""
This ensures that the application will respont to open AppleEvents, which
makes is feaseable to use IDLE as the default application for python files.
"""
def doOpenFile(*args):
for fn in args:
flist.open(fn)
# The command below is a hook in aquatk that is called whenever the app
# receives a file open event. The callback can have multiple arguments,
# one for every file that should be opened.
root.createcommand("::tk::mac::OpenDocument", doOpenFile)
def hideTkConsole(root):
root.tk.call('console', 'hide')
def setupApp(root, flist):
"""
Perform setup for the OSX application bundle.
"""
if not runningAsOSXApp(): return
hideTkConsole(root)
addOpenEventSupport(root, flist)
Add missing svn:eol-style property to text files."""
A number of function that enhance IDLE on MacOSX when it used as a normal
GUI application (as opposed to an X11 application).
"""
import sys
def runningAsOSXApp():
""" Returns True iff running from the IDLE.app bundle on OSX """
return (sys.platform == 'darwin' and 'IDLE.app' in sys.argv[0])
def addOpenEventSupport(root, flist):
"""
This ensures that the application will respont to open AppleEvents, which
makes is feaseable to use IDLE as the default application for python files.
"""
def doOpenFile(*args):
for fn in args:
flist.open(fn)
# The command below is a hook in aquatk that is called whenever the app
# receives a file open event. The callback can have multiple arguments,
# one for every file that should be opened.
root.createcommand("::tk::mac::OpenDocument", doOpenFile)
def hideTkConsole(root):
root.tk.call('console', 'hide')
def setupApp(root, flist):
"""
Perform setup for the OSX application bundle.
"""
if not runningAsOSXApp(): return
hideTkConsole(root)
addOpenEventSupport(root, flist)
|
<commit_before>"""
A number of function that enhance IDLE on MacOSX when it used as a normal
GUI application (as opposed to an X11 application).
"""
import sys
def runningAsOSXApp():
""" Returns True iff running from the IDLE.app bundle on OSX """
return (sys.platform == 'darwin' and 'IDLE.app' in sys.argv[0])
def addOpenEventSupport(root, flist):
"""
This ensures that the application will respont to open AppleEvents, which
makes is feaseable to use IDLE as the default application for python files.
"""
def doOpenFile(*args):
for fn in args:
flist.open(fn)
# The command below is a hook in aquatk that is called whenever the app
# receives a file open event. The callback can have multiple arguments,
# one for every file that should be opened.
root.createcommand("::tk::mac::OpenDocument", doOpenFile)
def hideTkConsole(root):
root.tk.call('console', 'hide')
def setupApp(root, flist):
"""
Perform setup for the OSX application bundle.
"""
if not runningAsOSXApp(): return
hideTkConsole(root)
addOpenEventSupport(root, flist)
<commit_msg>Add missing svn:eol-style property to text files.<commit_after>"""
A number of function that enhance IDLE on MacOSX when it used as a normal
GUI application (as opposed to an X11 application).
"""
import sys
def runningAsOSXApp():
""" Returns True iff running from the IDLE.app bundle on OSX """
return (sys.platform == 'darwin' and 'IDLE.app' in sys.argv[0])
def addOpenEventSupport(root, flist):
"""
This ensures that the application will respont to open AppleEvents, which
makes is feaseable to use IDLE as the default application for python files.
"""
def doOpenFile(*args):
for fn in args:
flist.open(fn)
# The command below is a hook in aquatk that is called whenever the app
# receives a file open event. The callback can have multiple arguments,
# one for every file that should be opened.
root.createcommand("::tk::mac::OpenDocument", doOpenFile)
def hideTkConsole(root):
root.tk.call('console', 'hide')
def setupApp(root, flist):
"""
Perform setup for the OSX application bundle.
"""
if not runningAsOSXApp(): return
hideTkConsole(root)
addOpenEventSupport(root, flist)
|
831c810b96c0ed1b6c254b0b6b2c3b1b259b51bb
|
src/pyckson/model/union.py
|
src/pyckson/model/union.py
|
from typing import Tuple, Union
def inspect_optional_typing(annotation) -> Tuple[bool, type]:
# seems like at some point internal behavior on typing Union changed
# https://bugs.launchpad.net/ubuntu/+source/python3.5/+bug/1650202
if "Union" not in str(annotation):
return False, type(None)
if hasattr(annotation, '__origin__'):
is_union = annotation.__origin__ == Union
else:
is_union = issubclass(annotation, Union)
if not is_union:
return False, type(None)
if hasattr(annotation, '__args__'):
union_params = annotation.__args__
else:
union_params = annotation.__union_params__
is_optional = len(union_params) == 2 and isinstance(None, union_params[1])
return is_optional, union_params[0]
|
from typing import Tuple, Union
def inspect_optional_typing(annotation) -> Tuple[bool, type]:
# seems like at some point internal behavior on typing Union changed
# https://bugs.launchpad.net/ubuntu/+source/python3.5/+bug/1650202
if "Union" not in str(annotation) and "Optional" not in str(annotation):
return False, type(None)
if hasattr(annotation, '__origin__'):
is_union = annotation.__origin__ == Union
else:
is_union = issubclass(annotation, Union)
if not is_union:
return False, type(None)
if hasattr(annotation, '__args__'):
union_params = annotation.__args__
else:
union_params = annotation.__union_params__
is_optional = len(union_params) == 2 and isinstance(None, union_params[1])
return is_optional, union_params[0]
|
Fix optional inspection in 3.9
|
Fix optional inspection in 3.9
|
Python
|
lgpl-2.1
|
antidot/Pyckson
|
from typing import Tuple, Union
def inspect_optional_typing(annotation) -> Tuple[bool, type]:
# seems like at some point internal behavior on typing Union changed
# https://bugs.launchpad.net/ubuntu/+source/python3.5/+bug/1650202
if "Union" not in str(annotation):
return False, type(None)
if hasattr(annotation, '__origin__'):
is_union = annotation.__origin__ == Union
else:
is_union = issubclass(annotation, Union)
if not is_union:
return False, type(None)
if hasattr(annotation, '__args__'):
union_params = annotation.__args__
else:
union_params = annotation.__union_params__
is_optional = len(union_params) == 2 and isinstance(None, union_params[1])
return is_optional, union_params[0]
Fix optional inspection in 3.9
|
from typing import Tuple, Union
def inspect_optional_typing(annotation) -> Tuple[bool, type]:
# seems like at some point internal behavior on typing Union changed
# https://bugs.launchpad.net/ubuntu/+source/python3.5/+bug/1650202
if "Union" not in str(annotation) and "Optional" not in str(annotation):
return False, type(None)
if hasattr(annotation, '__origin__'):
is_union = annotation.__origin__ == Union
else:
is_union = issubclass(annotation, Union)
if not is_union:
return False, type(None)
if hasattr(annotation, '__args__'):
union_params = annotation.__args__
else:
union_params = annotation.__union_params__
is_optional = len(union_params) == 2 and isinstance(None, union_params[1])
return is_optional, union_params[0]
|
<commit_before>from typing import Tuple, Union
def inspect_optional_typing(annotation) -> Tuple[bool, type]:
# seems like at some point internal behavior on typing Union changed
# https://bugs.launchpad.net/ubuntu/+source/python3.5/+bug/1650202
if "Union" not in str(annotation):
return False, type(None)
if hasattr(annotation, '__origin__'):
is_union = annotation.__origin__ == Union
else:
is_union = issubclass(annotation, Union)
if not is_union:
return False, type(None)
if hasattr(annotation, '__args__'):
union_params = annotation.__args__
else:
union_params = annotation.__union_params__
is_optional = len(union_params) == 2 and isinstance(None, union_params[1])
return is_optional, union_params[0]
<commit_msg>Fix optional inspection in 3.9<commit_after>
|
from typing import Tuple, Union
def inspect_optional_typing(annotation) -> Tuple[bool, type]:
# seems like at some point internal behavior on typing Union changed
# https://bugs.launchpad.net/ubuntu/+source/python3.5/+bug/1650202
if "Union" not in str(annotation) and "Optional" not in str(annotation):
return False, type(None)
if hasattr(annotation, '__origin__'):
is_union = annotation.__origin__ == Union
else:
is_union = issubclass(annotation, Union)
if not is_union:
return False, type(None)
if hasattr(annotation, '__args__'):
union_params = annotation.__args__
else:
union_params = annotation.__union_params__
is_optional = len(union_params) == 2 and isinstance(None, union_params[1])
return is_optional, union_params[0]
|
from typing import Tuple, Union
def inspect_optional_typing(annotation) -> Tuple[bool, type]:
# seems like at some point internal behavior on typing Union changed
# https://bugs.launchpad.net/ubuntu/+source/python3.5/+bug/1650202
if "Union" not in str(annotation):
return False, type(None)
if hasattr(annotation, '__origin__'):
is_union = annotation.__origin__ == Union
else:
is_union = issubclass(annotation, Union)
if not is_union:
return False, type(None)
if hasattr(annotation, '__args__'):
union_params = annotation.__args__
else:
union_params = annotation.__union_params__
is_optional = len(union_params) == 2 and isinstance(None, union_params[1])
return is_optional, union_params[0]
Fix optional inspection in 3.9from typing import Tuple, Union
def inspect_optional_typing(annotation) -> Tuple[bool, type]:
# seems like at some point internal behavior on typing Union changed
# https://bugs.launchpad.net/ubuntu/+source/python3.5/+bug/1650202
if "Union" not in str(annotation) and "Optional" not in str(annotation):
return False, type(None)
if hasattr(annotation, '__origin__'):
is_union = annotation.__origin__ == Union
else:
is_union = issubclass(annotation, Union)
if not is_union:
return False, type(None)
if hasattr(annotation, '__args__'):
union_params = annotation.__args__
else:
union_params = annotation.__union_params__
is_optional = len(union_params) == 2 and isinstance(None, union_params[1])
return is_optional, union_params[0]
|
<commit_before>from typing import Tuple, Union
def inspect_optional_typing(annotation) -> Tuple[bool, type]:
# seems like at some point internal behavior on typing Union changed
# https://bugs.launchpad.net/ubuntu/+source/python3.5/+bug/1650202
if "Union" not in str(annotation):
return False, type(None)
if hasattr(annotation, '__origin__'):
is_union = annotation.__origin__ == Union
else:
is_union = issubclass(annotation, Union)
if not is_union:
return False, type(None)
if hasattr(annotation, '__args__'):
union_params = annotation.__args__
else:
union_params = annotation.__union_params__
is_optional = len(union_params) == 2 and isinstance(None, union_params[1])
return is_optional, union_params[0]
<commit_msg>Fix optional inspection in 3.9<commit_after>from typing import Tuple, Union
def inspect_optional_typing(annotation) -> Tuple[bool, type]:
# seems like at some point internal behavior on typing Union changed
# https://bugs.launchpad.net/ubuntu/+source/python3.5/+bug/1650202
if "Union" not in str(annotation) and "Optional" not in str(annotation):
return False, type(None)
if hasattr(annotation, '__origin__'):
is_union = annotation.__origin__ == Union
else:
is_union = issubclass(annotation, Union)
if not is_union:
return False, type(None)
if hasattr(annotation, '__args__'):
union_params = annotation.__args__
else:
union_params = annotation.__union_params__
is_optional = len(union_params) == 2 and isinstance(None, union_params[1])
return is_optional, union_params[0]
|
a753841d01eb3e9493e08e20e8a28c9b08fdef53
|
comics/sets/models.py
|
comics/sets/models.py
|
from django.db import models
from django.utils import timezone
from comics.core.models import Comic
class Set(models.Model):
name = models.SlugField(max_length=100, unique=True,
help_text='The set identifier')
add_new_comics = models.BooleanField(default=False,
help_text='Automatically add new comics to the set')
hide_empty_comics = models.BooleanField(default=False,
help_text='Hide comics without matching releases from view')
created = models.DateTimeField(auto_now_add=True)
last_modified = models.DateTimeField()
last_loaded = models.DateTimeField()
comics = models.ManyToManyField(Comic)
class Meta:
db_table = 'comics_set'
ordering = ['name']
def __unicode__(self):
return self.name
def get_slug(self):
return self.name
def set_slug(self, slug):
self.name = slug
slug = property(get_slug, set_slug)
def set_loaded(self):
self.last_loaded = timezone.now()
self.save()
|
from django.db import models
from django.utils import timezone
from comics.core.models import Comic
class Set(models.Model):
name = models.SlugField(
max_length=100, unique=True,
help_text='The set identifier')
add_new_comics = models.BooleanField(
default=False,
help_text='Automatically add new comics to the set')
hide_empty_comics = models.BooleanField(
default=False,
help_text='Hide comics without matching releases from view')
created = models.DateTimeField(auto_now_add=True)
last_modified = models.DateTimeField()
last_loaded = models.DateTimeField()
comics = models.ManyToManyField(Comic)
class Meta:
db_table = 'comics_set'
ordering = ['name']
def __unicode__(self):
return self.name
def get_slug(self):
return self.name
def set_slug(self, slug):
self.name = slug
slug = property(get_slug, set_slug)
def set_loaded(self):
self.last_loaded = timezone.now()
self.save()
|
Fix all warnings in sets app
|
flake8: Fix all warnings in sets app
|
Python
|
agpl-3.0
|
datagutten/comics,jodal/comics,jodal/comics,datagutten/comics,datagutten/comics,datagutten/comics,jodal/comics,jodal/comics
|
from django.db import models
from django.utils import timezone
from comics.core.models import Comic
class Set(models.Model):
name = models.SlugField(max_length=100, unique=True,
help_text='The set identifier')
add_new_comics = models.BooleanField(default=False,
help_text='Automatically add new comics to the set')
hide_empty_comics = models.BooleanField(default=False,
help_text='Hide comics without matching releases from view')
created = models.DateTimeField(auto_now_add=True)
last_modified = models.DateTimeField()
last_loaded = models.DateTimeField()
comics = models.ManyToManyField(Comic)
class Meta:
db_table = 'comics_set'
ordering = ['name']
def __unicode__(self):
return self.name
def get_slug(self):
return self.name
def set_slug(self, slug):
self.name = slug
slug = property(get_slug, set_slug)
def set_loaded(self):
self.last_loaded = timezone.now()
self.save()
flake8: Fix all warnings in sets app
|
from django.db import models
from django.utils import timezone
from comics.core.models import Comic
class Set(models.Model):
name = models.SlugField(
max_length=100, unique=True,
help_text='The set identifier')
add_new_comics = models.BooleanField(
default=False,
help_text='Automatically add new comics to the set')
hide_empty_comics = models.BooleanField(
default=False,
help_text='Hide comics without matching releases from view')
created = models.DateTimeField(auto_now_add=True)
last_modified = models.DateTimeField()
last_loaded = models.DateTimeField()
comics = models.ManyToManyField(Comic)
class Meta:
db_table = 'comics_set'
ordering = ['name']
def __unicode__(self):
return self.name
def get_slug(self):
return self.name
def set_slug(self, slug):
self.name = slug
slug = property(get_slug, set_slug)
def set_loaded(self):
self.last_loaded = timezone.now()
self.save()
|
<commit_before>from django.db import models
from django.utils import timezone
from comics.core.models import Comic
class Set(models.Model):
name = models.SlugField(max_length=100, unique=True,
help_text='The set identifier')
add_new_comics = models.BooleanField(default=False,
help_text='Automatically add new comics to the set')
hide_empty_comics = models.BooleanField(default=False,
help_text='Hide comics without matching releases from view')
created = models.DateTimeField(auto_now_add=True)
last_modified = models.DateTimeField()
last_loaded = models.DateTimeField()
comics = models.ManyToManyField(Comic)
class Meta:
db_table = 'comics_set'
ordering = ['name']
def __unicode__(self):
return self.name
def get_slug(self):
return self.name
def set_slug(self, slug):
self.name = slug
slug = property(get_slug, set_slug)
def set_loaded(self):
self.last_loaded = timezone.now()
self.save()
<commit_msg>flake8: Fix all warnings in sets app<commit_after>
|
from django.db import models
from django.utils import timezone
from comics.core.models import Comic
class Set(models.Model):
name = models.SlugField(
max_length=100, unique=True,
help_text='The set identifier')
add_new_comics = models.BooleanField(
default=False,
help_text='Automatically add new comics to the set')
hide_empty_comics = models.BooleanField(
default=False,
help_text='Hide comics without matching releases from view')
created = models.DateTimeField(auto_now_add=True)
last_modified = models.DateTimeField()
last_loaded = models.DateTimeField()
comics = models.ManyToManyField(Comic)
class Meta:
db_table = 'comics_set'
ordering = ['name']
def __unicode__(self):
return self.name
def get_slug(self):
return self.name
def set_slug(self, slug):
self.name = slug
slug = property(get_slug, set_slug)
def set_loaded(self):
self.last_loaded = timezone.now()
self.save()
|
from django.db import models
from django.utils import timezone
from comics.core.models import Comic
class Set(models.Model):
name = models.SlugField(max_length=100, unique=True,
help_text='The set identifier')
add_new_comics = models.BooleanField(default=False,
help_text='Automatically add new comics to the set')
hide_empty_comics = models.BooleanField(default=False,
help_text='Hide comics without matching releases from view')
created = models.DateTimeField(auto_now_add=True)
last_modified = models.DateTimeField()
last_loaded = models.DateTimeField()
comics = models.ManyToManyField(Comic)
class Meta:
db_table = 'comics_set'
ordering = ['name']
def __unicode__(self):
return self.name
def get_slug(self):
return self.name
def set_slug(self, slug):
self.name = slug
slug = property(get_slug, set_slug)
def set_loaded(self):
self.last_loaded = timezone.now()
self.save()
flake8: Fix all warnings in sets appfrom django.db import models
from django.utils import timezone
from comics.core.models import Comic
class Set(models.Model):
name = models.SlugField(
max_length=100, unique=True,
help_text='The set identifier')
add_new_comics = models.BooleanField(
default=False,
help_text='Automatically add new comics to the set')
hide_empty_comics = models.BooleanField(
default=False,
help_text='Hide comics without matching releases from view')
created = models.DateTimeField(auto_now_add=True)
last_modified = models.DateTimeField()
last_loaded = models.DateTimeField()
comics = models.ManyToManyField(Comic)
class Meta:
db_table = 'comics_set'
ordering = ['name']
def __unicode__(self):
return self.name
def get_slug(self):
return self.name
def set_slug(self, slug):
self.name = slug
slug = property(get_slug, set_slug)
def set_loaded(self):
self.last_loaded = timezone.now()
self.save()
|
<commit_before>from django.db import models
from django.utils import timezone
from comics.core.models import Comic
class Set(models.Model):
name = models.SlugField(max_length=100, unique=True,
help_text='The set identifier')
add_new_comics = models.BooleanField(default=False,
help_text='Automatically add new comics to the set')
hide_empty_comics = models.BooleanField(default=False,
help_text='Hide comics without matching releases from view')
created = models.DateTimeField(auto_now_add=True)
last_modified = models.DateTimeField()
last_loaded = models.DateTimeField()
comics = models.ManyToManyField(Comic)
class Meta:
db_table = 'comics_set'
ordering = ['name']
def __unicode__(self):
return self.name
def get_slug(self):
return self.name
def set_slug(self, slug):
self.name = slug
slug = property(get_slug, set_slug)
def set_loaded(self):
self.last_loaded = timezone.now()
self.save()
<commit_msg>flake8: Fix all warnings in sets app<commit_after>from django.db import models
from django.utils import timezone
from comics.core.models import Comic
class Set(models.Model):
name = models.SlugField(
max_length=100, unique=True,
help_text='The set identifier')
add_new_comics = models.BooleanField(
default=False,
help_text='Automatically add new comics to the set')
hide_empty_comics = models.BooleanField(
default=False,
help_text='Hide comics without matching releases from view')
created = models.DateTimeField(auto_now_add=True)
last_modified = models.DateTimeField()
last_loaded = models.DateTimeField()
comics = models.ManyToManyField(Comic)
class Meta:
db_table = 'comics_set'
ordering = ['name']
def __unicode__(self):
return self.name
def get_slug(self):
return self.name
def set_slug(self, slug):
self.name = slug
slug = property(get_slug, set_slug)
def set_loaded(self):
self.last_loaded = timezone.now()
self.save()
|
4410e03df12b99c46467b6fe93f7b8cb206d441c
|
decorators.py
|
decorators.py
|
import time
def retry(ExceptionToCheck, tries=4, delay=3, backoff=2, logger=None):
"""Retry calling the decorated function using an exponential backoff.
http://www.saltycrane.com/blog/2009/11/trying-out-retry-decorator-python/
original from: http://wiki.python.org/moin/PythonDecoratorLibrary#Retry
:param ExceptionToCheck: the exception to check. may be a tuple of
excpetions to check
:type ExceptionToCheck: Exception or tuple
:param tries: number of times to try (not retry) before giving up
:type tries: int
:param delay: initial delay between retries in seconds
:type delay: int
:param backoff: backoff multiplier e.g. value of 2 will double the delay
each retry
:type backoff: int
:param logger: logger to use. If None, print
:type logger: logging.Logger instance
"""
def deco_retry(f):
def f_retry(*args, **kwargs):
mtries, mdelay = tries, delay
try_one_last_time = True
while mtries > 1:
try:
return f(*args, **kwargs)
try_one_last_time = False
break
except ExceptionToCheck, e:
msg = "%s, Retrying in %d seconds..." % (str(e), mdelay)
if logger:
logger.warning(msg)
else:
print msg
time.sleep(mdelay)
mtries -= 1
mdelay *= backoff
if try_one_last_time:
return f(*args, **kwargs)
return
return f_retry # true decorator
return deco_retry
|
import time
from functools import wraps
def retry(ExceptionToCheck, tries=4, delay=3, backoff=2, logger=None):
"""Retry calling the decorated function using an exponential backoff.
http://www.saltycrane.com/blog/2009/11/trying-out-retry-decorator-python/
original from: http://wiki.python.org/moin/PythonDecoratorLibrary#Retry
:param ExceptionToCheck: the exception to check. may be a tuple of
exceptions to check
:type ExceptionToCheck: Exception or tuple
:param tries: number of times to try (not retry) before giving up
:type tries: int
:param delay: initial delay between retries in seconds
:type delay: int
:param backoff: backoff multiplier e.g. value of 2 will double the delay
each retry
:type backoff: int
:param logger: logger to use. If None, print
:type logger: logging.Logger instance
"""
def deco_retry(f):
@wraps(f)
def f_retry(*args, **kwargs):
mtries, mdelay = tries, delay
while mtries > 1:
try:
return f(*args, **kwargs)
except ExceptionToCheck, e:
msg = "%s, Retrying in %d seconds..." % (str(e), mdelay)
if logger:
logger.warning(msg)
else:
print msg
time.sleep(mdelay)
mtries -= 1
mdelay *= backoff
return f(*args, **kwargs)
return f_retry # true decorator
return deco_retry
|
Remove unreachable code. Use functools.wraps.
|
Remove unreachable code. Use functools.wraps.
- Remove code that was unreachable. Thanks Jaskirat
(http://www.saltycrane.com/blog/2009/11/trying-out-retry-decorator-python/#c24691)
- Use functools.wraps to make the retry decorator "well behaved"
- Fix typo
|
Python
|
bsd-3-clause
|
saltycrane/retry-decorator
|
import time
def retry(ExceptionToCheck, tries=4, delay=3, backoff=2, logger=None):
"""Retry calling the decorated function using an exponential backoff.
http://www.saltycrane.com/blog/2009/11/trying-out-retry-decorator-python/
original from: http://wiki.python.org/moin/PythonDecoratorLibrary#Retry
:param ExceptionToCheck: the exception to check. may be a tuple of
excpetions to check
:type ExceptionToCheck: Exception or tuple
:param tries: number of times to try (not retry) before giving up
:type tries: int
:param delay: initial delay between retries in seconds
:type delay: int
:param backoff: backoff multiplier e.g. value of 2 will double the delay
each retry
:type backoff: int
:param logger: logger to use. If None, print
:type logger: logging.Logger instance
"""
def deco_retry(f):
def f_retry(*args, **kwargs):
mtries, mdelay = tries, delay
try_one_last_time = True
while mtries > 1:
try:
return f(*args, **kwargs)
try_one_last_time = False
break
except ExceptionToCheck, e:
msg = "%s, Retrying in %d seconds..." % (str(e), mdelay)
if logger:
logger.warning(msg)
else:
print msg
time.sleep(mdelay)
mtries -= 1
mdelay *= backoff
if try_one_last_time:
return f(*args, **kwargs)
return
return f_retry # true decorator
return deco_retry
Remove unreachable code. Use functools.wraps.
- Remove code that was unreachable. Thanks Jaskirat
(http://www.saltycrane.com/blog/2009/11/trying-out-retry-decorator-python/#c24691)
- Use functools.wraps to make the retry decorator "well behaved"
- Fix typo
|
import time
from functools import wraps
def retry(ExceptionToCheck, tries=4, delay=3, backoff=2, logger=None):
"""Retry calling the decorated function using an exponential backoff.
http://www.saltycrane.com/blog/2009/11/trying-out-retry-decorator-python/
original from: http://wiki.python.org/moin/PythonDecoratorLibrary#Retry
:param ExceptionToCheck: the exception to check. may be a tuple of
exceptions to check
:type ExceptionToCheck: Exception or tuple
:param tries: number of times to try (not retry) before giving up
:type tries: int
:param delay: initial delay between retries in seconds
:type delay: int
:param backoff: backoff multiplier e.g. value of 2 will double the delay
each retry
:type backoff: int
:param logger: logger to use. If None, print
:type logger: logging.Logger instance
"""
def deco_retry(f):
@wraps(f)
def f_retry(*args, **kwargs):
mtries, mdelay = tries, delay
while mtries > 1:
try:
return f(*args, **kwargs)
except ExceptionToCheck, e:
msg = "%s, Retrying in %d seconds..." % (str(e), mdelay)
if logger:
logger.warning(msg)
else:
print msg
time.sleep(mdelay)
mtries -= 1
mdelay *= backoff
return f(*args, **kwargs)
return f_retry # true decorator
return deco_retry
|
<commit_before>import time
def retry(ExceptionToCheck, tries=4, delay=3, backoff=2, logger=None):
"""Retry calling the decorated function using an exponential backoff.
http://www.saltycrane.com/blog/2009/11/trying-out-retry-decorator-python/
original from: http://wiki.python.org/moin/PythonDecoratorLibrary#Retry
:param ExceptionToCheck: the exception to check. may be a tuple of
excpetions to check
:type ExceptionToCheck: Exception or tuple
:param tries: number of times to try (not retry) before giving up
:type tries: int
:param delay: initial delay between retries in seconds
:type delay: int
:param backoff: backoff multiplier e.g. value of 2 will double the delay
each retry
:type backoff: int
:param logger: logger to use. If None, print
:type logger: logging.Logger instance
"""
def deco_retry(f):
def f_retry(*args, **kwargs):
mtries, mdelay = tries, delay
try_one_last_time = True
while mtries > 1:
try:
return f(*args, **kwargs)
try_one_last_time = False
break
except ExceptionToCheck, e:
msg = "%s, Retrying in %d seconds..." % (str(e), mdelay)
if logger:
logger.warning(msg)
else:
print msg
time.sleep(mdelay)
mtries -= 1
mdelay *= backoff
if try_one_last_time:
return f(*args, **kwargs)
return
return f_retry # true decorator
return deco_retry
<commit_msg>Remove unreachable code. Use functools.wraps.
- Remove code that was unreachable. Thanks Jaskirat
(http://www.saltycrane.com/blog/2009/11/trying-out-retry-decorator-python/#c24691)
- Use functools.wraps to make the retry decorator "well behaved"
- Fix typo<commit_after>
|
import time
from functools import wraps
def retry(ExceptionToCheck, tries=4, delay=3, backoff=2, logger=None):
"""Retry calling the decorated function using an exponential backoff.
http://www.saltycrane.com/blog/2009/11/trying-out-retry-decorator-python/
original from: http://wiki.python.org/moin/PythonDecoratorLibrary#Retry
:param ExceptionToCheck: the exception to check. may be a tuple of
exceptions to check
:type ExceptionToCheck: Exception or tuple
:param tries: number of times to try (not retry) before giving up
:type tries: int
:param delay: initial delay between retries in seconds
:type delay: int
:param backoff: backoff multiplier e.g. value of 2 will double the delay
each retry
:type backoff: int
:param logger: logger to use. If None, print
:type logger: logging.Logger instance
"""
def deco_retry(f):
@wraps(f)
def f_retry(*args, **kwargs):
mtries, mdelay = tries, delay
while mtries > 1:
try:
return f(*args, **kwargs)
except ExceptionToCheck, e:
msg = "%s, Retrying in %d seconds..." % (str(e), mdelay)
if logger:
logger.warning(msg)
else:
print msg
time.sleep(mdelay)
mtries -= 1
mdelay *= backoff
return f(*args, **kwargs)
return f_retry # true decorator
return deco_retry
|
import time
def retry(ExceptionToCheck, tries=4, delay=3, backoff=2, logger=None):
"""Retry calling the decorated function using an exponential backoff.
http://www.saltycrane.com/blog/2009/11/trying-out-retry-decorator-python/
original from: http://wiki.python.org/moin/PythonDecoratorLibrary#Retry
:param ExceptionToCheck: the exception to check. may be a tuple of
excpetions to check
:type ExceptionToCheck: Exception or tuple
:param tries: number of times to try (not retry) before giving up
:type tries: int
:param delay: initial delay between retries in seconds
:type delay: int
:param backoff: backoff multiplier e.g. value of 2 will double the delay
each retry
:type backoff: int
:param logger: logger to use. If None, print
:type logger: logging.Logger instance
"""
def deco_retry(f):
def f_retry(*args, **kwargs):
mtries, mdelay = tries, delay
try_one_last_time = True
while mtries > 1:
try:
return f(*args, **kwargs)
try_one_last_time = False
break
except ExceptionToCheck, e:
msg = "%s, Retrying in %d seconds..." % (str(e), mdelay)
if logger:
logger.warning(msg)
else:
print msg
time.sleep(mdelay)
mtries -= 1
mdelay *= backoff
if try_one_last_time:
return f(*args, **kwargs)
return
return f_retry # true decorator
return deco_retry
Remove unreachable code. Use functools.wraps.
- Remove code that was unreachable. Thanks Jaskirat
(http://www.saltycrane.com/blog/2009/11/trying-out-retry-decorator-python/#c24691)
- Use functools.wraps to make the retry decorator "well behaved"
- Fix typoimport time
from functools import wraps
def retry(ExceptionToCheck, tries=4, delay=3, backoff=2, logger=None):
"""Retry calling the decorated function using an exponential backoff.
http://www.saltycrane.com/blog/2009/11/trying-out-retry-decorator-python/
original from: http://wiki.python.org/moin/PythonDecoratorLibrary#Retry
:param ExceptionToCheck: the exception to check. may be a tuple of
exceptions to check
:type ExceptionToCheck: Exception or tuple
:param tries: number of times to try (not retry) before giving up
:type tries: int
:param delay: initial delay between retries in seconds
:type delay: int
:param backoff: backoff multiplier e.g. value of 2 will double the delay
each retry
:type backoff: int
:param logger: logger to use. If None, print
:type logger: logging.Logger instance
"""
def deco_retry(f):
@wraps(f)
def f_retry(*args, **kwargs):
mtries, mdelay = tries, delay
while mtries > 1:
try:
return f(*args, **kwargs)
except ExceptionToCheck, e:
msg = "%s, Retrying in %d seconds..." % (str(e), mdelay)
if logger:
logger.warning(msg)
else:
print msg
time.sleep(mdelay)
mtries -= 1
mdelay *= backoff
return f(*args, **kwargs)
return f_retry # true decorator
return deco_retry
|
<commit_before>import time
def retry(ExceptionToCheck, tries=4, delay=3, backoff=2, logger=None):
"""Retry calling the decorated function using an exponential backoff.
http://www.saltycrane.com/blog/2009/11/trying-out-retry-decorator-python/
original from: http://wiki.python.org/moin/PythonDecoratorLibrary#Retry
:param ExceptionToCheck: the exception to check. may be a tuple of
excpetions to check
:type ExceptionToCheck: Exception or tuple
:param tries: number of times to try (not retry) before giving up
:type tries: int
:param delay: initial delay between retries in seconds
:type delay: int
:param backoff: backoff multiplier e.g. value of 2 will double the delay
each retry
:type backoff: int
:param logger: logger to use. If None, print
:type logger: logging.Logger instance
"""
def deco_retry(f):
def f_retry(*args, **kwargs):
mtries, mdelay = tries, delay
try_one_last_time = True
while mtries > 1:
try:
return f(*args, **kwargs)
try_one_last_time = False
break
except ExceptionToCheck, e:
msg = "%s, Retrying in %d seconds..." % (str(e), mdelay)
if logger:
logger.warning(msg)
else:
print msg
time.sleep(mdelay)
mtries -= 1
mdelay *= backoff
if try_one_last_time:
return f(*args, **kwargs)
return
return f_retry # true decorator
return deco_retry
<commit_msg>Remove unreachable code. Use functools.wraps.
- Remove code that was unreachable. Thanks Jaskirat
(http://www.saltycrane.com/blog/2009/11/trying-out-retry-decorator-python/#c24691)
- Use functools.wraps to make the retry decorator "well behaved"
- Fix typo<commit_after>import time
from functools import wraps
def retry(ExceptionToCheck, tries=4, delay=3, backoff=2, logger=None):
"""Retry calling the decorated function using an exponential backoff.
http://www.saltycrane.com/blog/2009/11/trying-out-retry-decorator-python/
original from: http://wiki.python.org/moin/PythonDecoratorLibrary#Retry
:param ExceptionToCheck: the exception to check. may be a tuple of
exceptions to check
:type ExceptionToCheck: Exception or tuple
:param tries: number of times to try (not retry) before giving up
:type tries: int
:param delay: initial delay between retries in seconds
:type delay: int
:param backoff: backoff multiplier e.g. value of 2 will double the delay
each retry
:type backoff: int
:param logger: logger to use. If None, print
:type logger: logging.Logger instance
"""
def deco_retry(f):
@wraps(f)
def f_retry(*args, **kwargs):
mtries, mdelay = tries, delay
while mtries > 1:
try:
return f(*args, **kwargs)
except ExceptionToCheck, e:
msg = "%s, Retrying in %d seconds..." % (str(e), mdelay)
if logger:
logger.warning(msg)
else:
print msg
time.sleep(mdelay)
mtries -= 1
mdelay *= backoff
return f(*args, **kwargs)
return f_retry # true decorator
return deco_retry
|
ebd024b4f8ee6e490c183da0bada28a2aaf328d8
|
comrade/users/urls.py
|
comrade/users/urls.py
|
from django.conf.urls.defaults import *
from django.core.urlresolvers import reverse
from django.utils.functional import lazy
reverse_lazy = lazy(reverse, unicode)
urlpatterns = patterns('django.contrib.auth.views',
url(r'^login/', 'login', name='login'),
url(r'^logout/', 'logout', {'next_page':'/'}, name='logout'),
url(r'^password/forgot/$', 'password_reset',
{'post_reset_redirect':reverse_lazy('users:password_reset_done')},
name='password_reset'),
url(r'^password/forgot/done/$', 'password_reset_done',
name='password_reset_done'),
url(r'^password/reset/(?P<uidb36>[0-9A-Za-z]+)-(?P<token>.+)/$',
'password_reset_confirm',
{'post_reset_redirect':
reverse_lazy('users:password_reset_complete')},
name='password_reset_confirm'),
url(r'^password/reset/done/$', 'password_reset_complete',
name='password_reset_complete'),
url(r'^password/change/', 'password_change',
name='password_change'),
url(r'^password/change/done', 'password_change_done',
name='password_change'),
)
|
from django.conf.urls.defaults import *
from django.core.urlresolvers import reverse
from django.utils.functional import lazy
reverse_lazy = lazy(reverse, unicode)
urlpatterns = patterns('django.contrib.auth.views',
url(r'^login/', 'login', name='login'),
url(r'^logout/', 'logout', {'next_page':'/'}, name='logout'),
url(r'^password/forgot/$', 'password_reset',
{'post_reset_redirect':reverse_lazy('account:password_reset_done')},
name='password_reset'),
url(r'^password/forgot/done/$', 'password_reset_done',
name='password_reset_done'),
url(r'^password/reset/(?P<uidb36>[0-9A-Za-z]+)-(?P<token>.+)/$',
'password_reset_confirm',
{'post_reset_redirect':
reverse_lazy('account:password_reset_complete')},
name='password_reset_confirm'),
url(r'^password/reset/done/$', 'password_reset_complete',
name='password_reset_complete'),
url(r'^password/change/', 'password_change',
name='password_change'),
url(r'^password/change/done', 'password_change_done',
name='password_change'),
)
|
Rename url namespace users -> account.
|
Rename url namespace users -> account.
|
Python
|
mit
|
bueda/django-comrade
|
from django.conf.urls.defaults import *
from django.core.urlresolvers import reverse
from django.utils.functional import lazy
reverse_lazy = lazy(reverse, unicode)
urlpatterns = patterns('django.contrib.auth.views',
url(r'^login/', 'login', name='login'),
url(r'^logout/', 'logout', {'next_page':'/'}, name='logout'),
url(r'^password/forgot/$', 'password_reset',
{'post_reset_redirect':reverse_lazy('users:password_reset_done')},
name='password_reset'),
url(r'^password/forgot/done/$', 'password_reset_done',
name='password_reset_done'),
url(r'^password/reset/(?P<uidb36>[0-9A-Za-z]+)-(?P<token>.+)/$',
'password_reset_confirm',
{'post_reset_redirect':
reverse_lazy('users:password_reset_complete')},
name='password_reset_confirm'),
url(r'^password/reset/done/$', 'password_reset_complete',
name='password_reset_complete'),
url(r'^password/change/', 'password_change',
name='password_change'),
url(r'^password/change/done', 'password_change_done',
name='password_change'),
)
Rename url namespace users -> account.
|
from django.conf.urls.defaults import *
from django.core.urlresolvers import reverse
from django.utils.functional import lazy
reverse_lazy = lazy(reverse, unicode)
urlpatterns = patterns('django.contrib.auth.views',
url(r'^login/', 'login', name='login'),
url(r'^logout/', 'logout', {'next_page':'/'}, name='logout'),
url(r'^password/forgot/$', 'password_reset',
{'post_reset_redirect':reverse_lazy('account:password_reset_done')},
name='password_reset'),
url(r'^password/forgot/done/$', 'password_reset_done',
name='password_reset_done'),
url(r'^password/reset/(?P<uidb36>[0-9A-Za-z]+)-(?P<token>.+)/$',
'password_reset_confirm',
{'post_reset_redirect':
reverse_lazy('account:password_reset_complete')},
name='password_reset_confirm'),
url(r'^password/reset/done/$', 'password_reset_complete',
name='password_reset_complete'),
url(r'^password/change/', 'password_change',
name='password_change'),
url(r'^password/change/done', 'password_change_done',
name='password_change'),
)
|
<commit_before>from django.conf.urls.defaults import *
from django.core.urlresolvers import reverse
from django.utils.functional import lazy
reverse_lazy = lazy(reverse, unicode)
urlpatterns = patterns('django.contrib.auth.views',
url(r'^login/', 'login', name='login'),
url(r'^logout/', 'logout', {'next_page':'/'}, name='logout'),
url(r'^password/forgot/$', 'password_reset',
{'post_reset_redirect':reverse_lazy('users:password_reset_done')},
name='password_reset'),
url(r'^password/forgot/done/$', 'password_reset_done',
name='password_reset_done'),
url(r'^password/reset/(?P<uidb36>[0-9A-Za-z]+)-(?P<token>.+)/$',
'password_reset_confirm',
{'post_reset_redirect':
reverse_lazy('users:password_reset_complete')},
name='password_reset_confirm'),
url(r'^password/reset/done/$', 'password_reset_complete',
name='password_reset_complete'),
url(r'^password/change/', 'password_change',
name='password_change'),
url(r'^password/change/done', 'password_change_done',
name='password_change'),
)
<commit_msg>Rename url namespace users -> account.<commit_after>
|
from django.conf.urls.defaults import *
from django.core.urlresolvers import reverse
from django.utils.functional import lazy
reverse_lazy = lazy(reverse, unicode)
urlpatterns = patterns('django.contrib.auth.views',
url(r'^login/', 'login', name='login'),
url(r'^logout/', 'logout', {'next_page':'/'}, name='logout'),
url(r'^password/forgot/$', 'password_reset',
{'post_reset_redirect':reverse_lazy('account:password_reset_done')},
name='password_reset'),
url(r'^password/forgot/done/$', 'password_reset_done',
name='password_reset_done'),
url(r'^password/reset/(?P<uidb36>[0-9A-Za-z]+)-(?P<token>.+)/$',
'password_reset_confirm',
{'post_reset_redirect':
reverse_lazy('account:password_reset_complete')},
name='password_reset_confirm'),
url(r'^password/reset/done/$', 'password_reset_complete',
name='password_reset_complete'),
url(r'^password/change/', 'password_change',
name='password_change'),
url(r'^password/change/done', 'password_change_done',
name='password_change'),
)
|
from django.conf.urls.defaults import *
from django.core.urlresolvers import reverse
from django.utils.functional import lazy
reverse_lazy = lazy(reverse, unicode)
urlpatterns = patterns('django.contrib.auth.views',
url(r'^login/', 'login', name='login'),
url(r'^logout/', 'logout', {'next_page':'/'}, name='logout'),
url(r'^password/forgot/$', 'password_reset',
{'post_reset_redirect':reverse_lazy('users:password_reset_done')},
name='password_reset'),
url(r'^password/forgot/done/$', 'password_reset_done',
name='password_reset_done'),
url(r'^password/reset/(?P<uidb36>[0-9A-Za-z]+)-(?P<token>.+)/$',
'password_reset_confirm',
{'post_reset_redirect':
reverse_lazy('users:password_reset_complete')},
name='password_reset_confirm'),
url(r'^password/reset/done/$', 'password_reset_complete',
name='password_reset_complete'),
url(r'^password/change/', 'password_change',
name='password_change'),
url(r'^password/change/done', 'password_change_done',
name='password_change'),
)
Rename url namespace users -> account.from django.conf.urls.defaults import *
from django.core.urlresolvers import reverse
from django.utils.functional import lazy
reverse_lazy = lazy(reverse, unicode)
urlpatterns = patterns('django.contrib.auth.views',
url(r'^login/', 'login', name='login'),
url(r'^logout/', 'logout', {'next_page':'/'}, name='logout'),
url(r'^password/forgot/$', 'password_reset',
{'post_reset_redirect':reverse_lazy('account:password_reset_done')},
name='password_reset'),
url(r'^password/forgot/done/$', 'password_reset_done',
name='password_reset_done'),
url(r'^password/reset/(?P<uidb36>[0-9A-Za-z]+)-(?P<token>.+)/$',
'password_reset_confirm',
{'post_reset_redirect':
reverse_lazy('account:password_reset_complete')},
name='password_reset_confirm'),
url(r'^password/reset/done/$', 'password_reset_complete',
name='password_reset_complete'),
url(r'^password/change/', 'password_change',
name='password_change'),
url(r'^password/change/done', 'password_change_done',
name='password_change'),
)
|
<commit_before>from django.conf.urls.defaults import *
from django.core.urlresolvers import reverse
from django.utils.functional import lazy
reverse_lazy = lazy(reverse, unicode)
urlpatterns = patterns('django.contrib.auth.views',
url(r'^login/', 'login', name='login'),
url(r'^logout/', 'logout', {'next_page':'/'}, name='logout'),
url(r'^password/forgot/$', 'password_reset',
{'post_reset_redirect':reverse_lazy('users:password_reset_done')},
name='password_reset'),
url(r'^password/forgot/done/$', 'password_reset_done',
name='password_reset_done'),
url(r'^password/reset/(?P<uidb36>[0-9A-Za-z]+)-(?P<token>.+)/$',
'password_reset_confirm',
{'post_reset_redirect':
reverse_lazy('users:password_reset_complete')},
name='password_reset_confirm'),
url(r'^password/reset/done/$', 'password_reset_complete',
name='password_reset_complete'),
url(r'^password/change/', 'password_change',
name='password_change'),
url(r'^password/change/done', 'password_change_done',
name='password_change'),
)
<commit_msg>Rename url namespace users -> account.<commit_after>from django.conf.urls.defaults import *
from django.core.urlresolvers import reverse
from django.utils.functional import lazy
reverse_lazy = lazy(reverse, unicode)
urlpatterns = patterns('django.contrib.auth.views',
url(r'^login/', 'login', name='login'),
url(r'^logout/', 'logout', {'next_page':'/'}, name='logout'),
url(r'^password/forgot/$', 'password_reset',
{'post_reset_redirect':reverse_lazy('account:password_reset_done')},
name='password_reset'),
url(r'^password/forgot/done/$', 'password_reset_done',
name='password_reset_done'),
url(r'^password/reset/(?P<uidb36>[0-9A-Za-z]+)-(?P<token>.+)/$',
'password_reset_confirm',
{'post_reset_redirect':
reverse_lazy('account:password_reset_complete')},
name='password_reset_confirm'),
url(r'^password/reset/done/$', 'password_reset_complete',
name='password_reset_complete'),
url(r'^password/change/', 'password_change',
name='password_change'),
url(r'^password/change/done', 'password_change_done',
name='password_change'),
)
|
37490a9bd916480775271cf7e0c91b11a7eac913
|
distarray/tests/run_ipcluster.py
|
distarray/tests/run_ipcluster.py
|
import six
from subprocess import Popen, PIPE
def run_ipcluster(n=4):
"""Convenient way to start an ipcluster for testing.
You have to wait for it to start, however.
"""
# FIXME: This should be reimplemented to signal when the cluster has
# successfully started
if six.PY2:
cmd = 'ipcluster'
elif six.PY3:
cmd = 'ipcluster3'
else:
raise NotImplementedError("Not run with Python 2 *or* 3?")
engines = "--engines=MPIEngineSetLauncher"
Popen([cmd, 'start', '-n', str(n), engines, str('&')],
stdout=PIPE, stderr=PIPE)
if __name__ == '__main__':
run_ipcluster()
|
import sys
import six
from subprocess import Popen, PIPE
if six.PY2:
ipcluster_cmd = 'ipcluster'
elif six.PY3:
ipcluster_cmd = 'ipcluster3'
else:
raise NotImplementedError("Not run with Python 2 *or* 3?")
def start(n=12):
"""Convenient way to start an ipcluster for testing.
You have to wait for it to start, however.
"""
# FIXME: This should be reimplemented to signal when the cluster has
# successfully started
engines = "--engines=MPIEngineSetLauncher"
Popen([ipcluster_cmd, 'start', '-n', str(n), engines, str('&')],
stdout=PIPE, stderr=PIPE)
def stop():
"""Convenient way to stop an ipcluster."""
Popen([ipcluster_cmd, 'stop'], stdout=PIPE, stderr=PIPE)
if __name__ == '__main__':
cmd = sys.argv[1]
fn = eval(cmd)
fn()
|
Add a `stop` command to the script.
|
Add a `stop` command to the script.
|
Python
|
bsd-3-clause
|
enthought/distarray,enthought/distarray,RaoUmer/distarray,RaoUmer/distarray
|
import six
from subprocess import Popen, PIPE
def run_ipcluster(n=4):
"""Convenient way to start an ipcluster for testing.
You have to wait for it to start, however.
"""
# FIXME: This should be reimplemented to signal when the cluster has
# successfully started
if six.PY2:
cmd = 'ipcluster'
elif six.PY3:
cmd = 'ipcluster3'
else:
raise NotImplementedError("Not run with Python 2 *or* 3?")
engines = "--engines=MPIEngineSetLauncher"
Popen([cmd, 'start', '-n', str(n), engines, str('&')],
stdout=PIPE, stderr=PIPE)
if __name__ == '__main__':
run_ipcluster()
Add a `stop` command to the script.
|
import sys
import six
from subprocess import Popen, PIPE
if six.PY2:
ipcluster_cmd = 'ipcluster'
elif six.PY3:
ipcluster_cmd = 'ipcluster3'
else:
raise NotImplementedError("Not run with Python 2 *or* 3?")
def start(n=12):
"""Convenient way to start an ipcluster for testing.
You have to wait for it to start, however.
"""
# FIXME: This should be reimplemented to signal when the cluster has
# successfully started
engines = "--engines=MPIEngineSetLauncher"
Popen([ipcluster_cmd, 'start', '-n', str(n), engines, str('&')],
stdout=PIPE, stderr=PIPE)
def stop():
"""Convenient way to stop an ipcluster."""
Popen([ipcluster_cmd, 'stop'], stdout=PIPE, stderr=PIPE)
if __name__ == '__main__':
cmd = sys.argv[1]
fn = eval(cmd)
fn()
|
<commit_before>import six
from subprocess import Popen, PIPE
def run_ipcluster(n=4):
"""Convenient way to start an ipcluster for testing.
You have to wait for it to start, however.
"""
# FIXME: This should be reimplemented to signal when the cluster has
# successfully started
if six.PY2:
cmd = 'ipcluster'
elif six.PY3:
cmd = 'ipcluster3'
else:
raise NotImplementedError("Not run with Python 2 *or* 3?")
engines = "--engines=MPIEngineSetLauncher"
Popen([cmd, 'start', '-n', str(n), engines, str('&')],
stdout=PIPE, stderr=PIPE)
if __name__ == '__main__':
run_ipcluster()
<commit_msg>Add a `stop` command to the script.<commit_after>
|
import sys
import six
from subprocess import Popen, PIPE
if six.PY2:
ipcluster_cmd = 'ipcluster'
elif six.PY3:
ipcluster_cmd = 'ipcluster3'
else:
raise NotImplementedError("Not run with Python 2 *or* 3?")
def start(n=12):
"""Convenient way to start an ipcluster for testing.
You have to wait for it to start, however.
"""
# FIXME: This should be reimplemented to signal when the cluster has
# successfully started
engines = "--engines=MPIEngineSetLauncher"
Popen([ipcluster_cmd, 'start', '-n', str(n), engines, str('&')],
stdout=PIPE, stderr=PIPE)
def stop():
"""Convenient way to stop an ipcluster."""
Popen([ipcluster_cmd, 'stop'], stdout=PIPE, stderr=PIPE)
if __name__ == '__main__':
cmd = sys.argv[1]
fn = eval(cmd)
fn()
|
import six
from subprocess import Popen, PIPE
def run_ipcluster(n=4):
"""Convenient way to start an ipcluster for testing.
You have to wait for it to start, however.
"""
# FIXME: This should be reimplemented to signal when the cluster has
# successfully started
if six.PY2:
cmd = 'ipcluster'
elif six.PY3:
cmd = 'ipcluster3'
else:
raise NotImplementedError("Not run with Python 2 *or* 3?")
engines = "--engines=MPIEngineSetLauncher"
Popen([cmd, 'start', '-n', str(n), engines, str('&')],
stdout=PIPE, stderr=PIPE)
if __name__ == '__main__':
run_ipcluster()
Add a `stop` command to the script.import sys
import six
from subprocess import Popen, PIPE
if six.PY2:
ipcluster_cmd = 'ipcluster'
elif six.PY3:
ipcluster_cmd = 'ipcluster3'
else:
raise NotImplementedError("Not run with Python 2 *or* 3?")
def start(n=12):
"""Convenient way to start an ipcluster for testing.
You have to wait for it to start, however.
"""
# FIXME: This should be reimplemented to signal when the cluster has
# successfully started
engines = "--engines=MPIEngineSetLauncher"
Popen([ipcluster_cmd, 'start', '-n', str(n), engines, str('&')],
stdout=PIPE, stderr=PIPE)
def stop():
"""Convenient way to stop an ipcluster."""
Popen([ipcluster_cmd, 'stop'], stdout=PIPE, stderr=PIPE)
if __name__ == '__main__':
cmd = sys.argv[1]
fn = eval(cmd)
fn()
|
<commit_before>import six
from subprocess import Popen, PIPE
def run_ipcluster(n=4):
"""Convenient way to start an ipcluster for testing.
You have to wait for it to start, however.
"""
# FIXME: This should be reimplemented to signal when the cluster has
# successfully started
if six.PY2:
cmd = 'ipcluster'
elif six.PY3:
cmd = 'ipcluster3'
else:
raise NotImplementedError("Not run with Python 2 *or* 3?")
engines = "--engines=MPIEngineSetLauncher"
Popen([cmd, 'start', '-n', str(n), engines, str('&')],
stdout=PIPE, stderr=PIPE)
if __name__ == '__main__':
run_ipcluster()
<commit_msg>Add a `stop` command to the script.<commit_after>import sys
import six
from subprocess import Popen, PIPE
if six.PY2:
ipcluster_cmd = 'ipcluster'
elif six.PY3:
ipcluster_cmd = 'ipcluster3'
else:
raise NotImplementedError("Not run with Python 2 *or* 3?")
def start(n=12):
"""Convenient way to start an ipcluster for testing.
You have to wait for it to start, however.
"""
# FIXME: This should be reimplemented to signal when the cluster has
# successfully started
engines = "--engines=MPIEngineSetLauncher"
Popen([ipcluster_cmd, 'start', '-n', str(n), engines, str('&')],
stdout=PIPE, stderr=PIPE)
def stop():
"""Convenient way to stop an ipcluster."""
Popen([ipcluster_cmd, 'stop'], stdout=PIPE, stderr=PIPE)
if __name__ == '__main__':
cmd = sys.argv[1]
fn = eval(cmd)
fn()
|
b471da17f2e7b13d30746481e5db13f4d88de4d6
|
django/contrib/admin/__init__.py
|
django/contrib/admin/__init__.py
|
from django.contrib.admin.options import ModelAdmin, HORIZONTAL, VERTICAL
from django.contrib.admin.options import StackedInline, TabularInline
from django.contrib.admin.sites import AdminSite, site
def autodiscover():
"""
Auto-discover INSTALLED_APPS admin.py modules and fail silently when
not present. This forces an import on them to register any admin bits they
may want.
"""
import copy
from django.conf import settings
from django.utils.importlib import import_module
from django.utils.module_loading import module_has_submodule
for app in settings.INSTALLED_APPS:
mod = import_module(app)
# Attempt to import the app's admin module.
try:
before_import_registry = copy.copy(site._registry)
import_module('%s.admin' % app)
except:
# Reset the model registry to the state before the last import as
# this import will have to reoccur on the next request and this
# could raise NotRegistered and AlreadyRegistered exceptions
# (see #8245).
site._registry = before_import_registry
# Decide whether to bubble up this error. If the app just
# doesn't have an admin module, we can ignore the error
# attempting to import it, otherwise we want it to bubble up.
if module_has_submodule(mod, 'admin'):
raise
|
# ACTION_CHECKBOX_NAME is unused, but should stay since its import from here
# has been referenced in documentation.
from django.contrib.admin.helpers import ACTION_CHECKBOX_NAME
from django.contrib.admin.options import ModelAdmin, HORIZONTAL, VERTICAL
from django.contrib.admin.options import StackedInline, TabularInline
from django.contrib.admin.sites import AdminSite, site
def autodiscover():
"""
Auto-discover INSTALLED_APPS admin.py modules and fail silently when
not present. This forces an import on them to register any admin bits they
may want.
"""
import copy
from django.conf import settings
from django.utils.importlib import import_module
from django.utils.module_loading import module_has_submodule
for app in settings.INSTALLED_APPS:
mod = import_module(app)
# Attempt to import the app's admin module.
try:
before_import_registry = copy.copy(site._registry)
import_module('%s.admin' % app)
except:
# Reset the model registry to the state before the last import as
# this import will have to reoccur on the next request and this
# could raise NotRegistered and AlreadyRegistered exceptions
# (see #8245).
site._registry = before_import_registry
# Decide whether to bubble up this error. If the app just
# doesn't have an admin module, we can ignore the error
# attempting to import it, otherwise we want it to bubble up.
if module_has_submodule(mod, 'admin'):
raise
|
Revert the removal of an unused import (in [14175]) that was referenced in documentation. Thanks for noticing, clong.
|
Revert the removal of an unused import (in [14175]) that was referenced in documentation. Thanks for noticing, clong.
--HG--
extra : convert_revision : svn%3Abcc190cf-cafb-0310-a4f2-bffc1f526a37/django/trunk%4014359
|
Python
|
bsd-3-clause
|
adieu/django-nonrel,adieu/django-nonrel,adieu/django-nonrel
|
from django.contrib.admin.options import ModelAdmin, HORIZONTAL, VERTICAL
from django.contrib.admin.options import StackedInline, TabularInline
from django.contrib.admin.sites import AdminSite, site
def autodiscover():
"""
Auto-discover INSTALLED_APPS admin.py modules and fail silently when
not present. This forces an import on them to register any admin bits they
may want.
"""
import copy
from django.conf import settings
from django.utils.importlib import import_module
from django.utils.module_loading import module_has_submodule
for app in settings.INSTALLED_APPS:
mod = import_module(app)
# Attempt to import the app's admin module.
try:
before_import_registry = copy.copy(site._registry)
import_module('%s.admin' % app)
except:
# Reset the model registry to the state before the last import as
# this import will have to reoccur on the next request and this
# could raise NotRegistered and AlreadyRegistered exceptions
# (see #8245).
site._registry = before_import_registry
# Decide whether to bubble up this error. If the app just
# doesn't have an admin module, we can ignore the error
# attempting to import it, otherwise we want it to bubble up.
if module_has_submodule(mod, 'admin'):
raise
Revert the removal of an unused import (in [14175]) that was referenced in documentation. Thanks for noticing, clong.
--HG--
extra : convert_revision : svn%3Abcc190cf-cafb-0310-a4f2-bffc1f526a37/django/trunk%4014359
|
# ACTION_CHECKBOX_NAME is unused, but should stay since its import from here
# has been referenced in documentation.
from django.contrib.admin.helpers import ACTION_CHECKBOX_NAME
from django.contrib.admin.options import ModelAdmin, HORIZONTAL, VERTICAL
from django.contrib.admin.options import StackedInline, TabularInline
from django.contrib.admin.sites import AdminSite, site
def autodiscover():
"""
Auto-discover INSTALLED_APPS admin.py modules and fail silently when
not present. This forces an import on them to register any admin bits they
may want.
"""
import copy
from django.conf import settings
from django.utils.importlib import import_module
from django.utils.module_loading import module_has_submodule
for app in settings.INSTALLED_APPS:
mod = import_module(app)
# Attempt to import the app's admin module.
try:
before_import_registry = copy.copy(site._registry)
import_module('%s.admin' % app)
except:
# Reset the model registry to the state before the last import as
# this import will have to reoccur on the next request and this
# could raise NotRegistered and AlreadyRegistered exceptions
# (see #8245).
site._registry = before_import_registry
# Decide whether to bubble up this error. If the app just
# doesn't have an admin module, we can ignore the error
# attempting to import it, otherwise we want it to bubble up.
if module_has_submodule(mod, 'admin'):
raise
|
<commit_before>from django.contrib.admin.options import ModelAdmin, HORIZONTAL, VERTICAL
from django.contrib.admin.options import StackedInline, TabularInline
from django.contrib.admin.sites import AdminSite, site
def autodiscover():
"""
Auto-discover INSTALLED_APPS admin.py modules and fail silently when
not present. This forces an import on them to register any admin bits they
may want.
"""
import copy
from django.conf import settings
from django.utils.importlib import import_module
from django.utils.module_loading import module_has_submodule
for app in settings.INSTALLED_APPS:
mod = import_module(app)
# Attempt to import the app's admin module.
try:
before_import_registry = copy.copy(site._registry)
import_module('%s.admin' % app)
except:
# Reset the model registry to the state before the last import as
# this import will have to reoccur on the next request and this
# could raise NotRegistered and AlreadyRegistered exceptions
# (see #8245).
site._registry = before_import_registry
# Decide whether to bubble up this error. If the app just
# doesn't have an admin module, we can ignore the error
# attempting to import it, otherwise we want it to bubble up.
if module_has_submodule(mod, 'admin'):
raise
<commit_msg>Revert the removal of an unused import (in [14175]) that was referenced in documentation. Thanks for noticing, clong.
--HG--
extra : convert_revision : svn%3Abcc190cf-cafb-0310-a4f2-bffc1f526a37/django/trunk%4014359<commit_after>
|
# ACTION_CHECKBOX_NAME is unused, but should stay since its import from here
# has been referenced in documentation.
from django.contrib.admin.helpers import ACTION_CHECKBOX_NAME
from django.contrib.admin.options import ModelAdmin, HORIZONTAL, VERTICAL
from django.contrib.admin.options import StackedInline, TabularInline
from django.contrib.admin.sites import AdminSite, site
def autodiscover():
"""
Auto-discover INSTALLED_APPS admin.py modules and fail silently when
not present. This forces an import on them to register any admin bits they
may want.
"""
import copy
from django.conf import settings
from django.utils.importlib import import_module
from django.utils.module_loading import module_has_submodule
for app in settings.INSTALLED_APPS:
mod = import_module(app)
# Attempt to import the app's admin module.
try:
before_import_registry = copy.copy(site._registry)
import_module('%s.admin' % app)
except:
# Reset the model registry to the state before the last import as
# this import will have to reoccur on the next request and this
# could raise NotRegistered and AlreadyRegistered exceptions
# (see #8245).
site._registry = before_import_registry
# Decide whether to bubble up this error. If the app just
# doesn't have an admin module, we can ignore the error
# attempting to import it, otherwise we want it to bubble up.
if module_has_submodule(mod, 'admin'):
raise
|
from django.contrib.admin.options import ModelAdmin, HORIZONTAL, VERTICAL
from django.contrib.admin.options import StackedInline, TabularInline
from django.contrib.admin.sites import AdminSite, site
def autodiscover():
"""
Auto-discover INSTALLED_APPS admin.py modules and fail silently when
not present. This forces an import on them to register any admin bits they
may want.
"""
import copy
from django.conf import settings
from django.utils.importlib import import_module
from django.utils.module_loading import module_has_submodule
for app in settings.INSTALLED_APPS:
mod = import_module(app)
# Attempt to import the app's admin module.
try:
before_import_registry = copy.copy(site._registry)
import_module('%s.admin' % app)
except:
# Reset the model registry to the state before the last import as
# this import will have to reoccur on the next request and this
# could raise NotRegistered and AlreadyRegistered exceptions
# (see #8245).
site._registry = before_import_registry
# Decide whether to bubble up this error. If the app just
# doesn't have an admin module, we can ignore the error
# attempting to import it, otherwise we want it to bubble up.
if module_has_submodule(mod, 'admin'):
raise
Revert the removal of an unused import (in [14175]) that was referenced in documentation. Thanks for noticing, clong.
--HG--
extra : convert_revision : svn%3Abcc190cf-cafb-0310-a4f2-bffc1f526a37/django/trunk%4014359# ACTION_CHECKBOX_NAME is unused, but should stay since its import from here
# has been referenced in documentation.
from django.contrib.admin.helpers import ACTION_CHECKBOX_NAME
from django.contrib.admin.options import ModelAdmin, HORIZONTAL, VERTICAL
from django.contrib.admin.options import StackedInline, TabularInline
from django.contrib.admin.sites import AdminSite, site
def autodiscover():
"""
Auto-discover INSTALLED_APPS admin.py modules and fail silently when
not present. This forces an import on them to register any admin bits they
may want.
"""
import copy
from django.conf import settings
from django.utils.importlib import import_module
from django.utils.module_loading import module_has_submodule
for app in settings.INSTALLED_APPS:
mod = import_module(app)
# Attempt to import the app's admin module.
try:
before_import_registry = copy.copy(site._registry)
import_module('%s.admin' % app)
except:
# Reset the model registry to the state before the last import as
# this import will have to reoccur on the next request and this
# could raise NotRegistered and AlreadyRegistered exceptions
# (see #8245).
site._registry = before_import_registry
# Decide whether to bubble up this error. If the app just
# doesn't have an admin module, we can ignore the error
# attempting to import it, otherwise we want it to bubble up.
if module_has_submodule(mod, 'admin'):
raise
|
<commit_before>from django.contrib.admin.options import ModelAdmin, HORIZONTAL, VERTICAL
from django.contrib.admin.options import StackedInline, TabularInline
from django.contrib.admin.sites import AdminSite, site
def autodiscover():
"""
Auto-discover INSTALLED_APPS admin.py modules and fail silently when
not present. This forces an import on them to register any admin bits they
may want.
"""
import copy
from django.conf import settings
from django.utils.importlib import import_module
from django.utils.module_loading import module_has_submodule
for app in settings.INSTALLED_APPS:
mod = import_module(app)
# Attempt to import the app's admin module.
try:
before_import_registry = copy.copy(site._registry)
import_module('%s.admin' % app)
except:
# Reset the model registry to the state before the last import as
# this import will have to reoccur on the next request and this
# could raise NotRegistered and AlreadyRegistered exceptions
# (see #8245).
site._registry = before_import_registry
# Decide whether to bubble up this error. If the app just
# doesn't have an admin module, we can ignore the error
# attempting to import it, otherwise we want it to bubble up.
if module_has_submodule(mod, 'admin'):
raise
<commit_msg>Revert the removal of an unused import (in [14175]) that was referenced in documentation. Thanks for noticing, clong.
--HG--
extra : convert_revision : svn%3Abcc190cf-cafb-0310-a4f2-bffc1f526a37/django/trunk%4014359<commit_after># ACTION_CHECKBOX_NAME is unused, but should stay since its import from here
# has been referenced in documentation.
from django.contrib.admin.helpers import ACTION_CHECKBOX_NAME
from django.contrib.admin.options import ModelAdmin, HORIZONTAL, VERTICAL
from django.contrib.admin.options import StackedInline, TabularInline
from django.contrib.admin.sites import AdminSite, site
def autodiscover():
"""
Auto-discover INSTALLED_APPS admin.py modules and fail silently when
not present. This forces an import on them to register any admin bits they
may want.
"""
import copy
from django.conf import settings
from django.utils.importlib import import_module
from django.utils.module_loading import module_has_submodule
for app in settings.INSTALLED_APPS:
mod = import_module(app)
# Attempt to import the app's admin module.
try:
before_import_registry = copy.copy(site._registry)
import_module('%s.admin' % app)
except:
# Reset the model registry to the state before the last import as
# this import will have to reoccur on the next request and this
# could raise NotRegistered and AlreadyRegistered exceptions
# (see #8245).
site._registry = before_import_registry
# Decide whether to bubble up this error. If the app just
# doesn't have an admin module, we can ignore the error
# attempting to import it, otherwise we want it to bubble up.
if module_has_submodule(mod, 'admin'):
raise
|
0683e96d0cd55797639c84003e03c48ae7211912
|
sidecar/__init__.py
|
sidecar/__init__.py
|
from pyramid.config import Configurator
from pyramid.events import BeforeRender
from sqlalchemy import engine_from_config
from . import helpers
from .model import Session, Base
def add_renderer_globals(event):
event['h'] = helpers
def main(global_config, **settings):
"""
This function returns a Pyramid WSGI application.
"""
engine = engine_from_config(settings, 'sqlalchemy.')
Session.configure(bind=engine)
Base.metadata.bind = engine
config = Configurator(settings=settings)
config.include('.themes')
config.include('.views')
config.add_subscriber(add_renderer_globals, BeforeRender)
config.scan()
return config.make_wsgi_app()
|
from pyramid.config import Configurator
from pyramid.events import BeforeRender
from sqlalchemy import engine_from_config
from . import helpers
from .model import Session, Base
def add_renderer_globals(event):
event['h'] = helpers
def main(global_config, **settings):
"""
This function returns a Pyramid WSGI application.
"""
engine = engine_from_config(settings, 'sqlalchemy.')
Session.configure(bind=engine)
Base.metadata.bind = engine
config = Configurator(settings=settings)
config.include('.themes')
config.include('.views')
config.add_subscriber(add_renderer_globals, BeforeRender)
#config.scan()
return config.make_wsgi_app()
|
Disable config.scan() as well for now
|
Disable config.scan() as well for now
|
Python
|
mit
|
storborg/sidecar,storborg/sidecar
|
from pyramid.config import Configurator
from pyramid.events import BeforeRender
from sqlalchemy import engine_from_config
from . import helpers
from .model import Session, Base
def add_renderer_globals(event):
event['h'] = helpers
def main(global_config, **settings):
"""
This function returns a Pyramid WSGI application.
"""
engine = engine_from_config(settings, 'sqlalchemy.')
Session.configure(bind=engine)
Base.metadata.bind = engine
config = Configurator(settings=settings)
config.include('.themes')
config.include('.views')
config.add_subscriber(add_renderer_globals, BeforeRender)
config.scan()
return config.make_wsgi_app()
Disable config.scan() as well for now
|
from pyramid.config import Configurator
from pyramid.events import BeforeRender
from sqlalchemy import engine_from_config
from . import helpers
from .model import Session, Base
def add_renderer_globals(event):
event['h'] = helpers
def main(global_config, **settings):
"""
This function returns a Pyramid WSGI application.
"""
engine = engine_from_config(settings, 'sqlalchemy.')
Session.configure(bind=engine)
Base.metadata.bind = engine
config = Configurator(settings=settings)
config.include('.themes')
config.include('.views')
config.add_subscriber(add_renderer_globals, BeforeRender)
#config.scan()
return config.make_wsgi_app()
|
<commit_before>from pyramid.config import Configurator
from pyramid.events import BeforeRender
from sqlalchemy import engine_from_config
from . import helpers
from .model import Session, Base
def add_renderer_globals(event):
event['h'] = helpers
def main(global_config, **settings):
"""
This function returns a Pyramid WSGI application.
"""
engine = engine_from_config(settings, 'sqlalchemy.')
Session.configure(bind=engine)
Base.metadata.bind = engine
config = Configurator(settings=settings)
config.include('.themes')
config.include('.views')
config.add_subscriber(add_renderer_globals, BeforeRender)
config.scan()
return config.make_wsgi_app()
<commit_msg>Disable config.scan() as well for now<commit_after>
|
from pyramid.config import Configurator
from pyramid.events import BeforeRender
from sqlalchemy import engine_from_config
from . import helpers
from .model import Session, Base
def add_renderer_globals(event):
event['h'] = helpers
def main(global_config, **settings):
"""
This function returns a Pyramid WSGI application.
"""
engine = engine_from_config(settings, 'sqlalchemy.')
Session.configure(bind=engine)
Base.metadata.bind = engine
config = Configurator(settings=settings)
config.include('.themes')
config.include('.views')
config.add_subscriber(add_renderer_globals, BeforeRender)
#config.scan()
return config.make_wsgi_app()
|
from pyramid.config import Configurator
from pyramid.events import BeforeRender
from sqlalchemy import engine_from_config
from . import helpers
from .model import Session, Base
def add_renderer_globals(event):
event['h'] = helpers
def main(global_config, **settings):
"""
This function returns a Pyramid WSGI application.
"""
engine = engine_from_config(settings, 'sqlalchemy.')
Session.configure(bind=engine)
Base.metadata.bind = engine
config = Configurator(settings=settings)
config.include('.themes')
config.include('.views')
config.add_subscriber(add_renderer_globals, BeforeRender)
config.scan()
return config.make_wsgi_app()
Disable config.scan() as well for nowfrom pyramid.config import Configurator
from pyramid.events import BeforeRender
from sqlalchemy import engine_from_config
from . import helpers
from .model import Session, Base
def add_renderer_globals(event):
event['h'] = helpers
def main(global_config, **settings):
"""
This function returns a Pyramid WSGI application.
"""
engine = engine_from_config(settings, 'sqlalchemy.')
Session.configure(bind=engine)
Base.metadata.bind = engine
config = Configurator(settings=settings)
config.include('.themes')
config.include('.views')
config.add_subscriber(add_renderer_globals, BeforeRender)
#config.scan()
return config.make_wsgi_app()
|
<commit_before>from pyramid.config import Configurator
from pyramid.events import BeforeRender
from sqlalchemy import engine_from_config
from . import helpers
from .model import Session, Base
def add_renderer_globals(event):
event['h'] = helpers
def main(global_config, **settings):
"""
This function returns a Pyramid WSGI application.
"""
engine = engine_from_config(settings, 'sqlalchemy.')
Session.configure(bind=engine)
Base.metadata.bind = engine
config = Configurator(settings=settings)
config.include('.themes')
config.include('.views')
config.add_subscriber(add_renderer_globals, BeforeRender)
config.scan()
return config.make_wsgi_app()
<commit_msg>Disable config.scan() as well for now<commit_after>from pyramid.config import Configurator
from pyramid.events import BeforeRender
from sqlalchemy import engine_from_config
from . import helpers
from .model import Session, Base
def add_renderer_globals(event):
event['h'] = helpers
def main(global_config, **settings):
"""
This function returns a Pyramid WSGI application.
"""
engine = engine_from_config(settings, 'sqlalchemy.')
Session.configure(bind=engine)
Base.metadata.bind = engine
config = Configurator(settings=settings)
config.include('.themes')
config.include('.views')
config.add_subscriber(add_renderer_globals, BeforeRender)
#config.scan()
return config.make_wsgi_app()
|
2d5710dfc8361bb4c383e8fe8a2e2ca267c11875
|
luhn/luhn.py
|
luhn/luhn.py
|
# File: luhn.py
# Purpose: Write a program that can take a number and determine whether
# or not it is valid per the Luhn formula.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Sunday 18 September 2016, 09:55 PM
class Luhn(object):
def __init__(self, card_number):
self.card_number = card_number
def addends(self):
luhn_function = lambda x: (2 * x - 9) if (x > 4) else (2 * x)
|
# File: luhn.py
# Purpose: Write a program that can take a number and determine whether
# or not it is valid per the Luhn formula.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Sunday 18 September 2016, 09:55 PM
class Luhn(object):
def __init__(self, card_number):
self.card_number = card_number
def addends(self):
def luhn_function(x): return (2 * x - 9) if (x > 4) else (2 * x)
prev_digits = [item for item in str(self.card_number)]
return int(prev_digits)
cdn = Luhn(1212)
print (cdn.addends())
|
Convert card number to string for iteration
|
Convert card number to string for iteration
|
Python
|
mit
|
amalshehu/exercism-python
|
# File: luhn.py
# Purpose: Write a program that can take a number and determine whether
# or not it is valid per the Luhn formula.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Sunday 18 September 2016, 09:55 PM
class Luhn(object):
def __init__(self, card_number):
self.card_number = card_number
def addends(self):
luhn_function = lambda x: (2 * x - 9) if (x > 4) else (2 * x)
Convert card number to string for iteration
|
# File: luhn.py
# Purpose: Write a program that can take a number and determine whether
# or not it is valid per the Luhn formula.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Sunday 18 September 2016, 09:55 PM
class Luhn(object):
def __init__(self, card_number):
self.card_number = card_number
def addends(self):
def luhn_function(x): return (2 * x - 9) if (x > 4) else (2 * x)
prev_digits = [item for item in str(self.card_number)]
return int(prev_digits)
cdn = Luhn(1212)
print (cdn.addends())
|
<commit_before># File: luhn.py
# Purpose: Write a program that can take a number and determine whether
# or not it is valid per the Luhn formula.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Sunday 18 September 2016, 09:55 PM
class Luhn(object):
def __init__(self, card_number):
self.card_number = card_number
def addends(self):
luhn_function = lambda x: (2 * x - 9) if (x > 4) else (2 * x)
<commit_msg>Convert card number to string for iteration<commit_after>
|
# File: luhn.py
# Purpose: Write a program that can take a number and determine whether
# or not it is valid per the Luhn formula.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Sunday 18 September 2016, 09:55 PM
class Luhn(object):
def __init__(self, card_number):
self.card_number = card_number
def addends(self):
def luhn_function(x): return (2 * x - 9) if (x > 4) else (2 * x)
prev_digits = [item for item in str(self.card_number)]
return int(prev_digits)
cdn = Luhn(1212)
print (cdn.addends())
|
# File: luhn.py
# Purpose: Write a program that can take a number and determine whether
# or not it is valid per the Luhn formula.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Sunday 18 September 2016, 09:55 PM
class Luhn(object):
def __init__(self, card_number):
self.card_number = card_number
def addends(self):
luhn_function = lambda x: (2 * x - 9) if (x > 4) else (2 * x)
Convert card number to string for iteration# File: luhn.py
# Purpose: Write a program that can take a number and determine whether
# or not it is valid per the Luhn formula.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Sunday 18 September 2016, 09:55 PM
class Luhn(object):
def __init__(self, card_number):
self.card_number = card_number
def addends(self):
def luhn_function(x): return (2 * x - 9) if (x > 4) else (2 * x)
prev_digits = [item for item in str(self.card_number)]
return int(prev_digits)
cdn = Luhn(1212)
print (cdn.addends())
|
<commit_before># File: luhn.py
# Purpose: Write a program that can take a number and determine whether
# or not it is valid per the Luhn formula.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Sunday 18 September 2016, 09:55 PM
class Luhn(object):
def __init__(self, card_number):
self.card_number = card_number
def addends(self):
luhn_function = lambda x: (2 * x - 9) if (x > 4) else (2 * x)
<commit_msg>Convert card number to string for iteration<commit_after># File: luhn.py
# Purpose: Write a program that can take a number and determine whether
# or not it is valid per the Luhn formula.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Sunday 18 September 2016, 09:55 PM
class Luhn(object):
def __init__(self, card_number):
self.card_number = card_number
def addends(self):
def luhn_function(x): return (2 * x - 9) if (x > 4) else (2 * x)
prev_digits = [item for item in str(self.card_number)]
return int(prev_digits)
cdn = Luhn(1212)
print (cdn.addends())
|
88d93f580ce2f587ac5fa1b41e7ab3f67a9c6be4
|
avalonstar/apps/live/urls.py
|
avalonstar/apps/live/urls.py
|
# -*- coding: utf-8 -*-
from django.conf import settings
from django.conf.urls import patterns, include, url
from .views import AwayView, BumperView, GameView, PrologueView
urlpatterns = patterns('',
url(r'^away/$', name='live-away', view=AwayView.as_view()),
url(r'^bumper/$', name='live-bumper', view=BumperView.as_view()),
url(r'^game/$', name='live-game', view=GameView.as_view()),
url(r'^prologue/$', name='live-prologue', view=PrologueView.as_view()),
)
|
# -*- coding: utf-8 -*-
from django.conf import settings
from django.conf.urls import patterns, include, url
from .views import (AwayView, BumperView, DiscussionView, GameView,
PrologueView)
urlpatterns = patterns('',
url(r'^away/$', name='live-away', view=AwayView.as_view()),
url(r'^bumper/$', name='live-bumper', view=BumperView.as_view()),
url(r'^discussion/$,' name='live-discussion', view=DiscussionView.as_view()),
url(r'^game/$', name='live-game', view=GameView.as_view()),
url(r'^prologue/$', name='live-prologue', view=PrologueView.as_view()),
)
|
Add DiscussionView to the URLs.
|
Add DiscussionView to the URLs.
|
Python
|
apache-2.0
|
bryanveloso/avalonstar-tv,bryanveloso/avalonstar-tv,bryanveloso/avalonstar-tv
|
# -*- coding: utf-8 -*-
from django.conf import settings
from django.conf.urls import patterns, include, url
from .views import AwayView, BumperView, GameView, PrologueView
urlpatterns = patterns('',
url(r'^away/$', name='live-away', view=AwayView.as_view()),
url(r'^bumper/$', name='live-bumper', view=BumperView.as_view()),
url(r'^game/$', name='live-game', view=GameView.as_view()),
url(r'^prologue/$', name='live-prologue', view=PrologueView.as_view()),
)
Add DiscussionView to the URLs.
|
# -*- coding: utf-8 -*-
from django.conf import settings
from django.conf.urls import patterns, include, url
from .views import (AwayView, BumperView, DiscussionView, GameView,
PrologueView)
urlpatterns = patterns('',
url(r'^away/$', name='live-away', view=AwayView.as_view()),
url(r'^bumper/$', name='live-bumper', view=BumperView.as_view()),
url(r'^discussion/$,' name='live-discussion', view=DiscussionView.as_view()),
url(r'^game/$', name='live-game', view=GameView.as_view()),
url(r'^prologue/$', name='live-prologue', view=PrologueView.as_view()),
)
|
<commit_before># -*- coding: utf-8 -*-
from django.conf import settings
from django.conf.urls import patterns, include, url
from .views import AwayView, BumperView, GameView, PrologueView
urlpatterns = patterns('',
url(r'^away/$', name='live-away', view=AwayView.as_view()),
url(r'^bumper/$', name='live-bumper', view=BumperView.as_view()),
url(r'^game/$', name='live-game', view=GameView.as_view()),
url(r'^prologue/$', name='live-prologue', view=PrologueView.as_view()),
)
<commit_msg>Add DiscussionView to the URLs.<commit_after>
|
# -*- coding: utf-8 -*-
from django.conf import settings
from django.conf.urls import patterns, include, url
from .views import (AwayView, BumperView, DiscussionView, GameView,
PrologueView)
urlpatterns = patterns('',
url(r'^away/$', name='live-away', view=AwayView.as_view()),
url(r'^bumper/$', name='live-bumper', view=BumperView.as_view()),
url(r'^discussion/$,' name='live-discussion', view=DiscussionView.as_view()),
url(r'^game/$', name='live-game', view=GameView.as_view()),
url(r'^prologue/$', name='live-prologue', view=PrologueView.as_view()),
)
|
# -*- coding: utf-8 -*-
from django.conf import settings
from django.conf.urls import patterns, include, url
from .views import AwayView, BumperView, GameView, PrologueView
urlpatterns = patterns('',
url(r'^away/$', name='live-away', view=AwayView.as_view()),
url(r'^bumper/$', name='live-bumper', view=BumperView.as_view()),
url(r'^game/$', name='live-game', view=GameView.as_view()),
url(r'^prologue/$', name='live-prologue', view=PrologueView.as_view()),
)
Add DiscussionView to the URLs.# -*- coding: utf-8 -*-
from django.conf import settings
from django.conf.urls import patterns, include, url
from .views import (AwayView, BumperView, DiscussionView, GameView,
PrologueView)
urlpatterns = patterns('',
url(r'^away/$', name='live-away', view=AwayView.as_view()),
url(r'^bumper/$', name='live-bumper', view=BumperView.as_view()),
url(r'^discussion/$,' name='live-discussion', view=DiscussionView.as_view()),
url(r'^game/$', name='live-game', view=GameView.as_view()),
url(r'^prologue/$', name='live-prologue', view=PrologueView.as_view()),
)
|
<commit_before># -*- coding: utf-8 -*-
from django.conf import settings
from django.conf.urls import patterns, include, url
from .views import AwayView, BumperView, GameView, PrologueView
urlpatterns = patterns('',
url(r'^away/$', name='live-away', view=AwayView.as_view()),
url(r'^bumper/$', name='live-bumper', view=BumperView.as_view()),
url(r'^game/$', name='live-game', view=GameView.as_view()),
url(r'^prologue/$', name='live-prologue', view=PrologueView.as_view()),
)
<commit_msg>Add DiscussionView to the URLs.<commit_after># -*- coding: utf-8 -*-
from django.conf import settings
from django.conf.urls import patterns, include, url
from .views import (AwayView, BumperView, DiscussionView, GameView,
PrologueView)
urlpatterns = patterns('',
url(r'^away/$', name='live-away', view=AwayView.as_view()),
url(r'^bumper/$', name='live-bumper', view=BumperView.as_view()),
url(r'^discussion/$,' name='live-discussion', view=DiscussionView.as_view()),
url(r'^game/$', name='live-game', view=GameView.as_view()),
url(r'^prologue/$', name='live-prologue', view=PrologueView.as_view()),
)
|
0fed581409f0dfa4788964d02d066e8e30f1387f
|
webapp/byceps/blueprints/shop_admin/service.py
|
webapp/byceps/blueprints/shop_admin/service.py
|
# -*- coding: utf-8 -*-
"""
byceps.blueprints.shop_admin.service
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2014 Jochen Kupperschmidt
"""
from collections import Counter
from ..shop.models import PaymentState
def count_ordered_articles(article):
"""Count how often the article has been ordered, grouped by the
order's payment state.
"""
counter = Counter({state: 0 for state in PaymentState})
for order_item in article.order_items:
counter[order_item.order.payment_state] += order_item.quantity
return dict(counter)
|
# -*- coding: utf-8 -*-
"""
byceps.blueprints.shop_admin.service
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2014 Jochen Kupperschmidt
"""
from collections import Counter
from ..shop.models import PaymentState
def count_ordered_articles(article):
"""Count how often the article has been ordered, grouped by the
order's payment state.
"""
# Ensure every payment state is present in the resulting dictionary,
# even if no orders of the corresponding payment state exist for the
# article.
counter = Counter({state: 0 for state in PaymentState})
for order_item in article.order_items:
counter[order_item.order.payment_state] += order_item.quantity
return dict(counter)
|
Make sure every payment state is present in the counter.
|
Make sure every payment state is present in the counter.
|
Python
|
bsd-3-clause
|
m-ober/byceps,m-ober/byceps,homeworkprod/byceps,m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps
|
# -*- coding: utf-8 -*-
"""
byceps.blueprints.shop_admin.service
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2014 Jochen Kupperschmidt
"""
from collections import Counter
from ..shop.models import PaymentState
def count_ordered_articles(article):
"""Count how often the article has been ordered, grouped by the
order's payment state.
"""
counter = Counter({state: 0 for state in PaymentState})
for order_item in article.order_items:
counter[order_item.order.payment_state] += order_item.quantity
return dict(counter)
Make sure every payment state is present in the counter.
|
# -*- coding: utf-8 -*-
"""
byceps.blueprints.shop_admin.service
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2014 Jochen Kupperschmidt
"""
from collections import Counter
from ..shop.models import PaymentState
def count_ordered_articles(article):
"""Count how often the article has been ordered, grouped by the
order's payment state.
"""
# Ensure every payment state is present in the resulting dictionary,
# even if no orders of the corresponding payment state exist for the
# article.
counter = Counter({state: 0 for state in PaymentState})
for order_item in article.order_items:
counter[order_item.order.payment_state] += order_item.quantity
return dict(counter)
|
<commit_before># -*- coding: utf-8 -*-
"""
byceps.blueprints.shop_admin.service
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2014 Jochen Kupperschmidt
"""
from collections import Counter
from ..shop.models import PaymentState
def count_ordered_articles(article):
"""Count how often the article has been ordered, grouped by the
order's payment state.
"""
counter = Counter({state: 0 for state in PaymentState})
for order_item in article.order_items:
counter[order_item.order.payment_state] += order_item.quantity
return dict(counter)
<commit_msg>Make sure every payment state is present in the counter.<commit_after>
|
# -*- coding: utf-8 -*-
"""
byceps.blueprints.shop_admin.service
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2014 Jochen Kupperschmidt
"""
from collections import Counter
from ..shop.models import PaymentState
def count_ordered_articles(article):
"""Count how often the article has been ordered, grouped by the
order's payment state.
"""
# Ensure every payment state is present in the resulting dictionary,
# even if no orders of the corresponding payment state exist for the
# article.
counter = Counter({state: 0 for state in PaymentState})
for order_item in article.order_items:
counter[order_item.order.payment_state] += order_item.quantity
return dict(counter)
|
# -*- coding: utf-8 -*-
"""
byceps.blueprints.shop_admin.service
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2014 Jochen Kupperschmidt
"""
from collections import Counter
from ..shop.models import PaymentState
def count_ordered_articles(article):
"""Count how often the article has been ordered, grouped by the
order's payment state.
"""
counter = Counter({state: 0 for state in PaymentState})
for order_item in article.order_items:
counter[order_item.order.payment_state] += order_item.quantity
return dict(counter)
Make sure every payment state is present in the counter.# -*- coding: utf-8 -*-
"""
byceps.blueprints.shop_admin.service
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2014 Jochen Kupperschmidt
"""
from collections import Counter
from ..shop.models import PaymentState
def count_ordered_articles(article):
"""Count how often the article has been ordered, grouped by the
order's payment state.
"""
# Ensure every payment state is present in the resulting dictionary,
# even if no orders of the corresponding payment state exist for the
# article.
counter = Counter({state: 0 for state in PaymentState})
for order_item in article.order_items:
counter[order_item.order.payment_state] += order_item.quantity
return dict(counter)
|
<commit_before># -*- coding: utf-8 -*-
"""
byceps.blueprints.shop_admin.service
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2014 Jochen Kupperschmidt
"""
from collections import Counter
from ..shop.models import PaymentState
def count_ordered_articles(article):
"""Count how often the article has been ordered, grouped by the
order's payment state.
"""
counter = Counter({state: 0 for state in PaymentState})
for order_item in article.order_items:
counter[order_item.order.payment_state] += order_item.quantity
return dict(counter)
<commit_msg>Make sure every payment state is present in the counter.<commit_after># -*- coding: utf-8 -*-
"""
byceps.blueprints.shop_admin.service
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2014 Jochen Kupperschmidt
"""
from collections import Counter
from ..shop.models import PaymentState
def count_ordered_articles(article):
"""Count how often the article has been ordered, grouped by the
order's payment state.
"""
# Ensure every payment state is present in the resulting dictionary,
# even if no orders of the corresponding payment state exist for the
# article.
counter = Counter({state: 0 for state in PaymentState})
for order_item in article.order_items:
counter[order_item.order.payment_state] += order_item.quantity
return dict(counter)
|
0cb5447de992389be9587d7706637212bfe3b90b
|
tests/events/tests.py
|
tests/events/tests.py
|
# -*- coding: utf-8 -*-
from mock import Mock
from unittest2 import TestCase
from raven.events import Message
class MessageTest(TestCase):
def test_to_string(self):
unformatted_message = 'My message from %s about %s'
client = Mock()
message = Message(client)
message.logger = Mock()
data = {
'sentry.interfaces.Message': {
'message': unformatted_message,
}
}
self.assertEqual(message.to_string(data), unformatted_message)
self.assertEqual(message.logger.warn.call_count, 1)
args, kwargs = message.logger.warn.call_args
self.assertEqual(args, ('Unable to find params for message',))
self.assertEqual(kwargs,
{'extra': {'msg': {'message': unformatted_message}}})
data['sentry.interfaces.Message']['params'] = (1, 2)
self.assertEqual(message.to_string(data),
unformatted_message % (1, 2))
|
# -*- coding: utf-8 -*-
from mock import Mock
from unittest2 import TestCase
from raven.events import Message
class MessageTest(TestCase):
def test_to_string(self):
unformatted_message = 'My message from %s about %s'
client = Mock()
message = Message(client)
message.logger = Mock()
data = {
'sentry.interfaces.Message': {
'message': unformatted_message,
}
}
self.assertEqual(message.to_string(data), unformatted_message)
data['sentry.interfaces.Message']['params'] = (1, 2)
self.assertEqual(message.to_string(data),
unformatted_message % (1, 2))
|
Update test to match current behavior
|
Update test to match current behavior
|
Python
|
bsd-3-clause
|
johansteffner/raven-python,Photonomie/raven-python,jbarbuto/raven-python,nikolas/raven-python,lepture/raven-python,smarkets/raven-python,arthurlogilab/raven-python,lepture/raven-python,hzy/raven-python,recht/raven-python,inspirehep/raven-python,nikolas/raven-python,openlabs/raven,patrys/opbeat_python,ewdurbin/raven-python,dbravender/raven-python,akalipetis/raven-python,alex/raven,Photonomie/raven-python,jmp0xf/raven-python,akalipetis/raven-python,jbarbuto/raven-python,smarkets/raven-python,nikolas/raven-python,smarkets/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python,lopter/raven-python-old,beniwohli/apm-agent-python,percipient/raven-python,icereval/raven-python,percipient/raven-python,ronaldevers/raven-python,danriti/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python,akalipetis/raven-python,akheron/raven-python,dirtycoder/opbeat_python,hzy/raven-python,hzy/raven-python,patrys/opbeat_python,ewdurbin/raven-python,jmagnusson/raven-python,ronaldevers/raven-python,icereval/raven-python,collective/mr.poe,jmp0xf/raven-python,ticosax/opbeat_python,arthurlogilab/raven-python,tarkatronic/opbeat_python,icereval/raven-python,jbarbuto/raven-python,inspirehep/raven-python,ronaldevers/raven-python,akheron/raven-python,danriti/raven-python,daikeren/opbeat_python,johansteffner/raven-python,dbravender/raven-python,tarkatronic/opbeat_python,daikeren/opbeat_python,johansteffner/raven-python,ticosax/opbeat_python,someonehan/raven-python,Photonomie/raven-python,someonehan/raven-python,inspirehep/raven-python,dirtycoder/opbeat_python,beniwohli/apm-agent-python,beniwohli/apm-agent-python,jmagnusson/raven-python,percipient/raven-python,recht/raven-python,jbarbuto/raven-python,someonehan/raven-python,dirtycoder/opbeat_python,getsentry/raven-python,getsentry/raven-python,jmp0xf/raven-python,tarkatronic/opbeat_python,dbravender/raven-python,inspirehep/raven-python,akheron/raven-python,smarkets/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python,patrys/opbeat_python,jmagnusson/raven-python,danriti/raven-python,nikolas/raven-python,getsentry/raven-python,icereval/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python,arthurlogilab/raven-python,patrys/opbeat_python,arthurlogilab/raven-python,recht/raven-python,lepture/raven-python,daikeren/opbeat_python,ticosax/opbeat_python,beniwohli/apm-agent-python,ewdurbin/raven-python
|
# -*- coding: utf-8 -*-
from mock import Mock
from unittest2 import TestCase
from raven.events import Message
class MessageTest(TestCase):
def test_to_string(self):
unformatted_message = 'My message from %s about %s'
client = Mock()
message = Message(client)
message.logger = Mock()
data = {
'sentry.interfaces.Message': {
'message': unformatted_message,
}
}
self.assertEqual(message.to_string(data), unformatted_message)
self.assertEqual(message.logger.warn.call_count, 1)
args, kwargs = message.logger.warn.call_args
self.assertEqual(args, ('Unable to find params for message',))
self.assertEqual(kwargs,
{'extra': {'msg': {'message': unformatted_message}}})
data['sentry.interfaces.Message']['params'] = (1, 2)
self.assertEqual(message.to_string(data),
unformatted_message % (1, 2))
Update test to match current behavior
|
# -*- coding: utf-8 -*-
from mock import Mock
from unittest2 import TestCase
from raven.events import Message
class MessageTest(TestCase):
def test_to_string(self):
unformatted_message = 'My message from %s about %s'
client = Mock()
message = Message(client)
message.logger = Mock()
data = {
'sentry.interfaces.Message': {
'message': unformatted_message,
}
}
self.assertEqual(message.to_string(data), unformatted_message)
data['sentry.interfaces.Message']['params'] = (1, 2)
self.assertEqual(message.to_string(data),
unformatted_message % (1, 2))
|
<commit_before># -*- coding: utf-8 -*-
from mock import Mock
from unittest2 import TestCase
from raven.events import Message
class MessageTest(TestCase):
def test_to_string(self):
unformatted_message = 'My message from %s about %s'
client = Mock()
message = Message(client)
message.logger = Mock()
data = {
'sentry.interfaces.Message': {
'message': unformatted_message,
}
}
self.assertEqual(message.to_string(data), unformatted_message)
self.assertEqual(message.logger.warn.call_count, 1)
args, kwargs = message.logger.warn.call_args
self.assertEqual(args, ('Unable to find params for message',))
self.assertEqual(kwargs,
{'extra': {'msg': {'message': unformatted_message}}})
data['sentry.interfaces.Message']['params'] = (1, 2)
self.assertEqual(message.to_string(data),
unformatted_message % (1, 2))
<commit_msg>Update test to match current behavior<commit_after>
|
# -*- coding: utf-8 -*-
from mock import Mock
from unittest2 import TestCase
from raven.events import Message
class MessageTest(TestCase):
def test_to_string(self):
unformatted_message = 'My message from %s about %s'
client = Mock()
message = Message(client)
message.logger = Mock()
data = {
'sentry.interfaces.Message': {
'message': unformatted_message,
}
}
self.assertEqual(message.to_string(data), unformatted_message)
data['sentry.interfaces.Message']['params'] = (1, 2)
self.assertEqual(message.to_string(data),
unformatted_message % (1, 2))
|
# -*- coding: utf-8 -*-
from mock import Mock
from unittest2 import TestCase
from raven.events import Message
class MessageTest(TestCase):
def test_to_string(self):
unformatted_message = 'My message from %s about %s'
client = Mock()
message = Message(client)
message.logger = Mock()
data = {
'sentry.interfaces.Message': {
'message': unformatted_message,
}
}
self.assertEqual(message.to_string(data), unformatted_message)
self.assertEqual(message.logger.warn.call_count, 1)
args, kwargs = message.logger.warn.call_args
self.assertEqual(args, ('Unable to find params for message',))
self.assertEqual(kwargs,
{'extra': {'msg': {'message': unformatted_message}}})
data['sentry.interfaces.Message']['params'] = (1, 2)
self.assertEqual(message.to_string(data),
unformatted_message % (1, 2))
Update test to match current behavior# -*- coding: utf-8 -*-
from mock import Mock
from unittest2 import TestCase
from raven.events import Message
class MessageTest(TestCase):
def test_to_string(self):
unformatted_message = 'My message from %s about %s'
client = Mock()
message = Message(client)
message.logger = Mock()
data = {
'sentry.interfaces.Message': {
'message': unformatted_message,
}
}
self.assertEqual(message.to_string(data), unformatted_message)
data['sentry.interfaces.Message']['params'] = (1, 2)
self.assertEqual(message.to_string(data),
unformatted_message % (1, 2))
|
<commit_before># -*- coding: utf-8 -*-
from mock import Mock
from unittest2 import TestCase
from raven.events import Message
class MessageTest(TestCase):
def test_to_string(self):
unformatted_message = 'My message from %s about %s'
client = Mock()
message = Message(client)
message.logger = Mock()
data = {
'sentry.interfaces.Message': {
'message': unformatted_message,
}
}
self.assertEqual(message.to_string(data), unformatted_message)
self.assertEqual(message.logger.warn.call_count, 1)
args, kwargs = message.logger.warn.call_args
self.assertEqual(args, ('Unable to find params for message',))
self.assertEqual(kwargs,
{'extra': {'msg': {'message': unformatted_message}}})
data['sentry.interfaces.Message']['params'] = (1, 2)
self.assertEqual(message.to_string(data),
unformatted_message % (1, 2))
<commit_msg>Update test to match current behavior<commit_after># -*- coding: utf-8 -*-
from mock import Mock
from unittest2 import TestCase
from raven.events import Message
class MessageTest(TestCase):
def test_to_string(self):
unformatted_message = 'My message from %s about %s'
client = Mock()
message = Message(client)
message.logger = Mock()
data = {
'sentry.interfaces.Message': {
'message': unformatted_message,
}
}
self.assertEqual(message.to_string(data), unformatted_message)
data['sentry.interfaces.Message']['params'] = (1, 2)
self.assertEqual(message.to_string(data),
unformatted_message % (1, 2))
|
edf2388300b0c0b230cd1b1ec91268a13ee6e6ba
|
homepage/views.py
|
homepage/views.py
|
from django.views.generic import FormView, RedirectView
from django.contrib.auth.forms import AuthenticationForm
from django.core.urlresolvers import reverse
from django.contrib.auth import login, logout
class LoginView(FormView):
template_name = 'homepage/login.html'
form_class = AuthenticationForm
def form_valid(self, form):
user = form.get_user()
login(self.request, user)
return super().form_valid(form)
def get_success_url(self):
return reverse('tasks-project-detail', args=['none'])
class LogoutView(RedirectView):
permanent = False
pattern_name = 'homepage-login'
def get_redirect_url(self, *args, **kwargs):
self._logout_user()
return super().get_redirect_url(*args, **kwargs)
def _logout_user(self):
if self.request.user.is_authenticated():
logout(self.request)
|
from django.views.generic import FormView, RedirectView
from django.contrib.auth.forms import AuthenticationForm
from django.core.urlresolvers import reverse
from django.contrib.auth import login, logout
class LoginView(FormView):
template_name = 'homepage/login.html'
form_class = AuthenticationForm
def form_valid(self, form):
user = form.get_user()
login(self.request, user)
return super().form_valid(form)
def get_success_url(self):
return reverse('tasks-project-detail', args=['general'])
class LogoutView(RedirectView):
permanent = False
pattern_name = 'homepage-login'
def get_redirect_url(self, *args, **kwargs):
self._logout_user()
return super().get_redirect_url(*args, **kwargs)
def _logout_user(self):
if self.request.user.is_authenticated():
logout(self.request)
|
Fix wrong URL for success login
|
Fix wrong URL for success login
|
Python
|
mit
|
polarkac/TaskTracker,polarkac/TaskTracker
|
from django.views.generic import FormView, RedirectView
from django.contrib.auth.forms import AuthenticationForm
from django.core.urlresolvers import reverse
from django.contrib.auth import login, logout
class LoginView(FormView):
template_name = 'homepage/login.html'
form_class = AuthenticationForm
def form_valid(self, form):
user = form.get_user()
login(self.request, user)
return super().form_valid(form)
def get_success_url(self):
return reverse('tasks-project-detail', args=['none'])
class LogoutView(RedirectView):
permanent = False
pattern_name = 'homepage-login'
def get_redirect_url(self, *args, **kwargs):
self._logout_user()
return super().get_redirect_url(*args, **kwargs)
def _logout_user(self):
if self.request.user.is_authenticated():
logout(self.request)
Fix wrong URL for success login
|
from django.views.generic import FormView, RedirectView
from django.contrib.auth.forms import AuthenticationForm
from django.core.urlresolvers import reverse
from django.contrib.auth import login, logout
class LoginView(FormView):
template_name = 'homepage/login.html'
form_class = AuthenticationForm
def form_valid(self, form):
user = form.get_user()
login(self.request, user)
return super().form_valid(form)
def get_success_url(self):
return reverse('tasks-project-detail', args=['general'])
class LogoutView(RedirectView):
permanent = False
pattern_name = 'homepage-login'
def get_redirect_url(self, *args, **kwargs):
self._logout_user()
return super().get_redirect_url(*args, **kwargs)
def _logout_user(self):
if self.request.user.is_authenticated():
logout(self.request)
|
<commit_before>from django.views.generic import FormView, RedirectView
from django.contrib.auth.forms import AuthenticationForm
from django.core.urlresolvers import reverse
from django.contrib.auth import login, logout
class LoginView(FormView):
template_name = 'homepage/login.html'
form_class = AuthenticationForm
def form_valid(self, form):
user = form.get_user()
login(self.request, user)
return super().form_valid(form)
def get_success_url(self):
return reverse('tasks-project-detail', args=['none'])
class LogoutView(RedirectView):
permanent = False
pattern_name = 'homepage-login'
def get_redirect_url(self, *args, **kwargs):
self._logout_user()
return super().get_redirect_url(*args, **kwargs)
def _logout_user(self):
if self.request.user.is_authenticated():
logout(self.request)
<commit_msg>Fix wrong URL for success login<commit_after>
|
from django.views.generic import FormView, RedirectView
from django.contrib.auth.forms import AuthenticationForm
from django.core.urlresolvers import reverse
from django.contrib.auth import login, logout
class LoginView(FormView):
template_name = 'homepage/login.html'
form_class = AuthenticationForm
def form_valid(self, form):
user = form.get_user()
login(self.request, user)
return super().form_valid(form)
def get_success_url(self):
return reverse('tasks-project-detail', args=['general'])
class LogoutView(RedirectView):
permanent = False
pattern_name = 'homepage-login'
def get_redirect_url(self, *args, **kwargs):
self._logout_user()
return super().get_redirect_url(*args, **kwargs)
def _logout_user(self):
if self.request.user.is_authenticated():
logout(self.request)
|
from django.views.generic import FormView, RedirectView
from django.contrib.auth.forms import AuthenticationForm
from django.core.urlresolvers import reverse
from django.contrib.auth import login, logout
class LoginView(FormView):
template_name = 'homepage/login.html'
form_class = AuthenticationForm
def form_valid(self, form):
user = form.get_user()
login(self.request, user)
return super().form_valid(form)
def get_success_url(self):
return reverse('tasks-project-detail', args=['none'])
class LogoutView(RedirectView):
permanent = False
pattern_name = 'homepage-login'
def get_redirect_url(self, *args, **kwargs):
self._logout_user()
return super().get_redirect_url(*args, **kwargs)
def _logout_user(self):
if self.request.user.is_authenticated():
logout(self.request)
Fix wrong URL for success loginfrom django.views.generic import FormView, RedirectView
from django.contrib.auth.forms import AuthenticationForm
from django.core.urlresolvers import reverse
from django.contrib.auth import login, logout
class LoginView(FormView):
template_name = 'homepage/login.html'
form_class = AuthenticationForm
def form_valid(self, form):
user = form.get_user()
login(self.request, user)
return super().form_valid(form)
def get_success_url(self):
return reverse('tasks-project-detail', args=['general'])
class LogoutView(RedirectView):
permanent = False
pattern_name = 'homepage-login'
def get_redirect_url(self, *args, **kwargs):
self._logout_user()
return super().get_redirect_url(*args, **kwargs)
def _logout_user(self):
if self.request.user.is_authenticated():
logout(self.request)
|
<commit_before>from django.views.generic import FormView, RedirectView
from django.contrib.auth.forms import AuthenticationForm
from django.core.urlresolvers import reverse
from django.contrib.auth import login, logout
class LoginView(FormView):
template_name = 'homepage/login.html'
form_class = AuthenticationForm
def form_valid(self, form):
user = form.get_user()
login(self.request, user)
return super().form_valid(form)
def get_success_url(self):
return reverse('tasks-project-detail', args=['none'])
class LogoutView(RedirectView):
permanent = False
pattern_name = 'homepage-login'
def get_redirect_url(self, *args, **kwargs):
self._logout_user()
return super().get_redirect_url(*args, **kwargs)
def _logout_user(self):
if self.request.user.is_authenticated():
logout(self.request)
<commit_msg>Fix wrong URL for success login<commit_after>from django.views.generic import FormView, RedirectView
from django.contrib.auth.forms import AuthenticationForm
from django.core.urlresolvers import reverse
from django.contrib.auth import login, logout
class LoginView(FormView):
template_name = 'homepage/login.html'
form_class = AuthenticationForm
def form_valid(self, form):
user = form.get_user()
login(self.request, user)
return super().form_valid(form)
def get_success_url(self):
return reverse('tasks-project-detail', args=['general'])
class LogoutView(RedirectView):
permanent = False
pattern_name = 'homepage-login'
def get_redirect_url(self, *args, **kwargs):
self._logout_user()
return super().get_redirect_url(*args, **kwargs)
def _logout_user(self):
if self.request.user.is_authenticated():
logout(self.request)
|
e7f97bf1ddcf05bee3c3b6fc79c5cefb36af280a
|
lingcod/layers/urls.py
|
lingcod/layers/urls.py
|
from django.conf.urls.defaults import *
urlpatterns = patterns('lingcod.layers.views',
url(r'^public/',
'get_public_layers',
name='public-data-layers'),
url(r'^kml_file/(?P<session_key>\w+)/(?P<uid>[\w_]+).kml',
'get_kml_file',
name='kml-file'),
url(r'^privatekml/(?P<session_key>\w+)/$',
'get_privatekml_list',
name='layers-privatekml-list'),
url(r'^privatekml/(?P<session_key>\w+)/(?P<pk>\d+)/$',
'get_privatekml',
name='layers-privatekml'),
url(r'^privatekml/(?P<session_key>\w+)/(?P<pk>\d+)/(?P<path>[^\z]+)$',
'get_relative_to_privatekml',
name='layers-privatekml-relative'),
)
|
from django.conf.urls.defaults import *
import time
urlpatterns = patterns('lingcod.layers.views',
url(r'^public/$',
'get_public_layers',
name='public-data-layers'),
# Useful for debugging, avoids GE caching interference
url(r'^public/cachebuster/%s' % str(time.time()),
'get_public_layers',
name='public-data-layers-cachebuster'),
url(r'^kml_file/(?P<session_key>\w+)/(?P<uid>[\w_]+).kml',
'get_kml_file',
name='kml-file'),
url(r'^privatekml/(?P<session_key>\w+)/$',
'get_privatekml_list',
name='layers-privatekml-list'),
url(r'^privatekml/(?P<session_key>\w+)/(?P<pk>\d+)/$',
'get_privatekml',
name='layers-privatekml'),
url(r'^privatekml/(?P<session_key>\w+)/(?P<pk>\d+)/(?P<path>[^\z]+)$',
'get_relative_to_privatekml',
name='layers-privatekml-relative'),
)
|
Add another url pattern for debugging public layers
|
Add another url pattern for debugging public layers
|
Python
|
bsd-3-clause
|
Ecotrust/madrona_addons,Ecotrust/madrona_addons
|
from django.conf.urls.defaults import *
urlpatterns = patterns('lingcod.layers.views',
url(r'^public/',
'get_public_layers',
name='public-data-layers'),
url(r'^kml_file/(?P<session_key>\w+)/(?P<uid>[\w_]+).kml',
'get_kml_file',
name='kml-file'),
url(r'^privatekml/(?P<session_key>\w+)/$',
'get_privatekml_list',
name='layers-privatekml-list'),
url(r'^privatekml/(?P<session_key>\w+)/(?P<pk>\d+)/$',
'get_privatekml',
name='layers-privatekml'),
url(r'^privatekml/(?P<session_key>\w+)/(?P<pk>\d+)/(?P<path>[^\z]+)$',
'get_relative_to_privatekml',
name='layers-privatekml-relative'),
)
Add another url pattern for debugging public layers
|
from django.conf.urls.defaults import *
import time
urlpatterns = patterns('lingcod.layers.views',
url(r'^public/$',
'get_public_layers',
name='public-data-layers'),
# Useful for debugging, avoids GE caching interference
url(r'^public/cachebuster/%s' % str(time.time()),
'get_public_layers',
name='public-data-layers-cachebuster'),
url(r'^kml_file/(?P<session_key>\w+)/(?P<uid>[\w_]+).kml',
'get_kml_file',
name='kml-file'),
url(r'^privatekml/(?P<session_key>\w+)/$',
'get_privatekml_list',
name='layers-privatekml-list'),
url(r'^privatekml/(?P<session_key>\w+)/(?P<pk>\d+)/$',
'get_privatekml',
name='layers-privatekml'),
url(r'^privatekml/(?P<session_key>\w+)/(?P<pk>\d+)/(?P<path>[^\z]+)$',
'get_relative_to_privatekml',
name='layers-privatekml-relative'),
)
|
<commit_before>from django.conf.urls.defaults import *
urlpatterns = patterns('lingcod.layers.views',
url(r'^public/',
'get_public_layers',
name='public-data-layers'),
url(r'^kml_file/(?P<session_key>\w+)/(?P<uid>[\w_]+).kml',
'get_kml_file',
name='kml-file'),
url(r'^privatekml/(?P<session_key>\w+)/$',
'get_privatekml_list',
name='layers-privatekml-list'),
url(r'^privatekml/(?P<session_key>\w+)/(?P<pk>\d+)/$',
'get_privatekml',
name='layers-privatekml'),
url(r'^privatekml/(?P<session_key>\w+)/(?P<pk>\d+)/(?P<path>[^\z]+)$',
'get_relative_to_privatekml',
name='layers-privatekml-relative'),
)
<commit_msg>Add another url pattern for debugging public layers<commit_after>
|
from django.conf.urls.defaults import *
import time
urlpatterns = patterns('lingcod.layers.views',
url(r'^public/$',
'get_public_layers',
name='public-data-layers'),
# Useful for debugging, avoids GE caching interference
url(r'^public/cachebuster/%s' % str(time.time()),
'get_public_layers',
name='public-data-layers-cachebuster'),
url(r'^kml_file/(?P<session_key>\w+)/(?P<uid>[\w_]+).kml',
'get_kml_file',
name='kml-file'),
url(r'^privatekml/(?P<session_key>\w+)/$',
'get_privatekml_list',
name='layers-privatekml-list'),
url(r'^privatekml/(?P<session_key>\w+)/(?P<pk>\d+)/$',
'get_privatekml',
name='layers-privatekml'),
url(r'^privatekml/(?P<session_key>\w+)/(?P<pk>\d+)/(?P<path>[^\z]+)$',
'get_relative_to_privatekml',
name='layers-privatekml-relative'),
)
|
from django.conf.urls.defaults import *
urlpatterns = patterns('lingcod.layers.views',
url(r'^public/',
'get_public_layers',
name='public-data-layers'),
url(r'^kml_file/(?P<session_key>\w+)/(?P<uid>[\w_]+).kml',
'get_kml_file',
name='kml-file'),
url(r'^privatekml/(?P<session_key>\w+)/$',
'get_privatekml_list',
name='layers-privatekml-list'),
url(r'^privatekml/(?P<session_key>\w+)/(?P<pk>\d+)/$',
'get_privatekml',
name='layers-privatekml'),
url(r'^privatekml/(?P<session_key>\w+)/(?P<pk>\d+)/(?P<path>[^\z]+)$',
'get_relative_to_privatekml',
name='layers-privatekml-relative'),
)
Add another url pattern for debugging public layersfrom django.conf.urls.defaults import *
import time
urlpatterns = patterns('lingcod.layers.views',
url(r'^public/$',
'get_public_layers',
name='public-data-layers'),
# Useful for debugging, avoids GE caching interference
url(r'^public/cachebuster/%s' % str(time.time()),
'get_public_layers',
name='public-data-layers-cachebuster'),
url(r'^kml_file/(?P<session_key>\w+)/(?P<uid>[\w_]+).kml',
'get_kml_file',
name='kml-file'),
url(r'^privatekml/(?P<session_key>\w+)/$',
'get_privatekml_list',
name='layers-privatekml-list'),
url(r'^privatekml/(?P<session_key>\w+)/(?P<pk>\d+)/$',
'get_privatekml',
name='layers-privatekml'),
url(r'^privatekml/(?P<session_key>\w+)/(?P<pk>\d+)/(?P<path>[^\z]+)$',
'get_relative_to_privatekml',
name='layers-privatekml-relative'),
)
|
<commit_before>from django.conf.urls.defaults import *
urlpatterns = patterns('lingcod.layers.views',
url(r'^public/',
'get_public_layers',
name='public-data-layers'),
url(r'^kml_file/(?P<session_key>\w+)/(?P<uid>[\w_]+).kml',
'get_kml_file',
name='kml-file'),
url(r'^privatekml/(?P<session_key>\w+)/$',
'get_privatekml_list',
name='layers-privatekml-list'),
url(r'^privatekml/(?P<session_key>\w+)/(?P<pk>\d+)/$',
'get_privatekml',
name='layers-privatekml'),
url(r'^privatekml/(?P<session_key>\w+)/(?P<pk>\d+)/(?P<path>[^\z]+)$',
'get_relative_to_privatekml',
name='layers-privatekml-relative'),
)
<commit_msg>Add another url pattern for debugging public layers<commit_after>from django.conf.urls.defaults import *
import time
urlpatterns = patterns('lingcod.layers.views',
url(r'^public/$',
'get_public_layers',
name='public-data-layers'),
# Useful for debugging, avoids GE caching interference
url(r'^public/cachebuster/%s' % str(time.time()),
'get_public_layers',
name='public-data-layers-cachebuster'),
url(r'^kml_file/(?P<session_key>\w+)/(?P<uid>[\w_]+).kml',
'get_kml_file',
name='kml-file'),
url(r'^privatekml/(?P<session_key>\w+)/$',
'get_privatekml_list',
name='layers-privatekml-list'),
url(r'^privatekml/(?P<session_key>\w+)/(?P<pk>\d+)/$',
'get_privatekml',
name='layers-privatekml'),
url(r'^privatekml/(?P<session_key>\w+)/(?P<pk>\d+)/(?P<path>[^\z]+)$',
'get_relative_to_privatekml',
name='layers-privatekml-relative'),
)
|
f4f5a6ffa1fd60437b83bfc435a180ddf2433ea4
|
tests/test_confirmation.py
|
tests/test_confirmation.py
|
import pytest
import linkatos.confirmation as confirmation
def test_no_confirmation_with_url():
expecting_confirmation = False
parsed_m = {'out': 'http://ex.org', 'channel': 'ch', 'type': 'url'}
assert confirmation.update_confirmation_if_url(parsed_m,
expecting_confirmation) is True
# to do test process_confirmation_if_yn(parsed_message, expecting_confirmation)
def test_confirmation_with_url():
expecting_confirmation = True
parsed_m = {'out': 'http://ex.org', 'channel': 'ch', 'type': 'url'}
assert confirmation.update_confirmation_if_url(parsed_m,
expecting_confirmation) is True
def test_confirmation_without_url():
expecting_confirmation = True
parsed_m = {'out': None, 'channel': 'ch', 'type': None}
assert confirmation.update_confirmation_if_url(parsed_m,
expecting_confirmation) is True
def test_no_confirmation_without_url():
expecting_confirmation = False
parsed_m = {'out': None, 'channel': 'ch', 'type': None}
assert confirmation.update_confirmation_if_url(parsed_m,
expecting_confirmation) is False
|
import pytest
import linkatos.confirmation as confirmation
def test_no_confirmation_with_url():
expecting_confirmation = False
parsed_m = {'out': 'http://ex.org', 'channel': 'ch', 'type': 'url'}
assert confirmation.update_if_url(parsed_m,
expecting_confirmation) is True
# to do test process_confirmation_if_yn(parsed_message, expecting_confirmation)
def test_confirmation_with_url():
expecting_confirmation = True
parsed_m = {'out': 'http://ex.org', 'channel': 'ch', 'type': 'url'}
assert confirmation.update_if_url(parsed_m,
expecting_confirmation) is True
def test_confirmation_without_url():
expecting_confirmation = True
parsed_m = {'out': None, 'channel': 'ch', 'type': None}
assert confirmation.update_if_url(parsed_m,
expecting_confirmation) is True
def test_no_confirmation_without_url():
expecting_confirmation = False
parsed_m = {'out': None, 'channel': 'ch', 'type': None}
assert confirmation.update_if_url(parsed_m,
expecting_confirmation) is False
|
Fix change of function names
|
test: Fix change of function names
|
Python
|
mit
|
iwi/linkatos,iwi/linkatos
|
import pytest
import linkatos.confirmation as confirmation
def test_no_confirmation_with_url():
expecting_confirmation = False
parsed_m = {'out': 'http://ex.org', 'channel': 'ch', 'type': 'url'}
assert confirmation.update_confirmation_if_url(parsed_m,
expecting_confirmation) is True
# to do test process_confirmation_if_yn(parsed_message, expecting_confirmation)
def test_confirmation_with_url():
expecting_confirmation = True
parsed_m = {'out': 'http://ex.org', 'channel': 'ch', 'type': 'url'}
assert confirmation.update_confirmation_if_url(parsed_m,
expecting_confirmation) is True
def test_confirmation_without_url():
expecting_confirmation = True
parsed_m = {'out': None, 'channel': 'ch', 'type': None}
assert confirmation.update_confirmation_if_url(parsed_m,
expecting_confirmation) is True
def test_no_confirmation_without_url():
expecting_confirmation = False
parsed_m = {'out': None, 'channel': 'ch', 'type': None}
assert confirmation.update_confirmation_if_url(parsed_m,
expecting_confirmation) is False
test: Fix change of function names
|
import pytest
import linkatos.confirmation as confirmation
def test_no_confirmation_with_url():
expecting_confirmation = False
parsed_m = {'out': 'http://ex.org', 'channel': 'ch', 'type': 'url'}
assert confirmation.update_if_url(parsed_m,
expecting_confirmation) is True
# to do test process_confirmation_if_yn(parsed_message, expecting_confirmation)
def test_confirmation_with_url():
expecting_confirmation = True
parsed_m = {'out': 'http://ex.org', 'channel': 'ch', 'type': 'url'}
assert confirmation.update_if_url(parsed_m,
expecting_confirmation) is True
def test_confirmation_without_url():
expecting_confirmation = True
parsed_m = {'out': None, 'channel': 'ch', 'type': None}
assert confirmation.update_if_url(parsed_m,
expecting_confirmation) is True
def test_no_confirmation_without_url():
expecting_confirmation = False
parsed_m = {'out': None, 'channel': 'ch', 'type': None}
assert confirmation.update_if_url(parsed_m,
expecting_confirmation) is False
|
<commit_before>import pytest
import linkatos.confirmation as confirmation
def test_no_confirmation_with_url():
expecting_confirmation = False
parsed_m = {'out': 'http://ex.org', 'channel': 'ch', 'type': 'url'}
assert confirmation.update_confirmation_if_url(parsed_m,
expecting_confirmation) is True
# to do test process_confirmation_if_yn(parsed_message, expecting_confirmation)
def test_confirmation_with_url():
expecting_confirmation = True
parsed_m = {'out': 'http://ex.org', 'channel': 'ch', 'type': 'url'}
assert confirmation.update_confirmation_if_url(parsed_m,
expecting_confirmation) is True
def test_confirmation_without_url():
expecting_confirmation = True
parsed_m = {'out': None, 'channel': 'ch', 'type': None}
assert confirmation.update_confirmation_if_url(parsed_m,
expecting_confirmation) is True
def test_no_confirmation_without_url():
expecting_confirmation = False
parsed_m = {'out': None, 'channel': 'ch', 'type': None}
assert confirmation.update_confirmation_if_url(parsed_m,
expecting_confirmation) is False
<commit_msg>test: Fix change of function names<commit_after>
|
import pytest
import linkatos.confirmation as confirmation
def test_no_confirmation_with_url():
expecting_confirmation = False
parsed_m = {'out': 'http://ex.org', 'channel': 'ch', 'type': 'url'}
assert confirmation.update_if_url(parsed_m,
expecting_confirmation) is True
# to do test process_confirmation_if_yn(parsed_message, expecting_confirmation)
def test_confirmation_with_url():
expecting_confirmation = True
parsed_m = {'out': 'http://ex.org', 'channel': 'ch', 'type': 'url'}
assert confirmation.update_if_url(parsed_m,
expecting_confirmation) is True
def test_confirmation_without_url():
expecting_confirmation = True
parsed_m = {'out': None, 'channel': 'ch', 'type': None}
assert confirmation.update_if_url(parsed_m,
expecting_confirmation) is True
def test_no_confirmation_without_url():
expecting_confirmation = False
parsed_m = {'out': None, 'channel': 'ch', 'type': None}
assert confirmation.update_if_url(parsed_m,
expecting_confirmation) is False
|
import pytest
import linkatos.confirmation as confirmation
def test_no_confirmation_with_url():
expecting_confirmation = False
parsed_m = {'out': 'http://ex.org', 'channel': 'ch', 'type': 'url'}
assert confirmation.update_confirmation_if_url(parsed_m,
expecting_confirmation) is True
# to do test process_confirmation_if_yn(parsed_message, expecting_confirmation)
def test_confirmation_with_url():
expecting_confirmation = True
parsed_m = {'out': 'http://ex.org', 'channel': 'ch', 'type': 'url'}
assert confirmation.update_confirmation_if_url(parsed_m,
expecting_confirmation) is True
def test_confirmation_without_url():
expecting_confirmation = True
parsed_m = {'out': None, 'channel': 'ch', 'type': None}
assert confirmation.update_confirmation_if_url(parsed_m,
expecting_confirmation) is True
def test_no_confirmation_without_url():
expecting_confirmation = False
parsed_m = {'out': None, 'channel': 'ch', 'type': None}
assert confirmation.update_confirmation_if_url(parsed_m,
expecting_confirmation) is False
test: Fix change of function namesimport pytest
import linkatos.confirmation as confirmation
def test_no_confirmation_with_url():
expecting_confirmation = False
parsed_m = {'out': 'http://ex.org', 'channel': 'ch', 'type': 'url'}
assert confirmation.update_if_url(parsed_m,
expecting_confirmation) is True
# to do test process_confirmation_if_yn(parsed_message, expecting_confirmation)
def test_confirmation_with_url():
expecting_confirmation = True
parsed_m = {'out': 'http://ex.org', 'channel': 'ch', 'type': 'url'}
assert confirmation.update_if_url(parsed_m,
expecting_confirmation) is True
def test_confirmation_without_url():
expecting_confirmation = True
parsed_m = {'out': None, 'channel': 'ch', 'type': None}
assert confirmation.update_if_url(parsed_m,
expecting_confirmation) is True
def test_no_confirmation_without_url():
expecting_confirmation = False
parsed_m = {'out': None, 'channel': 'ch', 'type': None}
assert confirmation.update_if_url(parsed_m,
expecting_confirmation) is False
|
<commit_before>import pytest
import linkatos.confirmation as confirmation
def test_no_confirmation_with_url():
expecting_confirmation = False
parsed_m = {'out': 'http://ex.org', 'channel': 'ch', 'type': 'url'}
assert confirmation.update_confirmation_if_url(parsed_m,
expecting_confirmation) is True
# to do test process_confirmation_if_yn(parsed_message, expecting_confirmation)
def test_confirmation_with_url():
expecting_confirmation = True
parsed_m = {'out': 'http://ex.org', 'channel': 'ch', 'type': 'url'}
assert confirmation.update_confirmation_if_url(parsed_m,
expecting_confirmation) is True
def test_confirmation_without_url():
expecting_confirmation = True
parsed_m = {'out': None, 'channel': 'ch', 'type': None}
assert confirmation.update_confirmation_if_url(parsed_m,
expecting_confirmation) is True
def test_no_confirmation_without_url():
expecting_confirmation = False
parsed_m = {'out': None, 'channel': 'ch', 'type': None}
assert confirmation.update_confirmation_if_url(parsed_m,
expecting_confirmation) is False
<commit_msg>test: Fix change of function names<commit_after>import pytest
import linkatos.confirmation as confirmation
def test_no_confirmation_with_url():
expecting_confirmation = False
parsed_m = {'out': 'http://ex.org', 'channel': 'ch', 'type': 'url'}
assert confirmation.update_if_url(parsed_m,
expecting_confirmation) is True
# to do test process_confirmation_if_yn(parsed_message, expecting_confirmation)
def test_confirmation_with_url():
expecting_confirmation = True
parsed_m = {'out': 'http://ex.org', 'channel': 'ch', 'type': 'url'}
assert confirmation.update_if_url(parsed_m,
expecting_confirmation) is True
def test_confirmation_without_url():
expecting_confirmation = True
parsed_m = {'out': None, 'channel': 'ch', 'type': None}
assert confirmation.update_if_url(parsed_m,
expecting_confirmation) is True
def test_no_confirmation_without_url():
expecting_confirmation = False
parsed_m = {'out': None, 'channel': 'ch', 'type': None}
assert confirmation.update_if_url(parsed_m,
expecting_confirmation) is False
|
a2c55857cc9d910978c6c1ae963e0669176e061f
|
timezone_field/__init__.py
|
timezone_field/__init__.py
|
__version__ = '1.0'
__all__ = ['TimeZoneField', 'TimeZoneFormField']
from timezone_field.fields import TimeZoneField
from timezone_field.forms import TimeZoneFormField
|
__version__ = '1.1'
__all__ = ['TimeZoneField', 'TimeZoneFormField']
from timezone_field.fields import TimeZoneField
from timezone_field.forms import TimeZoneFormField
|
Bump version number to 1.1
|
Bump version number to 1.1
|
Python
|
bsd-2-clause
|
mfogel/django-timezone-field
|
__version__ = '1.0'
__all__ = ['TimeZoneField', 'TimeZoneFormField']
from timezone_field.fields import TimeZoneField
from timezone_field.forms import TimeZoneFormField
Bump version number to 1.1
|
__version__ = '1.1'
__all__ = ['TimeZoneField', 'TimeZoneFormField']
from timezone_field.fields import TimeZoneField
from timezone_field.forms import TimeZoneFormField
|
<commit_before>__version__ = '1.0'
__all__ = ['TimeZoneField', 'TimeZoneFormField']
from timezone_field.fields import TimeZoneField
from timezone_field.forms import TimeZoneFormField
<commit_msg>Bump version number to 1.1<commit_after>
|
__version__ = '1.1'
__all__ = ['TimeZoneField', 'TimeZoneFormField']
from timezone_field.fields import TimeZoneField
from timezone_field.forms import TimeZoneFormField
|
__version__ = '1.0'
__all__ = ['TimeZoneField', 'TimeZoneFormField']
from timezone_field.fields import TimeZoneField
from timezone_field.forms import TimeZoneFormField
Bump version number to 1.1__version__ = '1.1'
__all__ = ['TimeZoneField', 'TimeZoneFormField']
from timezone_field.fields import TimeZoneField
from timezone_field.forms import TimeZoneFormField
|
<commit_before>__version__ = '1.0'
__all__ = ['TimeZoneField', 'TimeZoneFormField']
from timezone_field.fields import TimeZoneField
from timezone_field.forms import TimeZoneFormField
<commit_msg>Bump version number to 1.1<commit_after>__version__ = '1.1'
__all__ = ['TimeZoneField', 'TimeZoneFormField']
from timezone_field.fields import TimeZoneField
from timezone_field.forms import TimeZoneFormField
|
96bb2ba0dc6e58195b598e03d177114becfeba7a
|
nxpy/util.py
|
nxpy/util.py
|
import re
from lxml import etree
def new_ele(tag, attrs={}, **extra):
etree.Element(tag, attrs, **extra)
def sub_ele(parent, tag, attrs={}, **extra):
etree.SubElement(parent, tag, attrs, **extra)
# Globals
tag_pattern = re.compile(r'({.*})?(.*)')
whitespace_pattern = re.compile(r'[\n\r\s]+')
|
import re
# Globals
tag_pattern = re.compile(r'({.*})?(.*)')
whitespace_pattern = re.compile(r'[\n\r\s]+')
|
Remove new_ele() and sub_ele() functions
|
Remove new_ele() and sub_ele() functions
|
Python
|
apache-2.0
|
Kent1/nxpy
|
import re
from lxml import etree
def new_ele(tag, attrs={}, **extra):
etree.Element(tag, attrs, **extra)
def sub_ele(parent, tag, attrs={}, **extra):
etree.SubElement(parent, tag, attrs, **extra)
# Globals
tag_pattern = re.compile(r'({.*})?(.*)')
whitespace_pattern = re.compile(r'[\n\r\s]+')
Remove new_ele() and sub_ele() functions
|
import re
# Globals
tag_pattern = re.compile(r'({.*})?(.*)')
whitespace_pattern = re.compile(r'[\n\r\s]+')
|
<commit_before>import re
from lxml import etree
def new_ele(tag, attrs={}, **extra):
etree.Element(tag, attrs, **extra)
def sub_ele(parent, tag, attrs={}, **extra):
etree.SubElement(parent, tag, attrs, **extra)
# Globals
tag_pattern = re.compile(r'({.*})?(.*)')
whitespace_pattern = re.compile(r'[\n\r\s]+')
<commit_msg>Remove new_ele() and sub_ele() functions<commit_after>
|
import re
# Globals
tag_pattern = re.compile(r'({.*})?(.*)')
whitespace_pattern = re.compile(r'[\n\r\s]+')
|
import re
from lxml import etree
def new_ele(tag, attrs={}, **extra):
etree.Element(tag, attrs, **extra)
def sub_ele(parent, tag, attrs={}, **extra):
etree.SubElement(parent, tag, attrs, **extra)
# Globals
tag_pattern = re.compile(r'({.*})?(.*)')
whitespace_pattern = re.compile(r'[\n\r\s]+')
Remove new_ele() and sub_ele() functionsimport re
# Globals
tag_pattern = re.compile(r'({.*})?(.*)')
whitespace_pattern = re.compile(r'[\n\r\s]+')
|
<commit_before>import re
from lxml import etree
def new_ele(tag, attrs={}, **extra):
etree.Element(tag, attrs, **extra)
def sub_ele(parent, tag, attrs={}, **extra):
etree.SubElement(parent, tag, attrs, **extra)
# Globals
tag_pattern = re.compile(r'({.*})?(.*)')
whitespace_pattern = re.compile(r'[\n\r\s]+')
<commit_msg>Remove new_ele() and sub_ele() functions<commit_after>import re
# Globals
tag_pattern = re.compile(r'({.*})?(.*)')
whitespace_pattern = re.compile(r'[\n\r\s]+')
|
4e29782102f121cccfbfbcaccc28fe9ccc99e495
|
trackon/update-trackers.py
|
trackon/update-trackers.py
|
from cgi import FieldStorage
from logging import debug, error, info
from time import time
from trackon import tracker
MAX_MIN_INTERVAL = 60*60*5
DEFAULT_CHECK_INTERVAL = 60*15
def main():
args = FieldStorage()
now = int(time())
if 'tracker-address' in args:
t = args['tracker-address'].value
r = tracker.check(t)
nxt = DEFAULT_CHECK_INTERVAL
if 'response' in r and 'min interval' in r['response']:
nxt = r['response']['min interval']
if nxt > MAX_MIN_INTERVAL:
nxt = MAX_MIN_INTERVAL
r['next-check'] = now+nxt
tracker.update(t, r)
if 'error' in r:
info("Update failed for %s: %s" % (t, r['error']))
else:
ti = tracker.allinfo() or {}
for t in ti:
if 'next-check' not in ti[t] or ti[t]['next-check'] < now:
# Gross hack: 1% of the time we try over https
if ti[t].get('ssl', True) or (now%100 == 0):
t = t.replace('http://', 'https://')
tracker.schedule_update(t)
if __name__ == '__main__':
main()
|
from cgi import FieldStorage
from logging import debug, error, info
from time import time
from trackon import tracker
MAX_MIN_INTERVAL = 60*60*5
DEFAULT_CHECK_INTERVAL = 60*15
def main():
args = FieldStorage()
now = int(time())
if 'tracker-address' in args:
t = args['tracker-address'].value
r = tracker.check(t)
nxt = DEFAULT_CHECK_INTERVAL
if 'response' in r and 'min interval' in r['response']:
nxt = r['response']['min interval']
if nxt > MAX_MIN_INTERVAL:
nxt = MAX_MIN_INTERVAL
r['next-check'] = now+nxt
tracker.update(t, r)
if 'error' in r:
info("Update failed for %s: %s" % (t, r['error']))
else:
ti = tracker.allinfo() or {}
for t in ti:
if 'next-check' not in ti[t] or ti[t]['next-check'] < now:
# Gross hack: 0.2% of the time we try over https
if ti[t].get('ssl', True) or (now%500 == 0):
t = t.replace('http://', 'https://')
tracker.schedule_update(t)
if __name__ == '__main__':
main()
|
Reduce frequency of 'random' ssl tests.
|
Reduce frequency of 'random' ssl tests.
|
Python
|
mit
|
CorralPeltzer/newTrackon
|
from cgi import FieldStorage
from logging import debug, error, info
from time import time
from trackon import tracker
MAX_MIN_INTERVAL = 60*60*5
DEFAULT_CHECK_INTERVAL = 60*15
def main():
args = FieldStorage()
now = int(time())
if 'tracker-address' in args:
t = args['tracker-address'].value
r = tracker.check(t)
nxt = DEFAULT_CHECK_INTERVAL
if 'response' in r and 'min interval' in r['response']:
nxt = r['response']['min interval']
if nxt > MAX_MIN_INTERVAL:
nxt = MAX_MIN_INTERVAL
r['next-check'] = now+nxt
tracker.update(t, r)
if 'error' in r:
info("Update failed for %s: %s" % (t, r['error']))
else:
ti = tracker.allinfo() or {}
for t in ti:
if 'next-check' not in ti[t] or ti[t]['next-check'] < now:
# Gross hack: 1% of the time we try over https
if ti[t].get('ssl', True) or (now%100 == 0):
t = t.replace('http://', 'https://')
tracker.schedule_update(t)
if __name__ == '__main__':
main()
Reduce frequency of 'random' ssl tests.
|
from cgi import FieldStorage
from logging import debug, error, info
from time import time
from trackon import tracker
MAX_MIN_INTERVAL = 60*60*5
DEFAULT_CHECK_INTERVAL = 60*15
def main():
args = FieldStorage()
now = int(time())
if 'tracker-address' in args:
t = args['tracker-address'].value
r = tracker.check(t)
nxt = DEFAULT_CHECK_INTERVAL
if 'response' in r and 'min interval' in r['response']:
nxt = r['response']['min interval']
if nxt > MAX_MIN_INTERVAL:
nxt = MAX_MIN_INTERVAL
r['next-check'] = now+nxt
tracker.update(t, r)
if 'error' in r:
info("Update failed for %s: %s" % (t, r['error']))
else:
ti = tracker.allinfo() or {}
for t in ti:
if 'next-check' not in ti[t] or ti[t]['next-check'] < now:
# Gross hack: 0.2% of the time we try over https
if ti[t].get('ssl', True) or (now%500 == 0):
t = t.replace('http://', 'https://')
tracker.schedule_update(t)
if __name__ == '__main__':
main()
|
<commit_before>from cgi import FieldStorage
from logging import debug, error, info
from time import time
from trackon import tracker
MAX_MIN_INTERVAL = 60*60*5
DEFAULT_CHECK_INTERVAL = 60*15
def main():
args = FieldStorage()
now = int(time())
if 'tracker-address' in args:
t = args['tracker-address'].value
r = tracker.check(t)
nxt = DEFAULT_CHECK_INTERVAL
if 'response' in r and 'min interval' in r['response']:
nxt = r['response']['min interval']
if nxt > MAX_MIN_INTERVAL:
nxt = MAX_MIN_INTERVAL
r['next-check'] = now+nxt
tracker.update(t, r)
if 'error' in r:
info("Update failed for %s: %s" % (t, r['error']))
else:
ti = tracker.allinfo() or {}
for t in ti:
if 'next-check' not in ti[t] or ti[t]['next-check'] < now:
# Gross hack: 1% of the time we try over https
if ti[t].get('ssl', True) or (now%100 == 0):
t = t.replace('http://', 'https://')
tracker.schedule_update(t)
if __name__ == '__main__':
main()
<commit_msg>Reduce frequency of 'random' ssl tests.<commit_after>
|
from cgi import FieldStorage
from logging import debug, error, info
from time import time
from trackon import tracker
MAX_MIN_INTERVAL = 60*60*5
DEFAULT_CHECK_INTERVAL = 60*15
def main():
args = FieldStorage()
now = int(time())
if 'tracker-address' in args:
t = args['tracker-address'].value
r = tracker.check(t)
nxt = DEFAULT_CHECK_INTERVAL
if 'response' in r and 'min interval' in r['response']:
nxt = r['response']['min interval']
if nxt > MAX_MIN_INTERVAL:
nxt = MAX_MIN_INTERVAL
r['next-check'] = now+nxt
tracker.update(t, r)
if 'error' in r:
info("Update failed for %s: %s" % (t, r['error']))
else:
ti = tracker.allinfo() or {}
for t in ti:
if 'next-check' not in ti[t] or ti[t]['next-check'] < now:
# Gross hack: 0.2% of the time we try over https
if ti[t].get('ssl', True) or (now%500 == 0):
t = t.replace('http://', 'https://')
tracker.schedule_update(t)
if __name__ == '__main__':
main()
|
from cgi import FieldStorage
from logging import debug, error, info
from time import time
from trackon import tracker
MAX_MIN_INTERVAL = 60*60*5
DEFAULT_CHECK_INTERVAL = 60*15
def main():
args = FieldStorage()
now = int(time())
if 'tracker-address' in args:
t = args['tracker-address'].value
r = tracker.check(t)
nxt = DEFAULT_CHECK_INTERVAL
if 'response' in r and 'min interval' in r['response']:
nxt = r['response']['min interval']
if nxt > MAX_MIN_INTERVAL:
nxt = MAX_MIN_INTERVAL
r['next-check'] = now+nxt
tracker.update(t, r)
if 'error' in r:
info("Update failed for %s: %s" % (t, r['error']))
else:
ti = tracker.allinfo() or {}
for t in ti:
if 'next-check' not in ti[t] or ti[t]['next-check'] < now:
# Gross hack: 1% of the time we try over https
if ti[t].get('ssl', True) or (now%100 == 0):
t = t.replace('http://', 'https://')
tracker.schedule_update(t)
if __name__ == '__main__':
main()
Reduce frequency of 'random' ssl tests.from cgi import FieldStorage
from logging import debug, error, info
from time import time
from trackon import tracker
MAX_MIN_INTERVAL = 60*60*5
DEFAULT_CHECK_INTERVAL = 60*15
def main():
args = FieldStorage()
now = int(time())
if 'tracker-address' in args:
t = args['tracker-address'].value
r = tracker.check(t)
nxt = DEFAULT_CHECK_INTERVAL
if 'response' in r and 'min interval' in r['response']:
nxt = r['response']['min interval']
if nxt > MAX_MIN_INTERVAL:
nxt = MAX_MIN_INTERVAL
r['next-check'] = now+nxt
tracker.update(t, r)
if 'error' in r:
info("Update failed for %s: %s" % (t, r['error']))
else:
ti = tracker.allinfo() or {}
for t in ti:
if 'next-check' not in ti[t] or ti[t]['next-check'] < now:
# Gross hack: 0.2% of the time we try over https
if ti[t].get('ssl', True) or (now%500 == 0):
t = t.replace('http://', 'https://')
tracker.schedule_update(t)
if __name__ == '__main__':
main()
|
<commit_before>from cgi import FieldStorage
from logging import debug, error, info
from time import time
from trackon import tracker
MAX_MIN_INTERVAL = 60*60*5
DEFAULT_CHECK_INTERVAL = 60*15
def main():
args = FieldStorage()
now = int(time())
if 'tracker-address' in args:
t = args['tracker-address'].value
r = tracker.check(t)
nxt = DEFAULT_CHECK_INTERVAL
if 'response' in r and 'min interval' in r['response']:
nxt = r['response']['min interval']
if nxt > MAX_MIN_INTERVAL:
nxt = MAX_MIN_INTERVAL
r['next-check'] = now+nxt
tracker.update(t, r)
if 'error' in r:
info("Update failed for %s: %s" % (t, r['error']))
else:
ti = tracker.allinfo() or {}
for t in ti:
if 'next-check' not in ti[t] or ti[t]['next-check'] < now:
# Gross hack: 1% of the time we try over https
if ti[t].get('ssl', True) or (now%100 == 0):
t = t.replace('http://', 'https://')
tracker.schedule_update(t)
if __name__ == '__main__':
main()
<commit_msg>Reduce frequency of 'random' ssl tests.<commit_after>from cgi import FieldStorage
from logging import debug, error, info
from time import time
from trackon import tracker
MAX_MIN_INTERVAL = 60*60*5
DEFAULT_CHECK_INTERVAL = 60*15
def main():
args = FieldStorage()
now = int(time())
if 'tracker-address' in args:
t = args['tracker-address'].value
r = tracker.check(t)
nxt = DEFAULT_CHECK_INTERVAL
if 'response' in r and 'min interval' in r['response']:
nxt = r['response']['min interval']
if nxt > MAX_MIN_INTERVAL:
nxt = MAX_MIN_INTERVAL
r['next-check'] = now+nxt
tracker.update(t, r)
if 'error' in r:
info("Update failed for %s: %s" % (t, r['error']))
else:
ti = tracker.allinfo() or {}
for t in ti:
if 'next-check' not in ti[t] or ti[t]['next-check'] < now:
# Gross hack: 0.2% of the time we try over https
if ti[t].get('ssl', True) or (now%500 == 0):
t = t.replace('http://', 'https://')
tracker.schedule_update(t)
if __name__ == '__main__':
main()
|
9130a94153b7d9f70883da737fb60d41db73e09a
|
try_telethon.py
|
try_telethon.py
|
#!/usr/bin/env python3
import traceback
from telethon_examples.interactive_telegram_client \
import InteractiveTelegramClient
def load_settings(path='api/settings'):
"""Loads the user settings located under `api/`"""
result = {}
with open(path, 'r', encoding='utf-8') as file:
for line in file:
value_pair = line.split('=')
left = value_pair[0].strip()
right = value_pair[1].strip()
if right.isnumeric():
result[left] = int(right)
else:
result[left] = right
return result
if __name__ == '__main__':
# Load the settings and initialize the client
settings = load_settings()
client = InteractiveTelegramClient(
session_user_id=str(settings.get('session_name', 'anonymous')),
user_phone=str(settings['user_phone']),
api_id=settings['api_id'],
api_hash=str(settings['api_hash']))
print('Initialization done!')
try:
client.run()
except Exception as e:
print('Unexpected error ({}): {} at\n{}'.format(
type(e), e, traceback.format_exc()))
finally:
client.disconnect()
print('Thanks for trying the interactive example! Exiting...')
|
#!/usr/bin/env python3
import traceback
from telethon_examples.interactive_telegram_client \
import InteractiveTelegramClient
def load_settings(path='api/settings'):
"""Loads the user settings located under `api/`"""
result = {}
with open(path, 'r', encoding='utf-8') as file:
for line in file:
value_pair = line.split('=')
left = value_pair[0].strip()
right = value_pair[1].strip()
if right.isnumeric():
result[left] = int(right)
else:
result[left] = right
return result
if __name__ == '__main__':
# Load the settings and initialize the client
settings = load_settings()
kwargs = {}
if settings.get('socks_proxy'):
import socks # $ pip install pysocks
host, port = settings['socks_proxy'].split(':')
kwargs = dict(proxy=(socks.SOCKS5, host, int(port)))
client = InteractiveTelegramClient(
session_user_id=str(settings.get('session_name', 'anonymous')),
user_phone=str(settings['user_phone']),
api_id=settings['api_id'],
api_hash=str(settings['api_hash']),
**kwargs)
print('Initialization done!')
try:
client.run()
except Exception as e:
print('Unexpected error ({}): {} at\n{}'.format(
type(e), e, traceback.format_exc()))
finally:
client.disconnect()
print('Thanks for trying the interactive example! Exiting...')
|
Support configuring SOCKS proxy in the example
|
Support configuring SOCKS proxy in the example
|
Python
|
mit
|
LonamiWebs/Telethon,LonamiWebs/Telethon,kyasabu/Telethon,andr-04/Telethon,LonamiWebs/Telethon,expectocode/Telethon,LonamiWebs/Telethon
|
#!/usr/bin/env python3
import traceback
from telethon_examples.interactive_telegram_client \
import InteractiveTelegramClient
def load_settings(path='api/settings'):
"""Loads the user settings located under `api/`"""
result = {}
with open(path, 'r', encoding='utf-8') as file:
for line in file:
value_pair = line.split('=')
left = value_pair[0].strip()
right = value_pair[1].strip()
if right.isnumeric():
result[left] = int(right)
else:
result[left] = right
return result
if __name__ == '__main__':
# Load the settings and initialize the client
settings = load_settings()
client = InteractiveTelegramClient(
session_user_id=str(settings.get('session_name', 'anonymous')),
user_phone=str(settings['user_phone']),
api_id=settings['api_id'],
api_hash=str(settings['api_hash']))
print('Initialization done!')
try:
client.run()
except Exception as e:
print('Unexpected error ({}): {} at\n{}'.format(
type(e), e, traceback.format_exc()))
finally:
client.disconnect()
print('Thanks for trying the interactive example! Exiting...')
Support configuring SOCKS proxy in the example
|
#!/usr/bin/env python3
import traceback
from telethon_examples.interactive_telegram_client \
import InteractiveTelegramClient
def load_settings(path='api/settings'):
"""Loads the user settings located under `api/`"""
result = {}
with open(path, 'r', encoding='utf-8') as file:
for line in file:
value_pair = line.split('=')
left = value_pair[0].strip()
right = value_pair[1].strip()
if right.isnumeric():
result[left] = int(right)
else:
result[left] = right
return result
if __name__ == '__main__':
# Load the settings and initialize the client
settings = load_settings()
kwargs = {}
if settings.get('socks_proxy'):
import socks # $ pip install pysocks
host, port = settings['socks_proxy'].split(':')
kwargs = dict(proxy=(socks.SOCKS5, host, int(port)))
client = InteractiveTelegramClient(
session_user_id=str(settings.get('session_name', 'anonymous')),
user_phone=str(settings['user_phone']),
api_id=settings['api_id'],
api_hash=str(settings['api_hash']),
**kwargs)
print('Initialization done!')
try:
client.run()
except Exception as e:
print('Unexpected error ({}): {} at\n{}'.format(
type(e), e, traceback.format_exc()))
finally:
client.disconnect()
print('Thanks for trying the interactive example! Exiting...')
|
<commit_before>#!/usr/bin/env python3
import traceback
from telethon_examples.interactive_telegram_client \
import InteractiveTelegramClient
def load_settings(path='api/settings'):
"""Loads the user settings located under `api/`"""
result = {}
with open(path, 'r', encoding='utf-8') as file:
for line in file:
value_pair = line.split('=')
left = value_pair[0].strip()
right = value_pair[1].strip()
if right.isnumeric():
result[left] = int(right)
else:
result[left] = right
return result
if __name__ == '__main__':
# Load the settings and initialize the client
settings = load_settings()
client = InteractiveTelegramClient(
session_user_id=str(settings.get('session_name', 'anonymous')),
user_phone=str(settings['user_phone']),
api_id=settings['api_id'],
api_hash=str(settings['api_hash']))
print('Initialization done!')
try:
client.run()
except Exception as e:
print('Unexpected error ({}): {} at\n{}'.format(
type(e), e, traceback.format_exc()))
finally:
client.disconnect()
print('Thanks for trying the interactive example! Exiting...')
<commit_msg>Support configuring SOCKS proxy in the example<commit_after>
|
#!/usr/bin/env python3
import traceback
from telethon_examples.interactive_telegram_client \
import InteractiveTelegramClient
def load_settings(path='api/settings'):
"""Loads the user settings located under `api/`"""
result = {}
with open(path, 'r', encoding='utf-8') as file:
for line in file:
value_pair = line.split('=')
left = value_pair[0].strip()
right = value_pair[1].strip()
if right.isnumeric():
result[left] = int(right)
else:
result[left] = right
return result
if __name__ == '__main__':
# Load the settings and initialize the client
settings = load_settings()
kwargs = {}
if settings.get('socks_proxy'):
import socks # $ pip install pysocks
host, port = settings['socks_proxy'].split(':')
kwargs = dict(proxy=(socks.SOCKS5, host, int(port)))
client = InteractiveTelegramClient(
session_user_id=str(settings.get('session_name', 'anonymous')),
user_phone=str(settings['user_phone']),
api_id=settings['api_id'],
api_hash=str(settings['api_hash']),
**kwargs)
print('Initialization done!')
try:
client.run()
except Exception as e:
print('Unexpected error ({}): {} at\n{}'.format(
type(e), e, traceback.format_exc()))
finally:
client.disconnect()
print('Thanks for trying the interactive example! Exiting...')
|
#!/usr/bin/env python3
import traceback
from telethon_examples.interactive_telegram_client \
import InteractiveTelegramClient
def load_settings(path='api/settings'):
"""Loads the user settings located under `api/`"""
result = {}
with open(path, 'r', encoding='utf-8') as file:
for line in file:
value_pair = line.split('=')
left = value_pair[0].strip()
right = value_pair[1].strip()
if right.isnumeric():
result[left] = int(right)
else:
result[left] = right
return result
if __name__ == '__main__':
# Load the settings and initialize the client
settings = load_settings()
client = InteractiveTelegramClient(
session_user_id=str(settings.get('session_name', 'anonymous')),
user_phone=str(settings['user_phone']),
api_id=settings['api_id'],
api_hash=str(settings['api_hash']))
print('Initialization done!')
try:
client.run()
except Exception as e:
print('Unexpected error ({}): {} at\n{}'.format(
type(e), e, traceback.format_exc()))
finally:
client.disconnect()
print('Thanks for trying the interactive example! Exiting...')
Support configuring SOCKS proxy in the example#!/usr/bin/env python3
import traceback
from telethon_examples.interactive_telegram_client \
import InteractiveTelegramClient
def load_settings(path='api/settings'):
"""Loads the user settings located under `api/`"""
result = {}
with open(path, 'r', encoding='utf-8') as file:
for line in file:
value_pair = line.split('=')
left = value_pair[0].strip()
right = value_pair[1].strip()
if right.isnumeric():
result[left] = int(right)
else:
result[left] = right
return result
if __name__ == '__main__':
# Load the settings and initialize the client
settings = load_settings()
kwargs = {}
if settings.get('socks_proxy'):
import socks # $ pip install pysocks
host, port = settings['socks_proxy'].split(':')
kwargs = dict(proxy=(socks.SOCKS5, host, int(port)))
client = InteractiveTelegramClient(
session_user_id=str(settings.get('session_name', 'anonymous')),
user_phone=str(settings['user_phone']),
api_id=settings['api_id'],
api_hash=str(settings['api_hash']),
**kwargs)
print('Initialization done!')
try:
client.run()
except Exception as e:
print('Unexpected error ({}): {} at\n{}'.format(
type(e), e, traceback.format_exc()))
finally:
client.disconnect()
print('Thanks for trying the interactive example! Exiting...')
|
<commit_before>#!/usr/bin/env python3
import traceback
from telethon_examples.interactive_telegram_client \
import InteractiveTelegramClient
def load_settings(path='api/settings'):
"""Loads the user settings located under `api/`"""
result = {}
with open(path, 'r', encoding='utf-8') as file:
for line in file:
value_pair = line.split('=')
left = value_pair[0].strip()
right = value_pair[1].strip()
if right.isnumeric():
result[left] = int(right)
else:
result[left] = right
return result
if __name__ == '__main__':
# Load the settings and initialize the client
settings = load_settings()
client = InteractiveTelegramClient(
session_user_id=str(settings.get('session_name', 'anonymous')),
user_phone=str(settings['user_phone']),
api_id=settings['api_id'],
api_hash=str(settings['api_hash']))
print('Initialization done!')
try:
client.run()
except Exception as e:
print('Unexpected error ({}): {} at\n{}'.format(
type(e), e, traceback.format_exc()))
finally:
client.disconnect()
print('Thanks for trying the interactive example! Exiting...')
<commit_msg>Support configuring SOCKS proxy in the example<commit_after>#!/usr/bin/env python3
import traceback
from telethon_examples.interactive_telegram_client \
import InteractiveTelegramClient
def load_settings(path='api/settings'):
"""Loads the user settings located under `api/`"""
result = {}
with open(path, 'r', encoding='utf-8') as file:
for line in file:
value_pair = line.split('=')
left = value_pair[0].strip()
right = value_pair[1].strip()
if right.isnumeric():
result[left] = int(right)
else:
result[left] = right
return result
if __name__ == '__main__':
# Load the settings and initialize the client
settings = load_settings()
kwargs = {}
if settings.get('socks_proxy'):
import socks # $ pip install pysocks
host, port = settings['socks_proxy'].split(':')
kwargs = dict(proxy=(socks.SOCKS5, host, int(port)))
client = InteractiveTelegramClient(
session_user_id=str(settings.get('session_name', 'anonymous')),
user_phone=str(settings['user_phone']),
api_id=settings['api_id'],
api_hash=str(settings['api_hash']),
**kwargs)
print('Initialization done!')
try:
client.run()
except Exception as e:
print('Unexpected error ({}): {} at\n{}'.format(
type(e), e, traceback.format_exc()))
finally:
client.disconnect()
print('Thanks for trying the interactive example! Exiting...')
|
ee259a377bc574c113640601064ceb259707d35f
|
akhet/__init__.py
|
akhet/__init__.py
|
from akhet.static import add_static_route
def includeme(config):
"""Add certain useful methods to a Pyramid ``Configurator`` instance.
Currently this adds the ``.add_static_route()`` method. (See
``pyramid_sqla.static.add_static_route()``.)
"""
config.add_directive('add_static_route', add_static_route)
|
Add 'includeme' function. (Accidently put in SQLAHelper.)
|
Add 'includeme' function. (Accidently put in SQLAHelper.)
|
Python
|
mit
|
koansys/akhet,koansys/akhet
|
Add 'includeme' function. (Accidently put in SQLAHelper.)
|
from akhet.static import add_static_route
def includeme(config):
"""Add certain useful methods to a Pyramid ``Configurator`` instance.
Currently this adds the ``.add_static_route()`` method. (See
``pyramid_sqla.static.add_static_route()``.)
"""
config.add_directive('add_static_route', add_static_route)
|
<commit_before><commit_msg>Add 'includeme' function. (Accidently put in SQLAHelper.)<commit_after>
|
from akhet.static import add_static_route
def includeme(config):
"""Add certain useful methods to a Pyramid ``Configurator`` instance.
Currently this adds the ``.add_static_route()`` method. (See
``pyramid_sqla.static.add_static_route()``.)
"""
config.add_directive('add_static_route', add_static_route)
|
Add 'includeme' function. (Accidently put in SQLAHelper.)from akhet.static import add_static_route
def includeme(config):
"""Add certain useful methods to a Pyramid ``Configurator`` instance.
Currently this adds the ``.add_static_route()`` method. (See
``pyramid_sqla.static.add_static_route()``.)
"""
config.add_directive('add_static_route', add_static_route)
|
<commit_before><commit_msg>Add 'includeme' function. (Accidently put in SQLAHelper.)<commit_after>from akhet.static import add_static_route
def includeme(config):
"""Add certain useful methods to a Pyramid ``Configurator`` instance.
Currently this adds the ``.add_static_route()`` method. (See
``pyramid_sqla.static.add_static_route()``.)
"""
config.add_directive('add_static_route', add_static_route)
|
|
52b022869b7092fc519accc2132c3f842502aeae
|
create_input_files.py
|
create_input_files.py
|
from utils import write_csv_rows, read_csv_rows
class input_table:
def __init__(self, filename, content):
self.filename = filename
self.content = content
connect_tbl=input_table('connectivity.csv',
[['Connectivity Table'],
['x1','y1','x2','y2','E','A']])
force_tbl=input_table('forces.csv',
[['Force Table'],
['x','y','Fx','Fy']])
bc_tbl=input_table('boundary_conditions.csv',
[['Boundary Conditions'],
['x','y','Constrained Dimension','Displacement']])
sim_tbl=input_table('simulation_parameters.csv',
[['Simulation Parameters'],
['Numerical Soln Multiplier','Degrees of Freedom'],
['1e9']])
input_files=[connect_tbl,force_tbl,bc_tbl,sim_tbl]
for i in range(0,len(input_files)):
write_csv_rows(input_files[i].filename,input_files[i].content)
print(input_files[i].content[0][0] + ' written to ' + input_files[i].filename)
|
from utils import write_csv_rows, read_csv_rows
class input_table:
def __init__(self, filename, name, headers, content=[]):
self.filename = filename
self.name = name
self.headers = headers
self.content = content
connect_filename = 'connectivity.csv'
connect_name = ['Connectivity Table']
connect_headers = ['x1','y1','x2','y2','E','A']
connect_tbl = input_table(connect_filename,
connect_name,
connect_headers)
force_filename = 'forces.csv'
force_name = ['Force Table']
force_headers = ['x','y','Fx','Fy']
force_tbl = input_table(force_filename,
force_name,
force_headers)
bc_filename = 'boundary_conditions.csv'
bc_name = ['Boundary Conditions']
bc_headers = ['x','y','Constrained Dimension','Displacement']
bc_tbl = input_table(bc_filename,
bc_name,
bc_headers)
sim_filename = 'simulation_parameters.csv'
sim_name = ['Simulation Parameters']
sim_headers = ['Numerical Soln Multiplier','Degrees of Freedom']
sim_content = ['1e9']
sim_tbl = input_table(sim_filename,
sim_name,
sim_headers,
sim_content)
input_files = [connect_tbl,force_tbl,bc_tbl,sim_tbl]
for i in range(0,len(input_files)):
tbl_list = [input_files[i].name,
input_files[i].headers,
input_files[i].content]
write_csv_rows(input_files[i].filename,tbl_list)
print(input_files[i].name[0] + ' written to ' +\
input_files[i].filename)
|
Clean up input tables class and instance declarations
|
Clean up input tables class and instance declarations
|
Python
|
mit
|
ndebuhr/openfea,ndebuhr/openfea
|
from utils import write_csv_rows, read_csv_rows
class input_table:
def __init__(self, filename, content):
self.filename = filename
self.content = content
connect_tbl=input_table('connectivity.csv',
[['Connectivity Table'],
['x1','y1','x2','y2','E','A']])
force_tbl=input_table('forces.csv',
[['Force Table'],
['x','y','Fx','Fy']])
bc_tbl=input_table('boundary_conditions.csv',
[['Boundary Conditions'],
['x','y','Constrained Dimension','Displacement']])
sim_tbl=input_table('simulation_parameters.csv',
[['Simulation Parameters'],
['Numerical Soln Multiplier','Degrees of Freedom'],
['1e9']])
input_files=[connect_tbl,force_tbl,bc_tbl,sim_tbl]
for i in range(0,len(input_files)):
write_csv_rows(input_files[i].filename,input_files[i].content)
print(input_files[i].content[0][0] + ' written to ' + input_files[i].filename)
Clean up input tables class and instance declarations
|
from utils import write_csv_rows, read_csv_rows
class input_table:
def __init__(self, filename, name, headers, content=[]):
self.filename = filename
self.name = name
self.headers = headers
self.content = content
connect_filename = 'connectivity.csv'
connect_name = ['Connectivity Table']
connect_headers = ['x1','y1','x2','y2','E','A']
connect_tbl = input_table(connect_filename,
connect_name,
connect_headers)
force_filename = 'forces.csv'
force_name = ['Force Table']
force_headers = ['x','y','Fx','Fy']
force_tbl = input_table(force_filename,
force_name,
force_headers)
bc_filename = 'boundary_conditions.csv'
bc_name = ['Boundary Conditions']
bc_headers = ['x','y','Constrained Dimension','Displacement']
bc_tbl = input_table(bc_filename,
bc_name,
bc_headers)
sim_filename = 'simulation_parameters.csv'
sim_name = ['Simulation Parameters']
sim_headers = ['Numerical Soln Multiplier','Degrees of Freedom']
sim_content = ['1e9']
sim_tbl = input_table(sim_filename,
sim_name,
sim_headers,
sim_content)
input_files = [connect_tbl,force_tbl,bc_tbl,sim_tbl]
for i in range(0,len(input_files)):
tbl_list = [input_files[i].name,
input_files[i].headers,
input_files[i].content]
write_csv_rows(input_files[i].filename,tbl_list)
print(input_files[i].name[0] + ' written to ' +\
input_files[i].filename)
|
<commit_before>from utils import write_csv_rows, read_csv_rows
class input_table:
def __init__(self, filename, content):
self.filename = filename
self.content = content
connect_tbl=input_table('connectivity.csv',
[['Connectivity Table'],
['x1','y1','x2','y2','E','A']])
force_tbl=input_table('forces.csv',
[['Force Table'],
['x','y','Fx','Fy']])
bc_tbl=input_table('boundary_conditions.csv',
[['Boundary Conditions'],
['x','y','Constrained Dimension','Displacement']])
sim_tbl=input_table('simulation_parameters.csv',
[['Simulation Parameters'],
['Numerical Soln Multiplier','Degrees of Freedom'],
['1e9']])
input_files=[connect_tbl,force_tbl,bc_tbl,sim_tbl]
for i in range(0,len(input_files)):
write_csv_rows(input_files[i].filename,input_files[i].content)
print(input_files[i].content[0][0] + ' written to ' + input_files[i].filename)
<commit_msg>Clean up input tables class and instance declarations<commit_after>
|
from utils import write_csv_rows, read_csv_rows
class input_table:
def __init__(self, filename, name, headers, content=[]):
self.filename = filename
self.name = name
self.headers = headers
self.content = content
connect_filename = 'connectivity.csv'
connect_name = ['Connectivity Table']
connect_headers = ['x1','y1','x2','y2','E','A']
connect_tbl = input_table(connect_filename,
connect_name,
connect_headers)
force_filename = 'forces.csv'
force_name = ['Force Table']
force_headers = ['x','y','Fx','Fy']
force_tbl = input_table(force_filename,
force_name,
force_headers)
bc_filename = 'boundary_conditions.csv'
bc_name = ['Boundary Conditions']
bc_headers = ['x','y','Constrained Dimension','Displacement']
bc_tbl = input_table(bc_filename,
bc_name,
bc_headers)
sim_filename = 'simulation_parameters.csv'
sim_name = ['Simulation Parameters']
sim_headers = ['Numerical Soln Multiplier','Degrees of Freedom']
sim_content = ['1e9']
sim_tbl = input_table(sim_filename,
sim_name,
sim_headers,
sim_content)
input_files = [connect_tbl,force_tbl,bc_tbl,sim_tbl]
for i in range(0,len(input_files)):
tbl_list = [input_files[i].name,
input_files[i].headers,
input_files[i].content]
write_csv_rows(input_files[i].filename,tbl_list)
print(input_files[i].name[0] + ' written to ' +\
input_files[i].filename)
|
from utils import write_csv_rows, read_csv_rows
class input_table:
def __init__(self, filename, content):
self.filename = filename
self.content = content
connect_tbl=input_table('connectivity.csv',
[['Connectivity Table'],
['x1','y1','x2','y2','E','A']])
force_tbl=input_table('forces.csv',
[['Force Table'],
['x','y','Fx','Fy']])
bc_tbl=input_table('boundary_conditions.csv',
[['Boundary Conditions'],
['x','y','Constrained Dimension','Displacement']])
sim_tbl=input_table('simulation_parameters.csv',
[['Simulation Parameters'],
['Numerical Soln Multiplier','Degrees of Freedom'],
['1e9']])
input_files=[connect_tbl,force_tbl,bc_tbl,sim_tbl]
for i in range(0,len(input_files)):
write_csv_rows(input_files[i].filename,input_files[i].content)
print(input_files[i].content[0][0] + ' written to ' + input_files[i].filename)
Clean up input tables class and instance declarationsfrom utils import write_csv_rows, read_csv_rows
class input_table:
def __init__(self, filename, name, headers, content=[]):
self.filename = filename
self.name = name
self.headers = headers
self.content = content
connect_filename = 'connectivity.csv'
connect_name = ['Connectivity Table']
connect_headers = ['x1','y1','x2','y2','E','A']
connect_tbl = input_table(connect_filename,
connect_name,
connect_headers)
force_filename = 'forces.csv'
force_name = ['Force Table']
force_headers = ['x','y','Fx','Fy']
force_tbl = input_table(force_filename,
force_name,
force_headers)
bc_filename = 'boundary_conditions.csv'
bc_name = ['Boundary Conditions']
bc_headers = ['x','y','Constrained Dimension','Displacement']
bc_tbl = input_table(bc_filename,
bc_name,
bc_headers)
sim_filename = 'simulation_parameters.csv'
sim_name = ['Simulation Parameters']
sim_headers = ['Numerical Soln Multiplier','Degrees of Freedom']
sim_content = ['1e9']
sim_tbl = input_table(sim_filename,
sim_name,
sim_headers,
sim_content)
input_files = [connect_tbl,force_tbl,bc_tbl,sim_tbl]
for i in range(0,len(input_files)):
tbl_list = [input_files[i].name,
input_files[i].headers,
input_files[i].content]
write_csv_rows(input_files[i].filename,tbl_list)
print(input_files[i].name[0] + ' written to ' +\
input_files[i].filename)
|
<commit_before>from utils import write_csv_rows, read_csv_rows
class input_table:
def __init__(self, filename, content):
self.filename = filename
self.content = content
connect_tbl=input_table('connectivity.csv',
[['Connectivity Table'],
['x1','y1','x2','y2','E','A']])
force_tbl=input_table('forces.csv',
[['Force Table'],
['x','y','Fx','Fy']])
bc_tbl=input_table('boundary_conditions.csv',
[['Boundary Conditions'],
['x','y','Constrained Dimension','Displacement']])
sim_tbl=input_table('simulation_parameters.csv',
[['Simulation Parameters'],
['Numerical Soln Multiplier','Degrees of Freedom'],
['1e9']])
input_files=[connect_tbl,force_tbl,bc_tbl,sim_tbl]
for i in range(0,len(input_files)):
write_csv_rows(input_files[i].filename,input_files[i].content)
print(input_files[i].content[0][0] + ' written to ' + input_files[i].filename)
<commit_msg>Clean up input tables class and instance declarations<commit_after>from utils import write_csv_rows, read_csv_rows
class input_table:
def __init__(self, filename, name, headers, content=[]):
self.filename = filename
self.name = name
self.headers = headers
self.content = content
connect_filename = 'connectivity.csv'
connect_name = ['Connectivity Table']
connect_headers = ['x1','y1','x2','y2','E','A']
connect_tbl = input_table(connect_filename,
connect_name,
connect_headers)
force_filename = 'forces.csv'
force_name = ['Force Table']
force_headers = ['x','y','Fx','Fy']
force_tbl = input_table(force_filename,
force_name,
force_headers)
bc_filename = 'boundary_conditions.csv'
bc_name = ['Boundary Conditions']
bc_headers = ['x','y','Constrained Dimension','Displacement']
bc_tbl = input_table(bc_filename,
bc_name,
bc_headers)
sim_filename = 'simulation_parameters.csv'
sim_name = ['Simulation Parameters']
sim_headers = ['Numerical Soln Multiplier','Degrees of Freedom']
sim_content = ['1e9']
sim_tbl = input_table(sim_filename,
sim_name,
sim_headers,
sim_content)
input_files = [connect_tbl,force_tbl,bc_tbl,sim_tbl]
for i in range(0,len(input_files)):
tbl_list = [input_files[i].name,
input_files[i].headers,
input_files[i].content]
write_csv_rows(input_files[i].filename,tbl_list)
print(input_files[i].name[0] + ' written to ' +\
input_files[i].filename)
|
33620c30a7e79243a9a1f32cdad7e2af8e1fa278
|
auditlog/admin.py
|
auditlog/admin.py
|
from django.contrib import admin
from auditlog.filters import ResourceTypeFilter
from auditlog.mixins import LogEntryAdminMixin
from auditlog.models import LogEntry
class LogEntryAdmin(admin.ModelAdmin, LogEntryAdminMixin):
list_display = ["created", "resource_url", "action", "msg_short", "user_url"]
search_fields = [
"timestamp",
"object_repr",
"changes",
"actor__first_name",
"actor__last_name",
]
list_filter = ["action", ResourceTypeFilter]
readonly_fields = ["created", "resource_url", "action", "user_url", "msg"]
fieldsets = [
(None, {"fields": ["created", "user_url", "resource_url"]}),
("Changes", {"fields": ["action", "msg"]}),
]
admin.site.register(LogEntry, LogEntryAdmin)
|
from django.contrib import admin
from auditlog.filters import ResourceTypeFilter
from auditlog.mixins import LogEntryAdminMixin
from auditlog.models import LogEntry
class LogEntryAdmin(admin.ModelAdmin, LogEntryAdminMixin):
list_display = ["created", "resource_url", "action", "msg_short", "user_url"]
search_fields = [
"timestamp",
"object_repr",
"changes",
"actor__first_name",
"actor__last_name",
"actor__username",
]
list_filter = ["action", ResourceTypeFilter]
readonly_fields = ["created", "resource_url", "action", "user_url", "msg"]
fieldsets = [
(None, {"fields": ["created", "user_url", "resource_url"]}),
("Changes", {"fields": ["action", "msg"]}),
]
admin.site.register(LogEntry, LogEntryAdmin)
|
Add actor username to search fields
|
Add actor username to search fields
|
Python
|
mit
|
jjkester/django-auditlog
|
from django.contrib import admin
from auditlog.filters import ResourceTypeFilter
from auditlog.mixins import LogEntryAdminMixin
from auditlog.models import LogEntry
class LogEntryAdmin(admin.ModelAdmin, LogEntryAdminMixin):
list_display = ["created", "resource_url", "action", "msg_short", "user_url"]
search_fields = [
"timestamp",
"object_repr",
"changes",
"actor__first_name",
"actor__last_name",
]
list_filter = ["action", ResourceTypeFilter]
readonly_fields = ["created", "resource_url", "action", "user_url", "msg"]
fieldsets = [
(None, {"fields": ["created", "user_url", "resource_url"]}),
("Changes", {"fields": ["action", "msg"]}),
]
admin.site.register(LogEntry, LogEntryAdmin)
Add actor username to search fields
|
from django.contrib import admin
from auditlog.filters import ResourceTypeFilter
from auditlog.mixins import LogEntryAdminMixin
from auditlog.models import LogEntry
class LogEntryAdmin(admin.ModelAdmin, LogEntryAdminMixin):
list_display = ["created", "resource_url", "action", "msg_short", "user_url"]
search_fields = [
"timestamp",
"object_repr",
"changes",
"actor__first_name",
"actor__last_name",
"actor__username",
]
list_filter = ["action", ResourceTypeFilter]
readonly_fields = ["created", "resource_url", "action", "user_url", "msg"]
fieldsets = [
(None, {"fields": ["created", "user_url", "resource_url"]}),
("Changes", {"fields": ["action", "msg"]}),
]
admin.site.register(LogEntry, LogEntryAdmin)
|
<commit_before>from django.contrib import admin
from auditlog.filters import ResourceTypeFilter
from auditlog.mixins import LogEntryAdminMixin
from auditlog.models import LogEntry
class LogEntryAdmin(admin.ModelAdmin, LogEntryAdminMixin):
list_display = ["created", "resource_url", "action", "msg_short", "user_url"]
search_fields = [
"timestamp",
"object_repr",
"changes",
"actor__first_name",
"actor__last_name",
]
list_filter = ["action", ResourceTypeFilter]
readonly_fields = ["created", "resource_url", "action", "user_url", "msg"]
fieldsets = [
(None, {"fields": ["created", "user_url", "resource_url"]}),
("Changes", {"fields": ["action", "msg"]}),
]
admin.site.register(LogEntry, LogEntryAdmin)
<commit_msg>Add actor username to search fields<commit_after>
|
from django.contrib import admin
from auditlog.filters import ResourceTypeFilter
from auditlog.mixins import LogEntryAdminMixin
from auditlog.models import LogEntry
class LogEntryAdmin(admin.ModelAdmin, LogEntryAdminMixin):
list_display = ["created", "resource_url", "action", "msg_short", "user_url"]
search_fields = [
"timestamp",
"object_repr",
"changes",
"actor__first_name",
"actor__last_name",
"actor__username",
]
list_filter = ["action", ResourceTypeFilter]
readonly_fields = ["created", "resource_url", "action", "user_url", "msg"]
fieldsets = [
(None, {"fields": ["created", "user_url", "resource_url"]}),
("Changes", {"fields": ["action", "msg"]}),
]
admin.site.register(LogEntry, LogEntryAdmin)
|
from django.contrib import admin
from auditlog.filters import ResourceTypeFilter
from auditlog.mixins import LogEntryAdminMixin
from auditlog.models import LogEntry
class LogEntryAdmin(admin.ModelAdmin, LogEntryAdminMixin):
list_display = ["created", "resource_url", "action", "msg_short", "user_url"]
search_fields = [
"timestamp",
"object_repr",
"changes",
"actor__first_name",
"actor__last_name",
]
list_filter = ["action", ResourceTypeFilter]
readonly_fields = ["created", "resource_url", "action", "user_url", "msg"]
fieldsets = [
(None, {"fields": ["created", "user_url", "resource_url"]}),
("Changes", {"fields": ["action", "msg"]}),
]
admin.site.register(LogEntry, LogEntryAdmin)
Add actor username to search fieldsfrom django.contrib import admin
from auditlog.filters import ResourceTypeFilter
from auditlog.mixins import LogEntryAdminMixin
from auditlog.models import LogEntry
class LogEntryAdmin(admin.ModelAdmin, LogEntryAdminMixin):
list_display = ["created", "resource_url", "action", "msg_short", "user_url"]
search_fields = [
"timestamp",
"object_repr",
"changes",
"actor__first_name",
"actor__last_name",
"actor__username",
]
list_filter = ["action", ResourceTypeFilter]
readonly_fields = ["created", "resource_url", "action", "user_url", "msg"]
fieldsets = [
(None, {"fields": ["created", "user_url", "resource_url"]}),
("Changes", {"fields": ["action", "msg"]}),
]
admin.site.register(LogEntry, LogEntryAdmin)
|
<commit_before>from django.contrib import admin
from auditlog.filters import ResourceTypeFilter
from auditlog.mixins import LogEntryAdminMixin
from auditlog.models import LogEntry
class LogEntryAdmin(admin.ModelAdmin, LogEntryAdminMixin):
list_display = ["created", "resource_url", "action", "msg_short", "user_url"]
search_fields = [
"timestamp",
"object_repr",
"changes",
"actor__first_name",
"actor__last_name",
]
list_filter = ["action", ResourceTypeFilter]
readonly_fields = ["created", "resource_url", "action", "user_url", "msg"]
fieldsets = [
(None, {"fields": ["created", "user_url", "resource_url"]}),
("Changes", {"fields": ["action", "msg"]}),
]
admin.site.register(LogEntry, LogEntryAdmin)
<commit_msg>Add actor username to search fields<commit_after>from django.contrib import admin
from auditlog.filters import ResourceTypeFilter
from auditlog.mixins import LogEntryAdminMixin
from auditlog.models import LogEntry
class LogEntryAdmin(admin.ModelAdmin, LogEntryAdminMixin):
list_display = ["created", "resource_url", "action", "msg_short", "user_url"]
search_fields = [
"timestamp",
"object_repr",
"changes",
"actor__first_name",
"actor__last_name",
"actor__username",
]
list_filter = ["action", ResourceTypeFilter]
readonly_fields = ["created", "resource_url", "action", "user_url", "msg"]
fieldsets = [
(None, {"fields": ["created", "user_url", "resource_url"]}),
("Changes", {"fields": ["action", "msg"]}),
]
admin.site.register(LogEntry, LogEntryAdmin)
|
f25c0074a013255141371b46cff0a506ad0b2ab5
|
axiom/__init__.py
|
axiom/__init__.py
|
# -*- test-case-name: axiom.test -*-
from axiom._version import __version__
from epsilon import asTwistedVersion
version = asTwistedVersion("axiom", __version__)
|
# -*- test-case-name: axiom.test -*-
from axiom._version import __version__
from twisted.python import versions
def asTwistedVersion(packageName, versionString):
return versions.Version(packageName, *map(int, versionString.split(".")))
version = asTwistedVersion("axiom", __version__)
|
Add a local asTwistedVersion implementation to Axiom, so as to not add another setup-time dependency on Epsilon
|
Add a local asTwistedVersion implementation to Axiom, so as to not add another setup-time dependency on Epsilon
|
Python
|
mit
|
hawkowl/axiom,twisted/axiom
|
# -*- test-case-name: axiom.test -*-
from axiom._version import __version__
from epsilon import asTwistedVersion
version = asTwistedVersion("axiom", __version__)
Add a local asTwistedVersion implementation to Axiom, so as to not add another setup-time dependency on Epsilon
|
# -*- test-case-name: axiom.test -*-
from axiom._version import __version__
from twisted.python import versions
def asTwistedVersion(packageName, versionString):
return versions.Version(packageName, *map(int, versionString.split(".")))
version = asTwistedVersion("axiom", __version__)
|
<commit_before># -*- test-case-name: axiom.test -*-
from axiom._version import __version__
from epsilon import asTwistedVersion
version = asTwistedVersion("axiom", __version__)
<commit_msg>Add a local asTwistedVersion implementation to Axiom, so as to not add another setup-time dependency on Epsilon<commit_after>
|
# -*- test-case-name: axiom.test -*-
from axiom._version import __version__
from twisted.python import versions
def asTwistedVersion(packageName, versionString):
return versions.Version(packageName, *map(int, versionString.split(".")))
version = asTwistedVersion("axiom", __version__)
|
# -*- test-case-name: axiom.test -*-
from axiom._version import __version__
from epsilon import asTwistedVersion
version = asTwistedVersion("axiom", __version__)
Add a local asTwistedVersion implementation to Axiom, so as to not add another setup-time dependency on Epsilon# -*- test-case-name: axiom.test -*-
from axiom._version import __version__
from twisted.python import versions
def asTwistedVersion(packageName, versionString):
return versions.Version(packageName, *map(int, versionString.split(".")))
version = asTwistedVersion("axiom", __version__)
|
<commit_before># -*- test-case-name: axiom.test -*-
from axiom._version import __version__
from epsilon import asTwistedVersion
version = asTwistedVersion("axiom", __version__)
<commit_msg>Add a local asTwistedVersion implementation to Axiom, so as to not add another setup-time dependency on Epsilon<commit_after># -*- test-case-name: axiom.test -*-
from axiom._version import __version__
from twisted.python import versions
def asTwistedVersion(packageName, versionString):
return versions.Version(packageName, *map(int, versionString.split(".")))
version = asTwistedVersion("axiom", __version__)
|
0c3a0fd8eee8ca4ced29dbb69570aa1605ea0d5d
|
PEATSA/Database/Scripts/JobMailer.py
|
PEATSA/Database/Scripts/JobMailer.py
|
#! /usr/bin/python
import sys, time, optparse, os
import PEATSA.Core as Core
import PEATSA.WebApp as WebApp
import ConstructJob
import MySQLdb
usage = "usage: %prog [options]"
parser = optparse.OptionParser(usage=usage, version="% 0.1", description=__doc__)
parser.add_option("-c", "--configurationFile", dest="configurationFile",
help="A PEATSA configuration file.", metavar="CONF")
(options, args) = parser.parse_args()
if options.configurationFile is None:
print 'Configuration file must be provided'
sys.exit(1)
configuration = Core.Environment.Configuration(filename=options.configurationFile)
jobTable = configuration.get('DATABASE', 'jobTable')
while(1):
connection = WebApp.UtilityFunctions.ConnectionFromConfiguration(configuration)
jobManager = WebApp.Data.JobManager(connection=connection, jobTable=jobTable)
selectQuery = """SELECT JobId FROM %s WHERE SentMail='0' AND State='Finished' AND NOT Email='Unknown'""" % (jobTable)
cursor = connection.cursor()
cursor.execute(selectQuery)
ids = [el[0] for el in cursor.fetchall()]
for id in ids:
job = WebApp.Data.Job(id, connection)
print 'Sending mail for job %s to %s' % (job.identification, job.email())
WebApp.UtilityFunctions.SendNotificationEmail(job)
connection.close()
time.sleep(30)
|
#! /usr/bin/python
import sys, time, optparse, os
import PEATSA.Core as Core
import PEATSA.WebApp as WebApp
import ConstructJob
import MySQLdb
usage = "usage: %prog [options]"
parser = optparse.OptionParser(usage=usage, version="% 0.1", description=__doc__)
parser.add_option("-c", "--configurationFile", dest="configurationFile",
help="A PEATSA configuration file.", metavar="CONF")
(options, args) = parser.parse_args()
if options.configurationFile is None:
print 'Configuration file must be provided'
sys.exit(1)
configuration = Core.Environment.Configuration(filename=options.configurationFile)
jobTable = configuration.get('DATABASE', 'jobTable')
while(1):
connection = WebApp.UtilityFunctions.ConnectionFromConfiguration(configuration)
jobManager = WebApp.Data.JobManager(connection=connection, jobTable=jobTable)
selectQuery = """SELECT JobId FROM %s WHERE SentMail='0' AND State='Finished' AND NOT Email='Unknown'""" % (jobTable)
cursor = connection.cursor()
cursor.execute(selectQuery)
ids = [el[0] for el in cursor.fetchall()]
for id in ids:
job = WebApp.Data.Job(id, connection)
print 'Sending mail for job %s to %s' % (job.identification, job.email())
print job.error()
WebApp.UtilityFunctions.SendNotificationEmail(job)
connection.close()
time.sleep(30)
|
Print error on mailing a failed job note
|
Print error on mailing a failed job note
|
Python
|
mit
|
dmnfarrell/peat,dmnfarrell/peat,dmnfarrell/peat,dmnfarrell/peat,dmnfarrell/peat,dmnfarrell/peat,dmnfarrell/peat
|
#! /usr/bin/python
import sys, time, optparse, os
import PEATSA.Core as Core
import PEATSA.WebApp as WebApp
import ConstructJob
import MySQLdb
usage = "usage: %prog [options]"
parser = optparse.OptionParser(usage=usage, version="% 0.1", description=__doc__)
parser.add_option("-c", "--configurationFile", dest="configurationFile",
help="A PEATSA configuration file.", metavar="CONF")
(options, args) = parser.parse_args()
if options.configurationFile is None:
print 'Configuration file must be provided'
sys.exit(1)
configuration = Core.Environment.Configuration(filename=options.configurationFile)
jobTable = configuration.get('DATABASE', 'jobTable')
while(1):
connection = WebApp.UtilityFunctions.ConnectionFromConfiguration(configuration)
jobManager = WebApp.Data.JobManager(connection=connection, jobTable=jobTable)
selectQuery = """SELECT JobId FROM %s WHERE SentMail='0' AND State='Finished' AND NOT Email='Unknown'""" % (jobTable)
cursor = connection.cursor()
cursor.execute(selectQuery)
ids = [el[0] for el in cursor.fetchall()]
for id in ids:
job = WebApp.Data.Job(id, connection)
print 'Sending mail for job %s to %s' % (job.identification, job.email())
WebApp.UtilityFunctions.SendNotificationEmail(job)
connection.close()
time.sleep(30)
Print error on mailing a failed job note
|
#! /usr/bin/python
import sys, time, optparse, os
import PEATSA.Core as Core
import PEATSA.WebApp as WebApp
import ConstructJob
import MySQLdb
usage = "usage: %prog [options]"
parser = optparse.OptionParser(usage=usage, version="% 0.1", description=__doc__)
parser.add_option("-c", "--configurationFile", dest="configurationFile",
help="A PEATSA configuration file.", metavar="CONF")
(options, args) = parser.parse_args()
if options.configurationFile is None:
print 'Configuration file must be provided'
sys.exit(1)
configuration = Core.Environment.Configuration(filename=options.configurationFile)
jobTable = configuration.get('DATABASE', 'jobTable')
while(1):
connection = WebApp.UtilityFunctions.ConnectionFromConfiguration(configuration)
jobManager = WebApp.Data.JobManager(connection=connection, jobTable=jobTable)
selectQuery = """SELECT JobId FROM %s WHERE SentMail='0' AND State='Finished' AND NOT Email='Unknown'""" % (jobTable)
cursor = connection.cursor()
cursor.execute(selectQuery)
ids = [el[0] for el in cursor.fetchall()]
for id in ids:
job = WebApp.Data.Job(id, connection)
print 'Sending mail for job %s to %s' % (job.identification, job.email())
print job.error()
WebApp.UtilityFunctions.SendNotificationEmail(job)
connection.close()
time.sleep(30)
|
<commit_before>#! /usr/bin/python
import sys, time, optparse, os
import PEATSA.Core as Core
import PEATSA.WebApp as WebApp
import ConstructJob
import MySQLdb
usage = "usage: %prog [options]"
parser = optparse.OptionParser(usage=usage, version="% 0.1", description=__doc__)
parser.add_option("-c", "--configurationFile", dest="configurationFile",
help="A PEATSA configuration file.", metavar="CONF")
(options, args) = parser.parse_args()
if options.configurationFile is None:
print 'Configuration file must be provided'
sys.exit(1)
configuration = Core.Environment.Configuration(filename=options.configurationFile)
jobTable = configuration.get('DATABASE', 'jobTable')
while(1):
connection = WebApp.UtilityFunctions.ConnectionFromConfiguration(configuration)
jobManager = WebApp.Data.JobManager(connection=connection, jobTable=jobTable)
selectQuery = """SELECT JobId FROM %s WHERE SentMail='0' AND State='Finished' AND NOT Email='Unknown'""" % (jobTable)
cursor = connection.cursor()
cursor.execute(selectQuery)
ids = [el[0] for el in cursor.fetchall()]
for id in ids:
job = WebApp.Data.Job(id, connection)
print 'Sending mail for job %s to %s' % (job.identification, job.email())
WebApp.UtilityFunctions.SendNotificationEmail(job)
connection.close()
time.sleep(30)
<commit_msg>Print error on mailing a failed job note<commit_after>
|
#! /usr/bin/python
import sys, time, optparse, os
import PEATSA.Core as Core
import PEATSA.WebApp as WebApp
import ConstructJob
import MySQLdb
usage = "usage: %prog [options]"
parser = optparse.OptionParser(usage=usage, version="% 0.1", description=__doc__)
parser.add_option("-c", "--configurationFile", dest="configurationFile",
help="A PEATSA configuration file.", metavar="CONF")
(options, args) = parser.parse_args()
if options.configurationFile is None:
print 'Configuration file must be provided'
sys.exit(1)
configuration = Core.Environment.Configuration(filename=options.configurationFile)
jobTable = configuration.get('DATABASE', 'jobTable')
while(1):
connection = WebApp.UtilityFunctions.ConnectionFromConfiguration(configuration)
jobManager = WebApp.Data.JobManager(connection=connection, jobTable=jobTable)
selectQuery = """SELECT JobId FROM %s WHERE SentMail='0' AND State='Finished' AND NOT Email='Unknown'""" % (jobTable)
cursor = connection.cursor()
cursor.execute(selectQuery)
ids = [el[0] for el in cursor.fetchall()]
for id in ids:
job = WebApp.Data.Job(id, connection)
print 'Sending mail for job %s to %s' % (job.identification, job.email())
print job.error()
WebApp.UtilityFunctions.SendNotificationEmail(job)
connection.close()
time.sleep(30)
|
#! /usr/bin/python
import sys, time, optparse, os
import PEATSA.Core as Core
import PEATSA.WebApp as WebApp
import ConstructJob
import MySQLdb
usage = "usage: %prog [options]"
parser = optparse.OptionParser(usage=usage, version="% 0.1", description=__doc__)
parser.add_option("-c", "--configurationFile", dest="configurationFile",
help="A PEATSA configuration file.", metavar="CONF")
(options, args) = parser.parse_args()
if options.configurationFile is None:
print 'Configuration file must be provided'
sys.exit(1)
configuration = Core.Environment.Configuration(filename=options.configurationFile)
jobTable = configuration.get('DATABASE', 'jobTable')
while(1):
connection = WebApp.UtilityFunctions.ConnectionFromConfiguration(configuration)
jobManager = WebApp.Data.JobManager(connection=connection, jobTable=jobTable)
selectQuery = """SELECT JobId FROM %s WHERE SentMail='0' AND State='Finished' AND NOT Email='Unknown'""" % (jobTable)
cursor = connection.cursor()
cursor.execute(selectQuery)
ids = [el[0] for el in cursor.fetchall()]
for id in ids:
job = WebApp.Data.Job(id, connection)
print 'Sending mail for job %s to %s' % (job.identification, job.email())
WebApp.UtilityFunctions.SendNotificationEmail(job)
connection.close()
time.sleep(30)
Print error on mailing a failed job note#! /usr/bin/python
import sys, time, optparse, os
import PEATSA.Core as Core
import PEATSA.WebApp as WebApp
import ConstructJob
import MySQLdb
usage = "usage: %prog [options]"
parser = optparse.OptionParser(usage=usage, version="% 0.1", description=__doc__)
parser.add_option("-c", "--configurationFile", dest="configurationFile",
help="A PEATSA configuration file.", metavar="CONF")
(options, args) = parser.parse_args()
if options.configurationFile is None:
print 'Configuration file must be provided'
sys.exit(1)
configuration = Core.Environment.Configuration(filename=options.configurationFile)
jobTable = configuration.get('DATABASE', 'jobTable')
while(1):
connection = WebApp.UtilityFunctions.ConnectionFromConfiguration(configuration)
jobManager = WebApp.Data.JobManager(connection=connection, jobTable=jobTable)
selectQuery = """SELECT JobId FROM %s WHERE SentMail='0' AND State='Finished' AND NOT Email='Unknown'""" % (jobTable)
cursor = connection.cursor()
cursor.execute(selectQuery)
ids = [el[0] for el in cursor.fetchall()]
for id in ids:
job = WebApp.Data.Job(id, connection)
print 'Sending mail for job %s to %s' % (job.identification, job.email())
print job.error()
WebApp.UtilityFunctions.SendNotificationEmail(job)
connection.close()
time.sleep(30)
|
<commit_before>#! /usr/bin/python
import sys, time, optparse, os
import PEATSA.Core as Core
import PEATSA.WebApp as WebApp
import ConstructJob
import MySQLdb
usage = "usage: %prog [options]"
parser = optparse.OptionParser(usage=usage, version="% 0.1", description=__doc__)
parser.add_option("-c", "--configurationFile", dest="configurationFile",
help="A PEATSA configuration file.", metavar="CONF")
(options, args) = parser.parse_args()
if options.configurationFile is None:
print 'Configuration file must be provided'
sys.exit(1)
configuration = Core.Environment.Configuration(filename=options.configurationFile)
jobTable = configuration.get('DATABASE', 'jobTable')
while(1):
connection = WebApp.UtilityFunctions.ConnectionFromConfiguration(configuration)
jobManager = WebApp.Data.JobManager(connection=connection, jobTable=jobTable)
selectQuery = """SELECT JobId FROM %s WHERE SentMail='0' AND State='Finished' AND NOT Email='Unknown'""" % (jobTable)
cursor = connection.cursor()
cursor.execute(selectQuery)
ids = [el[0] for el in cursor.fetchall()]
for id in ids:
job = WebApp.Data.Job(id, connection)
print 'Sending mail for job %s to %s' % (job.identification, job.email())
WebApp.UtilityFunctions.SendNotificationEmail(job)
connection.close()
time.sleep(30)
<commit_msg>Print error on mailing a failed job note<commit_after>#! /usr/bin/python
import sys, time, optparse, os
import PEATSA.Core as Core
import PEATSA.WebApp as WebApp
import ConstructJob
import MySQLdb
usage = "usage: %prog [options]"
parser = optparse.OptionParser(usage=usage, version="% 0.1", description=__doc__)
parser.add_option("-c", "--configurationFile", dest="configurationFile",
help="A PEATSA configuration file.", metavar="CONF")
(options, args) = parser.parse_args()
if options.configurationFile is None:
print 'Configuration file must be provided'
sys.exit(1)
configuration = Core.Environment.Configuration(filename=options.configurationFile)
jobTable = configuration.get('DATABASE', 'jobTable')
while(1):
connection = WebApp.UtilityFunctions.ConnectionFromConfiguration(configuration)
jobManager = WebApp.Data.JobManager(connection=connection, jobTable=jobTable)
selectQuery = """SELECT JobId FROM %s WHERE SentMail='0' AND State='Finished' AND NOT Email='Unknown'""" % (jobTable)
cursor = connection.cursor()
cursor.execute(selectQuery)
ids = [el[0] for el in cursor.fetchall()]
for id in ids:
job = WebApp.Data.Job(id, connection)
print 'Sending mail for job %s to %s' % (job.identification, job.email())
print job.error()
WebApp.UtilityFunctions.SendNotificationEmail(job)
connection.close()
time.sleep(30)
|
ddcb5b11a2f050e1eb8ae185888dde2ef1c66d72
|
dedupe/convenience.py
|
dedupe/convenience.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Convenience functions for in memory deduplication
"""
import collections
import dedupe.core
def dataSample(data, sample_size):
'''Randomly sample pairs of records from a data dictionary'''
data_list = data.values()
random_pairs = dedupe.core.randomPairs(len(data_list), sample_size)
return tuple((data_list[k1], data_list[k2]) for k1, k2 in random_pairs)
def blockData(data_d, blocker):
blocks = dedupe.core.OrderedDict({})
record_blocks = dedupe.core.OrderedDict({})
key_blocks = dedupe.core.OrderedDict({})
blocker.tfIdfBlocks(data_d.iteritems())
for (record_id, record) in data_d.iteritems():
for key in blocker((record_id, record)):
blocks.setdefault(key, {}).update({record_id : record})
blocked_records = tuple(block for block in blocks.values())
return blocked_records
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Convenience functions for in memory deduplication
"""
import collections
import dedupe.core
def dataSample(data, sample_size):
'''Randomly sample pairs of records from a data dictionary'''
data_list = data.values()
random_pairs = dedupe.core.randomPairs(len(data_list), sample_size)
return tuple((data_list[int(k1)], data_list[int(k2)]) for k1, k2 in random_pairs)
def blockData(data_d, blocker):
blocks = dedupe.core.OrderedDict({})
record_blocks = dedupe.core.OrderedDict({})
key_blocks = dedupe.core.OrderedDict({})
blocker.tfIdfBlocks(data_d.iteritems())
for (record_id, record) in data_d.iteritems():
for key in blocker((record_id, record)):
blocks.setdefault(key, {}).update({record_id : record})
blocked_records = tuple(block for block in blocks.values())
return blocked_records
|
Index of the list should be an int
|
Index of the list should be an int
|
Python
|
mit
|
dedupeio/dedupe-examples,dedupeio/dedupe,neozhangthe1/dedupe,tfmorris/dedupe,datamade/dedupe,pombredanne/dedupe,datamade/dedupe,nmiranda/dedupe,pombredanne/dedupe,neozhangthe1/dedupe,dedupeio/dedupe,davidkunio/dedupe,01-/dedupe,01-/dedupe,nmiranda/dedupe,tfmorris/dedupe,davidkunio/dedupe
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Convenience functions for in memory deduplication
"""
import collections
import dedupe.core
def dataSample(data, sample_size):
'''Randomly sample pairs of records from a data dictionary'''
data_list = data.values()
random_pairs = dedupe.core.randomPairs(len(data_list), sample_size)
return tuple((data_list[k1], data_list[k2]) for k1, k2 in random_pairs)
def blockData(data_d, blocker):
blocks = dedupe.core.OrderedDict({})
record_blocks = dedupe.core.OrderedDict({})
key_blocks = dedupe.core.OrderedDict({})
blocker.tfIdfBlocks(data_d.iteritems())
for (record_id, record) in data_d.iteritems():
for key in blocker((record_id, record)):
blocks.setdefault(key, {}).update({record_id : record})
blocked_records = tuple(block for block in blocks.values())
return blocked_records
Index of the list should be an int
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Convenience functions for in memory deduplication
"""
import collections
import dedupe.core
def dataSample(data, sample_size):
'''Randomly sample pairs of records from a data dictionary'''
data_list = data.values()
random_pairs = dedupe.core.randomPairs(len(data_list), sample_size)
return tuple((data_list[int(k1)], data_list[int(k2)]) for k1, k2 in random_pairs)
def blockData(data_d, blocker):
blocks = dedupe.core.OrderedDict({})
record_blocks = dedupe.core.OrderedDict({})
key_blocks = dedupe.core.OrderedDict({})
blocker.tfIdfBlocks(data_d.iteritems())
for (record_id, record) in data_d.iteritems():
for key in blocker((record_id, record)):
blocks.setdefault(key, {}).update({record_id : record})
blocked_records = tuple(block for block in blocks.values())
return blocked_records
|
<commit_before>#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Convenience functions for in memory deduplication
"""
import collections
import dedupe.core
def dataSample(data, sample_size):
'''Randomly sample pairs of records from a data dictionary'''
data_list = data.values()
random_pairs = dedupe.core.randomPairs(len(data_list), sample_size)
return tuple((data_list[k1], data_list[k2]) for k1, k2 in random_pairs)
def blockData(data_d, blocker):
blocks = dedupe.core.OrderedDict({})
record_blocks = dedupe.core.OrderedDict({})
key_blocks = dedupe.core.OrderedDict({})
blocker.tfIdfBlocks(data_d.iteritems())
for (record_id, record) in data_d.iteritems():
for key in blocker((record_id, record)):
blocks.setdefault(key, {}).update({record_id : record})
blocked_records = tuple(block for block in blocks.values())
return blocked_records
<commit_msg>Index of the list should be an int<commit_after>
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Convenience functions for in memory deduplication
"""
import collections
import dedupe.core
def dataSample(data, sample_size):
'''Randomly sample pairs of records from a data dictionary'''
data_list = data.values()
random_pairs = dedupe.core.randomPairs(len(data_list), sample_size)
return tuple((data_list[int(k1)], data_list[int(k2)]) for k1, k2 in random_pairs)
def blockData(data_d, blocker):
blocks = dedupe.core.OrderedDict({})
record_blocks = dedupe.core.OrderedDict({})
key_blocks = dedupe.core.OrderedDict({})
blocker.tfIdfBlocks(data_d.iteritems())
for (record_id, record) in data_d.iteritems():
for key in blocker((record_id, record)):
blocks.setdefault(key, {}).update({record_id : record})
blocked_records = tuple(block for block in blocks.values())
return blocked_records
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Convenience functions for in memory deduplication
"""
import collections
import dedupe.core
def dataSample(data, sample_size):
'''Randomly sample pairs of records from a data dictionary'''
data_list = data.values()
random_pairs = dedupe.core.randomPairs(len(data_list), sample_size)
return tuple((data_list[k1], data_list[k2]) for k1, k2 in random_pairs)
def blockData(data_d, blocker):
blocks = dedupe.core.OrderedDict({})
record_blocks = dedupe.core.OrderedDict({})
key_blocks = dedupe.core.OrderedDict({})
blocker.tfIdfBlocks(data_d.iteritems())
for (record_id, record) in data_d.iteritems():
for key in blocker((record_id, record)):
blocks.setdefault(key, {}).update({record_id : record})
blocked_records = tuple(block for block in blocks.values())
return blocked_records
Index of the list should be an int#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Convenience functions for in memory deduplication
"""
import collections
import dedupe.core
def dataSample(data, sample_size):
'''Randomly sample pairs of records from a data dictionary'''
data_list = data.values()
random_pairs = dedupe.core.randomPairs(len(data_list), sample_size)
return tuple((data_list[int(k1)], data_list[int(k2)]) for k1, k2 in random_pairs)
def blockData(data_d, blocker):
blocks = dedupe.core.OrderedDict({})
record_blocks = dedupe.core.OrderedDict({})
key_blocks = dedupe.core.OrderedDict({})
blocker.tfIdfBlocks(data_d.iteritems())
for (record_id, record) in data_d.iteritems():
for key in blocker((record_id, record)):
blocks.setdefault(key, {}).update({record_id : record})
blocked_records = tuple(block for block in blocks.values())
return blocked_records
|
<commit_before>#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Convenience functions for in memory deduplication
"""
import collections
import dedupe.core
def dataSample(data, sample_size):
'''Randomly sample pairs of records from a data dictionary'''
data_list = data.values()
random_pairs = dedupe.core.randomPairs(len(data_list), sample_size)
return tuple((data_list[k1], data_list[k2]) for k1, k2 in random_pairs)
def blockData(data_d, blocker):
blocks = dedupe.core.OrderedDict({})
record_blocks = dedupe.core.OrderedDict({})
key_blocks = dedupe.core.OrderedDict({})
blocker.tfIdfBlocks(data_d.iteritems())
for (record_id, record) in data_d.iteritems():
for key in blocker((record_id, record)):
blocks.setdefault(key, {}).update({record_id : record})
blocked_records = tuple(block for block in blocks.values())
return blocked_records
<commit_msg>Index of the list should be an int<commit_after>#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Convenience functions for in memory deduplication
"""
import collections
import dedupe.core
def dataSample(data, sample_size):
'''Randomly sample pairs of records from a data dictionary'''
data_list = data.values()
random_pairs = dedupe.core.randomPairs(len(data_list), sample_size)
return tuple((data_list[int(k1)], data_list[int(k2)]) for k1, k2 in random_pairs)
def blockData(data_d, blocker):
blocks = dedupe.core.OrderedDict({})
record_blocks = dedupe.core.OrderedDict({})
key_blocks = dedupe.core.OrderedDict({})
blocker.tfIdfBlocks(data_d.iteritems())
for (record_id, record) in data_d.iteritems():
for key in blocker((record_id, record)):
blocks.setdefault(key, {}).update({record_id : record})
blocked_records = tuple(block for block in blocks.values())
return blocked_records
|
c8b28cc0afc45c2e7b7ca83a41bc67804c7e9506
|
src/samples/showvideo.py
|
src/samples/showvideo.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from libavg import avg, app
import sys
class VideoPlayer(app.MainDiv):
def init(self):
self.videoNode = avg.VideoNode(
href=sys.argv[1],
parent=self)
self.videoNode.play()
app.App().run(VideoPlayer(), app_resolution='1920x1080', app_window_size='720x450')
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from libavg import avg, app
import sys
class VideoPlayer(app.MainDiv):
def onArgvParserCreated(self, parser):
parser.set_usage("%prog <video>")
def onArgvParsed(self, options, args, parser):
if len(args) != 1:
parser.print_help()
sys.exit(1)
self.__dir=args[0]
def onInit(self):
self.videoNode = avg.VideoNode(
href=self.__dir,
size=(500, 500),
parent=self)
self.videoNode.play()
app.App().run(VideoPlayer(), app_resolution='1920x1080', app_window_size='720x450')
|
Add checks for script parameters and correct onInit name
|
Add checks for script parameters and correct onInit name
|
Python
|
lgpl-2.1
|
libavg/libavg,libavg/libavg,libavg/libavg,libavg/libavg
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from libavg import avg, app
import sys
class VideoPlayer(app.MainDiv):
def init(self):
self.videoNode = avg.VideoNode(
href=sys.argv[1],
parent=self)
self.videoNode.play()
app.App().run(VideoPlayer(), app_resolution='1920x1080', app_window_size='720x450')
Add checks for script parameters and correct onInit name
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from libavg import avg, app
import sys
class VideoPlayer(app.MainDiv):
def onArgvParserCreated(self, parser):
parser.set_usage("%prog <video>")
def onArgvParsed(self, options, args, parser):
if len(args) != 1:
parser.print_help()
sys.exit(1)
self.__dir=args[0]
def onInit(self):
self.videoNode = avg.VideoNode(
href=self.__dir,
size=(500, 500),
parent=self)
self.videoNode.play()
app.App().run(VideoPlayer(), app_resolution='1920x1080', app_window_size='720x450')
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from libavg import avg, app
import sys
class VideoPlayer(app.MainDiv):
def init(self):
self.videoNode = avg.VideoNode(
href=sys.argv[1],
parent=self)
self.videoNode.play()
app.App().run(VideoPlayer(), app_resolution='1920x1080', app_window_size='720x450')
<commit_msg>Add checks for script parameters and correct onInit name<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from libavg import avg, app
import sys
class VideoPlayer(app.MainDiv):
def onArgvParserCreated(self, parser):
parser.set_usage("%prog <video>")
def onArgvParsed(self, options, args, parser):
if len(args) != 1:
parser.print_help()
sys.exit(1)
self.__dir=args[0]
def onInit(self):
self.videoNode = avg.VideoNode(
href=self.__dir,
size=(500, 500),
parent=self)
self.videoNode.play()
app.App().run(VideoPlayer(), app_resolution='1920x1080', app_window_size='720x450')
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from libavg import avg, app
import sys
class VideoPlayer(app.MainDiv):
def init(self):
self.videoNode = avg.VideoNode(
href=sys.argv[1],
parent=self)
self.videoNode.play()
app.App().run(VideoPlayer(), app_resolution='1920x1080', app_window_size='720x450')
Add checks for script parameters and correct onInit name#!/usr/bin/env python
# -*- coding: utf-8 -*-
from libavg import avg, app
import sys
class VideoPlayer(app.MainDiv):
def onArgvParserCreated(self, parser):
parser.set_usage("%prog <video>")
def onArgvParsed(self, options, args, parser):
if len(args) != 1:
parser.print_help()
sys.exit(1)
self.__dir=args[0]
def onInit(self):
self.videoNode = avg.VideoNode(
href=self.__dir,
size=(500, 500),
parent=self)
self.videoNode.play()
app.App().run(VideoPlayer(), app_resolution='1920x1080', app_window_size='720x450')
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from libavg import avg, app
import sys
class VideoPlayer(app.MainDiv):
def init(self):
self.videoNode = avg.VideoNode(
href=sys.argv[1],
parent=self)
self.videoNode.play()
app.App().run(VideoPlayer(), app_resolution='1920x1080', app_window_size='720x450')
<commit_msg>Add checks for script parameters and correct onInit name<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from libavg import avg, app
import sys
class VideoPlayer(app.MainDiv):
def onArgvParserCreated(self, parser):
parser.set_usage("%prog <video>")
def onArgvParsed(self, options, args, parser):
if len(args) != 1:
parser.print_help()
sys.exit(1)
self.__dir=args[0]
def onInit(self):
self.videoNode = avg.VideoNode(
href=self.__dir,
size=(500, 500),
parent=self)
self.videoNode.play()
app.App().run(VideoPlayer(), app_resolution='1920x1080', app_window_size='720x450')
|
97919d06b252af37e7c955ff800b309599e2debc
|
usingnamespace/forms/user.py
|
usingnamespace/forms/user.py
|
import colander
import deform
from csrf import CSRFSchema
class LoginForm(CSRFSchema):
"""The user login form."""
username = colander.SchemaNode(colander.String(),
title="Username",
widget=deform.widget.TextInputWidget(css_class='form-control'),
)
password = colander.SchemaNode(colander.String(),
title="Password",
validator=colander.Length(min=5),
widget=deform.widget.PasswordWidget(size=20,
css_class='form-control'),
)
|
import colander
import deform
from schemaform import SchemaFormMixin
from csrf import CSRFSchema
class LoginForm(CSRFSchema, SchemaFormMixin):
"""The user login form."""
username = colander.SchemaNode(colander.String(),
title="Username",
__buttons__ = (deform.form.Button(name=_("Submit"), css_class='btn btn-primary'),)
widget=deform.widget.TextInputWidget(css_class='form-control'),
)
password = colander.SchemaNode(colander.String(),
title="Password",
validator=colander.Length(min=5),
widget=deform.widget.PasswordWidget(size=20,
css_class='form-control'),
)
|
Add SchemaFormMixin to the LoginForm
|
Add SchemaFormMixin to the LoginForm
|
Python
|
isc
|
usingnamespace/usingnamespace
|
import colander
import deform
from csrf import CSRFSchema
class LoginForm(CSRFSchema):
"""The user login form."""
username = colander.SchemaNode(colander.String(),
title="Username",
widget=deform.widget.TextInputWidget(css_class='form-control'),
)
password = colander.SchemaNode(colander.String(),
title="Password",
validator=colander.Length(min=5),
widget=deform.widget.PasswordWidget(size=20,
css_class='form-control'),
)
Add SchemaFormMixin to the LoginForm
|
import colander
import deform
from schemaform import SchemaFormMixin
from csrf import CSRFSchema
class LoginForm(CSRFSchema, SchemaFormMixin):
"""The user login form."""
username = colander.SchemaNode(colander.String(),
title="Username",
__buttons__ = (deform.form.Button(name=_("Submit"), css_class='btn btn-primary'),)
widget=deform.widget.TextInputWidget(css_class='form-control'),
)
password = colander.SchemaNode(colander.String(),
title="Password",
validator=colander.Length(min=5),
widget=deform.widget.PasswordWidget(size=20,
css_class='form-control'),
)
|
<commit_before>import colander
import deform
from csrf import CSRFSchema
class LoginForm(CSRFSchema):
"""The user login form."""
username = colander.SchemaNode(colander.String(),
title="Username",
widget=deform.widget.TextInputWidget(css_class='form-control'),
)
password = colander.SchemaNode(colander.String(),
title="Password",
validator=colander.Length(min=5),
widget=deform.widget.PasswordWidget(size=20,
css_class='form-control'),
)
<commit_msg>Add SchemaFormMixin to the LoginForm<commit_after>
|
import colander
import deform
from schemaform import SchemaFormMixin
from csrf import CSRFSchema
class LoginForm(CSRFSchema, SchemaFormMixin):
"""The user login form."""
username = colander.SchemaNode(colander.String(),
title="Username",
__buttons__ = (deform.form.Button(name=_("Submit"), css_class='btn btn-primary'),)
widget=deform.widget.TextInputWidget(css_class='form-control'),
)
password = colander.SchemaNode(colander.String(),
title="Password",
validator=colander.Length(min=5),
widget=deform.widget.PasswordWidget(size=20,
css_class='form-control'),
)
|
import colander
import deform
from csrf import CSRFSchema
class LoginForm(CSRFSchema):
"""The user login form."""
username = colander.SchemaNode(colander.String(),
title="Username",
widget=deform.widget.TextInputWidget(css_class='form-control'),
)
password = colander.SchemaNode(colander.String(),
title="Password",
validator=colander.Length(min=5),
widget=deform.widget.PasswordWidget(size=20,
css_class='form-control'),
)
Add SchemaFormMixin to the LoginFormimport colander
import deform
from schemaform import SchemaFormMixin
from csrf import CSRFSchema
class LoginForm(CSRFSchema, SchemaFormMixin):
"""The user login form."""
username = colander.SchemaNode(colander.String(),
title="Username",
__buttons__ = (deform.form.Button(name=_("Submit"), css_class='btn btn-primary'),)
widget=deform.widget.TextInputWidget(css_class='form-control'),
)
password = colander.SchemaNode(colander.String(),
title="Password",
validator=colander.Length(min=5),
widget=deform.widget.PasswordWidget(size=20,
css_class='form-control'),
)
|
<commit_before>import colander
import deform
from csrf import CSRFSchema
class LoginForm(CSRFSchema):
"""The user login form."""
username = colander.SchemaNode(colander.String(),
title="Username",
widget=deform.widget.TextInputWidget(css_class='form-control'),
)
password = colander.SchemaNode(colander.String(),
title="Password",
validator=colander.Length(min=5),
widget=deform.widget.PasswordWidget(size=20,
css_class='form-control'),
)
<commit_msg>Add SchemaFormMixin to the LoginForm<commit_after>import colander
import deform
from schemaform import SchemaFormMixin
from csrf import CSRFSchema
class LoginForm(CSRFSchema, SchemaFormMixin):
"""The user login form."""
username = colander.SchemaNode(colander.String(),
title="Username",
__buttons__ = (deform.form.Button(name=_("Submit"), css_class='btn btn-primary'),)
widget=deform.widget.TextInputWidget(css_class='form-control'),
)
password = colander.SchemaNode(colander.String(),
title="Password",
validator=colander.Length(min=5),
widget=deform.widget.PasswordWidget(size=20,
css_class='form-control'),
)
|
85458434391144aa40101ba9f97c4ec47c975438
|
zsh/scripts/pyjson_helper.py
|
zsh/scripts/pyjson_helper.py
|
import argparse
import json
parser = argparse.ArgumentParser()
parser.add_argument('json_file')
args = parser.parse_args()
if __name__ == '__main__':
with open(args.json_file) as f:
data = json.load(f)
print '\n###'
print 'Loaded JSON in variable `data`.'
print '###'
|
import argparse
import json
import inspect
parser = argparse.ArgumentParser()
parser.add_argument('json_file')
args = parser.parse_args()
def load_json(json_file):
with open(json_file) as f:
return json.load(f)
if __name__ == '__main__':
data = load_json(args.json_file)
print '\n'
print inspect.getsource(load_json)
print '\n###'
print 'Loaded JSON in variable `data`.'
print '###'
|
Print code used to load json in python helper
|
zsh: Print code used to load json in python helper
|
Python
|
mit
|
achalddave/dotfiles,achalddave/dotfiles
|
import argparse
import json
parser = argparse.ArgumentParser()
parser.add_argument('json_file')
args = parser.parse_args()
if __name__ == '__main__':
with open(args.json_file) as f:
data = json.load(f)
print '\n###'
print 'Loaded JSON in variable `data`.'
print '###'
zsh: Print code used to load json in python helper
|
import argparse
import json
import inspect
parser = argparse.ArgumentParser()
parser.add_argument('json_file')
args = parser.parse_args()
def load_json(json_file):
with open(json_file) as f:
return json.load(f)
if __name__ == '__main__':
data = load_json(args.json_file)
print '\n'
print inspect.getsource(load_json)
print '\n###'
print 'Loaded JSON in variable `data`.'
print '###'
|
<commit_before>import argparse
import json
parser = argparse.ArgumentParser()
parser.add_argument('json_file')
args = parser.parse_args()
if __name__ == '__main__':
with open(args.json_file) as f:
data = json.load(f)
print '\n###'
print 'Loaded JSON in variable `data`.'
print '###'
<commit_msg>zsh: Print code used to load json in python helper<commit_after>
|
import argparse
import json
import inspect
parser = argparse.ArgumentParser()
parser.add_argument('json_file')
args = parser.parse_args()
def load_json(json_file):
with open(json_file) as f:
return json.load(f)
if __name__ == '__main__':
data = load_json(args.json_file)
print '\n'
print inspect.getsource(load_json)
print '\n###'
print 'Loaded JSON in variable `data`.'
print '###'
|
import argparse
import json
parser = argparse.ArgumentParser()
parser.add_argument('json_file')
args = parser.parse_args()
if __name__ == '__main__':
with open(args.json_file) as f:
data = json.load(f)
print '\n###'
print 'Loaded JSON in variable `data`.'
print '###'
zsh: Print code used to load json in python helperimport argparse
import json
import inspect
parser = argparse.ArgumentParser()
parser.add_argument('json_file')
args = parser.parse_args()
def load_json(json_file):
with open(json_file) as f:
return json.load(f)
if __name__ == '__main__':
data = load_json(args.json_file)
print '\n'
print inspect.getsource(load_json)
print '\n###'
print 'Loaded JSON in variable `data`.'
print '###'
|
<commit_before>import argparse
import json
parser = argparse.ArgumentParser()
parser.add_argument('json_file')
args = parser.parse_args()
if __name__ == '__main__':
with open(args.json_file) as f:
data = json.load(f)
print '\n###'
print 'Loaded JSON in variable `data`.'
print '###'
<commit_msg>zsh: Print code used to load json in python helper<commit_after>import argparse
import json
import inspect
parser = argparse.ArgumentParser()
parser.add_argument('json_file')
args = parser.parse_args()
def load_json(json_file):
with open(json_file) as f:
return json.load(f)
if __name__ == '__main__':
data = load_json(args.json_file)
print '\n'
print inspect.getsource(load_json)
print '\n###'
print 'Loaded JSON in variable `data`.'
print '###'
|
4102320e908dfa6e2fc320d73f118670ad5b1501
|
tests/basics/fun_name.py
|
tests/basics/fun_name.py
|
def Fun():
pass
class A:
def __init__(self):
pass
def Fun(self):
pass
try:
print(Fun.__name__)
print(A.__init__.__name__)
print(A.Fun.__name__)
print(A().Fun.__name__)
except AttributeError:
print('SKIP')
raise SystemExit
# __name__ of a bound native method is not implemented in uPy
# the test here is to make sure it doesn't crash
try:
str((1).to_bytes.__name__)
except AttributeError:
pass
|
def Fun():
pass
class A:
def __init__(self):
pass
def Fun(self):
pass
try:
print(Fun.__name__)
print(A.__init__.__name__)
print(A.Fun.__name__)
print(A().Fun.__name__)
except AttributeError:
print('SKIP')
raise SystemExit
# __name__ of a bound native method is not implemented in uPy
# the test here is to make sure it doesn't crash
try:
str((1).to_bytes.__name__)
except AttributeError:
pass
# name of a function that has closed over variables
def outer():
x = 1
def inner():
return x
return inner
print(outer.__name__)
|
Add test for getting name of func with closed over locals.
|
tests/basics: Add test for getting name of func with closed over locals.
Tests correct decoding of the prelude to get the function name.
|
Python
|
mit
|
pozetroninc/micropython,adafruit/circuitpython,MrSurly/micropython,henriknelson/micropython,selste/micropython,kerneltask/micropython,trezor/micropython,pozetroninc/micropython,selste/micropython,pozetroninc/micropython,pramasoul/micropython,kerneltask/micropython,henriknelson/micropython,henriknelson/micropython,tobbad/micropython,trezor/micropython,kerneltask/micropython,pramasoul/micropython,pramasoul/micropython,tobbad/micropython,henriknelson/micropython,trezor/micropython,bvernoux/micropython,pramasoul/micropython,MrSurly/micropython,pozetroninc/micropython,bvernoux/micropython,selste/micropython,tobbad/micropython,selste/micropython,adafruit/circuitpython,pozetroninc/micropython,kerneltask/micropython,tobbad/micropython,bvernoux/micropython,selste/micropython,bvernoux/micropython,MrSurly/micropython,trezor/micropython,kerneltask/micropython,adafruit/circuitpython,adafruit/circuitpython,bvernoux/micropython,MrSurly/micropython,trezor/micropython,pramasoul/micropython,tobbad/micropython,MrSurly/micropython,henriknelson/micropython,adafruit/circuitpython,adafruit/circuitpython
|
def Fun():
pass
class A:
def __init__(self):
pass
def Fun(self):
pass
try:
print(Fun.__name__)
print(A.__init__.__name__)
print(A.Fun.__name__)
print(A().Fun.__name__)
except AttributeError:
print('SKIP')
raise SystemExit
# __name__ of a bound native method is not implemented in uPy
# the test here is to make sure it doesn't crash
try:
str((1).to_bytes.__name__)
except AttributeError:
pass
tests/basics: Add test for getting name of func with closed over locals.
Tests correct decoding of the prelude to get the function name.
|
def Fun():
pass
class A:
def __init__(self):
pass
def Fun(self):
pass
try:
print(Fun.__name__)
print(A.__init__.__name__)
print(A.Fun.__name__)
print(A().Fun.__name__)
except AttributeError:
print('SKIP')
raise SystemExit
# __name__ of a bound native method is not implemented in uPy
# the test here is to make sure it doesn't crash
try:
str((1).to_bytes.__name__)
except AttributeError:
pass
# name of a function that has closed over variables
def outer():
x = 1
def inner():
return x
return inner
print(outer.__name__)
|
<commit_before>def Fun():
pass
class A:
def __init__(self):
pass
def Fun(self):
pass
try:
print(Fun.__name__)
print(A.__init__.__name__)
print(A.Fun.__name__)
print(A().Fun.__name__)
except AttributeError:
print('SKIP')
raise SystemExit
# __name__ of a bound native method is not implemented in uPy
# the test here is to make sure it doesn't crash
try:
str((1).to_bytes.__name__)
except AttributeError:
pass
<commit_msg>tests/basics: Add test for getting name of func with closed over locals.
Tests correct decoding of the prelude to get the function name.<commit_after>
|
def Fun():
pass
class A:
def __init__(self):
pass
def Fun(self):
pass
try:
print(Fun.__name__)
print(A.__init__.__name__)
print(A.Fun.__name__)
print(A().Fun.__name__)
except AttributeError:
print('SKIP')
raise SystemExit
# __name__ of a bound native method is not implemented in uPy
# the test here is to make sure it doesn't crash
try:
str((1).to_bytes.__name__)
except AttributeError:
pass
# name of a function that has closed over variables
def outer():
x = 1
def inner():
return x
return inner
print(outer.__name__)
|
def Fun():
pass
class A:
def __init__(self):
pass
def Fun(self):
pass
try:
print(Fun.__name__)
print(A.__init__.__name__)
print(A.Fun.__name__)
print(A().Fun.__name__)
except AttributeError:
print('SKIP')
raise SystemExit
# __name__ of a bound native method is not implemented in uPy
# the test here is to make sure it doesn't crash
try:
str((1).to_bytes.__name__)
except AttributeError:
pass
tests/basics: Add test for getting name of func with closed over locals.
Tests correct decoding of the prelude to get the function name.def Fun():
pass
class A:
def __init__(self):
pass
def Fun(self):
pass
try:
print(Fun.__name__)
print(A.__init__.__name__)
print(A.Fun.__name__)
print(A().Fun.__name__)
except AttributeError:
print('SKIP')
raise SystemExit
# __name__ of a bound native method is not implemented in uPy
# the test here is to make sure it doesn't crash
try:
str((1).to_bytes.__name__)
except AttributeError:
pass
# name of a function that has closed over variables
def outer():
x = 1
def inner():
return x
return inner
print(outer.__name__)
|
<commit_before>def Fun():
pass
class A:
def __init__(self):
pass
def Fun(self):
pass
try:
print(Fun.__name__)
print(A.__init__.__name__)
print(A.Fun.__name__)
print(A().Fun.__name__)
except AttributeError:
print('SKIP')
raise SystemExit
# __name__ of a bound native method is not implemented in uPy
# the test here is to make sure it doesn't crash
try:
str((1).to_bytes.__name__)
except AttributeError:
pass
<commit_msg>tests/basics: Add test for getting name of func with closed over locals.
Tests correct decoding of the prelude to get the function name.<commit_after>def Fun():
pass
class A:
def __init__(self):
pass
def Fun(self):
pass
try:
print(Fun.__name__)
print(A.__init__.__name__)
print(A.Fun.__name__)
print(A().Fun.__name__)
except AttributeError:
print('SKIP')
raise SystemExit
# __name__ of a bound native method is not implemented in uPy
# the test here is to make sure it doesn't crash
try:
str((1).to_bytes.__name__)
except AttributeError:
pass
# name of a function that has closed over variables
def outer():
x = 1
def inner():
return x
return inner
print(outer.__name__)
|
8fa89c8642721896b7b97ff928bc66e65470691a
|
pinax/stripe/tests/test_utils.py
|
pinax/stripe/tests/test_utils.py
|
import datetime
from django.test import TestCase
from django.utils import timezone
from ..utils import convert_tstamp, plan_from_stripe_id
class TestTimestampConversion(TestCase):
def test_conversion_without_field_name(self):
stamp = convert_tstamp(1365567407)
self.assertEquals(
stamp,
datetime.datetime(2013, 4, 10, 4, 16, 47, tzinfo=timezone.utc)
)
def test_conversion_with_field_name(self):
stamp = convert_tstamp({"my_date": 1365567407}, "my_date")
self.assertEquals(
stamp,
datetime.datetime(2013, 4, 10, 4, 16, 47, tzinfo=timezone.utc)
)
def test_conversion_with_invalid_field_name(self):
stamp = convert_tstamp({"my_date": 1365567407}, "foo")
self.assertEquals(
stamp,
None
)
def test_conversion_with_field_name_but_none(self):
stamp = convert_tstamp({"my_date": None}, "my_date")
self.assertEquals(
stamp,
None
)
class TestPlanFromStripeId(TestCase):
def test_plan_from_stripe_id_valid(self):
self.assertEquals(
plan_from_stripe_id("pro-monthly"),
"pro"
)
def test_plan_from_stripe_id_invalid(self):
self.assertIsNone(plan_from_stripe_id("invalide"))
|
import datetime
from django.test import TestCase
from django.utils import timezone
from ..utils import convert_tstamp
class TestTimestampConversion(TestCase):
def test_conversion_without_field_name(self):
stamp = convert_tstamp(1365567407)
self.assertEquals(
stamp,
datetime.datetime(2013, 4, 10, 4, 16, 47, tzinfo=timezone.utc)
)
def test_conversion_with_field_name(self):
stamp = convert_tstamp({"my_date": 1365567407}, "my_date")
self.assertEquals(
stamp,
datetime.datetime(2013, 4, 10, 4, 16, 47, tzinfo=timezone.utc)
)
def test_conversion_with_invalid_field_name(self):
stamp = convert_tstamp({"my_date": 1365567407}, "foo")
self.assertEquals(
stamp,
None
)
def test_conversion_with_field_name_but_none(self):
stamp = convert_tstamp({"my_date": None}, "my_date")
self.assertEquals(
stamp,
None
)
|
Remove test for function that no longer exists
|
Remove test for function that no longer exists
|
Python
|
mit
|
pinax/django-stripe-payments
|
import datetime
from django.test import TestCase
from django.utils import timezone
from ..utils import convert_tstamp, plan_from_stripe_id
class TestTimestampConversion(TestCase):
def test_conversion_without_field_name(self):
stamp = convert_tstamp(1365567407)
self.assertEquals(
stamp,
datetime.datetime(2013, 4, 10, 4, 16, 47, tzinfo=timezone.utc)
)
def test_conversion_with_field_name(self):
stamp = convert_tstamp({"my_date": 1365567407}, "my_date")
self.assertEquals(
stamp,
datetime.datetime(2013, 4, 10, 4, 16, 47, tzinfo=timezone.utc)
)
def test_conversion_with_invalid_field_name(self):
stamp = convert_tstamp({"my_date": 1365567407}, "foo")
self.assertEquals(
stamp,
None
)
def test_conversion_with_field_name_but_none(self):
stamp = convert_tstamp({"my_date": None}, "my_date")
self.assertEquals(
stamp,
None
)
class TestPlanFromStripeId(TestCase):
def test_plan_from_stripe_id_valid(self):
self.assertEquals(
plan_from_stripe_id("pro-monthly"),
"pro"
)
def test_plan_from_stripe_id_invalid(self):
self.assertIsNone(plan_from_stripe_id("invalide"))
Remove test for function that no longer exists
|
import datetime
from django.test import TestCase
from django.utils import timezone
from ..utils import convert_tstamp
class TestTimestampConversion(TestCase):
def test_conversion_without_field_name(self):
stamp = convert_tstamp(1365567407)
self.assertEquals(
stamp,
datetime.datetime(2013, 4, 10, 4, 16, 47, tzinfo=timezone.utc)
)
def test_conversion_with_field_name(self):
stamp = convert_tstamp({"my_date": 1365567407}, "my_date")
self.assertEquals(
stamp,
datetime.datetime(2013, 4, 10, 4, 16, 47, tzinfo=timezone.utc)
)
def test_conversion_with_invalid_field_name(self):
stamp = convert_tstamp({"my_date": 1365567407}, "foo")
self.assertEquals(
stamp,
None
)
def test_conversion_with_field_name_but_none(self):
stamp = convert_tstamp({"my_date": None}, "my_date")
self.assertEquals(
stamp,
None
)
|
<commit_before>import datetime
from django.test import TestCase
from django.utils import timezone
from ..utils import convert_tstamp, plan_from_stripe_id
class TestTimestampConversion(TestCase):
def test_conversion_without_field_name(self):
stamp = convert_tstamp(1365567407)
self.assertEquals(
stamp,
datetime.datetime(2013, 4, 10, 4, 16, 47, tzinfo=timezone.utc)
)
def test_conversion_with_field_name(self):
stamp = convert_tstamp({"my_date": 1365567407}, "my_date")
self.assertEquals(
stamp,
datetime.datetime(2013, 4, 10, 4, 16, 47, tzinfo=timezone.utc)
)
def test_conversion_with_invalid_field_name(self):
stamp = convert_tstamp({"my_date": 1365567407}, "foo")
self.assertEquals(
stamp,
None
)
def test_conversion_with_field_name_but_none(self):
stamp = convert_tstamp({"my_date": None}, "my_date")
self.assertEquals(
stamp,
None
)
class TestPlanFromStripeId(TestCase):
def test_plan_from_stripe_id_valid(self):
self.assertEquals(
plan_from_stripe_id("pro-monthly"),
"pro"
)
def test_plan_from_stripe_id_invalid(self):
self.assertIsNone(plan_from_stripe_id("invalide"))
<commit_msg>Remove test for function that no longer exists<commit_after>
|
import datetime
from django.test import TestCase
from django.utils import timezone
from ..utils import convert_tstamp
class TestTimestampConversion(TestCase):
def test_conversion_without_field_name(self):
stamp = convert_tstamp(1365567407)
self.assertEquals(
stamp,
datetime.datetime(2013, 4, 10, 4, 16, 47, tzinfo=timezone.utc)
)
def test_conversion_with_field_name(self):
stamp = convert_tstamp({"my_date": 1365567407}, "my_date")
self.assertEquals(
stamp,
datetime.datetime(2013, 4, 10, 4, 16, 47, tzinfo=timezone.utc)
)
def test_conversion_with_invalid_field_name(self):
stamp = convert_tstamp({"my_date": 1365567407}, "foo")
self.assertEquals(
stamp,
None
)
def test_conversion_with_field_name_but_none(self):
stamp = convert_tstamp({"my_date": None}, "my_date")
self.assertEquals(
stamp,
None
)
|
import datetime
from django.test import TestCase
from django.utils import timezone
from ..utils import convert_tstamp, plan_from_stripe_id
class TestTimestampConversion(TestCase):
def test_conversion_without_field_name(self):
stamp = convert_tstamp(1365567407)
self.assertEquals(
stamp,
datetime.datetime(2013, 4, 10, 4, 16, 47, tzinfo=timezone.utc)
)
def test_conversion_with_field_name(self):
stamp = convert_tstamp({"my_date": 1365567407}, "my_date")
self.assertEquals(
stamp,
datetime.datetime(2013, 4, 10, 4, 16, 47, tzinfo=timezone.utc)
)
def test_conversion_with_invalid_field_name(self):
stamp = convert_tstamp({"my_date": 1365567407}, "foo")
self.assertEquals(
stamp,
None
)
def test_conversion_with_field_name_but_none(self):
stamp = convert_tstamp({"my_date": None}, "my_date")
self.assertEquals(
stamp,
None
)
class TestPlanFromStripeId(TestCase):
def test_plan_from_stripe_id_valid(self):
self.assertEquals(
plan_from_stripe_id("pro-monthly"),
"pro"
)
def test_plan_from_stripe_id_invalid(self):
self.assertIsNone(plan_from_stripe_id("invalide"))
Remove test for function that no longer existsimport datetime
from django.test import TestCase
from django.utils import timezone
from ..utils import convert_tstamp
class TestTimestampConversion(TestCase):
def test_conversion_without_field_name(self):
stamp = convert_tstamp(1365567407)
self.assertEquals(
stamp,
datetime.datetime(2013, 4, 10, 4, 16, 47, tzinfo=timezone.utc)
)
def test_conversion_with_field_name(self):
stamp = convert_tstamp({"my_date": 1365567407}, "my_date")
self.assertEquals(
stamp,
datetime.datetime(2013, 4, 10, 4, 16, 47, tzinfo=timezone.utc)
)
def test_conversion_with_invalid_field_name(self):
stamp = convert_tstamp({"my_date": 1365567407}, "foo")
self.assertEquals(
stamp,
None
)
def test_conversion_with_field_name_but_none(self):
stamp = convert_tstamp({"my_date": None}, "my_date")
self.assertEquals(
stamp,
None
)
|
<commit_before>import datetime
from django.test import TestCase
from django.utils import timezone
from ..utils import convert_tstamp, plan_from_stripe_id
class TestTimestampConversion(TestCase):
def test_conversion_without_field_name(self):
stamp = convert_tstamp(1365567407)
self.assertEquals(
stamp,
datetime.datetime(2013, 4, 10, 4, 16, 47, tzinfo=timezone.utc)
)
def test_conversion_with_field_name(self):
stamp = convert_tstamp({"my_date": 1365567407}, "my_date")
self.assertEquals(
stamp,
datetime.datetime(2013, 4, 10, 4, 16, 47, tzinfo=timezone.utc)
)
def test_conversion_with_invalid_field_name(self):
stamp = convert_tstamp({"my_date": 1365567407}, "foo")
self.assertEquals(
stamp,
None
)
def test_conversion_with_field_name_but_none(self):
stamp = convert_tstamp({"my_date": None}, "my_date")
self.assertEquals(
stamp,
None
)
class TestPlanFromStripeId(TestCase):
def test_plan_from_stripe_id_valid(self):
self.assertEquals(
plan_from_stripe_id("pro-monthly"),
"pro"
)
def test_plan_from_stripe_id_invalid(self):
self.assertIsNone(plan_from_stripe_id("invalide"))
<commit_msg>Remove test for function that no longer exists<commit_after>import datetime
from django.test import TestCase
from django.utils import timezone
from ..utils import convert_tstamp
class TestTimestampConversion(TestCase):
def test_conversion_without_field_name(self):
stamp = convert_tstamp(1365567407)
self.assertEquals(
stamp,
datetime.datetime(2013, 4, 10, 4, 16, 47, tzinfo=timezone.utc)
)
def test_conversion_with_field_name(self):
stamp = convert_tstamp({"my_date": 1365567407}, "my_date")
self.assertEquals(
stamp,
datetime.datetime(2013, 4, 10, 4, 16, 47, tzinfo=timezone.utc)
)
def test_conversion_with_invalid_field_name(self):
stamp = convert_tstamp({"my_date": 1365567407}, "foo")
self.assertEquals(
stamp,
None
)
def test_conversion_with_field_name_but_none(self):
stamp = convert_tstamp({"my_date": None}, "my_date")
self.assertEquals(
stamp,
None
)
|
27b1d403540503f6e9d0ccd679918e3efe63ecf7
|
tests/test_navigation.py
|
tests/test_navigation.py
|
def get_menu_titles(page) -> list:
page.wait_for_load_state()
menu_list = page.query_selector_all("//*[@class='toctree-wrapper compound']/ul/li/a")
return [title.as_element().inner_text() for title in menu_list]
flag = True
def test_check_titles(page):
global flag
if(flag):
page.goto("index.html")
page.set_viewport_size({"width": 1050, "height": 600})
menu_list = get_menu_titles(page)
page.wait_for_load_state()
for menu_item in menu_list:
right_arrow = page.query_selector("//*[@id='relations-next']/a")
if(right_arrow):
page.click("//*[@id='relations-next']/a")
page.wait_for_load_state()
page_title = page.title().split(" — ")[0]
assert page_title == menu_item
if("toctree" in page.url):
# check titles for all sub-toctree content
# list_url = page.split("/")[3::]
# new_url = "/".join(list_url)
# test_check_titles(new_url)
flag = False
test_check_titles(page)
else:
break
|
def get_menu_titles(page) -> list:
page.wait_for_load_state()
menu_list = page.query_selector_all("//*[@class='toctree-wrapper compound']/ul/li/a")
return [title.as_element().inner_text() for title in menu_list]
flag = True
def test_check_titles(page):
global flag
if(flag):
page.goto("index.html")
menu_list = get_menu_titles(page)
page.wait_for_load_state()
for menu_item in menu_list:
right_arrow = page.query_selector("//*[@id='relations-next']/a")
if(right_arrow):
page.click("//*[@id='relations-next']/a")
page.wait_for_load_state()
page_title = page.title().split(" — ")[0]
assert page_title == menu_item
if("toctree" in page.url):
flag = False
test_check_titles(page)
else:
break
|
Delete debug comments and tool
|
Delete debug comments and tool
|
Python
|
agpl-3.0
|
PyAr/PyZombis,PyAr/PyZombis,PyAr/PyZombis
|
def get_menu_titles(page) -> list:
page.wait_for_load_state()
menu_list = page.query_selector_all("//*[@class='toctree-wrapper compound']/ul/li/a")
return [title.as_element().inner_text() for title in menu_list]
flag = True
def test_check_titles(page):
global flag
if(flag):
page.goto("index.html")
page.set_viewport_size({"width": 1050, "height": 600})
menu_list = get_menu_titles(page)
page.wait_for_load_state()
for menu_item in menu_list:
right_arrow = page.query_selector("//*[@id='relations-next']/a")
if(right_arrow):
page.click("//*[@id='relations-next']/a")
page.wait_for_load_state()
page_title = page.title().split(" — ")[0]
assert page_title == menu_item
if("toctree" in page.url):
# check titles for all sub-toctree content
# list_url = page.split("/")[3::]
# new_url = "/".join(list_url)
# test_check_titles(new_url)
flag = False
test_check_titles(page)
else:
break
Delete debug comments and tool
|
def get_menu_titles(page) -> list:
page.wait_for_load_state()
menu_list = page.query_selector_all("//*[@class='toctree-wrapper compound']/ul/li/a")
return [title.as_element().inner_text() for title in menu_list]
flag = True
def test_check_titles(page):
global flag
if(flag):
page.goto("index.html")
menu_list = get_menu_titles(page)
page.wait_for_load_state()
for menu_item in menu_list:
right_arrow = page.query_selector("//*[@id='relations-next']/a")
if(right_arrow):
page.click("//*[@id='relations-next']/a")
page.wait_for_load_state()
page_title = page.title().split(" — ")[0]
assert page_title == menu_item
if("toctree" in page.url):
flag = False
test_check_titles(page)
else:
break
|
<commit_before>def get_menu_titles(page) -> list:
page.wait_for_load_state()
menu_list = page.query_selector_all("//*[@class='toctree-wrapper compound']/ul/li/a")
return [title.as_element().inner_text() for title in menu_list]
flag = True
def test_check_titles(page):
global flag
if(flag):
page.goto("index.html")
page.set_viewport_size({"width": 1050, "height": 600})
menu_list = get_menu_titles(page)
page.wait_for_load_state()
for menu_item in menu_list:
right_arrow = page.query_selector("//*[@id='relations-next']/a")
if(right_arrow):
page.click("//*[@id='relations-next']/a")
page.wait_for_load_state()
page_title = page.title().split(" — ")[0]
assert page_title == menu_item
if("toctree" in page.url):
# check titles for all sub-toctree content
# list_url = page.split("/")[3::]
# new_url = "/".join(list_url)
# test_check_titles(new_url)
flag = False
test_check_titles(page)
else:
break
<commit_msg>Delete debug comments and tool<commit_after>
|
def get_menu_titles(page) -> list:
page.wait_for_load_state()
menu_list = page.query_selector_all("//*[@class='toctree-wrapper compound']/ul/li/a")
return [title.as_element().inner_text() for title in menu_list]
flag = True
def test_check_titles(page):
global flag
if(flag):
page.goto("index.html")
menu_list = get_menu_titles(page)
page.wait_for_load_state()
for menu_item in menu_list:
right_arrow = page.query_selector("//*[@id='relations-next']/a")
if(right_arrow):
page.click("//*[@id='relations-next']/a")
page.wait_for_load_state()
page_title = page.title().split(" — ")[0]
assert page_title == menu_item
if("toctree" in page.url):
flag = False
test_check_titles(page)
else:
break
|
def get_menu_titles(page) -> list:
page.wait_for_load_state()
menu_list = page.query_selector_all("//*[@class='toctree-wrapper compound']/ul/li/a")
return [title.as_element().inner_text() for title in menu_list]
flag = True
def test_check_titles(page):
global flag
if(flag):
page.goto("index.html")
page.set_viewport_size({"width": 1050, "height": 600})
menu_list = get_menu_titles(page)
page.wait_for_load_state()
for menu_item in menu_list:
right_arrow = page.query_selector("//*[@id='relations-next']/a")
if(right_arrow):
page.click("//*[@id='relations-next']/a")
page.wait_for_load_state()
page_title = page.title().split(" — ")[0]
assert page_title == menu_item
if("toctree" in page.url):
# check titles for all sub-toctree content
# list_url = page.split("/")[3::]
# new_url = "/".join(list_url)
# test_check_titles(new_url)
flag = False
test_check_titles(page)
else:
break
Delete debug comments and tooldef get_menu_titles(page) -> list:
page.wait_for_load_state()
menu_list = page.query_selector_all("//*[@class='toctree-wrapper compound']/ul/li/a")
return [title.as_element().inner_text() for title in menu_list]
flag = True
def test_check_titles(page):
global flag
if(flag):
page.goto("index.html")
menu_list = get_menu_titles(page)
page.wait_for_load_state()
for menu_item in menu_list:
right_arrow = page.query_selector("//*[@id='relations-next']/a")
if(right_arrow):
page.click("//*[@id='relations-next']/a")
page.wait_for_load_state()
page_title = page.title().split(" — ")[0]
assert page_title == menu_item
if("toctree" in page.url):
flag = False
test_check_titles(page)
else:
break
|
<commit_before>def get_menu_titles(page) -> list:
page.wait_for_load_state()
menu_list = page.query_selector_all("//*[@class='toctree-wrapper compound']/ul/li/a")
return [title.as_element().inner_text() for title in menu_list]
flag = True
def test_check_titles(page):
global flag
if(flag):
page.goto("index.html")
page.set_viewport_size({"width": 1050, "height": 600})
menu_list = get_menu_titles(page)
page.wait_for_load_state()
for menu_item in menu_list:
right_arrow = page.query_selector("//*[@id='relations-next']/a")
if(right_arrow):
page.click("//*[@id='relations-next']/a")
page.wait_for_load_state()
page_title = page.title().split(" — ")[0]
assert page_title == menu_item
if("toctree" in page.url):
# check titles for all sub-toctree content
# list_url = page.split("/")[3::]
# new_url = "/".join(list_url)
# test_check_titles(new_url)
flag = False
test_check_titles(page)
else:
break
<commit_msg>Delete debug comments and tool<commit_after>def get_menu_titles(page) -> list:
page.wait_for_load_state()
menu_list = page.query_selector_all("//*[@class='toctree-wrapper compound']/ul/li/a")
return [title.as_element().inner_text() for title in menu_list]
flag = True
def test_check_titles(page):
global flag
if(flag):
page.goto("index.html")
menu_list = get_menu_titles(page)
page.wait_for_load_state()
for menu_item in menu_list:
right_arrow = page.query_selector("//*[@id='relations-next']/a")
if(right_arrow):
page.click("//*[@id='relations-next']/a")
page.wait_for_load_state()
page_title = page.title().split(" — ")[0]
assert page_title == menu_item
if("toctree" in page.url):
flag = False
test_check_titles(page)
else:
break
|
508c9ef5f7dfd974fdad650cf1a211dad9d41db5
|
skipper/config.py
|
skipper/config.py
|
from string import Template
from collections import defaultdict
import os
import yaml
def load_defaults():
skipper_conf = 'skipper.yaml'
defaults = {}
if os.path.exists(skipper_conf):
with open(skipper_conf) as confile:
config = yaml.load(confile)
containers = config.pop('containers', None)
_normalize_config(config, defaults)
if containers is not None:
defaults['containers'] = containers
return defaults
def _normalize_config(config, normalized_config):
for key, value in config.iteritems():
if isinstance(value, dict):
normalized_config[key] = {}
_normalize_config(value, normalized_config[key])
elif isinstance(value, list):
normalized_config[key] = value
else:
normalized_key = key.replace('-', '_')
normalized_config[normalized_key] = _interpolate_env_vars(value)
def _interpolate_env_vars(key):
return Template(key).substitute(defaultdict(lambda: "", os.environ))
|
from string import Template
from collections import defaultdict
import os
import yaml
def load_defaults():
skipper_conf = 'skipper.yaml'
defaults = {}
if os.path.exists(skipper_conf):
with open(skipper_conf) as confile:
config = yaml.load(confile)
containers = config.pop('containers', None)
_normalize_config(config, defaults)
if containers is not None:
defaults['containers'] = containers
return defaults
def _normalize_config(config, normalized_config):
for key, value in config.iteritems():
if isinstance(value, dict):
normalized_config[key] = {}
_normalize_config(value, normalized_config[key])
elif isinstance(value, list):
normalized_config[key] = [_interpolate_env_vars(x) for x in value]
else:
normalized_key = key.replace('-', '_')
normalized_config[normalized_key] = _interpolate_env_vars(value)
def _interpolate_env_vars(key):
return Template(key).substitute(defaultdict(lambda: "", os.environ))
|
Handle env vars in volumes
|
Handle env vars in volumes
|
Python
|
apache-2.0
|
Stratoscale/skipper,Stratoscale/skipper
|
from string import Template
from collections import defaultdict
import os
import yaml
def load_defaults():
skipper_conf = 'skipper.yaml'
defaults = {}
if os.path.exists(skipper_conf):
with open(skipper_conf) as confile:
config = yaml.load(confile)
containers = config.pop('containers', None)
_normalize_config(config, defaults)
if containers is not None:
defaults['containers'] = containers
return defaults
def _normalize_config(config, normalized_config):
for key, value in config.iteritems():
if isinstance(value, dict):
normalized_config[key] = {}
_normalize_config(value, normalized_config[key])
elif isinstance(value, list):
normalized_config[key] = value
else:
normalized_key = key.replace('-', '_')
normalized_config[normalized_key] = _interpolate_env_vars(value)
def _interpolate_env_vars(key):
return Template(key).substitute(defaultdict(lambda: "", os.environ))
Handle env vars in volumes
|
from string import Template
from collections import defaultdict
import os
import yaml
def load_defaults():
skipper_conf = 'skipper.yaml'
defaults = {}
if os.path.exists(skipper_conf):
with open(skipper_conf) as confile:
config = yaml.load(confile)
containers = config.pop('containers', None)
_normalize_config(config, defaults)
if containers is not None:
defaults['containers'] = containers
return defaults
def _normalize_config(config, normalized_config):
for key, value in config.iteritems():
if isinstance(value, dict):
normalized_config[key] = {}
_normalize_config(value, normalized_config[key])
elif isinstance(value, list):
normalized_config[key] = [_interpolate_env_vars(x) for x in value]
else:
normalized_key = key.replace('-', '_')
normalized_config[normalized_key] = _interpolate_env_vars(value)
def _interpolate_env_vars(key):
return Template(key).substitute(defaultdict(lambda: "", os.environ))
|
<commit_before>from string import Template
from collections import defaultdict
import os
import yaml
def load_defaults():
skipper_conf = 'skipper.yaml'
defaults = {}
if os.path.exists(skipper_conf):
with open(skipper_conf) as confile:
config = yaml.load(confile)
containers = config.pop('containers', None)
_normalize_config(config, defaults)
if containers is not None:
defaults['containers'] = containers
return defaults
def _normalize_config(config, normalized_config):
for key, value in config.iteritems():
if isinstance(value, dict):
normalized_config[key] = {}
_normalize_config(value, normalized_config[key])
elif isinstance(value, list):
normalized_config[key] = value
else:
normalized_key = key.replace('-', '_')
normalized_config[normalized_key] = _interpolate_env_vars(value)
def _interpolate_env_vars(key):
return Template(key).substitute(defaultdict(lambda: "", os.environ))
<commit_msg>Handle env vars in volumes<commit_after>
|
from string import Template
from collections import defaultdict
import os
import yaml
def load_defaults():
skipper_conf = 'skipper.yaml'
defaults = {}
if os.path.exists(skipper_conf):
with open(skipper_conf) as confile:
config = yaml.load(confile)
containers = config.pop('containers', None)
_normalize_config(config, defaults)
if containers is not None:
defaults['containers'] = containers
return defaults
def _normalize_config(config, normalized_config):
for key, value in config.iteritems():
if isinstance(value, dict):
normalized_config[key] = {}
_normalize_config(value, normalized_config[key])
elif isinstance(value, list):
normalized_config[key] = [_interpolate_env_vars(x) for x in value]
else:
normalized_key = key.replace('-', '_')
normalized_config[normalized_key] = _interpolate_env_vars(value)
def _interpolate_env_vars(key):
return Template(key).substitute(defaultdict(lambda: "", os.environ))
|
from string import Template
from collections import defaultdict
import os
import yaml
def load_defaults():
skipper_conf = 'skipper.yaml'
defaults = {}
if os.path.exists(skipper_conf):
with open(skipper_conf) as confile:
config = yaml.load(confile)
containers = config.pop('containers', None)
_normalize_config(config, defaults)
if containers is not None:
defaults['containers'] = containers
return defaults
def _normalize_config(config, normalized_config):
for key, value in config.iteritems():
if isinstance(value, dict):
normalized_config[key] = {}
_normalize_config(value, normalized_config[key])
elif isinstance(value, list):
normalized_config[key] = value
else:
normalized_key = key.replace('-', '_')
normalized_config[normalized_key] = _interpolate_env_vars(value)
def _interpolate_env_vars(key):
return Template(key).substitute(defaultdict(lambda: "", os.environ))
Handle env vars in volumesfrom string import Template
from collections import defaultdict
import os
import yaml
def load_defaults():
skipper_conf = 'skipper.yaml'
defaults = {}
if os.path.exists(skipper_conf):
with open(skipper_conf) as confile:
config = yaml.load(confile)
containers = config.pop('containers', None)
_normalize_config(config, defaults)
if containers is not None:
defaults['containers'] = containers
return defaults
def _normalize_config(config, normalized_config):
for key, value in config.iteritems():
if isinstance(value, dict):
normalized_config[key] = {}
_normalize_config(value, normalized_config[key])
elif isinstance(value, list):
normalized_config[key] = [_interpolate_env_vars(x) for x in value]
else:
normalized_key = key.replace('-', '_')
normalized_config[normalized_key] = _interpolate_env_vars(value)
def _interpolate_env_vars(key):
return Template(key).substitute(defaultdict(lambda: "", os.environ))
|
<commit_before>from string import Template
from collections import defaultdict
import os
import yaml
def load_defaults():
skipper_conf = 'skipper.yaml'
defaults = {}
if os.path.exists(skipper_conf):
with open(skipper_conf) as confile:
config = yaml.load(confile)
containers = config.pop('containers', None)
_normalize_config(config, defaults)
if containers is not None:
defaults['containers'] = containers
return defaults
def _normalize_config(config, normalized_config):
for key, value in config.iteritems():
if isinstance(value, dict):
normalized_config[key] = {}
_normalize_config(value, normalized_config[key])
elif isinstance(value, list):
normalized_config[key] = value
else:
normalized_key = key.replace('-', '_')
normalized_config[normalized_key] = _interpolate_env_vars(value)
def _interpolate_env_vars(key):
return Template(key).substitute(defaultdict(lambda: "", os.environ))
<commit_msg>Handle env vars in volumes<commit_after>from string import Template
from collections import defaultdict
import os
import yaml
def load_defaults():
skipper_conf = 'skipper.yaml'
defaults = {}
if os.path.exists(skipper_conf):
with open(skipper_conf) as confile:
config = yaml.load(confile)
containers = config.pop('containers', None)
_normalize_config(config, defaults)
if containers is not None:
defaults['containers'] = containers
return defaults
def _normalize_config(config, normalized_config):
for key, value in config.iteritems():
if isinstance(value, dict):
normalized_config[key] = {}
_normalize_config(value, normalized_config[key])
elif isinstance(value, list):
normalized_config[key] = [_interpolate_env_vars(x) for x in value]
else:
normalized_key = key.replace('-', '_')
normalized_config[normalized_key] = _interpolate_env_vars(value)
def _interpolate_env_vars(key):
return Template(key).substitute(defaultdict(lambda: "", os.environ))
|
1e150c4d5797f17ba8bea53d328cb613adc6bc0f
|
self_play.py
|
self_play.py
|
import random
from game import Game, DiscState
class SelfPlay:
def __init__(self, game):
self.game = game
def play(self):
while self.game.winner is None:
col_index = self.calc_move(self.game.current_player)
if self.game.can_add_disc(col_index):
success = self.game.try_turn(self.game.current_player, col_index)
assert success
self.render_board()
print("Winner is: %s" % self.disc_state_to_player_name(self.game.winner))
def calc_move(self, current_player):
return random.randint(0, self.game.grid.width)
def render_board(self):
str_repr = [" %i " % col_index for col_index in range(self.game.grid.width)] + ["\n"]
for row in reversed(self.game.grid):
row_repr = []
for disc_value in row:
if disc_value is DiscState.empty:
row_repr.append("| |")
elif disc_value is DiscState.red:
row_repr.append("|O|")
else: # disc_value is black
row_repr.append("|X|")
row_repr.append("\n")
str_repr += row_repr
print("".join(str_repr))
def disc_state_to_player_name(self, disc_state):
if disc_state is DiscState.red:
return "O"
else:
return "X"
|
import random
from game import Game, DiscState
class SelfPlay:
def __init__(self, game):
self.game = game
self.log = []
def play(self):
while self.game.winner is None:
col_index = self.calc_move(self.game.current_player)
if self.game.can_add_disc(col_index):
success = self.game.try_turn(self.game.current_player, col_index)
assert success
self.log.append(col_index)
self.render_board()
print("Winner is: %s" % self.disc_state_to_player_name(self.game.winner))
def calc_move(self, current_player):
return random.randint(0, self.game.grid.width)
def render_board(self):
str_repr = [" %i " % col_index for col_index in range(self.game.grid.width)] + ["\n"]
for row in reversed(self.game.grid):
row_repr = []
for disc_value in row:
if disc_value is DiscState.empty:
row_repr.append("| |")
elif disc_value is DiscState.red:
row_repr.append("|O|")
else: # disc_value is black
row_repr.append("|X|")
row_repr.append("\n")
str_repr += row_repr
print("".join(str_repr))
def disc_state_to_player_name(self, disc_state):
if disc_state is DiscState.red:
return "O"
else:
return "X"
|
Add log to self play
|
Add log to self play
|
Python
|
mit
|
misterwilliam/connect-four
|
import random
from game import Game, DiscState
class SelfPlay:
def __init__(self, game):
self.game = game
def play(self):
while self.game.winner is None:
col_index = self.calc_move(self.game.current_player)
if self.game.can_add_disc(col_index):
success = self.game.try_turn(self.game.current_player, col_index)
assert success
self.render_board()
print("Winner is: %s" % self.disc_state_to_player_name(self.game.winner))
def calc_move(self, current_player):
return random.randint(0, self.game.grid.width)
def render_board(self):
str_repr = [" %i " % col_index for col_index in range(self.game.grid.width)] + ["\n"]
for row in reversed(self.game.grid):
row_repr = []
for disc_value in row:
if disc_value is DiscState.empty:
row_repr.append("| |")
elif disc_value is DiscState.red:
row_repr.append("|O|")
else: # disc_value is black
row_repr.append("|X|")
row_repr.append("\n")
str_repr += row_repr
print("".join(str_repr))
def disc_state_to_player_name(self, disc_state):
if disc_state is DiscState.red:
return "O"
else:
return "X"Add log to self play
|
import random
from game import Game, DiscState
class SelfPlay:
def __init__(self, game):
self.game = game
self.log = []
def play(self):
while self.game.winner is None:
col_index = self.calc_move(self.game.current_player)
if self.game.can_add_disc(col_index):
success = self.game.try_turn(self.game.current_player, col_index)
assert success
self.log.append(col_index)
self.render_board()
print("Winner is: %s" % self.disc_state_to_player_name(self.game.winner))
def calc_move(self, current_player):
return random.randint(0, self.game.grid.width)
def render_board(self):
str_repr = [" %i " % col_index for col_index in range(self.game.grid.width)] + ["\n"]
for row in reversed(self.game.grid):
row_repr = []
for disc_value in row:
if disc_value is DiscState.empty:
row_repr.append("| |")
elif disc_value is DiscState.red:
row_repr.append("|O|")
else: # disc_value is black
row_repr.append("|X|")
row_repr.append("\n")
str_repr += row_repr
print("".join(str_repr))
def disc_state_to_player_name(self, disc_state):
if disc_state is DiscState.red:
return "O"
else:
return "X"
|
<commit_before>import random
from game import Game, DiscState
class SelfPlay:
def __init__(self, game):
self.game = game
def play(self):
while self.game.winner is None:
col_index = self.calc_move(self.game.current_player)
if self.game.can_add_disc(col_index):
success = self.game.try_turn(self.game.current_player, col_index)
assert success
self.render_board()
print("Winner is: %s" % self.disc_state_to_player_name(self.game.winner))
def calc_move(self, current_player):
return random.randint(0, self.game.grid.width)
def render_board(self):
str_repr = [" %i " % col_index for col_index in range(self.game.grid.width)] + ["\n"]
for row in reversed(self.game.grid):
row_repr = []
for disc_value in row:
if disc_value is DiscState.empty:
row_repr.append("| |")
elif disc_value is DiscState.red:
row_repr.append("|O|")
else: # disc_value is black
row_repr.append("|X|")
row_repr.append("\n")
str_repr += row_repr
print("".join(str_repr))
def disc_state_to_player_name(self, disc_state):
if disc_state is DiscState.red:
return "O"
else:
return "X"<commit_msg>Add log to self play<commit_after>
|
import random
from game import Game, DiscState
class SelfPlay:
def __init__(self, game):
self.game = game
self.log = []
def play(self):
while self.game.winner is None:
col_index = self.calc_move(self.game.current_player)
if self.game.can_add_disc(col_index):
success = self.game.try_turn(self.game.current_player, col_index)
assert success
self.log.append(col_index)
self.render_board()
print("Winner is: %s" % self.disc_state_to_player_name(self.game.winner))
def calc_move(self, current_player):
return random.randint(0, self.game.grid.width)
def render_board(self):
str_repr = [" %i " % col_index for col_index in range(self.game.grid.width)] + ["\n"]
for row in reversed(self.game.grid):
row_repr = []
for disc_value in row:
if disc_value is DiscState.empty:
row_repr.append("| |")
elif disc_value is DiscState.red:
row_repr.append("|O|")
else: # disc_value is black
row_repr.append("|X|")
row_repr.append("\n")
str_repr += row_repr
print("".join(str_repr))
def disc_state_to_player_name(self, disc_state):
if disc_state is DiscState.red:
return "O"
else:
return "X"
|
import random
from game import Game, DiscState
class SelfPlay:
def __init__(self, game):
self.game = game
def play(self):
while self.game.winner is None:
col_index = self.calc_move(self.game.current_player)
if self.game.can_add_disc(col_index):
success = self.game.try_turn(self.game.current_player, col_index)
assert success
self.render_board()
print("Winner is: %s" % self.disc_state_to_player_name(self.game.winner))
def calc_move(self, current_player):
return random.randint(0, self.game.grid.width)
def render_board(self):
str_repr = [" %i " % col_index for col_index in range(self.game.grid.width)] + ["\n"]
for row in reversed(self.game.grid):
row_repr = []
for disc_value in row:
if disc_value is DiscState.empty:
row_repr.append("| |")
elif disc_value is DiscState.red:
row_repr.append("|O|")
else: # disc_value is black
row_repr.append("|X|")
row_repr.append("\n")
str_repr += row_repr
print("".join(str_repr))
def disc_state_to_player_name(self, disc_state):
if disc_state is DiscState.red:
return "O"
else:
return "X"Add log to self playimport random
from game import Game, DiscState
class SelfPlay:
def __init__(self, game):
self.game = game
self.log = []
def play(self):
while self.game.winner is None:
col_index = self.calc_move(self.game.current_player)
if self.game.can_add_disc(col_index):
success = self.game.try_turn(self.game.current_player, col_index)
assert success
self.log.append(col_index)
self.render_board()
print("Winner is: %s" % self.disc_state_to_player_name(self.game.winner))
def calc_move(self, current_player):
return random.randint(0, self.game.grid.width)
def render_board(self):
str_repr = [" %i " % col_index for col_index in range(self.game.grid.width)] + ["\n"]
for row in reversed(self.game.grid):
row_repr = []
for disc_value in row:
if disc_value is DiscState.empty:
row_repr.append("| |")
elif disc_value is DiscState.red:
row_repr.append("|O|")
else: # disc_value is black
row_repr.append("|X|")
row_repr.append("\n")
str_repr += row_repr
print("".join(str_repr))
def disc_state_to_player_name(self, disc_state):
if disc_state is DiscState.red:
return "O"
else:
return "X"
|
<commit_before>import random
from game import Game, DiscState
class SelfPlay:
def __init__(self, game):
self.game = game
def play(self):
while self.game.winner is None:
col_index = self.calc_move(self.game.current_player)
if self.game.can_add_disc(col_index):
success = self.game.try_turn(self.game.current_player, col_index)
assert success
self.render_board()
print("Winner is: %s" % self.disc_state_to_player_name(self.game.winner))
def calc_move(self, current_player):
return random.randint(0, self.game.grid.width)
def render_board(self):
str_repr = [" %i " % col_index for col_index in range(self.game.grid.width)] + ["\n"]
for row in reversed(self.game.grid):
row_repr = []
for disc_value in row:
if disc_value is DiscState.empty:
row_repr.append("| |")
elif disc_value is DiscState.red:
row_repr.append("|O|")
else: # disc_value is black
row_repr.append("|X|")
row_repr.append("\n")
str_repr += row_repr
print("".join(str_repr))
def disc_state_to_player_name(self, disc_state):
if disc_state is DiscState.red:
return "O"
else:
return "X"<commit_msg>Add log to self play<commit_after>import random
from game import Game, DiscState
class SelfPlay:
def __init__(self, game):
self.game = game
self.log = []
def play(self):
while self.game.winner is None:
col_index = self.calc_move(self.game.current_player)
if self.game.can_add_disc(col_index):
success = self.game.try_turn(self.game.current_player, col_index)
assert success
self.log.append(col_index)
self.render_board()
print("Winner is: %s" % self.disc_state_to_player_name(self.game.winner))
def calc_move(self, current_player):
return random.randint(0, self.game.grid.width)
def render_board(self):
str_repr = [" %i " % col_index for col_index in range(self.game.grid.width)] + ["\n"]
for row in reversed(self.game.grid):
row_repr = []
for disc_value in row:
if disc_value is DiscState.empty:
row_repr.append("| |")
elif disc_value is DiscState.red:
row_repr.append("|O|")
else: # disc_value is black
row_repr.append("|X|")
row_repr.append("\n")
str_repr += row_repr
print("".join(str_repr))
def disc_state_to_player_name(self, disc_state):
if disc_state is DiscState.red:
return "O"
else:
return "X"
|
4dabc48455ebb8f22d37cd964ceb16373f784362
|
mothermayi/colors.py
|
mothermayi/colors.py
|
BLUE = '\033[94m'
GREEN = '\033[92m'
YELLOW = '\033[93m'
RED = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def green(text):
return GREEN + text + ENDC
def red(text):
return RED + text + ENDC
|
BLUE = '\033[94m'
GREEN = '\033[92m'
YELLOW = '\033[93m'
RED = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def green(text):
return GREEN + text + ENDC
def red(text):
return RED + text + ENDC
def yellow(text):
return YELLOW + text + ENDC
|
Add a function for getting yellow
|
Add a function for getting yellow
|
Python
|
mit
|
EliRibble/mothermayi
|
BLUE = '\033[94m'
GREEN = '\033[92m'
YELLOW = '\033[93m'
RED = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def green(text):
return GREEN + text + ENDC
def red(text):
return RED + text + ENDC
Add a function for getting yellow
|
BLUE = '\033[94m'
GREEN = '\033[92m'
YELLOW = '\033[93m'
RED = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def green(text):
return GREEN + text + ENDC
def red(text):
return RED + text + ENDC
def yellow(text):
return YELLOW + text + ENDC
|
<commit_before>BLUE = '\033[94m'
GREEN = '\033[92m'
YELLOW = '\033[93m'
RED = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def green(text):
return GREEN + text + ENDC
def red(text):
return RED + text + ENDC
<commit_msg>Add a function for getting yellow<commit_after>
|
BLUE = '\033[94m'
GREEN = '\033[92m'
YELLOW = '\033[93m'
RED = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def green(text):
return GREEN + text + ENDC
def red(text):
return RED + text + ENDC
def yellow(text):
return YELLOW + text + ENDC
|
BLUE = '\033[94m'
GREEN = '\033[92m'
YELLOW = '\033[93m'
RED = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def green(text):
return GREEN + text + ENDC
def red(text):
return RED + text + ENDC
Add a function for getting yellowBLUE = '\033[94m'
GREEN = '\033[92m'
YELLOW = '\033[93m'
RED = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def green(text):
return GREEN + text + ENDC
def red(text):
return RED + text + ENDC
def yellow(text):
return YELLOW + text + ENDC
|
<commit_before>BLUE = '\033[94m'
GREEN = '\033[92m'
YELLOW = '\033[93m'
RED = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def green(text):
return GREEN + text + ENDC
def red(text):
return RED + text + ENDC
<commit_msg>Add a function for getting yellow<commit_after>BLUE = '\033[94m'
GREEN = '\033[92m'
YELLOW = '\033[93m'
RED = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def green(text):
return GREEN + text + ENDC
def red(text):
return RED + text + ENDC
def yellow(text):
return YELLOW + text + ENDC
|
70cc77a9146f9d4afd78df9a2f8da8673f0320de
|
extractor.py
|
extractor.py
|
import extractor.ui.mainapplication as ui
import Tkinter as tk
def main():
## root = tk.Tk()
## root.title('SNES Wolfenstein 3D Extractor')
## root.minsize(400, 100)
## ui.MainApplication(root).pack(side="top", fill="both", expand=True)
## root.mainloop()
ui.MainApplication().mainloop()
main()
|
import extractor.ui.mainapplication as ui
import Tkinter as tk
def main():
## root = tk.Tk()
## root.title('SNES Wolfenstein 3D Extractor')
## root.minsize(400, 100)
## ui.MainApplication(root).pack(side="top", fill="both", expand=True)
root = ui.MainApplication()
root.mainloop()
main()
|
Split program call into two lines.
|
Split program call into two lines.
|
Python
|
mit
|
adambiser/snes-wolf3d-extractor
|
import extractor.ui.mainapplication as ui
import Tkinter as tk
def main():
## root = tk.Tk()
## root.title('SNES Wolfenstein 3D Extractor')
## root.minsize(400, 100)
## ui.MainApplication(root).pack(side="top", fill="both", expand=True)
## root.mainloop()
ui.MainApplication().mainloop()
main()
Split program call into two lines.
|
import extractor.ui.mainapplication as ui
import Tkinter as tk
def main():
## root = tk.Tk()
## root.title('SNES Wolfenstein 3D Extractor')
## root.minsize(400, 100)
## ui.MainApplication(root).pack(side="top", fill="both", expand=True)
root = ui.MainApplication()
root.mainloop()
main()
|
<commit_before>import extractor.ui.mainapplication as ui
import Tkinter as tk
def main():
## root = tk.Tk()
## root.title('SNES Wolfenstein 3D Extractor')
## root.minsize(400, 100)
## ui.MainApplication(root).pack(side="top", fill="both", expand=True)
## root.mainloop()
ui.MainApplication().mainloop()
main()
<commit_msg>Split program call into two lines.<commit_after>
|
import extractor.ui.mainapplication as ui
import Tkinter as tk
def main():
## root = tk.Tk()
## root.title('SNES Wolfenstein 3D Extractor')
## root.minsize(400, 100)
## ui.MainApplication(root).pack(side="top", fill="both", expand=True)
root = ui.MainApplication()
root.mainloop()
main()
|
import extractor.ui.mainapplication as ui
import Tkinter as tk
def main():
## root = tk.Tk()
## root.title('SNES Wolfenstein 3D Extractor')
## root.minsize(400, 100)
## ui.MainApplication(root).pack(side="top", fill="both", expand=True)
## root.mainloop()
ui.MainApplication().mainloop()
main()
Split program call into two lines.import extractor.ui.mainapplication as ui
import Tkinter as tk
def main():
## root = tk.Tk()
## root.title('SNES Wolfenstein 3D Extractor')
## root.minsize(400, 100)
## ui.MainApplication(root).pack(side="top", fill="both", expand=True)
root = ui.MainApplication()
root.mainloop()
main()
|
<commit_before>import extractor.ui.mainapplication as ui
import Tkinter as tk
def main():
## root = tk.Tk()
## root.title('SNES Wolfenstein 3D Extractor')
## root.minsize(400, 100)
## ui.MainApplication(root).pack(side="top", fill="both", expand=True)
## root.mainloop()
ui.MainApplication().mainloop()
main()
<commit_msg>Split program call into two lines.<commit_after>import extractor.ui.mainapplication as ui
import Tkinter as tk
def main():
## root = tk.Tk()
## root.title('SNES Wolfenstein 3D Extractor')
## root.minsize(400, 100)
## ui.MainApplication(root).pack(side="top", fill="both", expand=True)
root = ui.MainApplication()
root.mainloop()
main()
|
316a6583036ca18cfdf1a95a122aa2367237fa2c
|
get/views.py
|
get/views.py
|
from django.shortcuts import render_to_response
from django.shortcuts import redirect
from models import DownloadLink
def index(request):
links = DownloadLink.objects.all()
context = {'links': list(links)}
return render_to_response('get/listing.tpl', context)
|
from django.shortcuts import render_to_response
from django.shortcuts import redirect
from models import DownloadLink
def index(request):
links = DownloadLink.objects.order_by('-pk')
context = {'links': list(links)}
return render_to_response('get/listing.tpl', context)
|
Order download links by pk.
|
get: Order download links by pk.
|
Python
|
bsd-3-clause
|
ProgVal/Supybot-website
|
from django.shortcuts import render_to_response
from django.shortcuts import redirect
from models import DownloadLink
def index(request):
links = DownloadLink.objects.all()
context = {'links': list(links)}
return render_to_response('get/listing.tpl', context)
get: Order download links by pk.
|
from django.shortcuts import render_to_response
from django.shortcuts import redirect
from models import DownloadLink
def index(request):
links = DownloadLink.objects.order_by('-pk')
context = {'links': list(links)}
return render_to_response('get/listing.tpl', context)
|
<commit_before>from django.shortcuts import render_to_response
from django.shortcuts import redirect
from models import DownloadLink
def index(request):
links = DownloadLink.objects.all()
context = {'links': list(links)}
return render_to_response('get/listing.tpl', context)
<commit_msg>get: Order download links by pk.<commit_after>
|
from django.shortcuts import render_to_response
from django.shortcuts import redirect
from models import DownloadLink
def index(request):
links = DownloadLink.objects.order_by('-pk')
context = {'links': list(links)}
return render_to_response('get/listing.tpl', context)
|
from django.shortcuts import render_to_response
from django.shortcuts import redirect
from models import DownloadLink
def index(request):
links = DownloadLink.objects.all()
context = {'links': list(links)}
return render_to_response('get/listing.tpl', context)
get: Order download links by pk.from django.shortcuts import render_to_response
from django.shortcuts import redirect
from models import DownloadLink
def index(request):
links = DownloadLink.objects.order_by('-pk')
context = {'links': list(links)}
return render_to_response('get/listing.tpl', context)
|
<commit_before>from django.shortcuts import render_to_response
from django.shortcuts import redirect
from models import DownloadLink
def index(request):
links = DownloadLink.objects.all()
context = {'links': list(links)}
return render_to_response('get/listing.tpl', context)
<commit_msg>get: Order download links by pk.<commit_after>from django.shortcuts import render_to_response
from django.shortcuts import redirect
from models import DownloadLink
def index(request):
links = DownloadLink.objects.order_by('-pk')
context = {'links': list(links)}
return render_to_response('get/listing.tpl', context)
|
1e50bdf90756a79d45b0c35353d007c5dad2abfc
|
hand_data.py
|
hand_data.py
|
import time
from lib import Leap
from lib.Leap import Bone
'''
gets the current frame from controller
for each finger, stores the topmost end of each bone (4 points)
adjusts bone location relativity by subtracting the center of the palm
returns the adjusted bone locations in the form:
[(finger1bone1x, finger1bone1y, finger1bone1z), ... finger5bone4z)]
'''
def get_hand_position(controller):
print "NEW FRAME"
fingers = controller.frame().fingers
finger_bones = []
for finger in fingers:
finger_bones.append(finger.bone(Bone.TYPE_METACARPAL).next_joint)
finger_bones.append(finger.bone(Bone.TYPE_PROXIMAL).next_joint)
finger_bones.append(finger.bone(Bone.TYPE_INTERMEDIATE).next_joint)
finger_bones.append(finger.bone(Bone.TYPE_DISTAL).next_joint)
# possible issue when more than one hand
hands = controller.frame().hands
hand_center = 0
for hand in hands:
hand_center = hand.palm_position
calibrated_finger_bones = []
for joint in finger_bones:
calibrated_finger_bones.append(joint - hand_center)
return calibrated_finger_bones
if __name__ == "__main__":
controller = Leap.Controller()
while True:
get_hand_position(controller)
time.sleep(1)
|
import time
from lib import Leap
from lib.Leap import Bone
'''
gets the current frame from controller
for each finger, stores the topmost end of each bone (4 points)
adjusts bone location relativity by subtracting the center of the palm
returns the adjusted bone locations in the form:
{feat0=some_float, feat1=some_float, ... feat59=some_float}
'''
def get_hand_position(controller):
print "NEW FRAME"
fingers = controller.frame().fingers
finger_bones = []
for finger in fingers:
finger_bones.append(finger.bone(Bone.TYPE_METACARPAL).next_joint)
finger_bones.append(finger.bone(Bone.TYPE_PROXIMAL).next_joint)
finger_bones.append(finger.bone(Bone.TYPE_INTERMEDIATE).next_joint)
finger_bones.append(finger.bone(Bone.TYPE_DISTAL).next_joint)
# possible issue when more than one hand
hands = controller.frame().hands
hand_center = 0
for hand in hands:
hand_center = hand.palm_position
calibrated_finger_bones = {}
for i in range(len(finger_bones)):
normalized_joint = (finger_bones[i] - hand_center).to_tuple()
for j in range(3):
calibrated_finger_bones["feat" + str(i*3+j)] = normalized_joint[j]
return calibrated_finger_bones
if __name__ == "__main__":
controller = Leap.Controller()
while True:
get_hand_position(controller)
time.sleep(1)
|
Return hand data as dictionary
|
Return hand data as dictionary
|
Python
|
mit
|
ssaamm/sign-language-tutor,ssaamm/sign-language-translator,ssaamm/sign-language-translator,ssaamm/sign-language-tutor
|
import time
from lib import Leap
from lib.Leap import Bone
'''
gets the current frame from controller
for each finger, stores the topmost end of each bone (4 points)
adjusts bone location relativity by subtracting the center of the palm
returns the adjusted bone locations in the form:
[(finger1bone1x, finger1bone1y, finger1bone1z), ... finger5bone4z)]
'''
def get_hand_position(controller):
print "NEW FRAME"
fingers = controller.frame().fingers
finger_bones = []
for finger in fingers:
finger_bones.append(finger.bone(Bone.TYPE_METACARPAL).next_joint)
finger_bones.append(finger.bone(Bone.TYPE_PROXIMAL).next_joint)
finger_bones.append(finger.bone(Bone.TYPE_INTERMEDIATE).next_joint)
finger_bones.append(finger.bone(Bone.TYPE_DISTAL).next_joint)
# possible issue when more than one hand
hands = controller.frame().hands
hand_center = 0
for hand in hands:
hand_center = hand.palm_position
calibrated_finger_bones = []
for joint in finger_bones:
calibrated_finger_bones.append(joint - hand_center)
return calibrated_finger_bones
if __name__ == "__main__":
controller = Leap.Controller()
while True:
get_hand_position(controller)
time.sleep(1)
Return hand data as dictionary
|
import time
from lib import Leap
from lib.Leap import Bone
'''
gets the current frame from controller
for each finger, stores the topmost end of each bone (4 points)
adjusts bone location relativity by subtracting the center of the palm
returns the adjusted bone locations in the form:
{feat0=some_float, feat1=some_float, ... feat59=some_float}
'''
def get_hand_position(controller):
print "NEW FRAME"
fingers = controller.frame().fingers
finger_bones = []
for finger in fingers:
finger_bones.append(finger.bone(Bone.TYPE_METACARPAL).next_joint)
finger_bones.append(finger.bone(Bone.TYPE_PROXIMAL).next_joint)
finger_bones.append(finger.bone(Bone.TYPE_INTERMEDIATE).next_joint)
finger_bones.append(finger.bone(Bone.TYPE_DISTAL).next_joint)
# possible issue when more than one hand
hands = controller.frame().hands
hand_center = 0
for hand in hands:
hand_center = hand.palm_position
calibrated_finger_bones = {}
for i in range(len(finger_bones)):
normalized_joint = (finger_bones[i] - hand_center).to_tuple()
for j in range(3):
calibrated_finger_bones["feat" + str(i*3+j)] = normalized_joint[j]
return calibrated_finger_bones
if __name__ == "__main__":
controller = Leap.Controller()
while True:
get_hand_position(controller)
time.sleep(1)
|
<commit_before>import time
from lib import Leap
from lib.Leap import Bone
'''
gets the current frame from controller
for each finger, stores the topmost end of each bone (4 points)
adjusts bone location relativity by subtracting the center of the palm
returns the adjusted bone locations in the form:
[(finger1bone1x, finger1bone1y, finger1bone1z), ... finger5bone4z)]
'''
def get_hand_position(controller):
print "NEW FRAME"
fingers = controller.frame().fingers
finger_bones = []
for finger in fingers:
finger_bones.append(finger.bone(Bone.TYPE_METACARPAL).next_joint)
finger_bones.append(finger.bone(Bone.TYPE_PROXIMAL).next_joint)
finger_bones.append(finger.bone(Bone.TYPE_INTERMEDIATE).next_joint)
finger_bones.append(finger.bone(Bone.TYPE_DISTAL).next_joint)
# possible issue when more than one hand
hands = controller.frame().hands
hand_center = 0
for hand in hands:
hand_center = hand.palm_position
calibrated_finger_bones = []
for joint in finger_bones:
calibrated_finger_bones.append(joint - hand_center)
return calibrated_finger_bones
if __name__ == "__main__":
controller = Leap.Controller()
while True:
get_hand_position(controller)
time.sleep(1)
<commit_msg>Return hand data as dictionary<commit_after>
|
import time
from lib import Leap
from lib.Leap import Bone
'''
gets the current frame from controller
for each finger, stores the topmost end of each bone (4 points)
adjusts bone location relativity by subtracting the center of the palm
returns the adjusted bone locations in the form:
{feat0=some_float, feat1=some_float, ... feat59=some_float}
'''
def get_hand_position(controller):
print "NEW FRAME"
fingers = controller.frame().fingers
finger_bones = []
for finger in fingers:
finger_bones.append(finger.bone(Bone.TYPE_METACARPAL).next_joint)
finger_bones.append(finger.bone(Bone.TYPE_PROXIMAL).next_joint)
finger_bones.append(finger.bone(Bone.TYPE_INTERMEDIATE).next_joint)
finger_bones.append(finger.bone(Bone.TYPE_DISTAL).next_joint)
# possible issue when more than one hand
hands = controller.frame().hands
hand_center = 0
for hand in hands:
hand_center = hand.palm_position
calibrated_finger_bones = {}
for i in range(len(finger_bones)):
normalized_joint = (finger_bones[i] - hand_center).to_tuple()
for j in range(3):
calibrated_finger_bones["feat" + str(i*3+j)] = normalized_joint[j]
return calibrated_finger_bones
if __name__ == "__main__":
controller = Leap.Controller()
while True:
get_hand_position(controller)
time.sleep(1)
|
import time
from lib import Leap
from lib.Leap import Bone
'''
gets the current frame from controller
for each finger, stores the topmost end of each bone (4 points)
adjusts bone location relativity by subtracting the center of the palm
returns the adjusted bone locations in the form:
[(finger1bone1x, finger1bone1y, finger1bone1z), ... finger5bone4z)]
'''
def get_hand_position(controller):
print "NEW FRAME"
fingers = controller.frame().fingers
finger_bones = []
for finger in fingers:
finger_bones.append(finger.bone(Bone.TYPE_METACARPAL).next_joint)
finger_bones.append(finger.bone(Bone.TYPE_PROXIMAL).next_joint)
finger_bones.append(finger.bone(Bone.TYPE_INTERMEDIATE).next_joint)
finger_bones.append(finger.bone(Bone.TYPE_DISTAL).next_joint)
# possible issue when more than one hand
hands = controller.frame().hands
hand_center = 0
for hand in hands:
hand_center = hand.palm_position
calibrated_finger_bones = []
for joint in finger_bones:
calibrated_finger_bones.append(joint - hand_center)
return calibrated_finger_bones
if __name__ == "__main__":
controller = Leap.Controller()
while True:
get_hand_position(controller)
time.sleep(1)
Return hand data as dictionaryimport time
from lib import Leap
from lib.Leap import Bone
'''
gets the current frame from controller
for each finger, stores the topmost end of each bone (4 points)
adjusts bone location relativity by subtracting the center of the palm
returns the adjusted bone locations in the form:
{feat0=some_float, feat1=some_float, ... feat59=some_float}
'''
def get_hand_position(controller):
print "NEW FRAME"
fingers = controller.frame().fingers
finger_bones = []
for finger in fingers:
finger_bones.append(finger.bone(Bone.TYPE_METACARPAL).next_joint)
finger_bones.append(finger.bone(Bone.TYPE_PROXIMAL).next_joint)
finger_bones.append(finger.bone(Bone.TYPE_INTERMEDIATE).next_joint)
finger_bones.append(finger.bone(Bone.TYPE_DISTAL).next_joint)
# possible issue when more than one hand
hands = controller.frame().hands
hand_center = 0
for hand in hands:
hand_center = hand.palm_position
calibrated_finger_bones = {}
for i in range(len(finger_bones)):
normalized_joint = (finger_bones[i] - hand_center).to_tuple()
for j in range(3):
calibrated_finger_bones["feat" + str(i*3+j)] = normalized_joint[j]
return calibrated_finger_bones
if __name__ == "__main__":
controller = Leap.Controller()
while True:
get_hand_position(controller)
time.sleep(1)
|
<commit_before>import time
from lib import Leap
from lib.Leap import Bone
'''
gets the current frame from controller
for each finger, stores the topmost end of each bone (4 points)
adjusts bone location relativity by subtracting the center of the palm
returns the adjusted bone locations in the form:
[(finger1bone1x, finger1bone1y, finger1bone1z), ... finger5bone4z)]
'''
def get_hand_position(controller):
print "NEW FRAME"
fingers = controller.frame().fingers
finger_bones = []
for finger in fingers:
finger_bones.append(finger.bone(Bone.TYPE_METACARPAL).next_joint)
finger_bones.append(finger.bone(Bone.TYPE_PROXIMAL).next_joint)
finger_bones.append(finger.bone(Bone.TYPE_INTERMEDIATE).next_joint)
finger_bones.append(finger.bone(Bone.TYPE_DISTAL).next_joint)
# possible issue when more than one hand
hands = controller.frame().hands
hand_center = 0
for hand in hands:
hand_center = hand.palm_position
calibrated_finger_bones = []
for joint in finger_bones:
calibrated_finger_bones.append(joint - hand_center)
return calibrated_finger_bones
if __name__ == "__main__":
controller = Leap.Controller()
while True:
get_hand_position(controller)
time.sleep(1)
<commit_msg>Return hand data as dictionary<commit_after>import time
from lib import Leap
from lib.Leap import Bone
'''
gets the current frame from controller
for each finger, stores the topmost end of each bone (4 points)
adjusts bone location relativity by subtracting the center of the palm
returns the adjusted bone locations in the form:
{feat0=some_float, feat1=some_float, ... feat59=some_float}
'''
def get_hand_position(controller):
print "NEW FRAME"
fingers = controller.frame().fingers
finger_bones = []
for finger in fingers:
finger_bones.append(finger.bone(Bone.TYPE_METACARPAL).next_joint)
finger_bones.append(finger.bone(Bone.TYPE_PROXIMAL).next_joint)
finger_bones.append(finger.bone(Bone.TYPE_INTERMEDIATE).next_joint)
finger_bones.append(finger.bone(Bone.TYPE_DISTAL).next_joint)
# possible issue when more than one hand
hands = controller.frame().hands
hand_center = 0
for hand in hands:
hand_center = hand.palm_position
calibrated_finger_bones = {}
for i in range(len(finger_bones)):
normalized_joint = (finger_bones[i] - hand_center).to_tuple()
for j in range(3):
calibrated_finger_bones["feat" + str(i*3+j)] = normalized_joint[j]
return calibrated_finger_bones
if __name__ == "__main__":
controller = Leap.Controller()
while True:
get_hand_position(controller)
time.sleep(1)
|
33f94c28500c96841b1bf5ce507e418364ea556f
|
StarWebTkeDesenv.py
|
StarWebTkeDesenv.py
|
import urllib.parse
import urllib.request
import http.cookiejar
url = 'http://stwebdv.thyssenkruppelevadores.com.br/scripts/gisdesenv.pl/swfw3.r'
values = {'usuario' : 'admin',
'senha' : 'tke'}
cj = http.cookiejar.CookieJar()
data = urllib.parse.urlencode(values).encode('utf-8')
opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(cj))
req = urllib.request.Request(url, data)
response = opener.open(req)
cj.extract_cookies(response, req)
print(cj._cookies)
|
import urllib.parse
import urllib.request
import http.cookiejar
cj = http.cookiejar.CookieJar()
opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(cj))
loginUrl = 'http://stwebdv.thyssenkruppelevadores.com.br/scripts/gisdesenv.pl/swfw3.r'
loginData = {'usuario' : 'admin', 'senha' : 'tke'}
postData = urllib.parse.urlencode(loginData).encode('utf-8')
request = urllib.request.Request(loginUrl, postData)
response = opener.open(request)
compilerUrl = 'http://stwebdv.thyssenkruppelevadores.com.br/scripts/gisdesenv.pl/progs/swfw0080'
compilerData = {'Arquivo' : 'c:/sisweb/desenv/ait/ait_aacm1.html,c:/sisweb/desenv/ait/ait_aacm2.html,c:/sisweb/desenv/ait/ait_aacm3.html,'}
postData = urllib.parse.urlencode(compilerData).encode('utf-8')
request = urllib.request.Request(compilerUrl, postData)
response = opener.open(request)
page = response.read()
print(page)
|
Add files to be compiled.
|
Add files to be compiled.
|
Python
|
mit
|
taschetto/sublimeSettings,taschetto/sublimeSettings
|
import urllib.parse
import urllib.request
import http.cookiejar
url = 'http://stwebdv.thyssenkruppelevadores.com.br/scripts/gisdesenv.pl/swfw3.r'
values = {'usuario' : 'admin',
'senha' : 'tke'}
cj = http.cookiejar.CookieJar()
data = urllib.parse.urlencode(values).encode('utf-8')
opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(cj))
req = urllib.request.Request(url, data)
response = opener.open(req)
cj.extract_cookies(response, req)
print(cj._cookies)Add files to be compiled.
|
import urllib.parse
import urllib.request
import http.cookiejar
cj = http.cookiejar.CookieJar()
opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(cj))
loginUrl = 'http://stwebdv.thyssenkruppelevadores.com.br/scripts/gisdesenv.pl/swfw3.r'
loginData = {'usuario' : 'admin', 'senha' : 'tke'}
postData = urllib.parse.urlencode(loginData).encode('utf-8')
request = urllib.request.Request(loginUrl, postData)
response = opener.open(request)
compilerUrl = 'http://stwebdv.thyssenkruppelevadores.com.br/scripts/gisdesenv.pl/progs/swfw0080'
compilerData = {'Arquivo' : 'c:/sisweb/desenv/ait/ait_aacm1.html,c:/sisweb/desenv/ait/ait_aacm2.html,c:/sisweb/desenv/ait/ait_aacm3.html,'}
postData = urllib.parse.urlencode(compilerData).encode('utf-8')
request = urllib.request.Request(compilerUrl, postData)
response = opener.open(request)
page = response.read()
print(page)
|
<commit_before>import urllib.parse
import urllib.request
import http.cookiejar
url = 'http://stwebdv.thyssenkruppelevadores.com.br/scripts/gisdesenv.pl/swfw3.r'
values = {'usuario' : 'admin',
'senha' : 'tke'}
cj = http.cookiejar.CookieJar()
data = urllib.parse.urlencode(values).encode('utf-8')
opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(cj))
req = urllib.request.Request(url, data)
response = opener.open(req)
cj.extract_cookies(response, req)
print(cj._cookies)<commit_msg>Add files to be compiled.<commit_after>
|
import urllib.parse
import urllib.request
import http.cookiejar
cj = http.cookiejar.CookieJar()
opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(cj))
loginUrl = 'http://stwebdv.thyssenkruppelevadores.com.br/scripts/gisdesenv.pl/swfw3.r'
loginData = {'usuario' : 'admin', 'senha' : 'tke'}
postData = urllib.parse.urlencode(loginData).encode('utf-8')
request = urllib.request.Request(loginUrl, postData)
response = opener.open(request)
compilerUrl = 'http://stwebdv.thyssenkruppelevadores.com.br/scripts/gisdesenv.pl/progs/swfw0080'
compilerData = {'Arquivo' : 'c:/sisweb/desenv/ait/ait_aacm1.html,c:/sisweb/desenv/ait/ait_aacm2.html,c:/sisweb/desenv/ait/ait_aacm3.html,'}
postData = urllib.parse.urlencode(compilerData).encode('utf-8')
request = urllib.request.Request(compilerUrl, postData)
response = opener.open(request)
page = response.read()
print(page)
|
import urllib.parse
import urllib.request
import http.cookiejar
url = 'http://stwebdv.thyssenkruppelevadores.com.br/scripts/gisdesenv.pl/swfw3.r'
values = {'usuario' : 'admin',
'senha' : 'tke'}
cj = http.cookiejar.CookieJar()
data = urllib.parse.urlencode(values).encode('utf-8')
opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(cj))
req = urllib.request.Request(url, data)
response = opener.open(req)
cj.extract_cookies(response, req)
print(cj._cookies)Add files to be compiled.import urllib.parse
import urllib.request
import http.cookiejar
cj = http.cookiejar.CookieJar()
opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(cj))
loginUrl = 'http://stwebdv.thyssenkruppelevadores.com.br/scripts/gisdesenv.pl/swfw3.r'
loginData = {'usuario' : 'admin', 'senha' : 'tke'}
postData = urllib.parse.urlencode(loginData).encode('utf-8')
request = urllib.request.Request(loginUrl, postData)
response = opener.open(request)
compilerUrl = 'http://stwebdv.thyssenkruppelevadores.com.br/scripts/gisdesenv.pl/progs/swfw0080'
compilerData = {'Arquivo' : 'c:/sisweb/desenv/ait/ait_aacm1.html,c:/sisweb/desenv/ait/ait_aacm2.html,c:/sisweb/desenv/ait/ait_aacm3.html,'}
postData = urllib.parse.urlencode(compilerData).encode('utf-8')
request = urllib.request.Request(compilerUrl, postData)
response = opener.open(request)
page = response.read()
print(page)
|
<commit_before>import urllib.parse
import urllib.request
import http.cookiejar
url = 'http://stwebdv.thyssenkruppelevadores.com.br/scripts/gisdesenv.pl/swfw3.r'
values = {'usuario' : 'admin',
'senha' : 'tke'}
cj = http.cookiejar.CookieJar()
data = urllib.parse.urlencode(values).encode('utf-8')
opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(cj))
req = urllib.request.Request(url, data)
response = opener.open(req)
cj.extract_cookies(response, req)
print(cj._cookies)<commit_msg>Add files to be compiled.<commit_after>import urllib.parse
import urllib.request
import http.cookiejar
cj = http.cookiejar.CookieJar()
opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(cj))
loginUrl = 'http://stwebdv.thyssenkruppelevadores.com.br/scripts/gisdesenv.pl/swfw3.r'
loginData = {'usuario' : 'admin', 'senha' : 'tke'}
postData = urllib.parse.urlencode(loginData).encode('utf-8')
request = urllib.request.Request(loginUrl, postData)
response = opener.open(request)
compilerUrl = 'http://stwebdv.thyssenkruppelevadores.com.br/scripts/gisdesenv.pl/progs/swfw0080'
compilerData = {'Arquivo' : 'c:/sisweb/desenv/ait/ait_aacm1.html,c:/sisweb/desenv/ait/ait_aacm2.html,c:/sisweb/desenv/ait/ait_aacm3.html,'}
postData = urllib.parse.urlencode(compilerData).encode('utf-8')
request = urllib.request.Request(compilerUrl, postData)
response = opener.open(request)
page = response.read()
print(page)
|
4bd6ed79562435c3e2ef96472f6990109c482117
|
deen/constants.py
|
deen/constants.py
|
import sys
__version__ = '0.9.1'
ENCODINGS = ['Base64',
'Base64 URL',
'Base32',
'Hex',
'URL',
'HTML',
'Rot13',
'UTF8',
'UTF16']
COMPRESSIONS = ['Gzip',
'Bz2']
HASHS = ['MD5',
'SHA1',
'SHA224',
'SHA256',
'SHA384',
'SHA512',
'RIPEMD160',
'MD4',
'MDC2',
'NTLM',
'Whirlpool']
MISC = ['X509Certificate']
FORMATTERS = ['XML',
'HTML',
'JSON']
# Add features based on Python version
if sys.version_info.major == 3:
if sys.version_info.minor >= 6:
HASHS.append('BLAKE2b')
HASHS.append('BLAKE2s')
if sys.version_info.minor >= 4:
ENCODINGS.insert(3, 'Base85')
|
import sys
__version__ = '0.9.2'
ENCODINGS = ['Base64',
'Base64 URL',
'Base32',
'Hex',
'URL',
'HTML',
'Rot13',
'UTF8',
'UTF16']
COMPRESSIONS = ['Gzip',
'Bz2']
HASHS = ['MD5',
'SHA1',
'SHA224',
'SHA256',
'SHA384',
'SHA512',
'RIPEMD160',
'MD4',
'MDC2',
'NTLM',
'Whirlpool']
MISC = []
try:
import OpenSSL.crypto
except ImportError:
pass
else:
MISC.append('X509Certificate')
FORMATTERS = ['XML',
'HTML',
'JSON']
# Add features based on Python version
if sys.version_info.major == 3:
if sys.version_info.minor >= 6:
HASHS.append('BLAKE2b')
HASHS.append('BLAKE2s')
if sys.version_info.minor >= 4:
ENCODINGS.insert(3, 'Base85')
|
Add X509 support only when pyOpenSSL is installed
|
Add X509 support only when pyOpenSSL is installed
|
Python
|
apache-2.0
|
takeshixx/deen,takeshixx/deen
|
import sys
__version__ = '0.9.1'
ENCODINGS = ['Base64',
'Base64 URL',
'Base32',
'Hex',
'URL',
'HTML',
'Rot13',
'UTF8',
'UTF16']
COMPRESSIONS = ['Gzip',
'Bz2']
HASHS = ['MD5',
'SHA1',
'SHA224',
'SHA256',
'SHA384',
'SHA512',
'RIPEMD160',
'MD4',
'MDC2',
'NTLM',
'Whirlpool']
MISC = ['X509Certificate']
FORMATTERS = ['XML',
'HTML',
'JSON']
# Add features based on Python version
if sys.version_info.major == 3:
if sys.version_info.minor >= 6:
HASHS.append('BLAKE2b')
HASHS.append('BLAKE2s')
if sys.version_info.minor >= 4:
ENCODINGS.insert(3, 'Base85')
Add X509 support only when pyOpenSSL is installed
|
import sys
__version__ = '0.9.2'
ENCODINGS = ['Base64',
'Base64 URL',
'Base32',
'Hex',
'URL',
'HTML',
'Rot13',
'UTF8',
'UTF16']
COMPRESSIONS = ['Gzip',
'Bz2']
HASHS = ['MD5',
'SHA1',
'SHA224',
'SHA256',
'SHA384',
'SHA512',
'RIPEMD160',
'MD4',
'MDC2',
'NTLM',
'Whirlpool']
MISC = []
try:
import OpenSSL.crypto
except ImportError:
pass
else:
MISC.append('X509Certificate')
FORMATTERS = ['XML',
'HTML',
'JSON']
# Add features based on Python version
if sys.version_info.major == 3:
if sys.version_info.minor >= 6:
HASHS.append('BLAKE2b')
HASHS.append('BLAKE2s')
if sys.version_info.minor >= 4:
ENCODINGS.insert(3, 'Base85')
|
<commit_before>import sys
__version__ = '0.9.1'
ENCODINGS = ['Base64',
'Base64 URL',
'Base32',
'Hex',
'URL',
'HTML',
'Rot13',
'UTF8',
'UTF16']
COMPRESSIONS = ['Gzip',
'Bz2']
HASHS = ['MD5',
'SHA1',
'SHA224',
'SHA256',
'SHA384',
'SHA512',
'RIPEMD160',
'MD4',
'MDC2',
'NTLM',
'Whirlpool']
MISC = ['X509Certificate']
FORMATTERS = ['XML',
'HTML',
'JSON']
# Add features based on Python version
if sys.version_info.major == 3:
if sys.version_info.minor >= 6:
HASHS.append('BLAKE2b')
HASHS.append('BLAKE2s')
if sys.version_info.minor >= 4:
ENCODINGS.insert(3, 'Base85')
<commit_msg>Add X509 support only when pyOpenSSL is installed<commit_after>
|
import sys
__version__ = '0.9.2'
ENCODINGS = ['Base64',
'Base64 URL',
'Base32',
'Hex',
'URL',
'HTML',
'Rot13',
'UTF8',
'UTF16']
COMPRESSIONS = ['Gzip',
'Bz2']
HASHS = ['MD5',
'SHA1',
'SHA224',
'SHA256',
'SHA384',
'SHA512',
'RIPEMD160',
'MD4',
'MDC2',
'NTLM',
'Whirlpool']
MISC = []
try:
import OpenSSL.crypto
except ImportError:
pass
else:
MISC.append('X509Certificate')
FORMATTERS = ['XML',
'HTML',
'JSON']
# Add features based on Python version
if sys.version_info.major == 3:
if sys.version_info.minor >= 6:
HASHS.append('BLAKE2b')
HASHS.append('BLAKE2s')
if sys.version_info.minor >= 4:
ENCODINGS.insert(3, 'Base85')
|
import sys
__version__ = '0.9.1'
ENCODINGS = ['Base64',
'Base64 URL',
'Base32',
'Hex',
'URL',
'HTML',
'Rot13',
'UTF8',
'UTF16']
COMPRESSIONS = ['Gzip',
'Bz2']
HASHS = ['MD5',
'SHA1',
'SHA224',
'SHA256',
'SHA384',
'SHA512',
'RIPEMD160',
'MD4',
'MDC2',
'NTLM',
'Whirlpool']
MISC = ['X509Certificate']
FORMATTERS = ['XML',
'HTML',
'JSON']
# Add features based on Python version
if sys.version_info.major == 3:
if sys.version_info.minor >= 6:
HASHS.append('BLAKE2b')
HASHS.append('BLAKE2s')
if sys.version_info.minor >= 4:
ENCODINGS.insert(3, 'Base85')
Add X509 support only when pyOpenSSL is installedimport sys
__version__ = '0.9.2'
ENCODINGS = ['Base64',
'Base64 URL',
'Base32',
'Hex',
'URL',
'HTML',
'Rot13',
'UTF8',
'UTF16']
COMPRESSIONS = ['Gzip',
'Bz2']
HASHS = ['MD5',
'SHA1',
'SHA224',
'SHA256',
'SHA384',
'SHA512',
'RIPEMD160',
'MD4',
'MDC2',
'NTLM',
'Whirlpool']
MISC = []
try:
import OpenSSL.crypto
except ImportError:
pass
else:
MISC.append('X509Certificate')
FORMATTERS = ['XML',
'HTML',
'JSON']
# Add features based on Python version
if sys.version_info.major == 3:
if sys.version_info.minor >= 6:
HASHS.append('BLAKE2b')
HASHS.append('BLAKE2s')
if sys.version_info.minor >= 4:
ENCODINGS.insert(3, 'Base85')
|
<commit_before>import sys
__version__ = '0.9.1'
ENCODINGS = ['Base64',
'Base64 URL',
'Base32',
'Hex',
'URL',
'HTML',
'Rot13',
'UTF8',
'UTF16']
COMPRESSIONS = ['Gzip',
'Bz2']
HASHS = ['MD5',
'SHA1',
'SHA224',
'SHA256',
'SHA384',
'SHA512',
'RIPEMD160',
'MD4',
'MDC2',
'NTLM',
'Whirlpool']
MISC = ['X509Certificate']
FORMATTERS = ['XML',
'HTML',
'JSON']
# Add features based on Python version
if sys.version_info.major == 3:
if sys.version_info.minor >= 6:
HASHS.append('BLAKE2b')
HASHS.append('BLAKE2s')
if sys.version_info.minor >= 4:
ENCODINGS.insert(3, 'Base85')
<commit_msg>Add X509 support only when pyOpenSSL is installed<commit_after>import sys
__version__ = '0.9.2'
ENCODINGS = ['Base64',
'Base64 URL',
'Base32',
'Hex',
'URL',
'HTML',
'Rot13',
'UTF8',
'UTF16']
COMPRESSIONS = ['Gzip',
'Bz2']
HASHS = ['MD5',
'SHA1',
'SHA224',
'SHA256',
'SHA384',
'SHA512',
'RIPEMD160',
'MD4',
'MDC2',
'NTLM',
'Whirlpool']
MISC = []
try:
import OpenSSL.crypto
except ImportError:
pass
else:
MISC.append('X509Certificate')
FORMATTERS = ['XML',
'HTML',
'JSON']
# Add features based on Python version
if sys.version_info.major == 3:
if sys.version_info.minor >= 6:
HASHS.append('BLAKE2b')
HASHS.append('BLAKE2s')
if sys.version_info.minor >= 4:
ENCODINGS.insert(3, 'Base85')
|
ff06ce55d0856cff774bdec5f0e872e093216bce
|
diffs/__init__.py
|
diffs/__init__.py
|
from __future__ import absolute_import, unicode_literals
from django.apps import apps as django_apps
from .signals import connect
__version__ = '0.0.1'
default_app_config = 'diffs.apps.DiffLogConfig'
klasses_to_connect = []
def register(klass):
"""
Decorator function that registers a class to record diffs.
@register
class ExampleModel(models.Model):
...
"""
from .models import DiffLogEntryManager
from dirtyfields import DirtyFieldsMixin
# Hack to add dirtyfieldsmixin automatically
if DirtyFieldsMixin not in klass.__bases__:
klass.__bases__ = (DirtyFieldsMixin,) + klass.__bases__
klass.add_to_class('diffs', DiffLogEntryManager())
if not django_apps.ready:
klasses_to_connect.append(klass)
else:
connect(klass)
return klass
|
from __future__ import absolute_import, unicode_literals
from .signals import connect
__version__ = '0.0.1'
default_app_config = 'diffs.apps.DiffLogConfig'
klasses_to_connect = []
def register(klass):
"""
Decorator function that registers a class to record diffs.
@register
class ExampleModel(models.Model):
...
"""
from django.apps import apps as django_apps
from dirtyfields import DirtyFieldsMixin
from .models import DiffLogEntryManager
# Hack to add dirtyfieldsmixin automatically
if DirtyFieldsMixin not in klass.__bases__:
klass.__bases__ = (DirtyFieldsMixin,) + klass.__bases__
klass.add_to_class('diffs', DiffLogEntryManager())
if not django_apps.ready:
klasses_to_connect.append(klass)
else:
connect(klass)
return klass
|
Reorganize imports to be later
|
Reorganize imports to be later
|
Python
|
mit
|
linuxlewis/django-diffs
|
from __future__ import absolute_import, unicode_literals
from django.apps import apps as django_apps
from .signals import connect
__version__ = '0.0.1'
default_app_config = 'diffs.apps.DiffLogConfig'
klasses_to_connect = []
def register(klass):
"""
Decorator function that registers a class to record diffs.
@register
class ExampleModel(models.Model):
...
"""
from .models import DiffLogEntryManager
from dirtyfields import DirtyFieldsMixin
# Hack to add dirtyfieldsmixin automatically
if DirtyFieldsMixin not in klass.__bases__:
klass.__bases__ = (DirtyFieldsMixin,) + klass.__bases__
klass.add_to_class('diffs', DiffLogEntryManager())
if not django_apps.ready:
klasses_to_connect.append(klass)
else:
connect(klass)
return klass
Reorganize imports to be later
|
from __future__ import absolute_import, unicode_literals
from .signals import connect
__version__ = '0.0.1'
default_app_config = 'diffs.apps.DiffLogConfig'
klasses_to_connect = []
def register(klass):
"""
Decorator function that registers a class to record diffs.
@register
class ExampleModel(models.Model):
...
"""
from django.apps import apps as django_apps
from dirtyfields import DirtyFieldsMixin
from .models import DiffLogEntryManager
# Hack to add dirtyfieldsmixin automatically
if DirtyFieldsMixin not in klass.__bases__:
klass.__bases__ = (DirtyFieldsMixin,) + klass.__bases__
klass.add_to_class('diffs', DiffLogEntryManager())
if not django_apps.ready:
klasses_to_connect.append(klass)
else:
connect(klass)
return klass
|
<commit_before>from __future__ import absolute_import, unicode_literals
from django.apps import apps as django_apps
from .signals import connect
__version__ = '0.0.1'
default_app_config = 'diffs.apps.DiffLogConfig'
klasses_to_connect = []
def register(klass):
"""
Decorator function that registers a class to record diffs.
@register
class ExampleModel(models.Model):
...
"""
from .models import DiffLogEntryManager
from dirtyfields import DirtyFieldsMixin
# Hack to add dirtyfieldsmixin automatically
if DirtyFieldsMixin not in klass.__bases__:
klass.__bases__ = (DirtyFieldsMixin,) + klass.__bases__
klass.add_to_class('diffs', DiffLogEntryManager())
if not django_apps.ready:
klasses_to_connect.append(klass)
else:
connect(klass)
return klass
<commit_msg>Reorganize imports to be later<commit_after>
|
from __future__ import absolute_import, unicode_literals
from .signals import connect
__version__ = '0.0.1'
default_app_config = 'diffs.apps.DiffLogConfig'
klasses_to_connect = []
def register(klass):
"""
Decorator function that registers a class to record diffs.
@register
class ExampleModel(models.Model):
...
"""
from django.apps import apps as django_apps
from dirtyfields import DirtyFieldsMixin
from .models import DiffLogEntryManager
# Hack to add dirtyfieldsmixin automatically
if DirtyFieldsMixin not in klass.__bases__:
klass.__bases__ = (DirtyFieldsMixin,) + klass.__bases__
klass.add_to_class('diffs', DiffLogEntryManager())
if not django_apps.ready:
klasses_to_connect.append(klass)
else:
connect(klass)
return klass
|
from __future__ import absolute_import, unicode_literals
from django.apps import apps as django_apps
from .signals import connect
__version__ = '0.0.1'
default_app_config = 'diffs.apps.DiffLogConfig'
klasses_to_connect = []
def register(klass):
"""
Decorator function that registers a class to record diffs.
@register
class ExampleModel(models.Model):
...
"""
from .models import DiffLogEntryManager
from dirtyfields import DirtyFieldsMixin
# Hack to add dirtyfieldsmixin automatically
if DirtyFieldsMixin not in klass.__bases__:
klass.__bases__ = (DirtyFieldsMixin,) + klass.__bases__
klass.add_to_class('diffs', DiffLogEntryManager())
if not django_apps.ready:
klasses_to_connect.append(klass)
else:
connect(klass)
return klass
Reorganize imports to be laterfrom __future__ import absolute_import, unicode_literals
from .signals import connect
__version__ = '0.0.1'
default_app_config = 'diffs.apps.DiffLogConfig'
klasses_to_connect = []
def register(klass):
"""
Decorator function that registers a class to record diffs.
@register
class ExampleModel(models.Model):
...
"""
from django.apps import apps as django_apps
from dirtyfields import DirtyFieldsMixin
from .models import DiffLogEntryManager
# Hack to add dirtyfieldsmixin automatically
if DirtyFieldsMixin not in klass.__bases__:
klass.__bases__ = (DirtyFieldsMixin,) + klass.__bases__
klass.add_to_class('diffs', DiffLogEntryManager())
if not django_apps.ready:
klasses_to_connect.append(klass)
else:
connect(klass)
return klass
|
<commit_before>from __future__ import absolute_import, unicode_literals
from django.apps import apps as django_apps
from .signals import connect
__version__ = '0.0.1'
default_app_config = 'diffs.apps.DiffLogConfig'
klasses_to_connect = []
def register(klass):
"""
Decorator function that registers a class to record diffs.
@register
class ExampleModel(models.Model):
...
"""
from .models import DiffLogEntryManager
from dirtyfields import DirtyFieldsMixin
# Hack to add dirtyfieldsmixin automatically
if DirtyFieldsMixin not in klass.__bases__:
klass.__bases__ = (DirtyFieldsMixin,) + klass.__bases__
klass.add_to_class('diffs', DiffLogEntryManager())
if not django_apps.ready:
klasses_to_connect.append(klass)
else:
connect(klass)
return klass
<commit_msg>Reorganize imports to be later<commit_after>from __future__ import absolute_import, unicode_literals
from .signals import connect
__version__ = '0.0.1'
default_app_config = 'diffs.apps.DiffLogConfig'
klasses_to_connect = []
def register(klass):
"""
Decorator function that registers a class to record diffs.
@register
class ExampleModel(models.Model):
...
"""
from django.apps import apps as django_apps
from dirtyfields import DirtyFieldsMixin
from .models import DiffLogEntryManager
# Hack to add dirtyfieldsmixin automatically
if DirtyFieldsMixin not in klass.__bases__:
klass.__bases__ = (DirtyFieldsMixin,) + klass.__bases__
klass.add_to_class('diffs', DiffLogEntryManager())
if not django_apps.ready:
klasses_to_connect.append(klass)
else:
connect(klass)
return klass
|
d0728b29514c0731dedee662ce19e73181bc4c34
|
pyfarm/models/gpu.py
|
pyfarm/models/gpu.py
|
# No shebang line, this module is meant to be imported
#
# Copyright 2014 Ambient Entertainment GmbH & Co. KG
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
GPU
-------
Model describing a given make and model of graphics card.
Every agent can have zero or more GPUs associated with it.
"""
from sqlalchemy.schema import UniqueConstraint
from pyfarm.master.application import db
from pyfarm.models.core.mixins import ReprMixin, UtilityMixins
from pyfarm.models.core.types import id_column
from pyfarm.models.core.cfg import TABLE_GPU, MAX_GPUNAME_LENGTH
class GPU(db.Model, UtilityMixins, ReprMixin):
__tablename__ = TABLE_GPU
__table_args__ = (UniqueConstraint("fullname"),)
id = id_column(db.Integer)
fullname = db.Column(db.String(MAX_GPUNAME_LENGTH), nullable=False,
doc="The full name of this graphics card model")
|
# No shebang line, this module is meant to be imported
#
# Copyright 2014 Ambient Entertainment GmbH & Co. KG
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
GPU
---
Model describing a given make and model of graphics card.
Every agent can have zero or more GPUs associated with it.
"""
from sqlalchemy.schema import UniqueConstraint
from pyfarm.master.application import db
from pyfarm.models.core.mixins import ReprMixin, UtilityMixins
from pyfarm.models.core.types import id_column
from pyfarm.models.core.cfg import TABLE_GPU, MAX_GPUNAME_LENGTH
class GPU(db.Model, UtilityMixins, ReprMixin):
__tablename__ = TABLE_GPU
__table_args__ = (UniqueConstraint("fullname"),)
id = id_column(db.Integer)
fullname = db.Column(db.String(MAX_GPUNAME_LENGTH), nullable=False,
doc="The full name of this graphics card model")
|
Fix docstring for module (minor)
|
Fix docstring for module (minor)
|
Python
|
apache-2.0
|
pyfarm/pyfarm-master,pyfarm/pyfarm-master,pyfarm/pyfarm-master
|
# No shebang line, this module is meant to be imported
#
# Copyright 2014 Ambient Entertainment GmbH & Co. KG
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
GPU
-------
Model describing a given make and model of graphics card.
Every agent can have zero or more GPUs associated with it.
"""
from sqlalchemy.schema import UniqueConstraint
from pyfarm.master.application import db
from pyfarm.models.core.mixins import ReprMixin, UtilityMixins
from pyfarm.models.core.types import id_column
from pyfarm.models.core.cfg import TABLE_GPU, MAX_GPUNAME_LENGTH
class GPU(db.Model, UtilityMixins, ReprMixin):
__tablename__ = TABLE_GPU
__table_args__ = (UniqueConstraint("fullname"),)
id = id_column(db.Integer)
fullname = db.Column(db.String(MAX_GPUNAME_LENGTH), nullable=False,
doc="The full name of this graphics card model")
Fix docstring for module (minor)
|
# No shebang line, this module is meant to be imported
#
# Copyright 2014 Ambient Entertainment GmbH & Co. KG
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
GPU
---
Model describing a given make and model of graphics card.
Every agent can have zero or more GPUs associated with it.
"""
from sqlalchemy.schema import UniqueConstraint
from pyfarm.master.application import db
from pyfarm.models.core.mixins import ReprMixin, UtilityMixins
from pyfarm.models.core.types import id_column
from pyfarm.models.core.cfg import TABLE_GPU, MAX_GPUNAME_LENGTH
class GPU(db.Model, UtilityMixins, ReprMixin):
__tablename__ = TABLE_GPU
__table_args__ = (UniqueConstraint("fullname"),)
id = id_column(db.Integer)
fullname = db.Column(db.String(MAX_GPUNAME_LENGTH), nullable=False,
doc="The full name of this graphics card model")
|
<commit_before># No shebang line, this module is meant to be imported
#
# Copyright 2014 Ambient Entertainment GmbH & Co. KG
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
GPU
-------
Model describing a given make and model of graphics card.
Every agent can have zero or more GPUs associated with it.
"""
from sqlalchemy.schema import UniqueConstraint
from pyfarm.master.application import db
from pyfarm.models.core.mixins import ReprMixin, UtilityMixins
from pyfarm.models.core.types import id_column
from pyfarm.models.core.cfg import TABLE_GPU, MAX_GPUNAME_LENGTH
class GPU(db.Model, UtilityMixins, ReprMixin):
__tablename__ = TABLE_GPU
__table_args__ = (UniqueConstraint("fullname"),)
id = id_column(db.Integer)
fullname = db.Column(db.String(MAX_GPUNAME_LENGTH), nullable=False,
doc="The full name of this graphics card model")
<commit_msg>Fix docstring for module (minor)<commit_after>
|
# No shebang line, this module is meant to be imported
#
# Copyright 2014 Ambient Entertainment GmbH & Co. KG
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
GPU
---
Model describing a given make and model of graphics card.
Every agent can have zero or more GPUs associated with it.
"""
from sqlalchemy.schema import UniqueConstraint
from pyfarm.master.application import db
from pyfarm.models.core.mixins import ReprMixin, UtilityMixins
from pyfarm.models.core.types import id_column
from pyfarm.models.core.cfg import TABLE_GPU, MAX_GPUNAME_LENGTH
class GPU(db.Model, UtilityMixins, ReprMixin):
__tablename__ = TABLE_GPU
__table_args__ = (UniqueConstraint("fullname"),)
id = id_column(db.Integer)
fullname = db.Column(db.String(MAX_GPUNAME_LENGTH), nullable=False,
doc="The full name of this graphics card model")
|
# No shebang line, this module is meant to be imported
#
# Copyright 2014 Ambient Entertainment GmbH & Co. KG
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
GPU
-------
Model describing a given make and model of graphics card.
Every agent can have zero or more GPUs associated with it.
"""
from sqlalchemy.schema import UniqueConstraint
from pyfarm.master.application import db
from pyfarm.models.core.mixins import ReprMixin, UtilityMixins
from pyfarm.models.core.types import id_column
from pyfarm.models.core.cfg import TABLE_GPU, MAX_GPUNAME_LENGTH
class GPU(db.Model, UtilityMixins, ReprMixin):
__tablename__ = TABLE_GPU
__table_args__ = (UniqueConstraint("fullname"),)
id = id_column(db.Integer)
fullname = db.Column(db.String(MAX_GPUNAME_LENGTH), nullable=False,
doc="The full name of this graphics card model")
Fix docstring for module (minor)# No shebang line, this module is meant to be imported
#
# Copyright 2014 Ambient Entertainment GmbH & Co. KG
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
GPU
---
Model describing a given make and model of graphics card.
Every agent can have zero or more GPUs associated with it.
"""
from sqlalchemy.schema import UniqueConstraint
from pyfarm.master.application import db
from pyfarm.models.core.mixins import ReprMixin, UtilityMixins
from pyfarm.models.core.types import id_column
from pyfarm.models.core.cfg import TABLE_GPU, MAX_GPUNAME_LENGTH
class GPU(db.Model, UtilityMixins, ReprMixin):
__tablename__ = TABLE_GPU
__table_args__ = (UniqueConstraint("fullname"),)
id = id_column(db.Integer)
fullname = db.Column(db.String(MAX_GPUNAME_LENGTH), nullable=False,
doc="The full name of this graphics card model")
|
<commit_before># No shebang line, this module is meant to be imported
#
# Copyright 2014 Ambient Entertainment GmbH & Co. KG
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
GPU
-------
Model describing a given make and model of graphics card.
Every agent can have zero or more GPUs associated with it.
"""
from sqlalchemy.schema import UniqueConstraint
from pyfarm.master.application import db
from pyfarm.models.core.mixins import ReprMixin, UtilityMixins
from pyfarm.models.core.types import id_column
from pyfarm.models.core.cfg import TABLE_GPU, MAX_GPUNAME_LENGTH
class GPU(db.Model, UtilityMixins, ReprMixin):
__tablename__ = TABLE_GPU
__table_args__ = (UniqueConstraint("fullname"),)
id = id_column(db.Integer)
fullname = db.Column(db.String(MAX_GPUNAME_LENGTH), nullable=False,
doc="The full name of this graphics card model")
<commit_msg>Fix docstring for module (minor)<commit_after># No shebang line, this module is meant to be imported
#
# Copyright 2014 Ambient Entertainment GmbH & Co. KG
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
GPU
---
Model describing a given make and model of graphics card.
Every agent can have zero or more GPUs associated with it.
"""
from sqlalchemy.schema import UniqueConstraint
from pyfarm.master.application import db
from pyfarm.models.core.mixins import ReprMixin, UtilityMixins
from pyfarm.models.core.types import id_column
from pyfarm.models.core.cfg import TABLE_GPU, MAX_GPUNAME_LENGTH
class GPU(db.Model, UtilityMixins, ReprMixin):
__tablename__ = TABLE_GPU
__table_args__ = (UniqueConstraint("fullname"),)
id = id_column(db.Integer)
fullname = db.Column(db.String(MAX_GPUNAME_LENGTH), nullable=False,
doc="The full name of this graphics card model")
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.