commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f92674dfa3e7dc5e1f3b8078b138877047cfe85e
|
feedzilla/compress_settings.py
|
feedzilla/compress_settings.py
|
COMPRESS_CSS = {
'common_styles': {
'source_filenames': ('feedzilla/css/reset.css', 'feedzilla/css/style.css'),
'output_filename': 'css/common_styles.r?.css',
}
}
COMPRESS_JS = {
'common_scripts': {
'source_filenames': ('feedzilla/js/jquery.js', 'feedzilla/js/fix_tags.js',
'feedzilla/js/jquery.highlight-2.js'),
'output_filename': 'js/common_styles.r?.js',
}
}
COMPRESS_VERSION = True
|
COMPRESS_CSS = {
'common_styles': {
'source_filenames': ('feedzilla/css/reset.css', 'feedzilla/css/style.css'),
'output_filename': 'css/common_styles.r?.css',
}
}
COMPRESS_JS = {
'common_scripts': {
'source_filenames': ('feedzilla/js/jquery.js', 'feedzilla/js/fix_tags.js',
'feedzilla/js/jquery.highlight-2.js'),
'output_filename': 'js/common_scripts.r?.js',
}
}
COMPRESS_VERSION = True
|
Fix filename in django-compress settings
|
Fix filename in django-compress settings
|
Python
|
bsd-3-clause
|
feedzilla/feedzilla,feedzilla/feedzilla,feedzilla/feedzilla
|
COMPRESS_CSS = {
'common_styles': {
'source_filenames': ('feedzilla/css/reset.css', 'feedzilla/css/style.css'),
'output_filename': 'css/common_styles.r?.css',
}
}
COMPRESS_JS = {
'common_scripts': {
'source_filenames': ('feedzilla/js/jquery.js', 'feedzilla/js/fix_tags.js',
'feedzilla/js/jquery.highlight-2.js'),
'output_filename': 'js/common_styles.r?.js',
}
}
COMPRESS_VERSION = True
Fix filename in django-compress settings
|
COMPRESS_CSS = {
'common_styles': {
'source_filenames': ('feedzilla/css/reset.css', 'feedzilla/css/style.css'),
'output_filename': 'css/common_styles.r?.css',
}
}
COMPRESS_JS = {
'common_scripts': {
'source_filenames': ('feedzilla/js/jquery.js', 'feedzilla/js/fix_tags.js',
'feedzilla/js/jquery.highlight-2.js'),
'output_filename': 'js/common_scripts.r?.js',
}
}
COMPRESS_VERSION = True
|
<commit_before>COMPRESS_CSS = {
'common_styles': {
'source_filenames': ('feedzilla/css/reset.css', 'feedzilla/css/style.css'),
'output_filename': 'css/common_styles.r?.css',
}
}
COMPRESS_JS = {
'common_scripts': {
'source_filenames': ('feedzilla/js/jquery.js', 'feedzilla/js/fix_tags.js',
'feedzilla/js/jquery.highlight-2.js'),
'output_filename': 'js/common_styles.r?.js',
}
}
COMPRESS_VERSION = True
<commit_msg>Fix filename in django-compress settings<commit_after>
|
COMPRESS_CSS = {
'common_styles': {
'source_filenames': ('feedzilla/css/reset.css', 'feedzilla/css/style.css'),
'output_filename': 'css/common_styles.r?.css',
}
}
COMPRESS_JS = {
'common_scripts': {
'source_filenames': ('feedzilla/js/jquery.js', 'feedzilla/js/fix_tags.js',
'feedzilla/js/jquery.highlight-2.js'),
'output_filename': 'js/common_scripts.r?.js',
}
}
COMPRESS_VERSION = True
|
COMPRESS_CSS = {
'common_styles': {
'source_filenames': ('feedzilla/css/reset.css', 'feedzilla/css/style.css'),
'output_filename': 'css/common_styles.r?.css',
}
}
COMPRESS_JS = {
'common_scripts': {
'source_filenames': ('feedzilla/js/jquery.js', 'feedzilla/js/fix_tags.js',
'feedzilla/js/jquery.highlight-2.js'),
'output_filename': 'js/common_styles.r?.js',
}
}
COMPRESS_VERSION = True
Fix filename in django-compress settingsCOMPRESS_CSS = {
'common_styles': {
'source_filenames': ('feedzilla/css/reset.css', 'feedzilla/css/style.css'),
'output_filename': 'css/common_styles.r?.css',
}
}
COMPRESS_JS = {
'common_scripts': {
'source_filenames': ('feedzilla/js/jquery.js', 'feedzilla/js/fix_tags.js',
'feedzilla/js/jquery.highlight-2.js'),
'output_filename': 'js/common_scripts.r?.js',
}
}
COMPRESS_VERSION = True
|
<commit_before>COMPRESS_CSS = {
'common_styles': {
'source_filenames': ('feedzilla/css/reset.css', 'feedzilla/css/style.css'),
'output_filename': 'css/common_styles.r?.css',
}
}
COMPRESS_JS = {
'common_scripts': {
'source_filenames': ('feedzilla/js/jquery.js', 'feedzilla/js/fix_tags.js',
'feedzilla/js/jquery.highlight-2.js'),
'output_filename': 'js/common_styles.r?.js',
}
}
COMPRESS_VERSION = True
<commit_msg>Fix filename in django-compress settings<commit_after>COMPRESS_CSS = {
'common_styles': {
'source_filenames': ('feedzilla/css/reset.css', 'feedzilla/css/style.css'),
'output_filename': 'css/common_styles.r?.css',
}
}
COMPRESS_JS = {
'common_scripts': {
'source_filenames': ('feedzilla/js/jquery.js', 'feedzilla/js/fix_tags.js',
'feedzilla/js/jquery.highlight-2.js'),
'output_filename': 'js/common_scripts.r?.js',
}
}
COMPRESS_VERSION = True
|
7bb851e54b9cc245259809c828ddbef62239c210
|
sensor_mqtt.py
|
sensor_mqtt.py
|
#!/usr/bin/env python
import mosquitto
import os
import time
import json
import random
import yaml
# Load config
stream = open("config.yml", 'r')
config = yaml.load(stream)
endpoint = os.environ['MQTT_ENDPOINT']
mypid = os.getpid()
client_uniq = "sensor_mqtt_"+str(mypid)
mqttc = mosquitto.Mosquitto(client_uniq)
mqttc.username_pw_set(config['mqtt']['username'])
mqttc.connect(config['mqtt']['broker'], config['mqtt']['port'], 60, True)
def publish(sensor, reading_type, reading):
sensor_config = config['sensors'][sensor][reading_type]
if sensor_config:
data = {
'version':'1.0.0',
'datastreams': [
{
"id" : sensor_config['publish_id'],
"datapoints": [
{
"at": time.ctime(),
"value": reading
}
]
}
]
}
mqttc.publish(sensor_config['mqtt_endpoint'], json.dumps(data))
while mqttc.loop() == 0:
publish("R1", "RIVR", random.randrange(0,255))
print "message published"
time.sleep(1)
pass
def cleanup():
print "Ending and cleaning up"
mqttc.disconnect()
|
#!/usr/bin/env python
import mosquitto
import os
import time
import json
import random
import yaml
# Load config
stream = open("config.yml", 'r')
config = yaml.load(stream)
endpoint = os.environ['MQTT_ENDPOINT']
mypid = os.getpid()
client_uniq = "sensor_mqtt_"+str(mypid)
mqttc = mosquitto.Mosquitto(client_uniq)
mqttc.username_pw_set(config['mqtt']['username'])
mqttc.connect(config['mqtt']['broker'], config['mqtt']['port'], 60, True)
def publish(sensor, reading_type, reading):
try:
sensor_config = config['sensors'][sensor][reading_type]
except KeyError:
print "unknown sensor or reading type: " + sensor + " " + reading_type
else:
if sensor_config:
data = {
'version':'1.0.0',
'datastreams': [
{
"id" : sensor_config['publish_id'],
"datapoints": [
{
"at": time.ctime(),
"value": reading
}
]
}
]
}
mqttc.publish(sensor_config['mqtt_endpoint'], json.dumps(data))
print "message published: " + sensor + " " + reading_type
while mqttc.loop() == 0:
publish("R1", "RIVR", random.randrange(0,255))
time.sleep(1)
pass
def cleanup():
print "Ending and cleaning up"
mqttc.disconnect()
|
Handle sensors / types that aren't in config file
|
Handle sensors / types that aren't in config file
|
Python
|
mit
|
sushack/pi_sensor_mqtt,OxFloodNet/pi_sensor_mqtt
|
#!/usr/bin/env python
import mosquitto
import os
import time
import json
import random
import yaml
# Load config
stream = open("config.yml", 'r')
config = yaml.load(stream)
endpoint = os.environ['MQTT_ENDPOINT']
mypid = os.getpid()
client_uniq = "sensor_mqtt_"+str(mypid)
mqttc = mosquitto.Mosquitto(client_uniq)
mqttc.username_pw_set(config['mqtt']['username'])
mqttc.connect(config['mqtt']['broker'], config['mqtt']['port'], 60, True)
def publish(sensor, reading_type, reading):
sensor_config = config['sensors'][sensor][reading_type]
if sensor_config:
data = {
'version':'1.0.0',
'datastreams': [
{
"id" : sensor_config['publish_id'],
"datapoints": [
{
"at": time.ctime(),
"value": reading
}
]
}
]
}
mqttc.publish(sensor_config['mqtt_endpoint'], json.dumps(data))
while mqttc.loop() == 0:
publish("R1", "RIVR", random.randrange(0,255))
print "message published"
time.sleep(1)
pass
def cleanup():
print "Ending and cleaning up"
mqttc.disconnect()Handle sensors / types that aren't in config file
|
#!/usr/bin/env python
import mosquitto
import os
import time
import json
import random
import yaml
# Load config
stream = open("config.yml", 'r')
config = yaml.load(stream)
endpoint = os.environ['MQTT_ENDPOINT']
mypid = os.getpid()
client_uniq = "sensor_mqtt_"+str(mypid)
mqttc = mosquitto.Mosquitto(client_uniq)
mqttc.username_pw_set(config['mqtt']['username'])
mqttc.connect(config['mqtt']['broker'], config['mqtt']['port'], 60, True)
def publish(sensor, reading_type, reading):
try:
sensor_config = config['sensors'][sensor][reading_type]
except KeyError:
print "unknown sensor or reading type: " + sensor + " " + reading_type
else:
if sensor_config:
data = {
'version':'1.0.0',
'datastreams': [
{
"id" : sensor_config['publish_id'],
"datapoints": [
{
"at": time.ctime(),
"value": reading
}
]
}
]
}
mqttc.publish(sensor_config['mqtt_endpoint'], json.dumps(data))
print "message published: " + sensor + " " + reading_type
while mqttc.loop() == 0:
publish("R1", "RIVR", random.randrange(0,255))
time.sleep(1)
pass
def cleanup():
print "Ending and cleaning up"
mqttc.disconnect()
|
<commit_before>#!/usr/bin/env python
import mosquitto
import os
import time
import json
import random
import yaml
# Load config
stream = open("config.yml", 'r')
config = yaml.load(stream)
endpoint = os.environ['MQTT_ENDPOINT']
mypid = os.getpid()
client_uniq = "sensor_mqtt_"+str(mypid)
mqttc = mosquitto.Mosquitto(client_uniq)
mqttc.username_pw_set(config['mqtt']['username'])
mqttc.connect(config['mqtt']['broker'], config['mqtt']['port'], 60, True)
def publish(sensor, reading_type, reading):
sensor_config = config['sensors'][sensor][reading_type]
if sensor_config:
data = {
'version':'1.0.0',
'datastreams': [
{
"id" : sensor_config['publish_id'],
"datapoints": [
{
"at": time.ctime(),
"value": reading
}
]
}
]
}
mqttc.publish(sensor_config['mqtt_endpoint'], json.dumps(data))
while mqttc.loop() == 0:
publish("R1", "RIVR", random.randrange(0,255))
print "message published"
time.sleep(1)
pass
def cleanup():
print "Ending and cleaning up"
mqttc.disconnect()<commit_msg>Handle sensors / types that aren't in config file<commit_after>
|
#!/usr/bin/env python
import mosquitto
import os
import time
import json
import random
import yaml
# Load config
stream = open("config.yml", 'r')
config = yaml.load(stream)
endpoint = os.environ['MQTT_ENDPOINT']
mypid = os.getpid()
client_uniq = "sensor_mqtt_"+str(mypid)
mqttc = mosquitto.Mosquitto(client_uniq)
mqttc.username_pw_set(config['mqtt']['username'])
mqttc.connect(config['mqtt']['broker'], config['mqtt']['port'], 60, True)
def publish(sensor, reading_type, reading):
try:
sensor_config = config['sensors'][sensor][reading_type]
except KeyError:
print "unknown sensor or reading type: " + sensor + " " + reading_type
else:
if sensor_config:
data = {
'version':'1.0.0',
'datastreams': [
{
"id" : sensor_config['publish_id'],
"datapoints": [
{
"at": time.ctime(),
"value": reading
}
]
}
]
}
mqttc.publish(sensor_config['mqtt_endpoint'], json.dumps(data))
print "message published: " + sensor + " " + reading_type
while mqttc.loop() == 0:
publish("R1", "RIVR", random.randrange(0,255))
time.sleep(1)
pass
def cleanup():
print "Ending and cleaning up"
mqttc.disconnect()
|
#!/usr/bin/env python
import mosquitto
import os
import time
import json
import random
import yaml
# Load config
stream = open("config.yml", 'r')
config = yaml.load(stream)
endpoint = os.environ['MQTT_ENDPOINT']
mypid = os.getpid()
client_uniq = "sensor_mqtt_"+str(mypid)
mqttc = mosquitto.Mosquitto(client_uniq)
mqttc.username_pw_set(config['mqtt']['username'])
mqttc.connect(config['mqtt']['broker'], config['mqtt']['port'], 60, True)
def publish(sensor, reading_type, reading):
sensor_config = config['sensors'][sensor][reading_type]
if sensor_config:
data = {
'version':'1.0.0',
'datastreams': [
{
"id" : sensor_config['publish_id'],
"datapoints": [
{
"at": time.ctime(),
"value": reading
}
]
}
]
}
mqttc.publish(sensor_config['mqtt_endpoint'], json.dumps(data))
while mqttc.loop() == 0:
publish("R1", "RIVR", random.randrange(0,255))
print "message published"
time.sleep(1)
pass
def cleanup():
print "Ending and cleaning up"
mqttc.disconnect()Handle sensors / types that aren't in config file#!/usr/bin/env python
import mosquitto
import os
import time
import json
import random
import yaml
# Load config
stream = open("config.yml", 'r')
config = yaml.load(stream)
endpoint = os.environ['MQTT_ENDPOINT']
mypid = os.getpid()
client_uniq = "sensor_mqtt_"+str(mypid)
mqttc = mosquitto.Mosquitto(client_uniq)
mqttc.username_pw_set(config['mqtt']['username'])
mqttc.connect(config['mqtt']['broker'], config['mqtt']['port'], 60, True)
def publish(sensor, reading_type, reading):
try:
sensor_config = config['sensors'][sensor][reading_type]
except KeyError:
print "unknown sensor or reading type: " + sensor + " " + reading_type
else:
if sensor_config:
data = {
'version':'1.0.0',
'datastreams': [
{
"id" : sensor_config['publish_id'],
"datapoints": [
{
"at": time.ctime(),
"value": reading
}
]
}
]
}
mqttc.publish(sensor_config['mqtt_endpoint'], json.dumps(data))
print "message published: " + sensor + " " + reading_type
while mqttc.loop() == 0:
publish("R1", "RIVR", random.randrange(0,255))
time.sleep(1)
pass
def cleanup():
print "Ending and cleaning up"
mqttc.disconnect()
|
<commit_before>#!/usr/bin/env python
import mosquitto
import os
import time
import json
import random
import yaml
# Load config
stream = open("config.yml", 'r')
config = yaml.load(stream)
endpoint = os.environ['MQTT_ENDPOINT']
mypid = os.getpid()
client_uniq = "sensor_mqtt_"+str(mypid)
mqttc = mosquitto.Mosquitto(client_uniq)
mqttc.username_pw_set(config['mqtt']['username'])
mqttc.connect(config['mqtt']['broker'], config['mqtt']['port'], 60, True)
def publish(sensor, reading_type, reading):
sensor_config = config['sensors'][sensor][reading_type]
if sensor_config:
data = {
'version':'1.0.0',
'datastreams': [
{
"id" : sensor_config['publish_id'],
"datapoints": [
{
"at": time.ctime(),
"value": reading
}
]
}
]
}
mqttc.publish(sensor_config['mqtt_endpoint'], json.dumps(data))
while mqttc.loop() == 0:
publish("R1", "RIVR", random.randrange(0,255))
print "message published"
time.sleep(1)
pass
def cleanup():
print "Ending and cleaning up"
mqttc.disconnect()<commit_msg>Handle sensors / types that aren't in config file<commit_after>#!/usr/bin/env python
import mosquitto
import os
import time
import json
import random
import yaml
# Load config
stream = open("config.yml", 'r')
config = yaml.load(stream)
endpoint = os.environ['MQTT_ENDPOINT']
mypid = os.getpid()
client_uniq = "sensor_mqtt_"+str(mypid)
mqttc = mosquitto.Mosquitto(client_uniq)
mqttc.username_pw_set(config['mqtt']['username'])
mqttc.connect(config['mqtt']['broker'], config['mqtt']['port'], 60, True)
def publish(sensor, reading_type, reading):
try:
sensor_config = config['sensors'][sensor][reading_type]
except KeyError:
print "unknown sensor or reading type: " + sensor + " " + reading_type
else:
if sensor_config:
data = {
'version':'1.0.0',
'datastreams': [
{
"id" : sensor_config['publish_id'],
"datapoints": [
{
"at": time.ctime(),
"value": reading
}
]
}
]
}
mqttc.publish(sensor_config['mqtt_endpoint'], json.dumps(data))
print "message published: " + sensor + " " + reading_type
while mqttc.loop() == 0:
publish("R1", "RIVR", random.randrange(0,255))
time.sleep(1)
pass
def cleanup():
print "Ending and cleaning up"
mqttc.disconnect()
|
727702d6d5cf8d43ac9c4f8011ff2b6d78cfbe4c
|
account_constraints/model/account_move.py
|
account_constraints/model/account_move.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author Joel Grand-Guillaume. Copyright 2012 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, api
class AccountMove(models.Model):
_inherit = "account.move"
@api.multi
def _check_fiscal_year(self):
for move in self:
if move.journal_id.allow_date_fy:
date_start = move.period_id.fiscalyear_id.date_start
date_stop = move.period_id.fiscalyear_id.date_stop
if not date_start <= move.date <= date_stop:
return False
return True
_constraints = [
(_check_fiscal_year,
'You cannot create entries with date not in the '
'fiscal year of the chosen period',
['line_id']),
]
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author Joel Grand-Guillaume. Copyright 2012 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, api, exceptions, _
class AccountMove(models.Model):
_inherit = "account.move"
@api.constrains('journal_id', 'period_id', 'date')
def _check_fiscal_year(self):
for move in self:
if move.journal_id.allow_date_fy:
date_start = move.period_id.fiscalyear_id.date_start
date_stop = move.period_id.fiscalyear_id.date_stop
if not date_start <= move.date <= date_stop:
raise exceptions.Warning(
_('You cannot create entries with date not in the '
'fiscal year of the chosen period'))
return True
|
Use constraint decorator on account_constraints+
|
[IMP] Use constraint decorator on account_constraints+
|
Python
|
agpl-3.0
|
pedrobaeza/account-financial-tools,VitalPet/account-financial-tools,amoya-dx/account-financial-tools,VitalPet/account-financial-tools,credativUK/account-financial-tools,lepistone/account-financial-tools,damdam-s/account-financial-tools,acsone/account-financial-tools,Pexego/account-financial-tools,raycarnes/account-financial-tools,cysnake4713/account-financial-tools,charbeljc/account-financial-tools,raycarnes/account-financial-tools,taktik/account-financial-tools,OpenPymeMx/account-financial-tools,akretion/account-financial-tools,pedrobaeza/account-financial-tools,adhoc-dev/oca-account-financial-tools,dvitme/account-financial-tools,adhoc-dev/oca-account-financial-tools,luc-demeyer/account-financial-tools,ClearCorp-dev/account-financial-tools,Endika/account-financial-tools,nagyv/account-financial-tools,andrius-preimantas/account-financial-tools,VitalPet/account-financial-tools,Nowheresly/account-financial-tools,andhit-r/account-financial-tools,factorlibre/account-financial-tools,abstract-open-solutions/account-financial-tools,iDTLabssl/account-financial-tools,credativUK/account-financial-tools,Antiun/account-financial-tools,OpenPymeMx/account-financial-tools,yelizariev/account-financial-tools,alhashash/account-financial-tools,diagramsoftware/account-financial-tools,abstract-open-solutions/account-financial-tools,syci/account-financial-tools,Endika/account-financial-tools,Pexego/account-financial-tools,charbeljc/account-financial-tools,open-synergy/account-financial-tools,damdam-s/account-financial-tools,amoya-dx/account-financial-tools,bringsvor/account-financial-tools,Domatix/account-financial-tools,xpansa/account-financial-tools,bringsvor/account-financial-tools,open-synergy/account-financial-tools,nagyv/account-financial-tools,OpenPymeMx/account-financial-tools,acsone/account-financial-tools,syci/account-financial-tools,yelizariev/account-financial-tools,lepistone/account-financial-tools,andrius-preimantas/account-financial-tools,Nowheresly/account-financial-tools,Domatix/account-financial-tools,andhit-r/account-financial-tools,akretion/account-financial-tools,open-synergy/account-financial-tools,DarkoNikolovski/account-financial-tools,cysnake4713/account-financial-tools,dvitme/account-financial-tools,Antiun/account-financial-tools,factorlibre/account-financial-tools,luc-demeyer/account-financial-tools,iDTLabssl/account-financial-tools,acsone/account-financial-tools,DarkoNikolovski/account-financial-tools,Domatix/account-financial-tools,taktik/account-financial-tools,diagramsoftware/account-financial-tools,xpansa/account-financial-tools,alhashash/account-financial-tools,ClearCorp-dev/account-financial-tools
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author Joel Grand-Guillaume. Copyright 2012 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, api
class AccountMove(models.Model):
_inherit = "account.move"
@api.multi
def _check_fiscal_year(self):
for move in self:
if move.journal_id.allow_date_fy:
date_start = move.period_id.fiscalyear_id.date_start
date_stop = move.period_id.fiscalyear_id.date_stop
if not date_start <= move.date <= date_stop:
return False
return True
_constraints = [
(_check_fiscal_year,
'You cannot create entries with date not in the '
'fiscal year of the chosen period',
['line_id']),
]
[IMP] Use constraint decorator on account_constraints+
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author Joel Grand-Guillaume. Copyright 2012 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, api, exceptions, _
class AccountMove(models.Model):
_inherit = "account.move"
@api.constrains('journal_id', 'period_id', 'date')
def _check_fiscal_year(self):
for move in self:
if move.journal_id.allow_date_fy:
date_start = move.period_id.fiscalyear_id.date_start
date_stop = move.period_id.fiscalyear_id.date_stop
if not date_start <= move.date <= date_stop:
raise exceptions.Warning(
_('You cannot create entries with date not in the '
'fiscal year of the chosen period'))
return True
|
<commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# Author Joel Grand-Guillaume. Copyright 2012 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, api
class AccountMove(models.Model):
_inherit = "account.move"
@api.multi
def _check_fiscal_year(self):
for move in self:
if move.journal_id.allow_date_fy:
date_start = move.period_id.fiscalyear_id.date_start
date_stop = move.period_id.fiscalyear_id.date_stop
if not date_start <= move.date <= date_stop:
return False
return True
_constraints = [
(_check_fiscal_year,
'You cannot create entries with date not in the '
'fiscal year of the chosen period',
['line_id']),
]
<commit_msg>[IMP] Use constraint decorator on account_constraints+<commit_after>
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author Joel Grand-Guillaume. Copyright 2012 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, api, exceptions, _
class AccountMove(models.Model):
_inherit = "account.move"
@api.constrains('journal_id', 'period_id', 'date')
def _check_fiscal_year(self):
for move in self:
if move.journal_id.allow_date_fy:
date_start = move.period_id.fiscalyear_id.date_start
date_stop = move.period_id.fiscalyear_id.date_stop
if not date_start <= move.date <= date_stop:
raise exceptions.Warning(
_('You cannot create entries with date not in the '
'fiscal year of the chosen period'))
return True
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author Joel Grand-Guillaume. Copyright 2012 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, api
class AccountMove(models.Model):
_inherit = "account.move"
@api.multi
def _check_fiscal_year(self):
for move in self:
if move.journal_id.allow_date_fy:
date_start = move.period_id.fiscalyear_id.date_start
date_stop = move.period_id.fiscalyear_id.date_stop
if not date_start <= move.date <= date_stop:
return False
return True
_constraints = [
(_check_fiscal_year,
'You cannot create entries with date not in the '
'fiscal year of the chosen period',
['line_id']),
]
[IMP] Use constraint decorator on account_constraints+# -*- coding: utf-8 -*-
##############################################################################
#
# Author Joel Grand-Guillaume. Copyright 2012 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, api, exceptions, _
class AccountMove(models.Model):
_inherit = "account.move"
@api.constrains('journal_id', 'period_id', 'date')
def _check_fiscal_year(self):
for move in self:
if move.journal_id.allow_date_fy:
date_start = move.period_id.fiscalyear_id.date_start
date_stop = move.period_id.fiscalyear_id.date_stop
if not date_start <= move.date <= date_stop:
raise exceptions.Warning(
_('You cannot create entries with date not in the '
'fiscal year of the chosen period'))
return True
|
<commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# Author Joel Grand-Guillaume. Copyright 2012 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, api
class AccountMove(models.Model):
_inherit = "account.move"
@api.multi
def _check_fiscal_year(self):
for move in self:
if move.journal_id.allow_date_fy:
date_start = move.period_id.fiscalyear_id.date_start
date_stop = move.period_id.fiscalyear_id.date_stop
if not date_start <= move.date <= date_stop:
return False
return True
_constraints = [
(_check_fiscal_year,
'You cannot create entries with date not in the '
'fiscal year of the chosen period',
['line_id']),
]
<commit_msg>[IMP] Use constraint decorator on account_constraints+<commit_after># -*- coding: utf-8 -*-
##############################################################################
#
# Author Joel Grand-Guillaume. Copyright 2012 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, api, exceptions, _
class AccountMove(models.Model):
_inherit = "account.move"
@api.constrains('journal_id', 'period_id', 'date')
def _check_fiscal_year(self):
for move in self:
if move.journal_id.allow_date_fy:
date_start = move.period_id.fiscalyear_id.date_start
date_stop = move.period_id.fiscalyear_id.date_stop
if not date_start <= move.date <= date_stop:
raise exceptions.Warning(
_('You cannot create entries with date not in the '
'fiscal year of the chosen period'))
return True
|
5a0659ed9e4f8085009c04ade4f66cbd5d3c94bd
|
openedx/core/djangoapps/user_api/accounts/permissions.py
|
openedx/core/djangoapps/user_api/accounts/permissions.py
|
"""
Permissions classes for User accounts API views.
"""
from __future__ import unicode_literals
from rest_framework import permissions
USERNAME_REPLACEMENT_GROUP = "username_replacement_admin"
class CanDeactivateUser(permissions.BasePermission):
"""
Grants access to AccountDeactivationView if the requesting user is a superuser
or has the explicit permission to deactivate a User account.
"""
def has_permission(self, request, view):
return request.user.has_perm('student.can_deactivate_users')
class CanRetireUser(permissions.BasePermission):
"""
Grants access to the various retirement API endpoints if the requesting user is
a superuser, the RETIREMENT_SERVICE_USERNAME, or has the explicit permission to
retire a User account.
"""
def has_permission(self, request, view):
return request.user.has_perm('accounts.can_retire_user')
class CanReplaceUsername(permissions.BasePermission):
"""
Grants access to the Username Replacement API for anyone in the group,
including the service user.
"""
def has_permission(self, request, view):
return request.user.groups.filter(name=USERNAME_REPLACEMENT_GROUP).exists()
|
"""
Permissions classes for User accounts API views.
"""
from __future__ import unicode_literals
from django.conf import settings
from rest_framework import permissions
USERNAME_REPLACEMENT_GROUP = "username_replacement_admin"
class CanDeactivateUser(permissions.BasePermission):
"""
Grants access to AccountDeactivationView if the requesting user is a superuser
or has the explicit permission to deactivate a User account.
"""
def has_permission(self, request, view):
return request.user.has_perm('student.can_deactivate_users')
class CanRetireUser(permissions.BasePermission):
"""
Grants access to the various retirement API endpoints if the requesting user is
a superuser, the RETIREMENT_SERVICE_USERNAME, or has the explicit permission to
retire a User account.
"""
def has_permission(self, request, view):
return request.user.has_perm('accounts.can_retire_user')
class CanReplaceUsername(permissions.BasePermission):
"""
Grants access to the Username Replacement API for anyone in the group,
including the service user.
"""
def has_permission(self, request, view):
return request.user.username == getattr(settings, "USERNAME_REPLACEMENT_WORKER")
|
Replace group with static username
|
Replace group with static username
|
Python
|
agpl-3.0
|
appsembler/edx-platform,stvstnfrd/edx-platform,msegado/edx-platform,edx-solutions/edx-platform,eduNEXT/edx-platform,eduNEXT/edunext-platform,eduNEXT/edx-platform,mitocw/edx-platform,msegado/edx-platform,eduNEXT/edx-platform,ESOedX/edx-platform,cpennington/edx-platform,stvstnfrd/edx-platform,jolyonb/edx-platform,eduNEXT/edunext-platform,cpennington/edx-platform,appsembler/edx-platform,ESOedX/edx-platform,msegado/edx-platform,msegado/edx-platform,stvstnfrd/edx-platform,eduNEXT/edunext-platform,angelapper/edx-platform,EDUlib/edx-platform,arbrandes/edx-platform,arbrandes/edx-platform,mitocw/edx-platform,edx-solutions/edx-platform,eduNEXT/edx-platform,EDUlib/edx-platform,ESOedX/edx-platform,angelapper/edx-platform,EDUlib/edx-platform,edx-solutions/edx-platform,edx-solutions/edx-platform,edx/edx-platform,edx/edx-platform,jolyonb/edx-platform,stvstnfrd/edx-platform,edx/edx-platform,appsembler/edx-platform,arbrandes/edx-platform,eduNEXT/edunext-platform,mitocw/edx-platform,arbrandes/edx-platform,jolyonb/edx-platform,angelapper/edx-platform,jolyonb/edx-platform,angelapper/edx-platform,ESOedX/edx-platform,msegado/edx-platform,mitocw/edx-platform,EDUlib/edx-platform,cpennington/edx-platform,edx/edx-platform,cpennington/edx-platform,appsembler/edx-platform
|
"""
Permissions classes for User accounts API views.
"""
from __future__ import unicode_literals
from rest_framework import permissions
USERNAME_REPLACEMENT_GROUP = "username_replacement_admin"
class CanDeactivateUser(permissions.BasePermission):
"""
Grants access to AccountDeactivationView if the requesting user is a superuser
or has the explicit permission to deactivate a User account.
"""
def has_permission(self, request, view):
return request.user.has_perm('student.can_deactivate_users')
class CanRetireUser(permissions.BasePermission):
"""
Grants access to the various retirement API endpoints if the requesting user is
a superuser, the RETIREMENT_SERVICE_USERNAME, or has the explicit permission to
retire a User account.
"""
def has_permission(self, request, view):
return request.user.has_perm('accounts.can_retire_user')
class CanReplaceUsername(permissions.BasePermission):
"""
Grants access to the Username Replacement API for anyone in the group,
including the service user.
"""
def has_permission(self, request, view):
return request.user.groups.filter(name=USERNAME_REPLACEMENT_GROUP).exists()
Replace group with static username
|
"""
Permissions classes for User accounts API views.
"""
from __future__ import unicode_literals
from django.conf import settings
from rest_framework import permissions
USERNAME_REPLACEMENT_GROUP = "username_replacement_admin"
class CanDeactivateUser(permissions.BasePermission):
"""
Grants access to AccountDeactivationView if the requesting user is a superuser
or has the explicit permission to deactivate a User account.
"""
def has_permission(self, request, view):
return request.user.has_perm('student.can_deactivate_users')
class CanRetireUser(permissions.BasePermission):
"""
Grants access to the various retirement API endpoints if the requesting user is
a superuser, the RETIREMENT_SERVICE_USERNAME, or has the explicit permission to
retire a User account.
"""
def has_permission(self, request, view):
return request.user.has_perm('accounts.can_retire_user')
class CanReplaceUsername(permissions.BasePermission):
"""
Grants access to the Username Replacement API for anyone in the group,
including the service user.
"""
def has_permission(self, request, view):
return request.user.username == getattr(settings, "USERNAME_REPLACEMENT_WORKER")
|
<commit_before>"""
Permissions classes for User accounts API views.
"""
from __future__ import unicode_literals
from rest_framework import permissions
USERNAME_REPLACEMENT_GROUP = "username_replacement_admin"
class CanDeactivateUser(permissions.BasePermission):
"""
Grants access to AccountDeactivationView if the requesting user is a superuser
or has the explicit permission to deactivate a User account.
"""
def has_permission(self, request, view):
return request.user.has_perm('student.can_deactivate_users')
class CanRetireUser(permissions.BasePermission):
"""
Grants access to the various retirement API endpoints if the requesting user is
a superuser, the RETIREMENT_SERVICE_USERNAME, or has the explicit permission to
retire a User account.
"""
def has_permission(self, request, view):
return request.user.has_perm('accounts.can_retire_user')
class CanReplaceUsername(permissions.BasePermission):
"""
Grants access to the Username Replacement API for anyone in the group,
including the service user.
"""
def has_permission(self, request, view):
return request.user.groups.filter(name=USERNAME_REPLACEMENT_GROUP).exists()
<commit_msg>Replace group with static username<commit_after>
|
"""
Permissions classes for User accounts API views.
"""
from __future__ import unicode_literals
from django.conf import settings
from rest_framework import permissions
USERNAME_REPLACEMENT_GROUP = "username_replacement_admin"
class CanDeactivateUser(permissions.BasePermission):
"""
Grants access to AccountDeactivationView if the requesting user is a superuser
or has the explicit permission to deactivate a User account.
"""
def has_permission(self, request, view):
return request.user.has_perm('student.can_deactivate_users')
class CanRetireUser(permissions.BasePermission):
"""
Grants access to the various retirement API endpoints if the requesting user is
a superuser, the RETIREMENT_SERVICE_USERNAME, or has the explicit permission to
retire a User account.
"""
def has_permission(self, request, view):
return request.user.has_perm('accounts.can_retire_user')
class CanReplaceUsername(permissions.BasePermission):
"""
Grants access to the Username Replacement API for anyone in the group,
including the service user.
"""
def has_permission(self, request, view):
return request.user.username == getattr(settings, "USERNAME_REPLACEMENT_WORKER")
|
"""
Permissions classes for User accounts API views.
"""
from __future__ import unicode_literals
from rest_framework import permissions
USERNAME_REPLACEMENT_GROUP = "username_replacement_admin"
class CanDeactivateUser(permissions.BasePermission):
"""
Grants access to AccountDeactivationView if the requesting user is a superuser
or has the explicit permission to deactivate a User account.
"""
def has_permission(self, request, view):
return request.user.has_perm('student.can_deactivate_users')
class CanRetireUser(permissions.BasePermission):
"""
Grants access to the various retirement API endpoints if the requesting user is
a superuser, the RETIREMENT_SERVICE_USERNAME, or has the explicit permission to
retire a User account.
"""
def has_permission(self, request, view):
return request.user.has_perm('accounts.can_retire_user')
class CanReplaceUsername(permissions.BasePermission):
"""
Grants access to the Username Replacement API for anyone in the group,
including the service user.
"""
def has_permission(self, request, view):
return request.user.groups.filter(name=USERNAME_REPLACEMENT_GROUP).exists()
Replace group with static username"""
Permissions classes for User accounts API views.
"""
from __future__ import unicode_literals
from django.conf import settings
from rest_framework import permissions
USERNAME_REPLACEMENT_GROUP = "username_replacement_admin"
class CanDeactivateUser(permissions.BasePermission):
"""
Grants access to AccountDeactivationView if the requesting user is a superuser
or has the explicit permission to deactivate a User account.
"""
def has_permission(self, request, view):
return request.user.has_perm('student.can_deactivate_users')
class CanRetireUser(permissions.BasePermission):
"""
Grants access to the various retirement API endpoints if the requesting user is
a superuser, the RETIREMENT_SERVICE_USERNAME, or has the explicit permission to
retire a User account.
"""
def has_permission(self, request, view):
return request.user.has_perm('accounts.can_retire_user')
class CanReplaceUsername(permissions.BasePermission):
"""
Grants access to the Username Replacement API for anyone in the group,
including the service user.
"""
def has_permission(self, request, view):
return request.user.username == getattr(settings, "USERNAME_REPLACEMENT_WORKER")
|
<commit_before>"""
Permissions classes for User accounts API views.
"""
from __future__ import unicode_literals
from rest_framework import permissions
USERNAME_REPLACEMENT_GROUP = "username_replacement_admin"
class CanDeactivateUser(permissions.BasePermission):
"""
Grants access to AccountDeactivationView if the requesting user is a superuser
or has the explicit permission to deactivate a User account.
"""
def has_permission(self, request, view):
return request.user.has_perm('student.can_deactivate_users')
class CanRetireUser(permissions.BasePermission):
"""
Grants access to the various retirement API endpoints if the requesting user is
a superuser, the RETIREMENT_SERVICE_USERNAME, or has the explicit permission to
retire a User account.
"""
def has_permission(self, request, view):
return request.user.has_perm('accounts.can_retire_user')
class CanReplaceUsername(permissions.BasePermission):
"""
Grants access to the Username Replacement API for anyone in the group,
including the service user.
"""
def has_permission(self, request, view):
return request.user.groups.filter(name=USERNAME_REPLACEMENT_GROUP).exists()
<commit_msg>Replace group with static username<commit_after>"""
Permissions classes for User accounts API views.
"""
from __future__ import unicode_literals
from django.conf import settings
from rest_framework import permissions
USERNAME_REPLACEMENT_GROUP = "username_replacement_admin"
class CanDeactivateUser(permissions.BasePermission):
"""
Grants access to AccountDeactivationView if the requesting user is a superuser
or has the explicit permission to deactivate a User account.
"""
def has_permission(self, request, view):
return request.user.has_perm('student.can_deactivate_users')
class CanRetireUser(permissions.BasePermission):
"""
Grants access to the various retirement API endpoints if the requesting user is
a superuser, the RETIREMENT_SERVICE_USERNAME, or has the explicit permission to
retire a User account.
"""
def has_permission(self, request, view):
return request.user.has_perm('accounts.can_retire_user')
class CanReplaceUsername(permissions.BasePermission):
"""
Grants access to the Username Replacement API for anyone in the group,
including the service user.
"""
def has_permission(self, request, view):
return request.user.username == getattr(settings, "USERNAME_REPLACEMENT_WORKER")
|
e3185bd059becaf83aaeed9951f695db4ac32511
|
schema/remind.py
|
schema/remind.py
|
from asyncqlio.orm.schema.column import Column
from asyncqlio.orm.schema.table import table_base
from asyncqlio.orm.schema.types import BigInt, Serial, Text, Timestamp
Table = table_base()
class Reminder(Table): # type: ignore
id = Column(Serial, primary_key=True)
guild_id = Column(BigInt)
channel_id = Column(BigInt)
message_id = Column(BigInt)
user_id = Column(BigInt)
time = Column(Timestamp)
topic = Column(Text)
|
from asyncqlio.orm.schema.column import Column
from asyncqlio.orm.schema.table import table_base
from asyncqlio.orm.schema.types import BigInt, Serial, Text, Timestamp
Table = table_base()
class Reminder(Table): # type: ignore
id = Column(Serial, primary_key=True)
guild_id = Column(BigInt)
channel_id = Column(BigInt)
message_id = Column(BigInt)
user_id = Column(BigInt)
time = Column(Timestamp)
topic = Column(Text, nullable=True)
|
Allow topic to be nullable in schema
|
Allow topic to be nullable in schema
|
Python
|
mit
|
BeatButton/beattie-bot,BeatButton/beattie
|
from asyncqlio.orm.schema.column import Column
from asyncqlio.orm.schema.table import table_base
from asyncqlio.orm.schema.types import BigInt, Serial, Text, Timestamp
Table = table_base()
class Reminder(Table): # type: ignore
id = Column(Serial, primary_key=True)
guild_id = Column(BigInt)
channel_id = Column(BigInt)
message_id = Column(BigInt)
user_id = Column(BigInt)
time = Column(Timestamp)
topic = Column(Text)
Allow topic to be nullable in schema
|
from asyncqlio.orm.schema.column import Column
from asyncqlio.orm.schema.table import table_base
from asyncqlio.orm.schema.types import BigInt, Serial, Text, Timestamp
Table = table_base()
class Reminder(Table): # type: ignore
id = Column(Serial, primary_key=True)
guild_id = Column(BigInt)
channel_id = Column(BigInt)
message_id = Column(BigInt)
user_id = Column(BigInt)
time = Column(Timestamp)
topic = Column(Text, nullable=True)
|
<commit_before>from asyncqlio.orm.schema.column import Column
from asyncqlio.orm.schema.table import table_base
from asyncqlio.orm.schema.types import BigInt, Serial, Text, Timestamp
Table = table_base()
class Reminder(Table): # type: ignore
id = Column(Serial, primary_key=True)
guild_id = Column(BigInt)
channel_id = Column(BigInt)
message_id = Column(BigInt)
user_id = Column(BigInt)
time = Column(Timestamp)
topic = Column(Text)
<commit_msg>Allow topic to be nullable in schema<commit_after>
|
from asyncqlio.orm.schema.column import Column
from asyncqlio.orm.schema.table import table_base
from asyncqlio.orm.schema.types import BigInt, Serial, Text, Timestamp
Table = table_base()
class Reminder(Table): # type: ignore
id = Column(Serial, primary_key=True)
guild_id = Column(BigInt)
channel_id = Column(BigInt)
message_id = Column(BigInt)
user_id = Column(BigInt)
time = Column(Timestamp)
topic = Column(Text, nullable=True)
|
from asyncqlio.orm.schema.column import Column
from asyncqlio.orm.schema.table import table_base
from asyncqlio.orm.schema.types import BigInt, Serial, Text, Timestamp
Table = table_base()
class Reminder(Table): # type: ignore
id = Column(Serial, primary_key=True)
guild_id = Column(BigInt)
channel_id = Column(BigInt)
message_id = Column(BigInt)
user_id = Column(BigInt)
time = Column(Timestamp)
topic = Column(Text)
Allow topic to be nullable in schemafrom asyncqlio.orm.schema.column import Column
from asyncqlio.orm.schema.table import table_base
from asyncqlio.orm.schema.types import BigInt, Serial, Text, Timestamp
Table = table_base()
class Reminder(Table): # type: ignore
id = Column(Serial, primary_key=True)
guild_id = Column(BigInt)
channel_id = Column(BigInt)
message_id = Column(BigInt)
user_id = Column(BigInt)
time = Column(Timestamp)
topic = Column(Text, nullable=True)
|
<commit_before>from asyncqlio.orm.schema.column import Column
from asyncqlio.orm.schema.table import table_base
from asyncqlio.orm.schema.types import BigInt, Serial, Text, Timestamp
Table = table_base()
class Reminder(Table): # type: ignore
id = Column(Serial, primary_key=True)
guild_id = Column(BigInt)
channel_id = Column(BigInt)
message_id = Column(BigInt)
user_id = Column(BigInt)
time = Column(Timestamp)
topic = Column(Text)
<commit_msg>Allow topic to be nullable in schema<commit_after>from asyncqlio.orm.schema.column import Column
from asyncqlio.orm.schema.table import table_base
from asyncqlio.orm.schema.types import BigInt, Serial, Text, Timestamp
Table = table_base()
class Reminder(Table): # type: ignore
id = Column(Serial, primary_key=True)
guild_id = Column(BigInt)
channel_id = Column(BigInt)
message_id = Column(BigInt)
user_id = Column(BigInt)
time = Column(Timestamp)
topic = Column(Text, nullable=True)
|
6cf8bad4faa15bcbc149db678e2ec232ce82b72a
|
utils/efushell/SocketDriver.py
|
utils/efushell/SocketDriver.py
|
import string
import socket
import sys
import time
import threading
class SimpleSocket:
def __init__(self, hostname="localhost", port=8888, timeout=2):
self.access_semaphor = threading.Semaphore(1)
try:
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
except socket.error:
sys.stderr.write(
"socket() [Socket connection error] Cannot connect to %s, error: %s\n" % (hostname, socket.error))
sys.exit(1)
self.sock.settimeout(timeout)
try:
self.sock.connect((hostname, port))
except socket.error:
sys.stderr.write("connect() [Socket connection error] Cannot connect to %s:%d, error: %s\n" % (
hostname, port, socket.error))
sys.exit(2)
def SendCommand(self, cmd):
self.access_semaphor.acquire()
self.sock.send('{}\n'.format(cmd).encode('utf-8'))
self.access_semaphor.release()
def Ask(self, cmd):
self.access_semaphor.acquire()
self.sock.send('{}\n'.format(cmd).encode('utf-8'))
reply = self.sock.recv(2048).strip(b'\n')
self.access_semaphor.release()
return reply
|
import string
import socket
import sys
import time
import threading
class SimpleSocket:
def __init__(self, hostname="localhost", port=8888, timeout=2):
self.access_semaphor = threading.Semaphore(1)
try:
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
except socket.error:
sys.stderr.write(
"socket() [Socket connection error] Cannot connect to %s, error: %s\n" % (hostname, socket.error))
sys.exit(1)
self.sock.settimeout(timeout)
try:
self.sock.connect((hostname, port))
except socket.error:
sys.stderr.write("connect() [Socket connection error] Cannot connect to %s:%d, error: %s\n" % (
hostname, port, socket.error))
sys.exit(2)
def SendCommand(self, cmd):
self.access_semaphor.acquire()
cmd += '\n'
self.sock.send(cmd.encode('utf-8'))
self.access_semaphor.release()
def Ask(self, cmd):
self.access_semaphor.acquire()
cmd += '\n'
self.sock.send(cmd.encode('utf-8'))
reply = self.sock.recv(2048).strip(b'\n')
self.access_semaphor.release()
return reply
|
Move string formatting onto two lines for readability
|
Move string formatting onto two lines for readability
|
Python
|
bsd-2-clause
|
ess-dmsc/event-formation-unit,ess-dmsc/event-formation-unit,ess-dmsc/event-formation-unit,ess-dmsc/event-formation-unit
|
import string
import socket
import sys
import time
import threading
class SimpleSocket:
def __init__(self, hostname="localhost", port=8888, timeout=2):
self.access_semaphor = threading.Semaphore(1)
try:
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
except socket.error:
sys.stderr.write(
"socket() [Socket connection error] Cannot connect to %s, error: %s\n" % (hostname, socket.error))
sys.exit(1)
self.sock.settimeout(timeout)
try:
self.sock.connect((hostname, port))
except socket.error:
sys.stderr.write("connect() [Socket connection error] Cannot connect to %s:%d, error: %s\n" % (
hostname, port, socket.error))
sys.exit(2)
def SendCommand(self, cmd):
self.access_semaphor.acquire()
self.sock.send('{}\n'.format(cmd).encode('utf-8'))
self.access_semaphor.release()
def Ask(self, cmd):
self.access_semaphor.acquire()
self.sock.send('{}\n'.format(cmd).encode('utf-8'))
reply = self.sock.recv(2048).strip(b'\n')
self.access_semaphor.release()
return reply
Move string formatting onto two lines for readability
|
import string
import socket
import sys
import time
import threading
class SimpleSocket:
def __init__(self, hostname="localhost", port=8888, timeout=2):
self.access_semaphor = threading.Semaphore(1)
try:
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
except socket.error:
sys.stderr.write(
"socket() [Socket connection error] Cannot connect to %s, error: %s\n" % (hostname, socket.error))
sys.exit(1)
self.sock.settimeout(timeout)
try:
self.sock.connect((hostname, port))
except socket.error:
sys.stderr.write("connect() [Socket connection error] Cannot connect to %s:%d, error: %s\n" % (
hostname, port, socket.error))
sys.exit(2)
def SendCommand(self, cmd):
self.access_semaphor.acquire()
cmd += '\n'
self.sock.send(cmd.encode('utf-8'))
self.access_semaphor.release()
def Ask(self, cmd):
self.access_semaphor.acquire()
cmd += '\n'
self.sock.send(cmd.encode('utf-8'))
reply = self.sock.recv(2048).strip(b'\n')
self.access_semaphor.release()
return reply
|
<commit_before>import string
import socket
import sys
import time
import threading
class SimpleSocket:
def __init__(self, hostname="localhost", port=8888, timeout=2):
self.access_semaphor = threading.Semaphore(1)
try:
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
except socket.error:
sys.stderr.write(
"socket() [Socket connection error] Cannot connect to %s, error: %s\n" % (hostname, socket.error))
sys.exit(1)
self.sock.settimeout(timeout)
try:
self.sock.connect((hostname, port))
except socket.error:
sys.stderr.write("connect() [Socket connection error] Cannot connect to %s:%d, error: %s\n" % (
hostname, port, socket.error))
sys.exit(2)
def SendCommand(self, cmd):
self.access_semaphor.acquire()
self.sock.send('{}\n'.format(cmd).encode('utf-8'))
self.access_semaphor.release()
def Ask(self, cmd):
self.access_semaphor.acquire()
self.sock.send('{}\n'.format(cmd).encode('utf-8'))
reply = self.sock.recv(2048).strip(b'\n')
self.access_semaphor.release()
return reply
<commit_msg>Move string formatting onto two lines for readability<commit_after>
|
import string
import socket
import sys
import time
import threading
class SimpleSocket:
def __init__(self, hostname="localhost", port=8888, timeout=2):
self.access_semaphor = threading.Semaphore(1)
try:
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
except socket.error:
sys.stderr.write(
"socket() [Socket connection error] Cannot connect to %s, error: %s\n" % (hostname, socket.error))
sys.exit(1)
self.sock.settimeout(timeout)
try:
self.sock.connect((hostname, port))
except socket.error:
sys.stderr.write("connect() [Socket connection error] Cannot connect to %s:%d, error: %s\n" % (
hostname, port, socket.error))
sys.exit(2)
def SendCommand(self, cmd):
self.access_semaphor.acquire()
cmd += '\n'
self.sock.send(cmd.encode('utf-8'))
self.access_semaphor.release()
def Ask(self, cmd):
self.access_semaphor.acquire()
cmd += '\n'
self.sock.send(cmd.encode('utf-8'))
reply = self.sock.recv(2048).strip(b'\n')
self.access_semaphor.release()
return reply
|
import string
import socket
import sys
import time
import threading
class SimpleSocket:
def __init__(self, hostname="localhost", port=8888, timeout=2):
self.access_semaphor = threading.Semaphore(1)
try:
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
except socket.error:
sys.stderr.write(
"socket() [Socket connection error] Cannot connect to %s, error: %s\n" % (hostname, socket.error))
sys.exit(1)
self.sock.settimeout(timeout)
try:
self.sock.connect((hostname, port))
except socket.error:
sys.stderr.write("connect() [Socket connection error] Cannot connect to %s:%d, error: %s\n" % (
hostname, port, socket.error))
sys.exit(2)
def SendCommand(self, cmd):
self.access_semaphor.acquire()
self.sock.send('{}\n'.format(cmd).encode('utf-8'))
self.access_semaphor.release()
def Ask(self, cmd):
self.access_semaphor.acquire()
self.sock.send('{}\n'.format(cmd).encode('utf-8'))
reply = self.sock.recv(2048).strip(b'\n')
self.access_semaphor.release()
return reply
Move string formatting onto two lines for readabilityimport string
import socket
import sys
import time
import threading
class SimpleSocket:
def __init__(self, hostname="localhost", port=8888, timeout=2):
self.access_semaphor = threading.Semaphore(1)
try:
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
except socket.error:
sys.stderr.write(
"socket() [Socket connection error] Cannot connect to %s, error: %s\n" % (hostname, socket.error))
sys.exit(1)
self.sock.settimeout(timeout)
try:
self.sock.connect((hostname, port))
except socket.error:
sys.stderr.write("connect() [Socket connection error] Cannot connect to %s:%d, error: %s\n" % (
hostname, port, socket.error))
sys.exit(2)
def SendCommand(self, cmd):
self.access_semaphor.acquire()
cmd += '\n'
self.sock.send(cmd.encode('utf-8'))
self.access_semaphor.release()
def Ask(self, cmd):
self.access_semaphor.acquire()
cmd += '\n'
self.sock.send(cmd.encode('utf-8'))
reply = self.sock.recv(2048).strip(b'\n')
self.access_semaphor.release()
return reply
|
<commit_before>import string
import socket
import sys
import time
import threading
class SimpleSocket:
def __init__(self, hostname="localhost", port=8888, timeout=2):
self.access_semaphor = threading.Semaphore(1)
try:
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
except socket.error:
sys.stderr.write(
"socket() [Socket connection error] Cannot connect to %s, error: %s\n" % (hostname, socket.error))
sys.exit(1)
self.sock.settimeout(timeout)
try:
self.sock.connect((hostname, port))
except socket.error:
sys.stderr.write("connect() [Socket connection error] Cannot connect to %s:%d, error: %s\n" % (
hostname, port, socket.error))
sys.exit(2)
def SendCommand(self, cmd):
self.access_semaphor.acquire()
self.sock.send('{}\n'.format(cmd).encode('utf-8'))
self.access_semaphor.release()
def Ask(self, cmd):
self.access_semaphor.acquire()
self.sock.send('{}\n'.format(cmd).encode('utf-8'))
reply = self.sock.recv(2048).strip(b'\n')
self.access_semaphor.release()
return reply
<commit_msg>Move string formatting onto two lines for readability<commit_after>import string
import socket
import sys
import time
import threading
class SimpleSocket:
def __init__(self, hostname="localhost", port=8888, timeout=2):
self.access_semaphor = threading.Semaphore(1)
try:
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
except socket.error:
sys.stderr.write(
"socket() [Socket connection error] Cannot connect to %s, error: %s\n" % (hostname, socket.error))
sys.exit(1)
self.sock.settimeout(timeout)
try:
self.sock.connect((hostname, port))
except socket.error:
sys.stderr.write("connect() [Socket connection error] Cannot connect to %s:%d, error: %s\n" % (
hostname, port, socket.error))
sys.exit(2)
def SendCommand(self, cmd):
self.access_semaphor.acquire()
cmd += '\n'
self.sock.send(cmd.encode('utf-8'))
self.access_semaphor.release()
def Ask(self, cmd):
self.access_semaphor.acquire()
cmd += '\n'
self.sock.send(cmd.encode('utf-8'))
reply = self.sock.recv(2048).strip(b'\n')
self.access_semaphor.release()
return reply
|
66970aed3876cdae30a77eb50960caf29118248f
|
lms/djangoapps/api_manager/management/commands/migrate_orgdata.py
|
lms/djangoapps/api_manager/management/commands/migrate_orgdata.py
|
import json
from django.contrib.auth.models import Group
from django.core.management.base import BaseCommand
from api_manager.models import GroupProfile, Organization
class Command(BaseCommand):
"""
Migrates legacy organization data and user relationships from older Group model approach to newer concrete Organization model
"""
def handle(self, *args, **options):
org_groups = GroupProfile.objects.filter(group_type='organization')
for org in org_groups:
data = json.loads(org.data)
migrated_org = Organization.objects.create(
name=data['name'],
display_name=data['display_name'],
contact_name=data['contact_name'],
contact_email=data['contact_email'],
contact_phone=data['contact_phone']
)
group = Group.objects.get(groupprofile=org.id)
users = group.user_set.all()
for user in users:
migrated_org.users.add(user)
linked_groups = group.grouprelationship.get_linked_group_relationships()
for linked_group in linked_groups:
if linked_group.to_group_relationship_id is not org.id: # Don't need to carry the symmetrical component
actual_group = Group.objects.get(id=linked_group.to_group_relationship_id)
migrated_org.groups.add(actual_group)
|
import json
from django.contrib.auth.models import Group
from django.core.management.base import BaseCommand
from api_manager.models import GroupProfile, Organization
class Command(BaseCommand):
"""
Migrates legacy organization data and user relationships from older Group model approach to newer concrete Organization model
"""
def handle(self, *args, **options):
org_groups = GroupProfile.objects.filter(group_type='organization')
for org in org_groups:
data = json.loads(org.data)
name = org.name
display_name = data.get('display_name', name)
contact_name = data.get('contact_name', None)
contact_email = data.get('email', None)
if contact_email is None:
contact_email = data.get('contact_email', None)
contact_phone = data.get('phone', None)
if contact_phone is None:
contact_phone = data.get('contact_phone', None)
migrated_org = Organization.objects.create(
name=name,
display_name=display_name,
contact_name=contact_name,
contact_email=contact_email,
contact_phone=contact_phone
)
group = Group.objects.get(groupprofile=org.id)
users = group.user_set.all()
for user in users:
migrated_org.users.add(user)
linked_groups = group.grouprelationship.get_linked_group_relationships()
for linked_group in linked_groups:
if linked_group.to_group_relationship_id is not org.id: # Don't need to carry the symmetrical component
actual_group = Group.objects.get(id=linked_group.to_group_relationship_id)
migrated_org.groups.add(actual_group)
|
Tweak to migration in order to accomodate old names for data fields and allow for if data fields were not present
|
Tweak to migration in order to accomodate old names for data fields and allow for if data fields were not present
|
Python
|
agpl-3.0
|
edx-solutions/edx-platform,edx-solutions/edx-platform,edx-solutions/edx-platform,edx-solutions/edx-platform
|
import json
from django.contrib.auth.models import Group
from django.core.management.base import BaseCommand
from api_manager.models import GroupProfile, Organization
class Command(BaseCommand):
"""
Migrates legacy organization data and user relationships from older Group model approach to newer concrete Organization model
"""
def handle(self, *args, **options):
org_groups = GroupProfile.objects.filter(group_type='organization')
for org in org_groups:
data = json.loads(org.data)
migrated_org = Organization.objects.create(
name=data['name'],
display_name=data['display_name'],
contact_name=data['contact_name'],
contact_email=data['contact_email'],
contact_phone=data['contact_phone']
)
group = Group.objects.get(groupprofile=org.id)
users = group.user_set.all()
for user in users:
migrated_org.users.add(user)
linked_groups = group.grouprelationship.get_linked_group_relationships()
for linked_group in linked_groups:
if linked_group.to_group_relationship_id is not org.id: # Don't need to carry the symmetrical component
actual_group = Group.objects.get(id=linked_group.to_group_relationship_id)
migrated_org.groups.add(actual_group)
Tweak to migration in order to accomodate old names for data fields and allow for if data fields were not present
|
import json
from django.contrib.auth.models import Group
from django.core.management.base import BaseCommand
from api_manager.models import GroupProfile, Organization
class Command(BaseCommand):
"""
Migrates legacy organization data and user relationships from older Group model approach to newer concrete Organization model
"""
def handle(self, *args, **options):
org_groups = GroupProfile.objects.filter(group_type='organization')
for org in org_groups:
data = json.loads(org.data)
name = org.name
display_name = data.get('display_name', name)
contact_name = data.get('contact_name', None)
contact_email = data.get('email', None)
if contact_email is None:
contact_email = data.get('contact_email', None)
contact_phone = data.get('phone', None)
if contact_phone is None:
contact_phone = data.get('contact_phone', None)
migrated_org = Organization.objects.create(
name=name,
display_name=display_name,
contact_name=contact_name,
contact_email=contact_email,
contact_phone=contact_phone
)
group = Group.objects.get(groupprofile=org.id)
users = group.user_set.all()
for user in users:
migrated_org.users.add(user)
linked_groups = group.grouprelationship.get_linked_group_relationships()
for linked_group in linked_groups:
if linked_group.to_group_relationship_id is not org.id: # Don't need to carry the symmetrical component
actual_group = Group.objects.get(id=linked_group.to_group_relationship_id)
migrated_org.groups.add(actual_group)
|
<commit_before>import json
from django.contrib.auth.models import Group
from django.core.management.base import BaseCommand
from api_manager.models import GroupProfile, Organization
class Command(BaseCommand):
"""
Migrates legacy organization data and user relationships from older Group model approach to newer concrete Organization model
"""
def handle(self, *args, **options):
org_groups = GroupProfile.objects.filter(group_type='organization')
for org in org_groups:
data = json.loads(org.data)
migrated_org = Organization.objects.create(
name=data['name'],
display_name=data['display_name'],
contact_name=data['contact_name'],
contact_email=data['contact_email'],
contact_phone=data['contact_phone']
)
group = Group.objects.get(groupprofile=org.id)
users = group.user_set.all()
for user in users:
migrated_org.users.add(user)
linked_groups = group.grouprelationship.get_linked_group_relationships()
for linked_group in linked_groups:
if linked_group.to_group_relationship_id is not org.id: # Don't need to carry the symmetrical component
actual_group = Group.objects.get(id=linked_group.to_group_relationship_id)
migrated_org.groups.add(actual_group)
<commit_msg>Tweak to migration in order to accomodate old names for data fields and allow for if data fields were not present<commit_after>
|
import json
from django.contrib.auth.models import Group
from django.core.management.base import BaseCommand
from api_manager.models import GroupProfile, Organization
class Command(BaseCommand):
"""
Migrates legacy organization data and user relationships from older Group model approach to newer concrete Organization model
"""
def handle(self, *args, **options):
org_groups = GroupProfile.objects.filter(group_type='organization')
for org in org_groups:
data = json.loads(org.data)
name = org.name
display_name = data.get('display_name', name)
contact_name = data.get('contact_name', None)
contact_email = data.get('email', None)
if contact_email is None:
contact_email = data.get('contact_email', None)
contact_phone = data.get('phone', None)
if contact_phone is None:
contact_phone = data.get('contact_phone', None)
migrated_org = Organization.objects.create(
name=name,
display_name=display_name,
contact_name=contact_name,
contact_email=contact_email,
contact_phone=contact_phone
)
group = Group.objects.get(groupprofile=org.id)
users = group.user_set.all()
for user in users:
migrated_org.users.add(user)
linked_groups = group.grouprelationship.get_linked_group_relationships()
for linked_group in linked_groups:
if linked_group.to_group_relationship_id is not org.id: # Don't need to carry the symmetrical component
actual_group = Group.objects.get(id=linked_group.to_group_relationship_id)
migrated_org.groups.add(actual_group)
|
import json
from django.contrib.auth.models import Group
from django.core.management.base import BaseCommand
from api_manager.models import GroupProfile, Organization
class Command(BaseCommand):
"""
Migrates legacy organization data and user relationships from older Group model approach to newer concrete Organization model
"""
def handle(self, *args, **options):
org_groups = GroupProfile.objects.filter(group_type='organization')
for org in org_groups:
data = json.loads(org.data)
migrated_org = Organization.objects.create(
name=data['name'],
display_name=data['display_name'],
contact_name=data['contact_name'],
contact_email=data['contact_email'],
contact_phone=data['contact_phone']
)
group = Group.objects.get(groupprofile=org.id)
users = group.user_set.all()
for user in users:
migrated_org.users.add(user)
linked_groups = group.grouprelationship.get_linked_group_relationships()
for linked_group in linked_groups:
if linked_group.to_group_relationship_id is not org.id: # Don't need to carry the symmetrical component
actual_group = Group.objects.get(id=linked_group.to_group_relationship_id)
migrated_org.groups.add(actual_group)
Tweak to migration in order to accomodate old names for data fields and allow for if data fields were not presentimport json
from django.contrib.auth.models import Group
from django.core.management.base import BaseCommand
from api_manager.models import GroupProfile, Organization
class Command(BaseCommand):
"""
Migrates legacy organization data and user relationships from older Group model approach to newer concrete Organization model
"""
def handle(self, *args, **options):
org_groups = GroupProfile.objects.filter(group_type='organization')
for org in org_groups:
data = json.loads(org.data)
name = org.name
display_name = data.get('display_name', name)
contact_name = data.get('contact_name', None)
contact_email = data.get('email', None)
if contact_email is None:
contact_email = data.get('contact_email', None)
contact_phone = data.get('phone', None)
if contact_phone is None:
contact_phone = data.get('contact_phone', None)
migrated_org = Organization.objects.create(
name=name,
display_name=display_name,
contact_name=contact_name,
contact_email=contact_email,
contact_phone=contact_phone
)
group = Group.objects.get(groupprofile=org.id)
users = group.user_set.all()
for user in users:
migrated_org.users.add(user)
linked_groups = group.grouprelationship.get_linked_group_relationships()
for linked_group in linked_groups:
if linked_group.to_group_relationship_id is not org.id: # Don't need to carry the symmetrical component
actual_group = Group.objects.get(id=linked_group.to_group_relationship_id)
migrated_org.groups.add(actual_group)
|
<commit_before>import json
from django.contrib.auth.models import Group
from django.core.management.base import BaseCommand
from api_manager.models import GroupProfile, Organization
class Command(BaseCommand):
"""
Migrates legacy organization data and user relationships from older Group model approach to newer concrete Organization model
"""
def handle(self, *args, **options):
org_groups = GroupProfile.objects.filter(group_type='organization')
for org in org_groups:
data = json.loads(org.data)
migrated_org = Organization.objects.create(
name=data['name'],
display_name=data['display_name'],
contact_name=data['contact_name'],
contact_email=data['contact_email'],
contact_phone=data['contact_phone']
)
group = Group.objects.get(groupprofile=org.id)
users = group.user_set.all()
for user in users:
migrated_org.users.add(user)
linked_groups = group.grouprelationship.get_linked_group_relationships()
for linked_group in linked_groups:
if linked_group.to_group_relationship_id is not org.id: # Don't need to carry the symmetrical component
actual_group = Group.objects.get(id=linked_group.to_group_relationship_id)
migrated_org.groups.add(actual_group)
<commit_msg>Tweak to migration in order to accomodate old names for data fields and allow for if data fields were not present<commit_after>import json
from django.contrib.auth.models import Group
from django.core.management.base import BaseCommand
from api_manager.models import GroupProfile, Organization
class Command(BaseCommand):
"""
Migrates legacy organization data and user relationships from older Group model approach to newer concrete Organization model
"""
def handle(self, *args, **options):
org_groups = GroupProfile.objects.filter(group_type='organization')
for org in org_groups:
data = json.loads(org.data)
name = org.name
display_name = data.get('display_name', name)
contact_name = data.get('contact_name', None)
contact_email = data.get('email', None)
if contact_email is None:
contact_email = data.get('contact_email', None)
contact_phone = data.get('phone', None)
if contact_phone is None:
contact_phone = data.get('contact_phone', None)
migrated_org = Organization.objects.create(
name=name,
display_name=display_name,
contact_name=contact_name,
contact_email=contact_email,
contact_phone=contact_phone
)
group = Group.objects.get(groupprofile=org.id)
users = group.user_set.all()
for user in users:
migrated_org.users.add(user)
linked_groups = group.grouprelationship.get_linked_group_relationships()
for linked_group in linked_groups:
if linked_group.to_group_relationship_id is not org.id: # Don't need to carry the symmetrical component
actual_group = Group.objects.get(id=linked_group.to_group_relationship_id)
migrated_org.groups.add(actual_group)
|
60116f05c86658d4ae929e0f1fb7e4e039515298
|
src/adhocracy/migration/versions/071_add_badge_impact.py
|
src/adhocracy/migration/versions/071_add_badge_impact.py
|
from sqlalchemy import MetaData, Table, Column
from sqlalchemy import Integer
def upgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
table = Table('badge', meta, autoload=True)
col = Column('impact', Integer, default=0, nullable=False)
col.create(table)
|
from sqlalchemy import MetaData, Table, Column
from sqlalchemy import Integer
def upgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
table = Table('badge', meta, autoload=True)
col = Column('impact', Integer, default=0, server_default=u'0',
nullable=False)
col.create(table)
|
Fix badge impact migration if badges are present
|
Fix badge impact migration if badges are present
If badges are already present in the system, their impact value must be set to something other than NULL. (default=0 is misleading, since it just applies to newly created badges, see http://stackoverflow.com/q/16097149/35070).
|
Python
|
agpl-3.0
|
phihag/adhocracy,liqd/adhocracy,DanielNeugebauer/adhocracy,liqd/adhocracy,alkadis/vcv,alkadis/vcv,DanielNeugebauer/adhocracy,liqd/adhocracy,alkadis/vcv,phihag/adhocracy,phihag/adhocracy,alkadis/vcv,DanielNeugebauer/adhocracy,phihag/adhocracy,DanielNeugebauer/adhocracy,liqd/adhocracy,alkadis/vcv,DanielNeugebauer/adhocracy,phihag/adhocracy
|
from sqlalchemy import MetaData, Table, Column
from sqlalchemy import Integer
def upgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
table = Table('badge', meta, autoload=True)
col = Column('impact', Integer, default=0, nullable=False)
col.create(table)
Fix badge impact migration if badges are present
If badges are already present in the system, their impact value must be set to something other than NULL. (default=0 is misleading, since it just applies to newly created badges, see http://stackoverflow.com/q/16097149/35070).
|
from sqlalchemy import MetaData, Table, Column
from sqlalchemy import Integer
def upgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
table = Table('badge', meta, autoload=True)
col = Column('impact', Integer, default=0, server_default=u'0',
nullable=False)
col.create(table)
|
<commit_before>from sqlalchemy import MetaData, Table, Column
from sqlalchemy import Integer
def upgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
table = Table('badge', meta, autoload=True)
col = Column('impact', Integer, default=0, nullable=False)
col.create(table)
<commit_msg>Fix badge impact migration if badges are present
If badges are already present in the system, their impact value must be set to something other than NULL. (default=0 is misleading, since it just applies to newly created badges, see http://stackoverflow.com/q/16097149/35070).<commit_after>
|
from sqlalchemy import MetaData, Table, Column
from sqlalchemy import Integer
def upgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
table = Table('badge', meta, autoload=True)
col = Column('impact', Integer, default=0, server_default=u'0',
nullable=False)
col.create(table)
|
from sqlalchemy import MetaData, Table, Column
from sqlalchemy import Integer
def upgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
table = Table('badge', meta, autoload=True)
col = Column('impact', Integer, default=0, nullable=False)
col.create(table)
Fix badge impact migration if badges are present
If badges are already present in the system, their impact value must be set to something other than NULL. (default=0 is misleading, since it just applies to newly created badges, see http://stackoverflow.com/q/16097149/35070).from sqlalchemy import MetaData, Table, Column
from sqlalchemy import Integer
def upgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
table = Table('badge', meta, autoload=True)
col = Column('impact', Integer, default=0, server_default=u'0',
nullable=False)
col.create(table)
|
<commit_before>from sqlalchemy import MetaData, Table, Column
from sqlalchemy import Integer
def upgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
table = Table('badge', meta, autoload=True)
col = Column('impact', Integer, default=0, nullable=False)
col.create(table)
<commit_msg>Fix badge impact migration if badges are present
If badges are already present in the system, their impact value must be set to something other than NULL. (default=0 is misleading, since it just applies to newly created badges, see http://stackoverflow.com/q/16097149/35070).<commit_after>from sqlalchemy import MetaData, Table, Column
from sqlalchemy import Integer
def upgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
table = Table('badge', meta, autoload=True)
col = Column('impact', Integer, default=0, server_default=u'0',
nullable=False)
col.create(table)
|
03de222d2e9655606f3a8faedfc2293d138527bf
|
one_time_eval.py
|
one_time_eval.py
|
# usage: python one_time_eval.py hole_cards [board_cards]
# examples:
# python one_time_eval.py as8sqdtc
# python one_time_eval.py as8sqdtc 2skskd
# python one_time_eval.py as8sqdtc3d3c 2skskd
# python one_time_eval.py as8sqdtc3d3c 2skskd3h5s
from convenience import find_pcts_multi, pr, str2cards
import sys
## argv to strings
hole_cards_str = sys.argv[1]
board_str = ''
if len(sys.argv) > 2:
board_str = sys.argv[2]
## strings to lists of Card objects
hole_cards = str2cards(hole_cards_str)
board = str2cards(board_str)
## hole card list to player list-of-lists
assert len(hole_cards) % 2 == 0
n_players = len(hole_cards) / 2
assert n_players > 1
p = []
for i in range(n_players):
pi = hole_cards[i * 2 : i * 2 + 2]
pr(pi)
p.append(pi)
print "Board",
pr(board)
percents = find_pcts_multi(p, board, iter = 10000)
print [round(x, 4) for x in percents]
|
# usage: python one_time_eval.py hole_cards [board_cards]
# examples:
# python one_time_eval.py as8sqdtc
# python one_time_eval.py as8sqdtc 2skskd
# python one_time_eval.py as8sqdtc3d3c 2skskd
# python one_time_eval.py as8sqdtc3d3c 2skskd3h5s
from convenience import find_pcts_multi, pr, str2cards
import sys
## argv to strings
hole_cards_str = sys.argv[1]
board_str = ''
if len(sys.argv) > 2:
board_str = sys.argv[2]
## strings to lists of Card objects
hole_cards = str2cards(hole_cards_str)
board = str2cards(board_str)
assert len(board) <= 5
## hole card list to player list-of-lists
assert len(hole_cards) % 2 == 0
n_players = len(hole_cards) / 2
assert n_players > 1
p = []
for i in range(n_players):
pi = hole_cards[i * 2 : i * 2 + 2]
pr(pi)
p.append(pi)
print "Board",
pr(board)
percents = find_pcts_multi(p, board, iter = 20000)
print [round(x, 4) for x in percents]
|
Increase iterations. Add assertion of max board cards.
|
Increase iterations. Add assertion of max board cards.
|
Python
|
mit
|
zimolzak/poker-experiments,zimolzak/poker-experiments,zimolzak/poker-experiments
|
# usage: python one_time_eval.py hole_cards [board_cards]
# examples:
# python one_time_eval.py as8sqdtc
# python one_time_eval.py as8sqdtc 2skskd
# python one_time_eval.py as8sqdtc3d3c 2skskd
# python one_time_eval.py as8sqdtc3d3c 2skskd3h5s
from convenience import find_pcts_multi, pr, str2cards
import sys
## argv to strings
hole_cards_str = sys.argv[1]
board_str = ''
if len(sys.argv) > 2:
board_str = sys.argv[2]
## strings to lists of Card objects
hole_cards = str2cards(hole_cards_str)
board = str2cards(board_str)
## hole card list to player list-of-lists
assert len(hole_cards) % 2 == 0
n_players = len(hole_cards) / 2
assert n_players > 1
p = []
for i in range(n_players):
pi = hole_cards[i * 2 : i * 2 + 2]
pr(pi)
p.append(pi)
print "Board",
pr(board)
percents = find_pcts_multi(p, board, iter = 10000)
print [round(x, 4) for x in percents]
Increase iterations. Add assertion of max board cards.
|
# usage: python one_time_eval.py hole_cards [board_cards]
# examples:
# python one_time_eval.py as8sqdtc
# python one_time_eval.py as8sqdtc 2skskd
# python one_time_eval.py as8sqdtc3d3c 2skskd
# python one_time_eval.py as8sqdtc3d3c 2skskd3h5s
from convenience import find_pcts_multi, pr, str2cards
import sys
## argv to strings
hole_cards_str = sys.argv[1]
board_str = ''
if len(sys.argv) > 2:
board_str = sys.argv[2]
## strings to lists of Card objects
hole_cards = str2cards(hole_cards_str)
board = str2cards(board_str)
assert len(board) <= 5
## hole card list to player list-of-lists
assert len(hole_cards) % 2 == 0
n_players = len(hole_cards) / 2
assert n_players > 1
p = []
for i in range(n_players):
pi = hole_cards[i * 2 : i * 2 + 2]
pr(pi)
p.append(pi)
print "Board",
pr(board)
percents = find_pcts_multi(p, board, iter = 20000)
print [round(x, 4) for x in percents]
|
<commit_before># usage: python one_time_eval.py hole_cards [board_cards]
# examples:
# python one_time_eval.py as8sqdtc
# python one_time_eval.py as8sqdtc 2skskd
# python one_time_eval.py as8sqdtc3d3c 2skskd
# python one_time_eval.py as8sqdtc3d3c 2skskd3h5s
from convenience import find_pcts_multi, pr, str2cards
import sys
## argv to strings
hole_cards_str = sys.argv[1]
board_str = ''
if len(sys.argv) > 2:
board_str = sys.argv[2]
## strings to lists of Card objects
hole_cards = str2cards(hole_cards_str)
board = str2cards(board_str)
## hole card list to player list-of-lists
assert len(hole_cards) % 2 == 0
n_players = len(hole_cards) / 2
assert n_players > 1
p = []
for i in range(n_players):
pi = hole_cards[i * 2 : i * 2 + 2]
pr(pi)
p.append(pi)
print "Board",
pr(board)
percents = find_pcts_multi(p, board, iter = 10000)
print [round(x, 4) for x in percents]
<commit_msg>Increase iterations. Add assertion of max board cards.<commit_after>
|
# usage: python one_time_eval.py hole_cards [board_cards]
# examples:
# python one_time_eval.py as8sqdtc
# python one_time_eval.py as8sqdtc 2skskd
# python one_time_eval.py as8sqdtc3d3c 2skskd
# python one_time_eval.py as8sqdtc3d3c 2skskd3h5s
from convenience import find_pcts_multi, pr, str2cards
import sys
## argv to strings
hole_cards_str = sys.argv[1]
board_str = ''
if len(sys.argv) > 2:
board_str = sys.argv[2]
## strings to lists of Card objects
hole_cards = str2cards(hole_cards_str)
board = str2cards(board_str)
assert len(board) <= 5
## hole card list to player list-of-lists
assert len(hole_cards) % 2 == 0
n_players = len(hole_cards) / 2
assert n_players > 1
p = []
for i in range(n_players):
pi = hole_cards[i * 2 : i * 2 + 2]
pr(pi)
p.append(pi)
print "Board",
pr(board)
percents = find_pcts_multi(p, board, iter = 20000)
print [round(x, 4) for x in percents]
|
# usage: python one_time_eval.py hole_cards [board_cards]
# examples:
# python one_time_eval.py as8sqdtc
# python one_time_eval.py as8sqdtc 2skskd
# python one_time_eval.py as8sqdtc3d3c 2skskd
# python one_time_eval.py as8sqdtc3d3c 2skskd3h5s
from convenience import find_pcts_multi, pr, str2cards
import sys
## argv to strings
hole_cards_str = sys.argv[1]
board_str = ''
if len(sys.argv) > 2:
board_str = sys.argv[2]
## strings to lists of Card objects
hole_cards = str2cards(hole_cards_str)
board = str2cards(board_str)
## hole card list to player list-of-lists
assert len(hole_cards) % 2 == 0
n_players = len(hole_cards) / 2
assert n_players > 1
p = []
for i in range(n_players):
pi = hole_cards[i * 2 : i * 2 + 2]
pr(pi)
p.append(pi)
print "Board",
pr(board)
percents = find_pcts_multi(p, board, iter = 10000)
print [round(x, 4) for x in percents]
Increase iterations. Add assertion of max board cards.# usage: python one_time_eval.py hole_cards [board_cards]
# examples:
# python one_time_eval.py as8sqdtc
# python one_time_eval.py as8sqdtc 2skskd
# python one_time_eval.py as8sqdtc3d3c 2skskd
# python one_time_eval.py as8sqdtc3d3c 2skskd3h5s
from convenience import find_pcts_multi, pr, str2cards
import sys
## argv to strings
hole_cards_str = sys.argv[1]
board_str = ''
if len(sys.argv) > 2:
board_str = sys.argv[2]
## strings to lists of Card objects
hole_cards = str2cards(hole_cards_str)
board = str2cards(board_str)
assert len(board) <= 5
## hole card list to player list-of-lists
assert len(hole_cards) % 2 == 0
n_players = len(hole_cards) / 2
assert n_players > 1
p = []
for i in range(n_players):
pi = hole_cards[i * 2 : i * 2 + 2]
pr(pi)
p.append(pi)
print "Board",
pr(board)
percents = find_pcts_multi(p, board, iter = 20000)
print [round(x, 4) for x in percents]
|
<commit_before># usage: python one_time_eval.py hole_cards [board_cards]
# examples:
# python one_time_eval.py as8sqdtc
# python one_time_eval.py as8sqdtc 2skskd
# python one_time_eval.py as8sqdtc3d3c 2skskd
# python one_time_eval.py as8sqdtc3d3c 2skskd3h5s
from convenience import find_pcts_multi, pr, str2cards
import sys
## argv to strings
hole_cards_str = sys.argv[1]
board_str = ''
if len(sys.argv) > 2:
board_str = sys.argv[2]
## strings to lists of Card objects
hole_cards = str2cards(hole_cards_str)
board = str2cards(board_str)
## hole card list to player list-of-lists
assert len(hole_cards) % 2 == 0
n_players = len(hole_cards) / 2
assert n_players > 1
p = []
for i in range(n_players):
pi = hole_cards[i * 2 : i * 2 + 2]
pr(pi)
p.append(pi)
print "Board",
pr(board)
percents = find_pcts_multi(p, board, iter = 10000)
print [round(x, 4) for x in percents]
<commit_msg>Increase iterations. Add assertion of max board cards.<commit_after># usage: python one_time_eval.py hole_cards [board_cards]
# examples:
# python one_time_eval.py as8sqdtc
# python one_time_eval.py as8sqdtc 2skskd
# python one_time_eval.py as8sqdtc3d3c 2skskd
# python one_time_eval.py as8sqdtc3d3c 2skskd3h5s
from convenience import find_pcts_multi, pr, str2cards
import sys
## argv to strings
hole_cards_str = sys.argv[1]
board_str = ''
if len(sys.argv) > 2:
board_str = sys.argv[2]
## strings to lists of Card objects
hole_cards = str2cards(hole_cards_str)
board = str2cards(board_str)
assert len(board) <= 5
## hole card list to player list-of-lists
assert len(hole_cards) % 2 == 0
n_players = len(hole_cards) / 2
assert n_players > 1
p = []
for i in range(n_players):
pi = hole_cards[i * 2 : i * 2 + 2]
pr(pi)
p.append(pi)
print "Board",
pr(board)
percents = find_pcts_multi(p, board, iter = 20000)
print [round(x, 4) for x in percents]
|
0de213c88dcee2db8f8cd416ff928e6018329e68
|
passwd_change.py
|
passwd_change.py
|
#!/usr/bin/env python3
import sys
_args = sys.argv
if __name__ == "__main__":
if len(_args) == 5:
keys_file = _args[1]
target_file = _args[2]
result_file = _args[3]
log_file = _args[4]
try:
with open(keys_file, 'r') as k:
keys = k.readlines()
keys = [key.strip().split('@')[0] for key in keys]
keys = [key for key in keys if key != '']
with open(target_file, 'r') as t:
target_lines = t.readlines()
log = open(log_file, 'w')
with open(result_file, 'w') as r:
for line in target_lines:
if line.split(':')[0] in keys or \
line.split(':')[3] != '12':
r.write(line)
else:
log.write(line)
log.close()
except Exception as e:
print(str(e))
sys.exit()
else:
print('==================================================')
print('python passwd_change.py keys passwd passwd_new log')
print('==================================================')
|
#!/usr/bin/env python3
import sys
_args = sys.argv
if __name__ == "__main__":
if len(_args) == 8:
keys_file = _args[1]
passwd_orig = _args[2]
passwd_new = _args[3]
passwd_log = _args[4]
shadow_orig = _args[5]
shadow_new = _args[6]
shadow_log = _args[7]
try:
with open(keys_file, 'r') as k:
keys = k.readlines()
keys = [key.strip().split('@')[0] for key in keys]
keys = [key for key in keys if key != '']
with open(passwd_orig, 'r') as po:
passwd_lines = po.readlines()
passwd_log = open(passwd_log, 'w')
passwd_new_keys = []
with open(passwd_new, 'w') as pn:
for line in passwd_lines:
if line.split(':')[0] in keys or \
line.split(':')[3] != '12':
pn.write(line)
passwd_new_keys.append(line.split(':')[0])
else:
passwd_log.write(line)
passwd_log.close()
with open(shadow_orig, 'r') as so:
shadow_lines = so.readlines()
shadow_log = open(shadow_log, 'w')
with open(shadow_new, 'w') as sn:
for line in shadow_lines:
if line.split(':')[0] in passwd_new_keys:
sn.write(line)
else:
shadow_log.write(line)
shadow_log.close()
except Exception as e:
print(str(e))
sys.exit()
else:
print('==================================================')
print('python passwd_change.py keys passwd passwd_new passwd_log' +
' shadow shadow_new shadow_log')
print('==================================================')
|
Add shadow changing support according to our new passwd.
|
Add shadow changing support according to our new passwd.
|
Python
|
mit
|
maxsocl/oldmailer
|
#!/usr/bin/env python3
import sys
_args = sys.argv
if __name__ == "__main__":
if len(_args) == 5:
keys_file = _args[1]
target_file = _args[2]
result_file = _args[3]
log_file = _args[4]
try:
with open(keys_file, 'r') as k:
keys = k.readlines()
keys = [key.strip().split('@')[0] for key in keys]
keys = [key for key in keys if key != '']
with open(target_file, 'r') as t:
target_lines = t.readlines()
log = open(log_file, 'w')
with open(result_file, 'w') as r:
for line in target_lines:
if line.split(':')[0] in keys or \
line.split(':')[3] != '12':
r.write(line)
else:
log.write(line)
log.close()
except Exception as e:
print(str(e))
sys.exit()
else:
print('==================================================')
print('python passwd_change.py keys passwd passwd_new log')
print('==================================================')
Add shadow changing support according to our new passwd.
|
#!/usr/bin/env python3
import sys
_args = sys.argv
if __name__ == "__main__":
if len(_args) == 8:
keys_file = _args[1]
passwd_orig = _args[2]
passwd_new = _args[3]
passwd_log = _args[4]
shadow_orig = _args[5]
shadow_new = _args[6]
shadow_log = _args[7]
try:
with open(keys_file, 'r') as k:
keys = k.readlines()
keys = [key.strip().split('@')[0] for key in keys]
keys = [key for key in keys if key != '']
with open(passwd_orig, 'r') as po:
passwd_lines = po.readlines()
passwd_log = open(passwd_log, 'w')
passwd_new_keys = []
with open(passwd_new, 'w') as pn:
for line in passwd_lines:
if line.split(':')[0] in keys or \
line.split(':')[3] != '12':
pn.write(line)
passwd_new_keys.append(line.split(':')[0])
else:
passwd_log.write(line)
passwd_log.close()
with open(shadow_orig, 'r') as so:
shadow_lines = so.readlines()
shadow_log = open(shadow_log, 'w')
with open(shadow_new, 'w') as sn:
for line in shadow_lines:
if line.split(':')[0] in passwd_new_keys:
sn.write(line)
else:
shadow_log.write(line)
shadow_log.close()
except Exception as e:
print(str(e))
sys.exit()
else:
print('==================================================')
print('python passwd_change.py keys passwd passwd_new passwd_log' +
' shadow shadow_new shadow_log')
print('==================================================')
|
<commit_before>#!/usr/bin/env python3
import sys
_args = sys.argv
if __name__ == "__main__":
if len(_args) == 5:
keys_file = _args[1]
target_file = _args[2]
result_file = _args[3]
log_file = _args[4]
try:
with open(keys_file, 'r') as k:
keys = k.readlines()
keys = [key.strip().split('@')[0] for key in keys]
keys = [key for key in keys if key != '']
with open(target_file, 'r') as t:
target_lines = t.readlines()
log = open(log_file, 'w')
with open(result_file, 'w') as r:
for line in target_lines:
if line.split(':')[0] in keys or \
line.split(':')[3] != '12':
r.write(line)
else:
log.write(line)
log.close()
except Exception as e:
print(str(e))
sys.exit()
else:
print('==================================================')
print('python passwd_change.py keys passwd passwd_new log')
print('==================================================')
<commit_msg>Add shadow changing support according to our new passwd.<commit_after>
|
#!/usr/bin/env python3
import sys
_args = sys.argv
if __name__ == "__main__":
if len(_args) == 8:
keys_file = _args[1]
passwd_orig = _args[2]
passwd_new = _args[3]
passwd_log = _args[4]
shadow_orig = _args[5]
shadow_new = _args[6]
shadow_log = _args[7]
try:
with open(keys_file, 'r') as k:
keys = k.readlines()
keys = [key.strip().split('@')[0] for key in keys]
keys = [key for key in keys if key != '']
with open(passwd_orig, 'r') as po:
passwd_lines = po.readlines()
passwd_log = open(passwd_log, 'w')
passwd_new_keys = []
with open(passwd_new, 'w') as pn:
for line in passwd_lines:
if line.split(':')[0] in keys or \
line.split(':')[3] != '12':
pn.write(line)
passwd_new_keys.append(line.split(':')[0])
else:
passwd_log.write(line)
passwd_log.close()
with open(shadow_orig, 'r') as so:
shadow_lines = so.readlines()
shadow_log = open(shadow_log, 'w')
with open(shadow_new, 'w') as sn:
for line in shadow_lines:
if line.split(':')[0] in passwd_new_keys:
sn.write(line)
else:
shadow_log.write(line)
shadow_log.close()
except Exception as e:
print(str(e))
sys.exit()
else:
print('==================================================')
print('python passwd_change.py keys passwd passwd_new passwd_log' +
' shadow shadow_new shadow_log')
print('==================================================')
|
#!/usr/bin/env python3
import sys
_args = sys.argv
if __name__ == "__main__":
if len(_args) == 5:
keys_file = _args[1]
target_file = _args[2]
result_file = _args[3]
log_file = _args[4]
try:
with open(keys_file, 'r') as k:
keys = k.readlines()
keys = [key.strip().split('@')[0] for key in keys]
keys = [key for key in keys if key != '']
with open(target_file, 'r') as t:
target_lines = t.readlines()
log = open(log_file, 'w')
with open(result_file, 'w') as r:
for line in target_lines:
if line.split(':')[0] in keys or \
line.split(':')[3] != '12':
r.write(line)
else:
log.write(line)
log.close()
except Exception as e:
print(str(e))
sys.exit()
else:
print('==================================================')
print('python passwd_change.py keys passwd passwd_new log')
print('==================================================')
Add shadow changing support according to our new passwd.#!/usr/bin/env python3
import sys
_args = sys.argv
if __name__ == "__main__":
if len(_args) == 8:
keys_file = _args[1]
passwd_orig = _args[2]
passwd_new = _args[3]
passwd_log = _args[4]
shadow_orig = _args[5]
shadow_new = _args[6]
shadow_log = _args[7]
try:
with open(keys_file, 'r') as k:
keys = k.readlines()
keys = [key.strip().split('@')[0] for key in keys]
keys = [key for key in keys if key != '']
with open(passwd_orig, 'r') as po:
passwd_lines = po.readlines()
passwd_log = open(passwd_log, 'w')
passwd_new_keys = []
with open(passwd_new, 'w') as pn:
for line in passwd_lines:
if line.split(':')[0] in keys or \
line.split(':')[3] != '12':
pn.write(line)
passwd_new_keys.append(line.split(':')[0])
else:
passwd_log.write(line)
passwd_log.close()
with open(shadow_orig, 'r') as so:
shadow_lines = so.readlines()
shadow_log = open(shadow_log, 'w')
with open(shadow_new, 'w') as sn:
for line in shadow_lines:
if line.split(':')[0] in passwd_new_keys:
sn.write(line)
else:
shadow_log.write(line)
shadow_log.close()
except Exception as e:
print(str(e))
sys.exit()
else:
print('==================================================')
print('python passwd_change.py keys passwd passwd_new passwd_log' +
' shadow shadow_new shadow_log')
print('==================================================')
|
<commit_before>#!/usr/bin/env python3
import sys
_args = sys.argv
if __name__ == "__main__":
if len(_args) == 5:
keys_file = _args[1]
target_file = _args[2]
result_file = _args[3]
log_file = _args[4]
try:
with open(keys_file, 'r') as k:
keys = k.readlines()
keys = [key.strip().split('@')[0] for key in keys]
keys = [key for key in keys if key != '']
with open(target_file, 'r') as t:
target_lines = t.readlines()
log = open(log_file, 'w')
with open(result_file, 'w') as r:
for line in target_lines:
if line.split(':')[0] in keys or \
line.split(':')[3] != '12':
r.write(line)
else:
log.write(line)
log.close()
except Exception as e:
print(str(e))
sys.exit()
else:
print('==================================================')
print('python passwd_change.py keys passwd passwd_new log')
print('==================================================')
<commit_msg>Add shadow changing support according to our new passwd.<commit_after>#!/usr/bin/env python3
import sys
_args = sys.argv
if __name__ == "__main__":
if len(_args) == 8:
keys_file = _args[1]
passwd_orig = _args[2]
passwd_new = _args[3]
passwd_log = _args[4]
shadow_orig = _args[5]
shadow_new = _args[6]
shadow_log = _args[7]
try:
with open(keys_file, 'r') as k:
keys = k.readlines()
keys = [key.strip().split('@')[0] for key in keys]
keys = [key for key in keys if key != '']
with open(passwd_orig, 'r') as po:
passwd_lines = po.readlines()
passwd_log = open(passwd_log, 'w')
passwd_new_keys = []
with open(passwd_new, 'w') as pn:
for line in passwd_lines:
if line.split(':')[0] in keys or \
line.split(':')[3] != '12':
pn.write(line)
passwd_new_keys.append(line.split(':')[0])
else:
passwd_log.write(line)
passwd_log.close()
with open(shadow_orig, 'r') as so:
shadow_lines = so.readlines()
shadow_log = open(shadow_log, 'w')
with open(shadow_new, 'w') as sn:
for line in shadow_lines:
if line.split(':')[0] in passwd_new_keys:
sn.write(line)
else:
shadow_log.write(line)
shadow_log.close()
except Exception as e:
print(str(e))
sys.exit()
else:
print('==================================================')
print('python passwd_change.py keys passwd passwd_new passwd_log' +
' shadow shadow_new shadow_log')
print('==================================================')
|
99bb83abc18be1581735dc03c21a680060e9a14c
|
l10n_it_website_portal_fiscalcode/controllers/main.py
|
l10n_it_website_portal_fiscalcode/controllers/main.py
|
# Copyright 2019 Simone Rubino
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
from odoo import _
from odoo.addons.portal.controllers.portal import CustomerPortal
from odoo.http import request
CustomerPortal.OPTIONAL_BILLING_FIELDS.extend(['fiscalcode'])
class WebsitePortalFiscalCode(CustomerPortal):
def details_form_validate(self, data):
error, error_message = \
super(WebsitePortalFiscalCode, self).details_form_validate(data)
# Check fiscalcode
dummy_partner = request.env['res.partner'].new({
'fiscalcode': data.get('fiscalcode'),
'company_name': data.get('company_name'),
})
if not dummy_partner.check_fiscalcode():
error['fiscalcode'] = 'error'
error_message.append(_('Fiscal Code not valid'))
return error, error_message
|
# Copyright 2019 Simone Rubino
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
from odoo import _
from odoo.addons.portal.controllers.portal import CustomerPortal
from odoo.http import request
CustomerPortal.OPTIONAL_BILLING_FIELDS.extend(['fiscalcode'])
class WebsitePortalFiscalCode(CustomerPortal):
def details_form_validate(self, data):
error, error_message = \
super(WebsitePortalFiscalCode, self).details_form_validate(data)
# Check fiscalcode
partner = request.env.user.partner_id
# company_type does not come from page form
company_type = partner.company_type
company_name = False
if 'company_name' in data:
company_name = data.get('company_name')
else:
# when company_name is not posted (readonly)
if partner.company_name:
company_name = partner.company_name
elif partner.company_type == 'company':
company_name = partner.name
dummy_partner = request.env['res.partner'].new({
'fiscalcode': data.get('fiscalcode'),
'company_name': company_name,
'company_type': company_type,
})
if not dummy_partner.check_fiscalcode():
error['fiscalcode'] = 'error'
error_message.append(_('Fiscal Code not valid'))
return error, error_message
|
FIX l10n_it_website_portal_fiscalcode check after invoice issuing
|
FIX l10n_it_website_portal_fiscalcode check after invoice issuing
Steps:
- Create a partner (type company) and give them portal access
- With the new user, access to portal
- Edit partner details setting fiscal code with 11 digits
- Using admin, create an invoice for that partner and validate
- Using the new user, access to portal, open partner details and save
Get "The fiscal code doesn't seem to be correct"
|
Python
|
agpl-3.0
|
OCA/l10n-italy,dcorio/l10n-italy,OCA/l10n-italy,dcorio/l10n-italy,dcorio/l10n-italy,OCA/l10n-italy
|
# Copyright 2019 Simone Rubino
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
from odoo import _
from odoo.addons.portal.controllers.portal import CustomerPortal
from odoo.http import request
CustomerPortal.OPTIONAL_BILLING_FIELDS.extend(['fiscalcode'])
class WebsitePortalFiscalCode(CustomerPortal):
def details_form_validate(self, data):
error, error_message = \
super(WebsitePortalFiscalCode, self).details_form_validate(data)
# Check fiscalcode
dummy_partner = request.env['res.partner'].new({
'fiscalcode': data.get('fiscalcode'),
'company_name': data.get('company_name'),
})
if not dummy_partner.check_fiscalcode():
error['fiscalcode'] = 'error'
error_message.append(_('Fiscal Code not valid'))
return error, error_message
FIX l10n_it_website_portal_fiscalcode check after invoice issuing
Steps:
- Create a partner (type company) and give them portal access
- With the new user, access to portal
- Edit partner details setting fiscal code with 11 digits
- Using admin, create an invoice for that partner and validate
- Using the new user, access to portal, open partner details and save
Get "The fiscal code doesn't seem to be correct"
|
# Copyright 2019 Simone Rubino
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
from odoo import _
from odoo.addons.portal.controllers.portal import CustomerPortal
from odoo.http import request
CustomerPortal.OPTIONAL_BILLING_FIELDS.extend(['fiscalcode'])
class WebsitePortalFiscalCode(CustomerPortal):
def details_form_validate(self, data):
error, error_message = \
super(WebsitePortalFiscalCode, self).details_form_validate(data)
# Check fiscalcode
partner = request.env.user.partner_id
# company_type does not come from page form
company_type = partner.company_type
company_name = False
if 'company_name' in data:
company_name = data.get('company_name')
else:
# when company_name is not posted (readonly)
if partner.company_name:
company_name = partner.company_name
elif partner.company_type == 'company':
company_name = partner.name
dummy_partner = request.env['res.partner'].new({
'fiscalcode': data.get('fiscalcode'),
'company_name': company_name,
'company_type': company_type,
})
if not dummy_partner.check_fiscalcode():
error['fiscalcode'] = 'error'
error_message.append(_('Fiscal Code not valid'))
return error, error_message
|
<commit_before># Copyright 2019 Simone Rubino
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
from odoo import _
from odoo.addons.portal.controllers.portal import CustomerPortal
from odoo.http import request
CustomerPortal.OPTIONAL_BILLING_FIELDS.extend(['fiscalcode'])
class WebsitePortalFiscalCode(CustomerPortal):
def details_form_validate(self, data):
error, error_message = \
super(WebsitePortalFiscalCode, self).details_form_validate(data)
# Check fiscalcode
dummy_partner = request.env['res.partner'].new({
'fiscalcode': data.get('fiscalcode'),
'company_name': data.get('company_name'),
})
if not dummy_partner.check_fiscalcode():
error['fiscalcode'] = 'error'
error_message.append(_('Fiscal Code not valid'))
return error, error_message
<commit_msg>FIX l10n_it_website_portal_fiscalcode check after invoice issuing
Steps:
- Create a partner (type company) and give them portal access
- With the new user, access to portal
- Edit partner details setting fiscal code with 11 digits
- Using admin, create an invoice for that partner and validate
- Using the new user, access to portal, open partner details and save
Get "The fiscal code doesn't seem to be correct"<commit_after>
|
# Copyright 2019 Simone Rubino
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
from odoo import _
from odoo.addons.portal.controllers.portal import CustomerPortal
from odoo.http import request
CustomerPortal.OPTIONAL_BILLING_FIELDS.extend(['fiscalcode'])
class WebsitePortalFiscalCode(CustomerPortal):
def details_form_validate(self, data):
error, error_message = \
super(WebsitePortalFiscalCode, self).details_form_validate(data)
# Check fiscalcode
partner = request.env.user.partner_id
# company_type does not come from page form
company_type = partner.company_type
company_name = False
if 'company_name' in data:
company_name = data.get('company_name')
else:
# when company_name is not posted (readonly)
if partner.company_name:
company_name = partner.company_name
elif partner.company_type == 'company':
company_name = partner.name
dummy_partner = request.env['res.partner'].new({
'fiscalcode': data.get('fiscalcode'),
'company_name': company_name,
'company_type': company_type,
})
if not dummy_partner.check_fiscalcode():
error['fiscalcode'] = 'error'
error_message.append(_('Fiscal Code not valid'))
return error, error_message
|
# Copyright 2019 Simone Rubino
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
from odoo import _
from odoo.addons.portal.controllers.portal import CustomerPortal
from odoo.http import request
CustomerPortal.OPTIONAL_BILLING_FIELDS.extend(['fiscalcode'])
class WebsitePortalFiscalCode(CustomerPortal):
def details_form_validate(self, data):
error, error_message = \
super(WebsitePortalFiscalCode, self).details_form_validate(data)
# Check fiscalcode
dummy_partner = request.env['res.partner'].new({
'fiscalcode': data.get('fiscalcode'),
'company_name': data.get('company_name'),
})
if not dummy_partner.check_fiscalcode():
error['fiscalcode'] = 'error'
error_message.append(_('Fiscal Code not valid'))
return error, error_message
FIX l10n_it_website_portal_fiscalcode check after invoice issuing
Steps:
- Create a partner (type company) and give them portal access
- With the new user, access to portal
- Edit partner details setting fiscal code with 11 digits
- Using admin, create an invoice for that partner and validate
- Using the new user, access to portal, open partner details and save
Get "The fiscal code doesn't seem to be correct"# Copyright 2019 Simone Rubino
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
from odoo import _
from odoo.addons.portal.controllers.portal import CustomerPortal
from odoo.http import request
CustomerPortal.OPTIONAL_BILLING_FIELDS.extend(['fiscalcode'])
class WebsitePortalFiscalCode(CustomerPortal):
def details_form_validate(self, data):
error, error_message = \
super(WebsitePortalFiscalCode, self).details_form_validate(data)
# Check fiscalcode
partner = request.env.user.partner_id
# company_type does not come from page form
company_type = partner.company_type
company_name = False
if 'company_name' in data:
company_name = data.get('company_name')
else:
# when company_name is not posted (readonly)
if partner.company_name:
company_name = partner.company_name
elif partner.company_type == 'company':
company_name = partner.name
dummy_partner = request.env['res.partner'].new({
'fiscalcode': data.get('fiscalcode'),
'company_name': company_name,
'company_type': company_type,
})
if not dummy_partner.check_fiscalcode():
error['fiscalcode'] = 'error'
error_message.append(_('Fiscal Code not valid'))
return error, error_message
|
<commit_before># Copyright 2019 Simone Rubino
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
from odoo import _
from odoo.addons.portal.controllers.portal import CustomerPortal
from odoo.http import request
CustomerPortal.OPTIONAL_BILLING_FIELDS.extend(['fiscalcode'])
class WebsitePortalFiscalCode(CustomerPortal):
def details_form_validate(self, data):
error, error_message = \
super(WebsitePortalFiscalCode, self).details_form_validate(data)
# Check fiscalcode
dummy_partner = request.env['res.partner'].new({
'fiscalcode': data.get('fiscalcode'),
'company_name': data.get('company_name'),
})
if not dummy_partner.check_fiscalcode():
error['fiscalcode'] = 'error'
error_message.append(_('Fiscal Code not valid'))
return error, error_message
<commit_msg>FIX l10n_it_website_portal_fiscalcode check after invoice issuing
Steps:
- Create a partner (type company) and give them portal access
- With the new user, access to portal
- Edit partner details setting fiscal code with 11 digits
- Using admin, create an invoice for that partner and validate
- Using the new user, access to portal, open partner details and save
Get "The fiscal code doesn't seem to be correct"<commit_after># Copyright 2019 Simone Rubino
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
from odoo import _
from odoo.addons.portal.controllers.portal import CustomerPortal
from odoo.http import request
CustomerPortal.OPTIONAL_BILLING_FIELDS.extend(['fiscalcode'])
class WebsitePortalFiscalCode(CustomerPortal):
def details_form_validate(self, data):
error, error_message = \
super(WebsitePortalFiscalCode, self).details_form_validate(data)
# Check fiscalcode
partner = request.env.user.partner_id
# company_type does not come from page form
company_type = partner.company_type
company_name = False
if 'company_name' in data:
company_name = data.get('company_name')
else:
# when company_name is not posted (readonly)
if partner.company_name:
company_name = partner.company_name
elif partner.company_type == 'company':
company_name = partner.name
dummy_partner = request.env['res.partner'].new({
'fiscalcode': data.get('fiscalcode'),
'company_name': company_name,
'company_type': company_type,
})
if not dummy_partner.check_fiscalcode():
error['fiscalcode'] = 'error'
error_message.append(_('Fiscal Code not valid'))
return error, error_message
|
fb58ecee7e3e71f0dbb202f7284c3af20ccbcdaa
|
shared/logger.py
|
shared/logger.py
|
import os
import logging
from datetime import datetime
log_name = '{}/../logs/{}.log'.format(os.path.dirname(os.path.abspath(__file__)),
datetime.now().strftime('%Y.%m.%d.%H.%M.%S'))
logger = logging.getLogger('main')
logger.setLevel(logging.INFO)
logging.basicConfig(format='%(asctime)s %(message)s',
datefmt='%m/%d/%Y %I:%M:%S %p', filename=log_name)
def info(component, message):
if component:
logger.info('[{}] {}'.format(component, message))
else:
logger.info(message)
def error(component, message):
if component:
logger.error('[{}] {}'.format(component, message))
else:
logger.error(message)
|
import os
import logging
from datetime import datetime
log_name = '{}/../logs/{}.log'.format(os.path.dirname(os.path.abspath(__file__)),
datetime.now().strftime('%Y.%m.%d.%H.%M.%S'))
logger = logging.getLogger('main')
logger.setLevel(logging.INFO)
console = logging.StreamHandler()
logger.addHandler(console)
logging.basicConfig(format='%(asctime)s %(message)s',
datefmt='%m/%d/%Y %I:%M:%S %p', filename=log_name)
def info(component, message):
if component:
logger.info('[{}] {}'.format(component, message))
else:
logger.info(message)
def error(component, message):
if component:
logger.error('[{}] {}'.format(component, message))
else:
logger.error(message)
|
Allow logging to console as well as disk
|
Allow logging to console as well as disk
|
Python
|
mit
|
Mo-Talha/Nomad,Mo-Talha/Nomad,Mo-Talha/Nomad,Mo-Talha/Nomad
|
import os
import logging
from datetime import datetime
log_name = '{}/../logs/{}.log'.format(os.path.dirname(os.path.abspath(__file__)),
datetime.now().strftime('%Y.%m.%d.%H.%M.%S'))
logger = logging.getLogger('main')
logger.setLevel(logging.INFO)
logging.basicConfig(format='%(asctime)s %(message)s',
datefmt='%m/%d/%Y %I:%M:%S %p', filename=log_name)
def info(component, message):
if component:
logger.info('[{}] {}'.format(component, message))
else:
logger.info(message)
def error(component, message):
if component:
logger.error('[{}] {}'.format(component, message))
else:
logger.error(message)
Allow logging to console as well as disk
|
import os
import logging
from datetime import datetime
log_name = '{}/../logs/{}.log'.format(os.path.dirname(os.path.abspath(__file__)),
datetime.now().strftime('%Y.%m.%d.%H.%M.%S'))
logger = logging.getLogger('main')
logger.setLevel(logging.INFO)
console = logging.StreamHandler()
logger.addHandler(console)
logging.basicConfig(format='%(asctime)s %(message)s',
datefmt='%m/%d/%Y %I:%M:%S %p', filename=log_name)
def info(component, message):
if component:
logger.info('[{}] {}'.format(component, message))
else:
logger.info(message)
def error(component, message):
if component:
logger.error('[{}] {}'.format(component, message))
else:
logger.error(message)
|
<commit_before>import os
import logging
from datetime import datetime
log_name = '{}/../logs/{}.log'.format(os.path.dirname(os.path.abspath(__file__)),
datetime.now().strftime('%Y.%m.%d.%H.%M.%S'))
logger = logging.getLogger('main')
logger.setLevel(logging.INFO)
logging.basicConfig(format='%(asctime)s %(message)s',
datefmt='%m/%d/%Y %I:%M:%S %p', filename=log_name)
def info(component, message):
if component:
logger.info('[{}] {}'.format(component, message))
else:
logger.info(message)
def error(component, message):
if component:
logger.error('[{}] {}'.format(component, message))
else:
logger.error(message)
<commit_msg>Allow logging to console as well as disk<commit_after>
|
import os
import logging
from datetime import datetime
log_name = '{}/../logs/{}.log'.format(os.path.dirname(os.path.abspath(__file__)),
datetime.now().strftime('%Y.%m.%d.%H.%M.%S'))
logger = logging.getLogger('main')
logger.setLevel(logging.INFO)
console = logging.StreamHandler()
logger.addHandler(console)
logging.basicConfig(format='%(asctime)s %(message)s',
datefmt='%m/%d/%Y %I:%M:%S %p', filename=log_name)
def info(component, message):
if component:
logger.info('[{}] {}'.format(component, message))
else:
logger.info(message)
def error(component, message):
if component:
logger.error('[{}] {}'.format(component, message))
else:
logger.error(message)
|
import os
import logging
from datetime import datetime
log_name = '{}/../logs/{}.log'.format(os.path.dirname(os.path.abspath(__file__)),
datetime.now().strftime('%Y.%m.%d.%H.%M.%S'))
logger = logging.getLogger('main')
logger.setLevel(logging.INFO)
logging.basicConfig(format='%(asctime)s %(message)s',
datefmt='%m/%d/%Y %I:%M:%S %p', filename=log_name)
def info(component, message):
if component:
logger.info('[{}] {}'.format(component, message))
else:
logger.info(message)
def error(component, message):
if component:
logger.error('[{}] {}'.format(component, message))
else:
logger.error(message)
Allow logging to console as well as diskimport os
import logging
from datetime import datetime
log_name = '{}/../logs/{}.log'.format(os.path.dirname(os.path.abspath(__file__)),
datetime.now().strftime('%Y.%m.%d.%H.%M.%S'))
logger = logging.getLogger('main')
logger.setLevel(logging.INFO)
console = logging.StreamHandler()
logger.addHandler(console)
logging.basicConfig(format='%(asctime)s %(message)s',
datefmt='%m/%d/%Y %I:%M:%S %p', filename=log_name)
def info(component, message):
if component:
logger.info('[{}] {}'.format(component, message))
else:
logger.info(message)
def error(component, message):
if component:
logger.error('[{}] {}'.format(component, message))
else:
logger.error(message)
|
<commit_before>import os
import logging
from datetime import datetime
log_name = '{}/../logs/{}.log'.format(os.path.dirname(os.path.abspath(__file__)),
datetime.now().strftime('%Y.%m.%d.%H.%M.%S'))
logger = logging.getLogger('main')
logger.setLevel(logging.INFO)
logging.basicConfig(format='%(asctime)s %(message)s',
datefmt='%m/%d/%Y %I:%M:%S %p', filename=log_name)
def info(component, message):
if component:
logger.info('[{}] {}'.format(component, message))
else:
logger.info(message)
def error(component, message):
if component:
logger.error('[{}] {}'.format(component, message))
else:
logger.error(message)
<commit_msg>Allow logging to console as well as disk<commit_after>import os
import logging
from datetime import datetime
log_name = '{}/../logs/{}.log'.format(os.path.dirname(os.path.abspath(__file__)),
datetime.now().strftime('%Y.%m.%d.%H.%M.%S'))
logger = logging.getLogger('main')
logger.setLevel(logging.INFO)
console = logging.StreamHandler()
logger.addHandler(console)
logging.basicConfig(format='%(asctime)s %(message)s',
datefmt='%m/%d/%Y %I:%M:%S %p', filename=log_name)
def info(component, message):
if component:
logger.info('[{}] {}'.format(component, message))
else:
logger.info(message)
def error(component, message):
if component:
logger.error('[{}] {}'.format(component, message))
else:
logger.error(message)
|
9358b4ebf433a1c23d7c12b99e0253f3741eed8c
|
contrail_provisioning/config/templates/contrail_api_conf.py
|
contrail_provisioning/config/templates/contrail_api_conf.py
|
import string
template = string.Template("""
[DEFAULTS]
ifmap_server_ip=$__contrail_ifmap_server_ip__
ifmap_server_port=$__contrail_ifmap_server_port__
ifmap_username=$__contrail_ifmap_username__
ifmap_password=$__contrail_ifmap_password__
cassandra_server_list=$__contrail_cassandra_server_list__
listen_ip_addr=$__contrail_listen_ip_addr__
listen_port=$__contrail_listen_port__
multi_tenancy=$__contrail_multi_tenancy__
log_file=$__contrail_log_file__
log_local=1
log_level=SYS_NOTICE
disc_server_ip=$__contrail_disc_server_ip__
disc_server_port=$__contrail_disc_server_port__
zk_server_ip=$__contrail_zookeeper_server_ip__
redis_server_ip=$__contrail_redis_ip__
rabbit_server=$__rabbit_server_ip__
rabbit_port=$__rabbit_server_port__
[SECURITY]
use_certs=$__contrail_use_certs__
keyfile=$__contrail_keyfile_location__
certfile=$__contrail_certfile_location__
ca_certs=$__contrail_cacertfile_location__
""")
|
import string
template = string.Template("""
[DEFAULTS]
ifmap_server_ip=$__contrail_ifmap_server_ip__
ifmap_server_port=$__contrail_ifmap_server_port__
ifmap_username=$__contrail_ifmap_username__
ifmap_password=$__contrail_ifmap_password__
cassandra_server_list=$__contrail_cassandra_server_list__
listen_ip_addr=$__contrail_listen_ip_addr__
listen_port=$__contrail_listen_port__
multi_tenancy=$__contrail_multi_tenancy__
log_file=$__contrail_log_file__
log_local=1
log_level=SYS_NOTICE
disc_server_ip=$__contrail_disc_server_ip__
disc_server_port=$__contrail_disc_server_port__
zk_server_ip=$__contrail_zookeeper_server_ip__
redis_server_ip=$__contrail_redis_ip__
rabbit_server=$__rabbit_server_ip__
rabbit_port=$__rabbit_server_port__
list_optimization_enabled=True
[SECURITY]
use_certs=$__contrail_use_certs__
keyfile=$__contrail_keyfile_location__
certfile=$__contrail_certfile_location__
ca_certs=$__contrail_cacertfile_location__
""")
|
Enable port list optimization by default for new install+provision
|
config-perf: Enable port list optimization by default for new install+provision
From R1.05 onwards port is created as child of project. This leads to
better list performance.
Change-Id: Id0acbd1194403c500cdf0ee5851ef6cf5dba1c97
Closes-Bug: #1441924
|
Python
|
apache-2.0
|
Juniper/contrail-provisioning,Juniper/contrail-provisioning
|
import string
template = string.Template("""
[DEFAULTS]
ifmap_server_ip=$__contrail_ifmap_server_ip__
ifmap_server_port=$__contrail_ifmap_server_port__
ifmap_username=$__contrail_ifmap_username__
ifmap_password=$__contrail_ifmap_password__
cassandra_server_list=$__contrail_cassandra_server_list__
listen_ip_addr=$__contrail_listen_ip_addr__
listen_port=$__contrail_listen_port__
multi_tenancy=$__contrail_multi_tenancy__
log_file=$__contrail_log_file__
log_local=1
log_level=SYS_NOTICE
disc_server_ip=$__contrail_disc_server_ip__
disc_server_port=$__contrail_disc_server_port__
zk_server_ip=$__contrail_zookeeper_server_ip__
redis_server_ip=$__contrail_redis_ip__
rabbit_server=$__rabbit_server_ip__
rabbit_port=$__rabbit_server_port__
[SECURITY]
use_certs=$__contrail_use_certs__
keyfile=$__contrail_keyfile_location__
certfile=$__contrail_certfile_location__
ca_certs=$__contrail_cacertfile_location__
""")
config-perf: Enable port list optimization by default for new install+provision
From R1.05 onwards port is created as child of project. This leads to
better list performance.
Change-Id: Id0acbd1194403c500cdf0ee5851ef6cf5dba1c97
Closes-Bug: #1441924
|
import string
template = string.Template("""
[DEFAULTS]
ifmap_server_ip=$__contrail_ifmap_server_ip__
ifmap_server_port=$__contrail_ifmap_server_port__
ifmap_username=$__contrail_ifmap_username__
ifmap_password=$__contrail_ifmap_password__
cassandra_server_list=$__contrail_cassandra_server_list__
listen_ip_addr=$__contrail_listen_ip_addr__
listen_port=$__contrail_listen_port__
multi_tenancy=$__contrail_multi_tenancy__
log_file=$__contrail_log_file__
log_local=1
log_level=SYS_NOTICE
disc_server_ip=$__contrail_disc_server_ip__
disc_server_port=$__contrail_disc_server_port__
zk_server_ip=$__contrail_zookeeper_server_ip__
redis_server_ip=$__contrail_redis_ip__
rabbit_server=$__rabbit_server_ip__
rabbit_port=$__rabbit_server_port__
list_optimization_enabled=True
[SECURITY]
use_certs=$__contrail_use_certs__
keyfile=$__contrail_keyfile_location__
certfile=$__contrail_certfile_location__
ca_certs=$__contrail_cacertfile_location__
""")
|
<commit_before>import string
template = string.Template("""
[DEFAULTS]
ifmap_server_ip=$__contrail_ifmap_server_ip__
ifmap_server_port=$__contrail_ifmap_server_port__
ifmap_username=$__contrail_ifmap_username__
ifmap_password=$__contrail_ifmap_password__
cassandra_server_list=$__contrail_cassandra_server_list__
listen_ip_addr=$__contrail_listen_ip_addr__
listen_port=$__contrail_listen_port__
multi_tenancy=$__contrail_multi_tenancy__
log_file=$__contrail_log_file__
log_local=1
log_level=SYS_NOTICE
disc_server_ip=$__contrail_disc_server_ip__
disc_server_port=$__contrail_disc_server_port__
zk_server_ip=$__contrail_zookeeper_server_ip__
redis_server_ip=$__contrail_redis_ip__
rabbit_server=$__rabbit_server_ip__
rabbit_port=$__rabbit_server_port__
[SECURITY]
use_certs=$__contrail_use_certs__
keyfile=$__contrail_keyfile_location__
certfile=$__contrail_certfile_location__
ca_certs=$__contrail_cacertfile_location__
""")
<commit_msg>config-perf: Enable port list optimization by default for new install+provision
From R1.05 onwards port is created as child of project. This leads to
better list performance.
Change-Id: Id0acbd1194403c500cdf0ee5851ef6cf5dba1c97
Closes-Bug: #1441924<commit_after>
|
import string
template = string.Template("""
[DEFAULTS]
ifmap_server_ip=$__contrail_ifmap_server_ip__
ifmap_server_port=$__contrail_ifmap_server_port__
ifmap_username=$__contrail_ifmap_username__
ifmap_password=$__contrail_ifmap_password__
cassandra_server_list=$__contrail_cassandra_server_list__
listen_ip_addr=$__contrail_listen_ip_addr__
listen_port=$__contrail_listen_port__
multi_tenancy=$__contrail_multi_tenancy__
log_file=$__contrail_log_file__
log_local=1
log_level=SYS_NOTICE
disc_server_ip=$__contrail_disc_server_ip__
disc_server_port=$__contrail_disc_server_port__
zk_server_ip=$__contrail_zookeeper_server_ip__
redis_server_ip=$__contrail_redis_ip__
rabbit_server=$__rabbit_server_ip__
rabbit_port=$__rabbit_server_port__
list_optimization_enabled=True
[SECURITY]
use_certs=$__contrail_use_certs__
keyfile=$__contrail_keyfile_location__
certfile=$__contrail_certfile_location__
ca_certs=$__contrail_cacertfile_location__
""")
|
import string
template = string.Template("""
[DEFAULTS]
ifmap_server_ip=$__contrail_ifmap_server_ip__
ifmap_server_port=$__contrail_ifmap_server_port__
ifmap_username=$__contrail_ifmap_username__
ifmap_password=$__contrail_ifmap_password__
cassandra_server_list=$__contrail_cassandra_server_list__
listen_ip_addr=$__contrail_listen_ip_addr__
listen_port=$__contrail_listen_port__
multi_tenancy=$__contrail_multi_tenancy__
log_file=$__contrail_log_file__
log_local=1
log_level=SYS_NOTICE
disc_server_ip=$__contrail_disc_server_ip__
disc_server_port=$__contrail_disc_server_port__
zk_server_ip=$__contrail_zookeeper_server_ip__
redis_server_ip=$__contrail_redis_ip__
rabbit_server=$__rabbit_server_ip__
rabbit_port=$__rabbit_server_port__
[SECURITY]
use_certs=$__contrail_use_certs__
keyfile=$__contrail_keyfile_location__
certfile=$__contrail_certfile_location__
ca_certs=$__contrail_cacertfile_location__
""")
config-perf: Enable port list optimization by default for new install+provision
From R1.05 onwards port is created as child of project. This leads to
better list performance.
Change-Id: Id0acbd1194403c500cdf0ee5851ef6cf5dba1c97
Closes-Bug: #1441924import string
template = string.Template("""
[DEFAULTS]
ifmap_server_ip=$__contrail_ifmap_server_ip__
ifmap_server_port=$__contrail_ifmap_server_port__
ifmap_username=$__contrail_ifmap_username__
ifmap_password=$__contrail_ifmap_password__
cassandra_server_list=$__contrail_cassandra_server_list__
listen_ip_addr=$__contrail_listen_ip_addr__
listen_port=$__contrail_listen_port__
multi_tenancy=$__contrail_multi_tenancy__
log_file=$__contrail_log_file__
log_local=1
log_level=SYS_NOTICE
disc_server_ip=$__contrail_disc_server_ip__
disc_server_port=$__contrail_disc_server_port__
zk_server_ip=$__contrail_zookeeper_server_ip__
redis_server_ip=$__contrail_redis_ip__
rabbit_server=$__rabbit_server_ip__
rabbit_port=$__rabbit_server_port__
list_optimization_enabled=True
[SECURITY]
use_certs=$__contrail_use_certs__
keyfile=$__contrail_keyfile_location__
certfile=$__contrail_certfile_location__
ca_certs=$__contrail_cacertfile_location__
""")
|
<commit_before>import string
template = string.Template("""
[DEFAULTS]
ifmap_server_ip=$__contrail_ifmap_server_ip__
ifmap_server_port=$__contrail_ifmap_server_port__
ifmap_username=$__contrail_ifmap_username__
ifmap_password=$__contrail_ifmap_password__
cassandra_server_list=$__contrail_cassandra_server_list__
listen_ip_addr=$__contrail_listen_ip_addr__
listen_port=$__contrail_listen_port__
multi_tenancy=$__contrail_multi_tenancy__
log_file=$__contrail_log_file__
log_local=1
log_level=SYS_NOTICE
disc_server_ip=$__contrail_disc_server_ip__
disc_server_port=$__contrail_disc_server_port__
zk_server_ip=$__contrail_zookeeper_server_ip__
redis_server_ip=$__contrail_redis_ip__
rabbit_server=$__rabbit_server_ip__
rabbit_port=$__rabbit_server_port__
[SECURITY]
use_certs=$__contrail_use_certs__
keyfile=$__contrail_keyfile_location__
certfile=$__contrail_certfile_location__
ca_certs=$__contrail_cacertfile_location__
""")
<commit_msg>config-perf: Enable port list optimization by default for new install+provision
From R1.05 onwards port is created as child of project. This leads to
better list performance.
Change-Id: Id0acbd1194403c500cdf0ee5851ef6cf5dba1c97
Closes-Bug: #1441924<commit_after>import string
template = string.Template("""
[DEFAULTS]
ifmap_server_ip=$__contrail_ifmap_server_ip__
ifmap_server_port=$__contrail_ifmap_server_port__
ifmap_username=$__contrail_ifmap_username__
ifmap_password=$__contrail_ifmap_password__
cassandra_server_list=$__contrail_cassandra_server_list__
listen_ip_addr=$__contrail_listen_ip_addr__
listen_port=$__contrail_listen_port__
multi_tenancy=$__contrail_multi_tenancy__
log_file=$__contrail_log_file__
log_local=1
log_level=SYS_NOTICE
disc_server_ip=$__contrail_disc_server_ip__
disc_server_port=$__contrail_disc_server_port__
zk_server_ip=$__contrail_zookeeper_server_ip__
redis_server_ip=$__contrail_redis_ip__
rabbit_server=$__rabbit_server_ip__
rabbit_port=$__rabbit_server_port__
list_optimization_enabled=True
[SECURITY]
use_certs=$__contrail_use_certs__
keyfile=$__contrail_keyfile_location__
certfile=$__contrail_certfile_location__
ca_certs=$__contrail_cacertfile_location__
""")
|
3f2b1fd9d8d7472323de24e971b004d177637c95
|
php4dvd/model/application.py
|
php4dvd/model/application.py
|
Class Application(object):
def __init__(sefl, driver):
self.driver = driver
def go_to_homepage(self):
self.driver.get("http://hub.wart.ru/php4dvd/")
def login(self, user):
driver = self.driver
driver.find_element_by_id("username").clear()
driver.find_element_by_id("username").send_keys(user.username)
driver.find_element_by_id("password").clear()
driver.find_element_by_id("password").send_keys(user.password)
driver.find_element_by_id("submit").click()
def logout(self):
driver = self.driver
driver.find_element_by_link_text("Log out").click()
driver.switch_to_alert().accept()
def is_element_present(driver, how, what):
try:
driver.find_element(by=how, value=what)
except NoSuchElementException, e:
return False
return True
|
Class Application(object):
def __init__(sefl, driver):
self.driver = driver
self.wait = WebDriverWait(driver, 10)
def go_to_homepage(self):
self.driver.get("http://hub.wart.ru/php4dvd/")
def login(self, user):
driver = self.driver
driver.find_element_by_id("username").clear()
driver.find_element_by_id("username").send_keys(user.username)
driver.find_element_by_id("password").clear()
driver.find_element_by_id("password").send_keys(user.password)
driver.find_element_by_id("submit").click()
def logout(self):
driver = self.driver
driver.find_element_by_link_text("Log out").click()
driver.switch_to_alert().accept()
def is_element_present(driver, how, what):
try:
driver.find_element(by=how, value=what)
except NoSuchElementException, e:
return False
return True
|
Set wait variable to 10 seconds.
|
Set wait variable to 10 seconds.
|
Python
|
bsd-2-clause
|
bsamorodov/selenium-py-training-samorodov
|
Class Application(object):
def __init__(sefl, driver):
self.driver = driver
def go_to_homepage(self):
self.driver.get("http://hub.wart.ru/php4dvd/")
def login(self, user):
driver = self.driver
driver.find_element_by_id("username").clear()
driver.find_element_by_id("username").send_keys(user.username)
driver.find_element_by_id("password").clear()
driver.find_element_by_id("password").send_keys(user.password)
driver.find_element_by_id("submit").click()
def logout(self):
driver = self.driver
driver.find_element_by_link_text("Log out").click()
driver.switch_to_alert().accept()
def is_element_present(driver, how, what):
try:
driver.find_element(by=how, value=what)
except NoSuchElementException, e:
return False
return True
Set wait variable to 10 seconds.
|
Class Application(object):
def __init__(sefl, driver):
self.driver = driver
self.wait = WebDriverWait(driver, 10)
def go_to_homepage(self):
self.driver.get("http://hub.wart.ru/php4dvd/")
def login(self, user):
driver = self.driver
driver.find_element_by_id("username").clear()
driver.find_element_by_id("username").send_keys(user.username)
driver.find_element_by_id("password").clear()
driver.find_element_by_id("password").send_keys(user.password)
driver.find_element_by_id("submit").click()
def logout(self):
driver = self.driver
driver.find_element_by_link_text("Log out").click()
driver.switch_to_alert().accept()
def is_element_present(driver, how, what):
try:
driver.find_element(by=how, value=what)
except NoSuchElementException, e:
return False
return True
|
<commit_before>Class Application(object):
def __init__(sefl, driver):
self.driver = driver
def go_to_homepage(self):
self.driver.get("http://hub.wart.ru/php4dvd/")
def login(self, user):
driver = self.driver
driver.find_element_by_id("username").clear()
driver.find_element_by_id("username").send_keys(user.username)
driver.find_element_by_id("password").clear()
driver.find_element_by_id("password").send_keys(user.password)
driver.find_element_by_id("submit").click()
def logout(self):
driver = self.driver
driver.find_element_by_link_text("Log out").click()
driver.switch_to_alert().accept()
def is_element_present(driver, how, what):
try:
driver.find_element(by=how, value=what)
except NoSuchElementException, e:
return False
return True
<commit_msg>Set wait variable to 10 seconds.<commit_after>
|
Class Application(object):
def __init__(sefl, driver):
self.driver = driver
self.wait = WebDriverWait(driver, 10)
def go_to_homepage(self):
self.driver.get("http://hub.wart.ru/php4dvd/")
def login(self, user):
driver = self.driver
driver.find_element_by_id("username").clear()
driver.find_element_by_id("username").send_keys(user.username)
driver.find_element_by_id("password").clear()
driver.find_element_by_id("password").send_keys(user.password)
driver.find_element_by_id("submit").click()
def logout(self):
driver = self.driver
driver.find_element_by_link_text("Log out").click()
driver.switch_to_alert().accept()
def is_element_present(driver, how, what):
try:
driver.find_element(by=how, value=what)
except NoSuchElementException, e:
return False
return True
|
Class Application(object):
def __init__(sefl, driver):
self.driver = driver
def go_to_homepage(self):
self.driver.get("http://hub.wart.ru/php4dvd/")
def login(self, user):
driver = self.driver
driver.find_element_by_id("username").clear()
driver.find_element_by_id("username").send_keys(user.username)
driver.find_element_by_id("password").clear()
driver.find_element_by_id("password").send_keys(user.password)
driver.find_element_by_id("submit").click()
def logout(self):
driver = self.driver
driver.find_element_by_link_text("Log out").click()
driver.switch_to_alert().accept()
def is_element_present(driver, how, what):
try:
driver.find_element(by=how, value=what)
except NoSuchElementException, e:
return False
return True
Set wait variable to 10 seconds.Class Application(object):
def __init__(sefl, driver):
self.driver = driver
self.wait = WebDriverWait(driver, 10)
def go_to_homepage(self):
self.driver.get("http://hub.wart.ru/php4dvd/")
def login(self, user):
driver = self.driver
driver.find_element_by_id("username").clear()
driver.find_element_by_id("username").send_keys(user.username)
driver.find_element_by_id("password").clear()
driver.find_element_by_id("password").send_keys(user.password)
driver.find_element_by_id("submit").click()
def logout(self):
driver = self.driver
driver.find_element_by_link_text("Log out").click()
driver.switch_to_alert().accept()
def is_element_present(driver, how, what):
try:
driver.find_element(by=how, value=what)
except NoSuchElementException, e:
return False
return True
|
<commit_before>Class Application(object):
def __init__(sefl, driver):
self.driver = driver
def go_to_homepage(self):
self.driver.get("http://hub.wart.ru/php4dvd/")
def login(self, user):
driver = self.driver
driver.find_element_by_id("username").clear()
driver.find_element_by_id("username").send_keys(user.username)
driver.find_element_by_id("password").clear()
driver.find_element_by_id("password").send_keys(user.password)
driver.find_element_by_id("submit").click()
def logout(self):
driver = self.driver
driver.find_element_by_link_text("Log out").click()
driver.switch_to_alert().accept()
def is_element_present(driver, how, what):
try:
driver.find_element(by=how, value=what)
except NoSuchElementException, e:
return False
return True
<commit_msg>Set wait variable to 10 seconds.<commit_after>Class Application(object):
def __init__(sefl, driver):
self.driver = driver
self.wait = WebDriverWait(driver, 10)
def go_to_homepage(self):
self.driver.get("http://hub.wart.ru/php4dvd/")
def login(self, user):
driver = self.driver
driver.find_element_by_id("username").clear()
driver.find_element_by_id("username").send_keys(user.username)
driver.find_element_by_id("password").clear()
driver.find_element_by_id("password").send_keys(user.password)
driver.find_element_by_id("submit").click()
def logout(self):
driver = self.driver
driver.find_element_by_link_text("Log out").click()
driver.switch_to_alert().accept()
def is_element_present(driver, how, what):
try:
driver.find_element(by=how, value=what)
except NoSuchElementException, e:
return False
return True
|
aed8f8cde3664389153e9fdf27dbe69cba000e8e
|
wightinvoices/clients/views.py
|
wightinvoices/clients/views.py
|
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from django.views import generic
from django.core.urlresolvers import reverse
from . import models, forms
class ClientMixin(object):
"""
A mixin that describes Client model.
"""
model = models.Client
pk_url_kwarg = 'invoice_id'
form_class = forms.Client
def get_success_url(self):
return reverse('client-detail', args=[self.object.id])
def get_queryset(self):
return super(ClientMixin, self).get_queryset()
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(ClientMixin, self).dispatch(*args, **kwargs)
def get_context_data(self, **kwargs):
kwargs = super(ClientMixin, self).get_context_data(**kwargs)
kwargs['activemenu'] = 'client'
return kwargs
class ClientList(ClientMixin, generic.ListView):
pass
class ClientCreation(ClientMixin, generic.CreateView):
pass
class ClientUpdate(ClientMixin, generic.UpdateView):
pass
class ClientDetail(ClientMixin, generic.DetailView):
pass
|
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from django.views import generic
from django.core.urlresolvers import reverse
from . import models, forms
class ClientMixin(object):
"""
A mixin that describes Client model.
"""
model = models.Client
pk_url_kwarg = 'client_id'
form_class = forms.Client
def get_success_url(self):
return reverse('client-detail', args=[self.object.id])
def get_queryset(self):
return super(ClientMixin, self).get_queryset()
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(ClientMixin, self).dispatch(*args, **kwargs)
def get_context_data(self, **kwargs):
kwargs = super(ClientMixin, self).get_context_data(**kwargs)
kwargs['activemenu'] = 'client'
return kwargs
class ClientList(ClientMixin, generic.ListView):
pass
class ClientCreation(ClientMixin, generic.CreateView):
pass
class ClientUpdate(ClientMixin, generic.UpdateView):
pass
class ClientDetail(ClientMixin, generic.DetailView):
pass
|
Use client_id as url identifier.
|
Use client_id as url identifier.
|
Python
|
mit
|
linovia/wight-invoices
|
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from django.views import generic
from django.core.urlresolvers import reverse
from . import models, forms
class ClientMixin(object):
"""
A mixin that describes Client model.
"""
model = models.Client
pk_url_kwarg = 'invoice_id'
form_class = forms.Client
def get_success_url(self):
return reverse('client-detail', args=[self.object.id])
def get_queryset(self):
return super(ClientMixin, self).get_queryset()
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(ClientMixin, self).dispatch(*args, **kwargs)
def get_context_data(self, **kwargs):
kwargs = super(ClientMixin, self).get_context_data(**kwargs)
kwargs['activemenu'] = 'client'
return kwargs
class ClientList(ClientMixin, generic.ListView):
pass
class ClientCreation(ClientMixin, generic.CreateView):
pass
class ClientUpdate(ClientMixin, generic.UpdateView):
pass
class ClientDetail(ClientMixin, generic.DetailView):
pass
Use client_id as url identifier.
|
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from django.views import generic
from django.core.urlresolvers import reverse
from . import models, forms
class ClientMixin(object):
"""
A mixin that describes Client model.
"""
model = models.Client
pk_url_kwarg = 'client_id'
form_class = forms.Client
def get_success_url(self):
return reverse('client-detail', args=[self.object.id])
def get_queryset(self):
return super(ClientMixin, self).get_queryset()
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(ClientMixin, self).dispatch(*args, **kwargs)
def get_context_data(self, **kwargs):
kwargs = super(ClientMixin, self).get_context_data(**kwargs)
kwargs['activemenu'] = 'client'
return kwargs
class ClientList(ClientMixin, generic.ListView):
pass
class ClientCreation(ClientMixin, generic.CreateView):
pass
class ClientUpdate(ClientMixin, generic.UpdateView):
pass
class ClientDetail(ClientMixin, generic.DetailView):
pass
|
<commit_before>from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from django.views import generic
from django.core.urlresolvers import reverse
from . import models, forms
class ClientMixin(object):
"""
A mixin that describes Client model.
"""
model = models.Client
pk_url_kwarg = 'invoice_id'
form_class = forms.Client
def get_success_url(self):
return reverse('client-detail', args=[self.object.id])
def get_queryset(self):
return super(ClientMixin, self).get_queryset()
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(ClientMixin, self).dispatch(*args, **kwargs)
def get_context_data(self, **kwargs):
kwargs = super(ClientMixin, self).get_context_data(**kwargs)
kwargs['activemenu'] = 'client'
return kwargs
class ClientList(ClientMixin, generic.ListView):
pass
class ClientCreation(ClientMixin, generic.CreateView):
pass
class ClientUpdate(ClientMixin, generic.UpdateView):
pass
class ClientDetail(ClientMixin, generic.DetailView):
pass
<commit_msg>Use client_id as url identifier.<commit_after>
|
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from django.views import generic
from django.core.urlresolvers import reverse
from . import models, forms
class ClientMixin(object):
"""
A mixin that describes Client model.
"""
model = models.Client
pk_url_kwarg = 'client_id'
form_class = forms.Client
def get_success_url(self):
return reverse('client-detail', args=[self.object.id])
def get_queryset(self):
return super(ClientMixin, self).get_queryset()
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(ClientMixin, self).dispatch(*args, **kwargs)
def get_context_data(self, **kwargs):
kwargs = super(ClientMixin, self).get_context_data(**kwargs)
kwargs['activemenu'] = 'client'
return kwargs
class ClientList(ClientMixin, generic.ListView):
pass
class ClientCreation(ClientMixin, generic.CreateView):
pass
class ClientUpdate(ClientMixin, generic.UpdateView):
pass
class ClientDetail(ClientMixin, generic.DetailView):
pass
|
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from django.views import generic
from django.core.urlresolvers import reverse
from . import models, forms
class ClientMixin(object):
"""
A mixin that describes Client model.
"""
model = models.Client
pk_url_kwarg = 'invoice_id'
form_class = forms.Client
def get_success_url(self):
return reverse('client-detail', args=[self.object.id])
def get_queryset(self):
return super(ClientMixin, self).get_queryset()
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(ClientMixin, self).dispatch(*args, **kwargs)
def get_context_data(self, **kwargs):
kwargs = super(ClientMixin, self).get_context_data(**kwargs)
kwargs['activemenu'] = 'client'
return kwargs
class ClientList(ClientMixin, generic.ListView):
pass
class ClientCreation(ClientMixin, generic.CreateView):
pass
class ClientUpdate(ClientMixin, generic.UpdateView):
pass
class ClientDetail(ClientMixin, generic.DetailView):
pass
Use client_id as url identifier.from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from django.views import generic
from django.core.urlresolvers import reverse
from . import models, forms
class ClientMixin(object):
"""
A mixin that describes Client model.
"""
model = models.Client
pk_url_kwarg = 'client_id'
form_class = forms.Client
def get_success_url(self):
return reverse('client-detail', args=[self.object.id])
def get_queryset(self):
return super(ClientMixin, self).get_queryset()
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(ClientMixin, self).dispatch(*args, **kwargs)
def get_context_data(self, **kwargs):
kwargs = super(ClientMixin, self).get_context_data(**kwargs)
kwargs['activemenu'] = 'client'
return kwargs
class ClientList(ClientMixin, generic.ListView):
pass
class ClientCreation(ClientMixin, generic.CreateView):
pass
class ClientUpdate(ClientMixin, generic.UpdateView):
pass
class ClientDetail(ClientMixin, generic.DetailView):
pass
|
<commit_before>from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from django.views import generic
from django.core.urlresolvers import reverse
from . import models, forms
class ClientMixin(object):
"""
A mixin that describes Client model.
"""
model = models.Client
pk_url_kwarg = 'invoice_id'
form_class = forms.Client
def get_success_url(self):
return reverse('client-detail', args=[self.object.id])
def get_queryset(self):
return super(ClientMixin, self).get_queryset()
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(ClientMixin, self).dispatch(*args, **kwargs)
def get_context_data(self, **kwargs):
kwargs = super(ClientMixin, self).get_context_data(**kwargs)
kwargs['activemenu'] = 'client'
return kwargs
class ClientList(ClientMixin, generic.ListView):
pass
class ClientCreation(ClientMixin, generic.CreateView):
pass
class ClientUpdate(ClientMixin, generic.UpdateView):
pass
class ClientDetail(ClientMixin, generic.DetailView):
pass
<commit_msg>Use client_id as url identifier.<commit_after>from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from django.views import generic
from django.core.urlresolvers import reverse
from . import models, forms
class ClientMixin(object):
"""
A mixin that describes Client model.
"""
model = models.Client
pk_url_kwarg = 'client_id'
form_class = forms.Client
def get_success_url(self):
return reverse('client-detail', args=[self.object.id])
def get_queryset(self):
return super(ClientMixin, self).get_queryset()
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(ClientMixin, self).dispatch(*args, **kwargs)
def get_context_data(self, **kwargs):
kwargs = super(ClientMixin, self).get_context_data(**kwargs)
kwargs['activemenu'] = 'client'
return kwargs
class ClientList(ClientMixin, generic.ListView):
pass
class ClientCreation(ClientMixin, generic.CreateView):
pass
class ClientUpdate(ClientMixin, generic.UpdateView):
pass
class ClientDetail(ClientMixin, generic.DetailView):
pass
|
69890f36b1853b3845ff29ec15ccde11f7ac86f2
|
zerver/migrations/0306_custom_profile_field_date_format.py
|
zerver/migrations/0306_custom_profile_field_date_format.py
|
from django.db import migrations
class Migration(migrations.Migration):
"""
We previously accepted invalid ISO 8601 dates like 1909-3-5 for
date values of custom profile fields. Correct them by adding the
missing leading zeros: 1909-03-05.
"""
dependencies = [
("zerver", "0305_realm_deactivated_redirect"),
]
operations = [
migrations.RunSQL(
sql="""\
UPDATE zerver_customprofilefieldvalue
SET value = to_char(to_date(value, 'YYYY-MM-DD'), 'YYYY-MM-DD')
FROM zerver_customprofilefield AS f
WHERE f.id = field_id
AND f.field_type = 4
AND value <> to_char(to_date(value, 'YYYY-MM-DD'), 'YYYY-MM-DD');
""",
reverse_sql="",
),
]
|
from django.db import migrations
class Migration(migrations.Migration):
"""
We previously accepted invalid ISO 8601 dates like 1909-3-5 for
date values of custom profile fields. Correct them by adding the
missing leading zeros: 1909-03-05.
"""
dependencies = [
("zerver", "0305_realm_deactivated_redirect"),
]
operations = [
migrations.RunSQL(
sql="""\
UPDATE zerver_customprofilefieldvalue
SET value = to_char(to_date(value, 'YYYY-MM-DD'), 'YYYY-MM-DD')
FROM zerver_customprofilefield AS f
WHERE f.id = field_id
AND f.field_type = 4
AND CASE
WHEN f.field_type = 4
THEN value <> to_char(to_date(value, 'YYYY-MM-DD'), 'YYYY-MM-DD')
END;
""",
reverse_sql="",
),
]
|
Enforce evaluation order in 0306 WHERE clause.
|
migrations: Enforce evaluation order in 0306 WHERE clause.
Depending on PostgreSQL’s query plan, it was possible for the value
condition to be evaluated before the field_type condition was checked,
leading to errors like
psycopg2.errors.InvalidDatetimeFormat: invalid value "stri" for "YYYY"
DETAIL: Value must be an integer.
Signed-off-by: Anders Kaseorg <dfdb7392591db597bc41cf266a9c3bc12a2706e5@zulip.com>
|
Python
|
apache-2.0
|
zulip/zulip,eeshangarg/zulip,hackerkid/zulip,andersk/zulip,kou/zulip,rht/zulip,kou/zulip,rht/zulip,andersk/zulip,punchagan/zulip,hackerkid/zulip,kou/zulip,andersk/zulip,hackerkid/zulip,punchagan/zulip,punchagan/zulip,kou/zulip,andersk/zulip,punchagan/zulip,rht/zulip,hackerkid/zulip,andersk/zulip,rht/zulip,rht/zulip,eeshangarg/zulip,zulip/zulip,zulip/zulip,hackerkid/zulip,punchagan/zulip,eeshangarg/zulip,zulip/zulip,eeshangarg/zulip,kou/zulip,kou/zulip,zulip/zulip,rht/zulip,zulip/zulip,andersk/zulip,hackerkid/zulip,zulip/zulip,punchagan/zulip,eeshangarg/zulip,rht/zulip,kou/zulip,eeshangarg/zulip,eeshangarg/zulip,andersk/zulip,punchagan/zulip,hackerkid/zulip
|
from django.db import migrations
class Migration(migrations.Migration):
"""
We previously accepted invalid ISO 8601 dates like 1909-3-5 for
date values of custom profile fields. Correct them by adding the
missing leading zeros: 1909-03-05.
"""
dependencies = [
("zerver", "0305_realm_deactivated_redirect"),
]
operations = [
migrations.RunSQL(
sql="""\
UPDATE zerver_customprofilefieldvalue
SET value = to_char(to_date(value, 'YYYY-MM-DD'), 'YYYY-MM-DD')
FROM zerver_customprofilefield AS f
WHERE f.id = field_id
AND f.field_type = 4
AND value <> to_char(to_date(value, 'YYYY-MM-DD'), 'YYYY-MM-DD');
""",
reverse_sql="",
),
]
migrations: Enforce evaluation order in 0306 WHERE clause.
Depending on PostgreSQL’s query plan, it was possible for the value
condition to be evaluated before the field_type condition was checked,
leading to errors like
psycopg2.errors.InvalidDatetimeFormat: invalid value "stri" for "YYYY"
DETAIL: Value must be an integer.
Signed-off-by: Anders Kaseorg <dfdb7392591db597bc41cf266a9c3bc12a2706e5@zulip.com>
|
from django.db import migrations
class Migration(migrations.Migration):
"""
We previously accepted invalid ISO 8601 dates like 1909-3-5 for
date values of custom profile fields. Correct them by adding the
missing leading zeros: 1909-03-05.
"""
dependencies = [
("zerver", "0305_realm_deactivated_redirect"),
]
operations = [
migrations.RunSQL(
sql="""\
UPDATE zerver_customprofilefieldvalue
SET value = to_char(to_date(value, 'YYYY-MM-DD'), 'YYYY-MM-DD')
FROM zerver_customprofilefield AS f
WHERE f.id = field_id
AND f.field_type = 4
AND CASE
WHEN f.field_type = 4
THEN value <> to_char(to_date(value, 'YYYY-MM-DD'), 'YYYY-MM-DD')
END;
""",
reverse_sql="",
),
]
|
<commit_before>from django.db import migrations
class Migration(migrations.Migration):
"""
We previously accepted invalid ISO 8601 dates like 1909-3-5 for
date values of custom profile fields. Correct them by adding the
missing leading zeros: 1909-03-05.
"""
dependencies = [
("zerver", "0305_realm_deactivated_redirect"),
]
operations = [
migrations.RunSQL(
sql="""\
UPDATE zerver_customprofilefieldvalue
SET value = to_char(to_date(value, 'YYYY-MM-DD'), 'YYYY-MM-DD')
FROM zerver_customprofilefield AS f
WHERE f.id = field_id
AND f.field_type = 4
AND value <> to_char(to_date(value, 'YYYY-MM-DD'), 'YYYY-MM-DD');
""",
reverse_sql="",
),
]
<commit_msg>migrations: Enforce evaluation order in 0306 WHERE clause.
Depending on PostgreSQL’s query plan, it was possible for the value
condition to be evaluated before the field_type condition was checked,
leading to errors like
psycopg2.errors.InvalidDatetimeFormat: invalid value "stri" for "YYYY"
DETAIL: Value must be an integer.
Signed-off-by: Anders Kaseorg <dfdb7392591db597bc41cf266a9c3bc12a2706e5@zulip.com><commit_after>
|
from django.db import migrations
class Migration(migrations.Migration):
"""
We previously accepted invalid ISO 8601 dates like 1909-3-5 for
date values of custom profile fields. Correct them by adding the
missing leading zeros: 1909-03-05.
"""
dependencies = [
("zerver", "0305_realm_deactivated_redirect"),
]
operations = [
migrations.RunSQL(
sql="""\
UPDATE zerver_customprofilefieldvalue
SET value = to_char(to_date(value, 'YYYY-MM-DD'), 'YYYY-MM-DD')
FROM zerver_customprofilefield AS f
WHERE f.id = field_id
AND f.field_type = 4
AND CASE
WHEN f.field_type = 4
THEN value <> to_char(to_date(value, 'YYYY-MM-DD'), 'YYYY-MM-DD')
END;
""",
reverse_sql="",
),
]
|
from django.db import migrations
class Migration(migrations.Migration):
"""
We previously accepted invalid ISO 8601 dates like 1909-3-5 for
date values of custom profile fields. Correct them by adding the
missing leading zeros: 1909-03-05.
"""
dependencies = [
("zerver", "0305_realm_deactivated_redirect"),
]
operations = [
migrations.RunSQL(
sql="""\
UPDATE zerver_customprofilefieldvalue
SET value = to_char(to_date(value, 'YYYY-MM-DD'), 'YYYY-MM-DD')
FROM zerver_customprofilefield AS f
WHERE f.id = field_id
AND f.field_type = 4
AND value <> to_char(to_date(value, 'YYYY-MM-DD'), 'YYYY-MM-DD');
""",
reverse_sql="",
),
]
migrations: Enforce evaluation order in 0306 WHERE clause.
Depending on PostgreSQL’s query plan, it was possible for the value
condition to be evaluated before the field_type condition was checked,
leading to errors like
psycopg2.errors.InvalidDatetimeFormat: invalid value "stri" for "YYYY"
DETAIL: Value must be an integer.
Signed-off-by: Anders Kaseorg <dfdb7392591db597bc41cf266a9c3bc12a2706e5@zulip.com>from django.db import migrations
class Migration(migrations.Migration):
"""
We previously accepted invalid ISO 8601 dates like 1909-3-5 for
date values of custom profile fields. Correct them by adding the
missing leading zeros: 1909-03-05.
"""
dependencies = [
("zerver", "0305_realm_deactivated_redirect"),
]
operations = [
migrations.RunSQL(
sql="""\
UPDATE zerver_customprofilefieldvalue
SET value = to_char(to_date(value, 'YYYY-MM-DD'), 'YYYY-MM-DD')
FROM zerver_customprofilefield AS f
WHERE f.id = field_id
AND f.field_type = 4
AND CASE
WHEN f.field_type = 4
THEN value <> to_char(to_date(value, 'YYYY-MM-DD'), 'YYYY-MM-DD')
END;
""",
reverse_sql="",
),
]
|
<commit_before>from django.db import migrations
class Migration(migrations.Migration):
"""
We previously accepted invalid ISO 8601 dates like 1909-3-5 for
date values of custom profile fields. Correct them by adding the
missing leading zeros: 1909-03-05.
"""
dependencies = [
("zerver", "0305_realm_deactivated_redirect"),
]
operations = [
migrations.RunSQL(
sql="""\
UPDATE zerver_customprofilefieldvalue
SET value = to_char(to_date(value, 'YYYY-MM-DD'), 'YYYY-MM-DD')
FROM zerver_customprofilefield AS f
WHERE f.id = field_id
AND f.field_type = 4
AND value <> to_char(to_date(value, 'YYYY-MM-DD'), 'YYYY-MM-DD');
""",
reverse_sql="",
),
]
<commit_msg>migrations: Enforce evaluation order in 0306 WHERE clause.
Depending on PostgreSQL’s query plan, it was possible for the value
condition to be evaluated before the field_type condition was checked,
leading to errors like
psycopg2.errors.InvalidDatetimeFormat: invalid value "stri" for "YYYY"
DETAIL: Value must be an integer.
Signed-off-by: Anders Kaseorg <dfdb7392591db597bc41cf266a9c3bc12a2706e5@zulip.com><commit_after>from django.db import migrations
class Migration(migrations.Migration):
"""
We previously accepted invalid ISO 8601 dates like 1909-3-5 for
date values of custom profile fields. Correct them by adding the
missing leading zeros: 1909-03-05.
"""
dependencies = [
("zerver", "0305_realm_deactivated_redirect"),
]
operations = [
migrations.RunSQL(
sql="""\
UPDATE zerver_customprofilefieldvalue
SET value = to_char(to_date(value, 'YYYY-MM-DD'), 'YYYY-MM-DD')
FROM zerver_customprofilefield AS f
WHERE f.id = field_id
AND f.field_type = 4
AND CASE
WHEN f.field_type = 4
THEN value <> to_char(to_date(value, 'YYYY-MM-DD'), 'YYYY-MM-DD')
END;
""",
reverse_sql="",
),
]
|
72a9dd0f0cff3fc6dcc97a4068b82e4b13bbc127
|
accounts/management/__init__.py
|
accounts/management/__init__.py
|
from django.db.models.signals import post_syncdb
from django.conf import settings
from accounts import models
def ensure_core_accounts_exists(sender, **kwargs):
create_source_account()
create_sales_account()
create_expired_account()
def create_sales_account():
name = getattr(settings, 'ACCOUNTS_SALES_NAME')
__, created = models.Account.objects.get_or_create(name=name)
if created:
print "Created sales account '%s'" % name
def create_expired_account():
name = getattr(settings, 'ACCOUNTS_EXPIRED_NAME')
__, created = models.Account.objects.get_or_create(name=name)
if created:
print "Created expired account '%s'" % name
def create_source_account():
# Create a source account if one does not exist
if not hasattr(settings, 'ACCOUNTS_SOURCE_NAME'):
return
# We only create the source account if there are no accounts already
# created.
if models.Account.objects.all().count() > 0:
return
name = getattr(settings, 'ACCOUNTS_SOURCE_NAME')
__, created = models.Account.objects.get_or_create(name=name,
credit_limit=None)
if created:
print "Created source account '%s'" % name
post_syncdb.connect(ensure_core_accounts_exists, sender=models)
|
from django.db.models.signals import post_syncdb
from django.conf import settings
from accounts import models
def ensure_core_accounts_exists(sender, **kwargs):
create_source_account()
create_sales_account()
create_expired_account()
def create_sales_account():
name = getattr(settings, 'ACCOUNTS_SALES_NAME')
models.Account.objects.get_or_create(name=name)
def create_expired_account():
name = getattr(settings, 'ACCOUNTS_EXPIRED_NAME')
models.Account.objects.get_or_create(name=name)
def create_source_account():
# Create a source account if one does not exist
if not hasattr(settings, 'ACCOUNTS_SOURCE_NAME'):
return
# We only create the source account if there are no accounts already
# created.
if models.Account.objects.all().count() > 0:
return
name = getattr(settings, 'ACCOUNTS_SOURCE_NAME')
models.Account.objects.get_or_create(name=name, credit_limit=None)
post_syncdb.connect(ensure_core_accounts_exists, sender=models)
|
Remove print statements for syncdb receivers
|
Remove print statements for syncdb receivers
|
Python
|
bsd-3-clause
|
django-oscar/django-oscar-accounts,michaelkuty/django-oscar-accounts,Mariana-Tek/django-oscar-accounts,amsys/django-account-balances,carver/django-account-balances,Jannes123/django-oscar-accounts,machtfit/django-oscar-accounts,michaelkuty/django-oscar-accounts,amsys/django-account-balances,django-oscar/django-oscar-accounts,Mariana-Tek/django-oscar-accounts,machtfit/django-oscar-accounts,Jannes123/django-oscar-accounts
|
from django.db.models.signals import post_syncdb
from django.conf import settings
from accounts import models
def ensure_core_accounts_exists(sender, **kwargs):
create_source_account()
create_sales_account()
create_expired_account()
def create_sales_account():
name = getattr(settings, 'ACCOUNTS_SALES_NAME')
__, created = models.Account.objects.get_or_create(name=name)
if created:
print "Created sales account '%s'" % name
def create_expired_account():
name = getattr(settings, 'ACCOUNTS_EXPIRED_NAME')
__, created = models.Account.objects.get_or_create(name=name)
if created:
print "Created expired account '%s'" % name
def create_source_account():
# Create a source account if one does not exist
if not hasattr(settings, 'ACCOUNTS_SOURCE_NAME'):
return
# We only create the source account if there are no accounts already
# created.
if models.Account.objects.all().count() > 0:
return
name = getattr(settings, 'ACCOUNTS_SOURCE_NAME')
__, created = models.Account.objects.get_or_create(name=name,
credit_limit=None)
if created:
print "Created source account '%s'" % name
post_syncdb.connect(ensure_core_accounts_exists, sender=models)
Remove print statements for syncdb receivers
|
from django.db.models.signals import post_syncdb
from django.conf import settings
from accounts import models
def ensure_core_accounts_exists(sender, **kwargs):
create_source_account()
create_sales_account()
create_expired_account()
def create_sales_account():
name = getattr(settings, 'ACCOUNTS_SALES_NAME')
models.Account.objects.get_or_create(name=name)
def create_expired_account():
name = getattr(settings, 'ACCOUNTS_EXPIRED_NAME')
models.Account.objects.get_or_create(name=name)
def create_source_account():
# Create a source account if one does not exist
if not hasattr(settings, 'ACCOUNTS_SOURCE_NAME'):
return
# We only create the source account if there are no accounts already
# created.
if models.Account.objects.all().count() > 0:
return
name = getattr(settings, 'ACCOUNTS_SOURCE_NAME')
models.Account.objects.get_or_create(name=name, credit_limit=None)
post_syncdb.connect(ensure_core_accounts_exists, sender=models)
|
<commit_before>from django.db.models.signals import post_syncdb
from django.conf import settings
from accounts import models
def ensure_core_accounts_exists(sender, **kwargs):
create_source_account()
create_sales_account()
create_expired_account()
def create_sales_account():
name = getattr(settings, 'ACCOUNTS_SALES_NAME')
__, created = models.Account.objects.get_or_create(name=name)
if created:
print "Created sales account '%s'" % name
def create_expired_account():
name = getattr(settings, 'ACCOUNTS_EXPIRED_NAME')
__, created = models.Account.objects.get_or_create(name=name)
if created:
print "Created expired account '%s'" % name
def create_source_account():
# Create a source account if one does not exist
if not hasattr(settings, 'ACCOUNTS_SOURCE_NAME'):
return
# We only create the source account if there are no accounts already
# created.
if models.Account.objects.all().count() > 0:
return
name = getattr(settings, 'ACCOUNTS_SOURCE_NAME')
__, created = models.Account.objects.get_or_create(name=name,
credit_limit=None)
if created:
print "Created source account '%s'" % name
post_syncdb.connect(ensure_core_accounts_exists, sender=models)
<commit_msg>Remove print statements for syncdb receivers<commit_after>
|
from django.db.models.signals import post_syncdb
from django.conf import settings
from accounts import models
def ensure_core_accounts_exists(sender, **kwargs):
create_source_account()
create_sales_account()
create_expired_account()
def create_sales_account():
name = getattr(settings, 'ACCOUNTS_SALES_NAME')
models.Account.objects.get_or_create(name=name)
def create_expired_account():
name = getattr(settings, 'ACCOUNTS_EXPIRED_NAME')
models.Account.objects.get_or_create(name=name)
def create_source_account():
# Create a source account if one does not exist
if not hasattr(settings, 'ACCOUNTS_SOURCE_NAME'):
return
# We only create the source account if there are no accounts already
# created.
if models.Account.objects.all().count() > 0:
return
name = getattr(settings, 'ACCOUNTS_SOURCE_NAME')
models.Account.objects.get_or_create(name=name, credit_limit=None)
post_syncdb.connect(ensure_core_accounts_exists, sender=models)
|
from django.db.models.signals import post_syncdb
from django.conf import settings
from accounts import models
def ensure_core_accounts_exists(sender, **kwargs):
create_source_account()
create_sales_account()
create_expired_account()
def create_sales_account():
name = getattr(settings, 'ACCOUNTS_SALES_NAME')
__, created = models.Account.objects.get_or_create(name=name)
if created:
print "Created sales account '%s'" % name
def create_expired_account():
name = getattr(settings, 'ACCOUNTS_EXPIRED_NAME')
__, created = models.Account.objects.get_or_create(name=name)
if created:
print "Created expired account '%s'" % name
def create_source_account():
# Create a source account if one does not exist
if not hasattr(settings, 'ACCOUNTS_SOURCE_NAME'):
return
# We only create the source account if there are no accounts already
# created.
if models.Account.objects.all().count() > 0:
return
name = getattr(settings, 'ACCOUNTS_SOURCE_NAME')
__, created = models.Account.objects.get_or_create(name=name,
credit_limit=None)
if created:
print "Created source account '%s'" % name
post_syncdb.connect(ensure_core_accounts_exists, sender=models)
Remove print statements for syncdb receiversfrom django.db.models.signals import post_syncdb
from django.conf import settings
from accounts import models
def ensure_core_accounts_exists(sender, **kwargs):
create_source_account()
create_sales_account()
create_expired_account()
def create_sales_account():
name = getattr(settings, 'ACCOUNTS_SALES_NAME')
models.Account.objects.get_or_create(name=name)
def create_expired_account():
name = getattr(settings, 'ACCOUNTS_EXPIRED_NAME')
models.Account.objects.get_or_create(name=name)
def create_source_account():
# Create a source account if one does not exist
if not hasattr(settings, 'ACCOUNTS_SOURCE_NAME'):
return
# We only create the source account if there are no accounts already
# created.
if models.Account.objects.all().count() > 0:
return
name = getattr(settings, 'ACCOUNTS_SOURCE_NAME')
models.Account.objects.get_or_create(name=name, credit_limit=None)
post_syncdb.connect(ensure_core_accounts_exists, sender=models)
|
<commit_before>from django.db.models.signals import post_syncdb
from django.conf import settings
from accounts import models
def ensure_core_accounts_exists(sender, **kwargs):
create_source_account()
create_sales_account()
create_expired_account()
def create_sales_account():
name = getattr(settings, 'ACCOUNTS_SALES_NAME')
__, created = models.Account.objects.get_or_create(name=name)
if created:
print "Created sales account '%s'" % name
def create_expired_account():
name = getattr(settings, 'ACCOUNTS_EXPIRED_NAME')
__, created = models.Account.objects.get_or_create(name=name)
if created:
print "Created expired account '%s'" % name
def create_source_account():
# Create a source account if one does not exist
if not hasattr(settings, 'ACCOUNTS_SOURCE_NAME'):
return
# We only create the source account if there are no accounts already
# created.
if models.Account.objects.all().count() > 0:
return
name = getattr(settings, 'ACCOUNTS_SOURCE_NAME')
__, created = models.Account.objects.get_or_create(name=name,
credit_limit=None)
if created:
print "Created source account '%s'" % name
post_syncdb.connect(ensure_core_accounts_exists, sender=models)
<commit_msg>Remove print statements for syncdb receivers<commit_after>from django.db.models.signals import post_syncdb
from django.conf import settings
from accounts import models
def ensure_core_accounts_exists(sender, **kwargs):
create_source_account()
create_sales_account()
create_expired_account()
def create_sales_account():
name = getattr(settings, 'ACCOUNTS_SALES_NAME')
models.Account.objects.get_or_create(name=name)
def create_expired_account():
name = getattr(settings, 'ACCOUNTS_EXPIRED_NAME')
models.Account.objects.get_or_create(name=name)
def create_source_account():
# Create a source account if one does not exist
if not hasattr(settings, 'ACCOUNTS_SOURCE_NAME'):
return
# We only create the source account if there are no accounts already
# created.
if models.Account.objects.all().count() > 0:
return
name = getattr(settings, 'ACCOUNTS_SOURCE_NAME')
models.Account.objects.get_or_create(name=name, credit_limit=None)
post_syncdb.connect(ensure_core_accounts_exists, sender=models)
|
c29b82ee2ce1d16d60fcd4715d2836c7a6f2d241
|
app/__init__.py
|
app/__init__.py
|
from flask import Flask
app = Flask(__name__)
from app import views
|
from flask import Flask
app = Flask(__name__)
from app import views
# Flask configuration
app.config.from_object(__name__)
app.config.update(dict(
MYSQL_HOST='localhost',
MYSQL_USER='root',
MYSQL_PASSWORD='123456',
MYSQL_DB='flask_todo_app',
MYSQL_CURSORCLASS='DictCursor',
SECRET_KEY='development key'
))
|
Set the MySQL related configuration.
|
Set the MySQL related configuration.
|
Python
|
mit
|
alchermd/flask-todo-app,alchermd/flask-todo-app
|
from flask import Flask
app = Flask(__name__)
from app import viewsSet the MySQL related configuration.
|
from flask import Flask
app = Flask(__name__)
from app import views
# Flask configuration
app.config.from_object(__name__)
app.config.update(dict(
MYSQL_HOST='localhost',
MYSQL_USER='root',
MYSQL_PASSWORD='123456',
MYSQL_DB='flask_todo_app',
MYSQL_CURSORCLASS='DictCursor',
SECRET_KEY='development key'
))
|
<commit_before>from flask import Flask
app = Flask(__name__)
from app import views<commit_msg>Set the MySQL related configuration.<commit_after>
|
from flask import Flask
app = Flask(__name__)
from app import views
# Flask configuration
app.config.from_object(__name__)
app.config.update(dict(
MYSQL_HOST='localhost',
MYSQL_USER='root',
MYSQL_PASSWORD='123456',
MYSQL_DB='flask_todo_app',
MYSQL_CURSORCLASS='DictCursor',
SECRET_KEY='development key'
))
|
from flask import Flask
app = Flask(__name__)
from app import viewsSet the MySQL related configuration.from flask import Flask
app = Flask(__name__)
from app import views
# Flask configuration
app.config.from_object(__name__)
app.config.update(dict(
MYSQL_HOST='localhost',
MYSQL_USER='root',
MYSQL_PASSWORD='123456',
MYSQL_DB='flask_todo_app',
MYSQL_CURSORCLASS='DictCursor',
SECRET_KEY='development key'
))
|
<commit_before>from flask import Flask
app = Flask(__name__)
from app import views<commit_msg>Set the MySQL related configuration.<commit_after>from flask import Flask
app = Flask(__name__)
from app import views
# Flask configuration
app.config.from_object(__name__)
app.config.update(dict(
MYSQL_HOST='localhost',
MYSQL_USER='root',
MYSQL_PASSWORD='123456',
MYSQL_DB='flask_todo_app',
MYSQL_CURSORCLASS='DictCursor',
SECRET_KEY='development key'
))
|
4aa11073a551c8a026daea9175336b63dd9780b2
|
src/poliastro/twobody/events.py
|
src/poliastro/twobody/events.py
|
from astropy import units as u
from numpy.linalg import norm
class LithobrakeEvent:
"""Terminal event that detects impact with the attractor surface.
Parameters
----------
R : float
Radius of the attractor.
"""
def __init__(self, R):
self._R = R
self._last_t = None
@property
def terminal(self):
# Tell SciPy to stop the integration at H = R (impact)
return True
@property
def last_t(self):
return self._last_t * u.s
def __call__(self, t, u, k):
self._last_t = t
H = norm(u[:3])
# SciPy will search for H - R = 0
return H - self._R
|
from astropy import units as u
from numpy.linalg import norm
class LithobrakeEvent:
"""Terminal event that detects impact with the attractor surface.
Parameters
----------
R : float
Radius of the attractor.
"""
def __init__(self, R):
self._R = R
self._last_t = None
@property
def terminal(self):
# Tell SciPy to stop the integration at H = R (impact)
return True
@property
def last_t(self):
return self._last_t * u.s
def __call__(self, t, u, k):
self._last_t = t
H = norm(u[:3])
# SciPy will search for H - R = 0
print(H - self._R)
return H - self._R
class AltitudeCrossEvent:
"""Detect if a satellite crosses a specific threshold altitude.
Parameters
----------
R: ~astropy.units.Quantity
Radius of the attractor (km).
thresh_H: ~astropy.units.Quantity
Threshold altitude (in km), defaults to 100 km.
terminal: bool
Whether to terminate integration if this event occurs, defaults to True.
"""
def __init__(self, R, thresh_H=100*u.km, terminal=True):
self._R = R.to(u.km).value
self._thresh_H = thresh_H.to(u.km).value # Threshold height from the ground.
self._terminal = terminal
self._last_t = None
@property
def terminal(self):
# Orekit's API stops propagation when descending, but not when ascending.
return self._terminal
@property
def last_t(self):
return self._last_t * u.s
def __call__(self, t, u, k):
self._last_t = t
H = norm(u[:3])
# H is from the center of the attractor.
return H - self._R - self._thresh_H # If this goes from +ve to -ve, altitude is decreasing.
|
Add altitude cross event detector
|
Add altitude cross event detector
|
Python
|
mit
|
poliastro/poliastro
|
from astropy import units as u
from numpy.linalg import norm
class LithobrakeEvent:
"""Terminal event that detects impact with the attractor surface.
Parameters
----------
R : float
Radius of the attractor.
"""
def __init__(self, R):
self._R = R
self._last_t = None
@property
def terminal(self):
# Tell SciPy to stop the integration at H = R (impact)
return True
@property
def last_t(self):
return self._last_t * u.s
def __call__(self, t, u, k):
self._last_t = t
H = norm(u[:3])
# SciPy will search for H - R = 0
return H - self._R
Add altitude cross event detector
|
from astropy import units as u
from numpy.linalg import norm
class LithobrakeEvent:
"""Terminal event that detects impact with the attractor surface.
Parameters
----------
R : float
Radius of the attractor.
"""
def __init__(self, R):
self._R = R
self._last_t = None
@property
def terminal(self):
# Tell SciPy to stop the integration at H = R (impact)
return True
@property
def last_t(self):
return self._last_t * u.s
def __call__(self, t, u, k):
self._last_t = t
H = norm(u[:3])
# SciPy will search for H - R = 0
print(H - self._R)
return H - self._R
class AltitudeCrossEvent:
"""Detect if a satellite crosses a specific threshold altitude.
Parameters
----------
R: ~astropy.units.Quantity
Radius of the attractor (km).
thresh_H: ~astropy.units.Quantity
Threshold altitude (in km), defaults to 100 km.
terminal: bool
Whether to terminate integration if this event occurs, defaults to True.
"""
def __init__(self, R, thresh_H=100*u.km, terminal=True):
self._R = R.to(u.km).value
self._thresh_H = thresh_H.to(u.km).value # Threshold height from the ground.
self._terminal = terminal
self._last_t = None
@property
def terminal(self):
# Orekit's API stops propagation when descending, but not when ascending.
return self._terminal
@property
def last_t(self):
return self._last_t * u.s
def __call__(self, t, u, k):
self._last_t = t
H = norm(u[:3])
# H is from the center of the attractor.
return H - self._R - self._thresh_H # If this goes from +ve to -ve, altitude is decreasing.
|
<commit_before>from astropy import units as u
from numpy.linalg import norm
class LithobrakeEvent:
"""Terminal event that detects impact with the attractor surface.
Parameters
----------
R : float
Radius of the attractor.
"""
def __init__(self, R):
self._R = R
self._last_t = None
@property
def terminal(self):
# Tell SciPy to stop the integration at H = R (impact)
return True
@property
def last_t(self):
return self._last_t * u.s
def __call__(self, t, u, k):
self._last_t = t
H = norm(u[:3])
# SciPy will search for H - R = 0
return H - self._R
<commit_msg>Add altitude cross event detector<commit_after>
|
from astropy import units as u
from numpy.linalg import norm
class LithobrakeEvent:
"""Terminal event that detects impact with the attractor surface.
Parameters
----------
R : float
Radius of the attractor.
"""
def __init__(self, R):
self._R = R
self._last_t = None
@property
def terminal(self):
# Tell SciPy to stop the integration at H = R (impact)
return True
@property
def last_t(self):
return self._last_t * u.s
def __call__(self, t, u, k):
self._last_t = t
H = norm(u[:3])
# SciPy will search for H - R = 0
print(H - self._R)
return H - self._R
class AltitudeCrossEvent:
"""Detect if a satellite crosses a specific threshold altitude.
Parameters
----------
R: ~astropy.units.Quantity
Radius of the attractor (km).
thresh_H: ~astropy.units.Quantity
Threshold altitude (in km), defaults to 100 km.
terminal: bool
Whether to terminate integration if this event occurs, defaults to True.
"""
def __init__(self, R, thresh_H=100*u.km, terminal=True):
self._R = R.to(u.km).value
self._thresh_H = thresh_H.to(u.km).value # Threshold height from the ground.
self._terminal = terminal
self._last_t = None
@property
def terminal(self):
# Orekit's API stops propagation when descending, but not when ascending.
return self._terminal
@property
def last_t(self):
return self._last_t * u.s
def __call__(self, t, u, k):
self._last_t = t
H = norm(u[:3])
# H is from the center of the attractor.
return H - self._R - self._thresh_H # If this goes from +ve to -ve, altitude is decreasing.
|
from astropy import units as u
from numpy.linalg import norm
class LithobrakeEvent:
"""Terminal event that detects impact with the attractor surface.
Parameters
----------
R : float
Radius of the attractor.
"""
def __init__(self, R):
self._R = R
self._last_t = None
@property
def terminal(self):
# Tell SciPy to stop the integration at H = R (impact)
return True
@property
def last_t(self):
return self._last_t * u.s
def __call__(self, t, u, k):
self._last_t = t
H = norm(u[:3])
# SciPy will search for H - R = 0
return H - self._R
Add altitude cross event detectorfrom astropy import units as u
from numpy.linalg import norm
class LithobrakeEvent:
"""Terminal event that detects impact with the attractor surface.
Parameters
----------
R : float
Radius of the attractor.
"""
def __init__(self, R):
self._R = R
self._last_t = None
@property
def terminal(self):
# Tell SciPy to stop the integration at H = R (impact)
return True
@property
def last_t(self):
return self._last_t * u.s
def __call__(self, t, u, k):
self._last_t = t
H = norm(u[:3])
# SciPy will search for H - R = 0
print(H - self._R)
return H - self._R
class AltitudeCrossEvent:
"""Detect if a satellite crosses a specific threshold altitude.
Parameters
----------
R: ~astropy.units.Quantity
Radius of the attractor (km).
thresh_H: ~astropy.units.Quantity
Threshold altitude (in km), defaults to 100 km.
terminal: bool
Whether to terminate integration if this event occurs, defaults to True.
"""
def __init__(self, R, thresh_H=100*u.km, terminal=True):
self._R = R.to(u.km).value
self._thresh_H = thresh_H.to(u.km).value # Threshold height from the ground.
self._terminal = terminal
self._last_t = None
@property
def terminal(self):
# Orekit's API stops propagation when descending, but not when ascending.
return self._terminal
@property
def last_t(self):
return self._last_t * u.s
def __call__(self, t, u, k):
self._last_t = t
H = norm(u[:3])
# H is from the center of the attractor.
return H - self._R - self._thresh_H # If this goes from +ve to -ve, altitude is decreasing.
|
<commit_before>from astropy import units as u
from numpy.linalg import norm
class LithobrakeEvent:
"""Terminal event that detects impact with the attractor surface.
Parameters
----------
R : float
Radius of the attractor.
"""
def __init__(self, R):
self._R = R
self._last_t = None
@property
def terminal(self):
# Tell SciPy to stop the integration at H = R (impact)
return True
@property
def last_t(self):
return self._last_t * u.s
def __call__(self, t, u, k):
self._last_t = t
H = norm(u[:3])
# SciPy will search for H - R = 0
return H - self._R
<commit_msg>Add altitude cross event detector<commit_after>from astropy import units as u
from numpy.linalg import norm
class LithobrakeEvent:
"""Terminal event that detects impact with the attractor surface.
Parameters
----------
R : float
Radius of the attractor.
"""
def __init__(self, R):
self._R = R
self._last_t = None
@property
def terminal(self):
# Tell SciPy to stop the integration at H = R (impact)
return True
@property
def last_t(self):
return self._last_t * u.s
def __call__(self, t, u, k):
self._last_t = t
H = norm(u[:3])
# SciPy will search for H - R = 0
print(H - self._R)
return H - self._R
class AltitudeCrossEvent:
"""Detect if a satellite crosses a specific threshold altitude.
Parameters
----------
R: ~astropy.units.Quantity
Radius of the attractor (km).
thresh_H: ~astropy.units.Quantity
Threshold altitude (in km), defaults to 100 km.
terminal: bool
Whether to terminate integration if this event occurs, defaults to True.
"""
def __init__(self, R, thresh_H=100*u.km, terminal=True):
self._R = R.to(u.km).value
self._thresh_H = thresh_H.to(u.km).value # Threshold height from the ground.
self._terminal = terminal
self._last_t = None
@property
def terminal(self):
# Orekit's API stops propagation when descending, but not when ascending.
return self._terminal
@property
def last_t(self):
return self._last_t * u.s
def __call__(self, t, u, k):
self._last_t = t
H = norm(u[:3])
# H is from the center of the attractor.
return H - self._R - self._thresh_H # If this goes from +ve to -ve, altitude is decreasing.
|
3d1625e5e9a6a90cec1f2e18739462b006905c88
|
game.py
|
game.py
|
from deuces import Card, Deck
class Game():
"""
Contains a deck of cards that can be accessed by players to play
various card games.
"""
def __init__(self, name='kiwi'):
self.players = []
self.name = name
self.deck = Deck()
self.visible_cards = []
def add_player(self, player):
self.players.append(player)
def shuffle(self):
self.deck.shuffle()
def deal(self, ncards):
for i in range(ncards):
for p in self.players:
p.cards.append(self.deck.draw())
def flip_flop(self):
"""Create flop.
Flips over 3 cards and makes them publicly visible, as would
happen when creating the flop in Texas Hold'em.
"""
self.visible_cards += self.deck.draw(3)
def flip_cards(self, ncards=1):
"""Like flip_flop, but allows variable number of cards."""
if ncards == 1:
self.visible_cards.append(self.deck.draw(ncards))
else:
self.visible_cards += self.deck.draw(ncards)
def query_players(self):
players = '%s players: ' % self.name
for p in self.players:
players += '%s ' % p.tag
return players
def query_state(self):
info = '%s visible cards: ' % self.name
for c in self.visible_cards:
info += Card.int_to_pretty_str(c)
return info
|
from deuces import Card, Deck
class Game():
"""
Contains a deck of cards that can be accessed by players to play
various card games.
"""
def __init__(self, name='kiwi'):
self.players = []
self.name = name
self.deck = Deck()
self.visible_cards = []
def add_player(self, player):
self.players.append(player)
def shuffle(self):
self.deck.shuffle()
self.visible_cards = []
def deal(self, ncards):
for i in range(ncards):
for p in self.players:
p.cards.append(self.deck.draw())
def flip_flop(self):
"""Create flop.
Flips over 3 cards and makes them publicly visible, as would
happen when creating the flop in Texas Hold'em.
"""
self.visible_cards += self.deck.draw(3)
def flip_cards(self, ncards=1):
"""Like flip_flop, but allows variable number of cards."""
if ncards == 1:
self.visible_cards.append(self.deck.draw(ncards))
else:
self.visible_cards += self.deck.draw(ncards)
def query_players(self):
players = '%s players: ' % self.name
for p in self.players:
players += '%s ' % p.tag
return players
def query_state(self):
info = '%s visible cards: ' % self.name
for c in self.visible_cards:
info += Card.int_to_pretty_str(c)
return info
|
Fix behavioural bug in shuffling
|
Fix behavioural bug in shuffling
Previously shuffling would create a new deck, implying that all the
cards had been "picked up" and were being shuffled for a new hand.
However, this did not pick up the visible cards from the game, so the
visible cards would just continue to grow unchecked. This clears the
visible cards when shuffling occurs so it truly is a new deck.
|
Python
|
bsd-2-clause
|
dramborleg/text-poker
|
from deuces import Card, Deck
class Game():
"""
Contains a deck of cards that can be accessed by players to play
various card games.
"""
def __init__(self, name='kiwi'):
self.players = []
self.name = name
self.deck = Deck()
self.visible_cards = []
def add_player(self, player):
self.players.append(player)
def shuffle(self):
self.deck.shuffle()
def deal(self, ncards):
for i in range(ncards):
for p in self.players:
p.cards.append(self.deck.draw())
def flip_flop(self):
"""Create flop.
Flips over 3 cards and makes them publicly visible, as would
happen when creating the flop in Texas Hold'em.
"""
self.visible_cards += self.deck.draw(3)
def flip_cards(self, ncards=1):
"""Like flip_flop, but allows variable number of cards."""
if ncards == 1:
self.visible_cards.append(self.deck.draw(ncards))
else:
self.visible_cards += self.deck.draw(ncards)
def query_players(self):
players = '%s players: ' % self.name
for p in self.players:
players += '%s ' % p.tag
return players
def query_state(self):
info = '%s visible cards: ' % self.name
for c in self.visible_cards:
info += Card.int_to_pretty_str(c)
return info
Fix behavioural bug in shuffling
Previously shuffling would create a new deck, implying that all the
cards had been "picked up" and were being shuffled for a new hand.
However, this did not pick up the visible cards from the game, so the
visible cards would just continue to grow unchecked. This clears the
visible cards when shuffling occurs so it truly is a new deck.
|
from deuces import Card, Deck
class Game():
"""
Contains a deck of cards that can be accessed by players to play
various card games.
"""
def __init__(self, name='kiwi'):
self.players = []
self.name = name
self.deck = Deck()
self.visible_cards = []
def add_player(self, player):
self.players.append(player)
def shuffle(self):
self.deck.shuffle()
self.visible_cards = []
def deal(self, ncards):
for i in range(ncards):
for p in self.players:
p.cards.append(self.deck.draw())
def flip_flop(self):
"""Create flop.
Flips over 3 cards and makes them publicly visible, as would
happen when creating the flop in Texas Hold'em.
"""
self.visible_cards += self.deck.draw(3)
def flip_cards(self, ncards=1):
"""Like flip_flop, but allows variable number of cards."""
if ncards == 1:
self.visible_cards.append(self.deck.draw(ncards))
else:
self.visible_cards += self.deck.draw(ncards)
def query_players(self):
players = '%s players: ' % self.name
for p in self.players:
players += '%s ' % p.tag
return players
def query_state(self):
info = '%s visible cards: ' % self.name
for c in self.visible_cards:
info += Card.int_to_pretty_str(c)
return info
|
<commit_before>from deuces import Card, Deck
class Game():
"""
Contains a deck of cards that can be accessed by players to play
various card games.
"""
def __init__(self, name='kiwi'):
self.players = []
self.name = name
self.deck = Deck()
self.visible_cards = []
def add_player(self, player):
self.players.append(player)
def shuffle(self):
self.deck.shuffle()
def deal(self, ncards):
for i in range(ncards):
for p in self.players:
p.cards.append(self.deck.draw())
def flip_flop(self):
"""Create flop.
Flips over 3 cards and makes them publicly visible, as would
happen when creating the flop in Texas Hold'em.
"""
self.visible_cards += self.deck.draw(3)
def flip_cards(self, ncards=1):
"""Like flip_flop, but allows variable number of cards."""
if ncards == 1:
self.visible_cards.append(self.deck.draw(ncards))
else:
self.visible_cards += self.deck.draw(ncards)
def query_players(self):
players = '%s players: ' % self.name
for p in self.players:
players += '%s ' % p.tag
return players
def query_state(self):
info = '%s visible cards: ' % self.name
for c in self.visible_cards:
info += Card.int_to_pretty_str(c)
return info
<commit_msg>Fix behavioural bug in shuffling
Previously shuffling would create a new deck, implying that all the
cards had been "picked up" and were being shuffled for a new hand.
However, this did not pick up the visible cards from the game, so the
visible cards would just continue to grow unchecked. This clears the
visible cards when shuffling occurs so it truly is a new deck.<commit_after>
|
from deuces import Card, Deck
class Game():
"""
Contains a deck of cards that can be accessed by players to play
various card games.
"""
def __init__(self, name='kiwi'):
self.players = []
self.name = name
self.deck = Deck()
self.visible_cards = []
def add_player(self, player):
self.players.append(player)
def shuffle(self):
self.deck.shuffle()
self.visible_cards = []
def deal(self, ncards):
for i in range(ncards):
for p in self.players:
p.cards.append(self.deck.draw())
def flip_flop(self):
"""Create flop.
Flips over 3 cards and makes them publicly visible, as would
happen when creating the flop in Texas Hold'em.
"""
self.visible_cards += self.deck.draw(3)
def flip_cards(self, ncards=1):
"""Like flip_flop, but allows variable number of cards."""
if ncards == 1:
self.visible_cards.append(self.deck.draw(ncards))
else:
self.visible_cards += self.deck.draw(ncards)
def query_players(self):
players = '%s players: ' % self.name
for p in self.players:
players += '%s ' % p.tag
return players
def query_state(self):
info = '%s visible cards: ' % self.name
for c in self.visible_cards:
info += Card.int_to_pretty_str(c)
return info
|
from deuces import Card, Deck
class Game():
"""
Contains a deck of cards that can be accessed by players to play
various card games.
"""
def __init__(self, name='kiwi'):
self.players = []
self.name = name
self.deck = Deck()
self.visible_cards = []
def add_player(self, player):
self.players.append(player)
def shuffle(self):
self.deck.shuffle()
def deal(self, ncards):
for i in range(ncards):
for p in self.players:
p.cards.append(self.deck.draw())
def flip_flop(self):
"""Create flop.
Flips over 3 cards and makes them publicly visible, as would
happen when creating the flop in Texas Hold'em.
"""
self.visible_cards += self.deck.draw(3)
def flip_cards(self, ncards=1):
"""Like flip_flop, but allows variable number of cards."""
if ncards == 1:
self.visible_cards.append(self.deck.draw(ncards))
else:
self.visible_cards += self.deck.draw(ncards)
def query_players(self):
players = '%s players: ' % self.name
for p in self.players:
players += '%s ' % p.tag
return players
def query_state(self):
info = '%s visible cards: ' % self.name
for c in self.visible_cards:
info += Card.int_to_pretty_str(c)
return info
Fix behavioural bug in shuffling
Previously shuffling would create a new deck, implying that all the
cards had been "picked up" and were being shuffled for a new hand.
However, this did not pick up the visible cards from the game, so the
visible cards would just continue to grow unchecked. This clears the
visible cards when shuffling occurs so it truly is a new deck.from deuces import Card, Deck
class Game():
"""
Contains a deck of cards that can be accessed by players to play
various card games.
"""
def __init__(self, name='kiwi'):
self.players = []
self.name = name
self.deck = Deck()
self.visible_cards = []
def add_player(self, player):
self.players.append(player)
def shuffle(self):
self.deck.shuffle()
self.visible_cards = []
def deal(self, ncards):
for i in range(ncards):
for p in self.players:
p.cards.append(self.deck.draw())
def flip_flop(self):
"""Create flop.
Flips over 3 cards and makes them publicly visible, as would
happen when creating the flop in Texas Hold'em.
"""
self.visible_cards += self.deck.draw(3)
def flip_cards(self, ncards=1):
"""Like flip_flop, but allows variable number of cards."""
if ncards == 1:
self.visible_cards.append(self.deck.draw(ncards))
else:
self.visible_cards += self.deck.draw(ncards)
def query_players(self):
players = '%s players: ' % self.name
for p in self.players:
players += '%s ' % p.tag
return players
def query_state(self):
info = '%s visible cards: ' % self.name
for c in self.visible_cards:
info += Card.int_to_pretty_str(c)
return info
|
<commit_before>from deuces import Card, Deck
class Game():
"""
Contains a deck of cards that can be accessed by players to play
various card games.
"""
def __init__(self, name='kiwi'):
self.players = []
self.name = name
self.deck = Deck()
self.visible_cards = []
def add_player(self, player):
self.players.append(player)
def shuffle(self):
self.deck.shuffle()
def deal(self, ncards):
for i in range(ncards):
for p in self.players:
p.cards.append(self.deck.draw())
def flip_flop(self):
"""Create flop.
Flips over 3 cards and makes them publicly visible, as would
happen when creating the flop in Texas Hold'em.
"""
self.visible_cards += self.deck.draw(3)
def flip_cards(self, ncards=1):
"""Like flip_flop, but allows variable number of cards."""
if ncards == 1:
self.visible_cards.append(self.deck.draw(ncards))
else:
self.visible_cards += self.deck.draw(ncards)
def query_players(self):
players = '%s players: ' % self.name
for p in self.players:
players += '%s ' % p.tag
return players
def query_state(self):
info = '%s visible cards: ' % self.name
for c in self.visible_cards:
info += Card.int_to_pretty_str(c)
return info
<commit_msg>Fix behavioural bug in shuffling
Previously shuffling would create a new deck, implying that all the
cards had been "picked up" and were being shuffled for a new hand.
However, this did not pick up the visible cards from the game, so the
visible cards would just continue to grow unchecked. This clears the
visible cards when shuffling occurs so it truly is a new deck.<commit_after>from deuces import Card, Deck
class Game():
"""
Contains a deck of cards that can be accessed by players to play
various card games.
"""
def __init__(self, name='kiwi'):
self.players = []
self.name = name
self.deck = Deck()
self.visible_cards = []
def add_player(self, player):
self.players.append(player)
def shuffle(self):
self.deck.shuffle()
self.visible_cards = []
def deal(self, ncards):
for i in range(ncards):
for p in self.players:
p.cards.append(self.deck.draw())
def flip_flop(self):
"""Create flop.
Flips over 3 cards and makes them publicly visible, as would
happen when creating the flop in Texas Hold'em.
"""
self.visible_cards += self.deck.draw(3)
def flip_cards(self, ncards=1):
"""Like flip_flop, but allows variable number of cards."""
if ncards == 1:
self.visible_cards.append(self.deck.draw(ncards))
else:
self.visible_cards += self.deck.draw(ncards)
def query_players(self):
players = '%s players: ' % self.name
for p in self.players:
players += '%s ' % p.tag
return players
def query_state(self):
info = '%s visible cards: ' % self.name
for c in self.visible_cards:
info += Card.int_to_pretty_str(c)
return info
|
4ec2672dc22c3477984e335e3678f3a2e69ecbd2
|
wger/exercises/migrations/0018_delete_pending_exercises.py
|
wger/exercises/migrations/0018_delete_pending_exercises.py
|
# Generated by Django 3.2.15 on 2022-08-25 17:25
from django.db import migrations
from django.conf import settings
def delete_pending_exercises(apps, schema_editor):
"""
Delete all pending exercises
Note that we can't access STATUS_PENDING here because we are not using
a real model.
"""
Exercise = apps.get_model("exercises", "ExerciseBase")
Exercise.objects.filter(status='1').delete()
class Migration(migrations.Migration):
dependencies = [
('core', '0014_merge_20210818_1735'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('exercises', '0017_muscle_name_en'),
]
operations = [migrations.RunPython(delete_pending_exercises)]
|
# Generated by Django 3.2.15 on 2022-08-25 17:25
from django.db import migrations
from django.conf import settings
def delete_pending_bases(apps, schema_editor):
"""
Delete all pending bases
Note that we can't access STATUS_PENDING here because we are not using
a real model.
"""
Base = apps.get_model("exercises", "ExerciseBase")
Base.objects.filter(status='1').delete()
def delete_pending_translations(apps, schema_editor):
"""
Delete all pending translations
Note that we can't access STATUS_PENDING here because we are not using
a real model.
"""
Exercise = apps.get_model("exercises", "Exercise")
Exercise.objects.filter(status='1').delete()
class Migration(migrations.Migration):
dependencies = [
('core', '0014_merge_20210818_1735'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('exercises', '0017_muscle_name_en'),
]
operations = [
migrations.RunPython(delete_pending_bases),
migrations.RunPython(delete_pending_translations),
]
|
Delete both pending bases and translations
|
Delete both pending bases and translations
|
Python
|
agpl-3.0
|
wger-project/wger,wger-project/wger,wger-project/wger,wger-project/wger
|
# Generated by Django 3.2.15 on 2022-08-25 17:25
from django.db import migrations
from django.conf import settings
def delete_pending_exercises(apps, schema_editor):
"""
Delete all pending exercises
Note that we can't access STATUS_PENDING here because we are not using
a real model.
"""
Exercise = apps.get_model("exercises", "ExerciseBase")
Exercise.objects.filter(status='1').delete()
class Migration(migrations.Migration):
dependencies = [
('core', '0014_merge_20210818_1735'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('exercises', '0017_muscle_name_en'),
]
operations = [migrations.RunPython(delete_pending_exercises)]
Delete both pending bases and translations
|
# Generated by Django 3.2.15 on 2022-08-25 17:25
from django.db import migrations
from django.conf import settings
def delete_pending_bases(apps, schema_editor):
"""
Delete all pending bases
Note that we can't access STATUS_PENDING here because we are not using
a real model.
"""
Base = apps.get_model("exercises", "ExerciseBase")
Base.objects.filter(status='1').delete()
def delete_pending_translations(apps, schema_editor):
"""
Delete all pending translations
Note that we can't access STATUS_PENDING here because we are not using
a real model.
"""
Exercise = apps.get_model("exercises", "Exercise")
Exercise.objects.filter(status='1').delete()
class Migration(migrations.Migration):
dependencies = [
('core', '0014_merge_20210818_1735'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('exercises', '0017_muscle_name_en'),
]
operations = [
migrations.RunPython(delete_pending_bases),
migrations.RunPython(delete_pending_translations),
]
|
<commit_before># Generated by Django 3.2.15 on 2022-08-25 17:25
from django.db import migrations
from django.conf import settings
def delete_pending_exercises(apps, schema_editor):
"""
Delete all pending exercises
Note that we can't access STATUS_PENDING here because we are not using
a real model.
"""
Exercise = apps.get_model("exercises", "ExerciseBase")
Exercise.objects.filter(status='1').delete()
class Migration(migrations.Migration):
dependencies = [
('core', '0014_merge_20210818_1735'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('exercises', '0017_muscle_name_en'),
]
operations = [migrations.RunPython(delete_pending_exercises)]
<commit_msg>Delete both pending bases and translations<commit_after>
|
# Generated by Django 3.2.15 on 2022-08-25 17:25
from django.db import migrations
from django.conf import settings
def delete_pending_bases(apps, schema_editor):
"""
Delete all pending bases
Note that we can't access STATUS_PENDING here because we are not using
a real model.
"""
Base = apps.get_model("exercises", "ExerciseBase")
Base.objects.filter(status='1').delete()
def delete_pending_translations(apps, schema_editor):
"""
Delete all pending translations
Note that we can't access STATUS_PENDING here because we are not using
a real model.
"""
Exercise = apps.get_model("exercises", "Exercise")
Exercise.objects.filter(status='1').delete()
class Migration(migrations.Migration):
dependencies = [
('core', '0014_merge_20210818_1735'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('exercises', '0017_muscle_name_en'),
]
operations = [
migrations.RunPython(delete_pending_bases),
migrations.RunPython(delete_pending_translations),
]
|
# Generated by Django 3.2.15 on 2022-08-25 17:25
from django.db import migrations
from django.conf import settings
def delete_pending_exercises(apps, schema_editor):
"""
Delete all pending exercises
Note that we can't access STATUS_PENDING here because we are not using
a real model.
"""
Exercise = apps.get_model("exercises", "ExerciseBase")
Exercise.objects.filter(status='1').delete()
class Migration(migrations.Migration):
dependencies = [
('core', '0014_merge_20210818_1735'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('exercises', '0017_muscle_name_en'),
]
operations = [migrations.RunPython(delete_pending_exercises)]
Delete both pending bases and translations# Generated by Django 3.2.15 on 2022-08-25 17:25
from django.db import migrations
from django.conf import settings
def delete_pending_bases(apps, schema_editor):
"""
Delete all pending bases
Note that we can't access STATUS_PENDING here because we are not using
a real model.
"""
Base = apps.get_model("exercises", "ExerciseBase")
Base.objects.filter(status='1').delete()
def delete_pending_translations(apps, schema_editor):
"""
Delete all pending translations
Note that we can't access STATUS_PENDING here because we are not using
a real model.
"""
Exercise = apps.get_model("exercises", "Exercise")
Exercise.objects.filter(status='1').delete()
class Migration(migrations.Migration):
dependencies = [
('core', '0014_merge_20210818_1735'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('exercises', '0017_muscle_name_en'),
]
operations = [
migrations.RunPython(delete_pending_bases),
migrations.RunPython(delete_pending_translations),
]
|
<commit_before># Generated by Django 3.2.15 on 2022-08-25 17:25
from django.db import migrations
from django.conf import settings
def delete_pending_exercises(apps, schema_editor):
"""
Delete all pending exercises
Note that we can't access STATUS_PENDING here because we are not using
a real model.
"""
Exercise = apps.get_model("exercises", "ExerciseBase")
Exercise.objects.filter(status='1').delete()
class Migration(migrations.Migration):
dependencies = [
('core', '0014_merge_20210818_1735'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('exercises', '0017_muscle_name_en'),
]
operations = [migrations.RunPython(delete_pending_exercises)]
<commit_msg>Delete both pending bases and translations<commit_after># Generated by Django 3.2.15 on 2022-08-25 17:25
from django.db import migrations
from django.conf import settings
def delete_pending_bases(apps, schema_editor):
"""
Delete all pending bases
Note that we can't access STATUS_PENDING here because we are not using
a real model.
"""
Base = apps.get_model("exercises", "ExerciseBase")
Base.objects.filter(status='1').delete()
def delete_pending_translations(apps, schema_editor):
"""
Delete all pending translations
Note that we can't access STATUS_PENDING here because we are not using
a real model.
"""
Exercise = apps.get_model("exercises", "Exercise")
Exercise.objects.filter(status='1').delete()
class Migration(migrations.Migration):
dependencies = [
('core', '0014_merge_20210818_1735'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('exercises', '0017_muscle_name_en'),
]
operations = [
migrations.RunPython(delete_pending_bases),
migrations.RunPython(delete_pending_translations),
]
|
513b23b169e87a92b2fdf0bd6b33778ea68b9b24
|
imagekit/management/commands/ikcachevalidate.py
|
imagekit/management/commands/ikcachevalidate.py
|
from optparse import make_option
from django.core.management.base import BaseCommand
from django.db.models.loading import cache
from ...utils import validate_app_cache
class Command(BaseCommand):
help = ('Validates the image cache for a list of apps.')
args = '[apps]'
requires_model_validation = True
can_import_settings = True
option_list = BaseCommand.option_list + (
make_option('--force',
dest='force_revalidation',
action='store_true',
default=False,
help='Invalidate each image file before validating it, thereby'
' ensuring its revalidation. This is very similar to'
' running ikcacheinvalidate and then running'
' ikcachevalidate; the difference being that this option'
' causes files to be invalidated and validated'
' one-at-a-time, whereas running the two commands in series'
' would invalidate all images before validating any.'
),
)
def handle(self, *args, **options):
apps = args or cache.app_models.keys()
validate_app_cache(apps, options['force_revalidation'])
|
from optparse import make_option
from django.core.management.base import BaseCommand
from django.db.models.loading import cache
from ...utils import validate_app_cache
class Command(BaseCommand):
help = ('Validates the image cache for a list of apps.')
args = '[apps]'
requires_model_validation = True
can_import_settings = True
option_list = BaseCommand.option_list + (
make_option('--force-revalidation',
dest='force_revalidation',
action='store_true',
default=False,
help='Invalidate each image file before validating it, thereby'
' ensuring its revalidation. This is very similar to'
' running ikcacheinvalidate and then running'
' ikcachevalidate; the difference being that this option'
' causes files to be invalidated and validated'
' one-at-a-time, whereas running the two commands in series'
' would invalidate all images before validating any.'
),
)
def handle(self, *args, **options):
apps = args or cache.app_models.keys()
validate_app_cache(apps, options['force_revalidation'])
|
Rename force flag to force-revalidation
|
Rename force flag to force-revalidation
|
Python
|
bsd-3-clause
|
pcompassion/django-imagekit,FundedByMe/django-imagekit,tawanda/django-imagekit,tawanda/django-imagekit,pcompassion/django-imagekit,pcompassion/django-imagekit,FundedByMe/django-imagekit
|
from optparse import make_option
from django.core.management.base import BaseCommand
from django.db.models.loading import cache
from ...utils import validate_app_cache
class Command(BaseCommand):
help = ('Validates the image cache for a list of apps.')
args = '[apps]'
requires_model_validation = True
can_import_settings = True
option_list = BaseCommand.option_list + (
make_option('--force',
dest='force_revalidation',
action='store_true',
default=False,
help='Invalidate each image file before validating it, thereby'
' ensuring its revalidation. This is very similar to'
' running ikcacheinvalidate and then running'
' ikcachevalidate; the difference being that this option'
' causes files to be invalidated and validated'
' one-at-a-time, whereas running the two commands in series'
' would invalidate all images before validating any.'
),
)
def handle(self, *args, **options):
apps = args or cache.app_models.keys()
validate_app_cache(apps, options['force_revalidation'])
Rename force flag to force-revalidation
|
from optparse import make_option
from django.core.management.base import BaseCommand
from django.db.models.loading import cache
from ...utils import validate_app_cache
class Command(BaseCommand):
help = ('Validates the image cache for a list of apps.')
args = '[apps]'
requires_model_validation = True
can_import_settings = True
option_list = BaseCommand.option_list + (
make_option('--force-revalidation',
dest='force_revalidation',
action='store_true',
default=False,
help='Invalidate each image file before validating it, thereby'
' ensuring its revalidation. This is very similar to'
' running ikcacheinvalidate and then running'
' ikcachevalidate; the difference being that this option'
' causes files to be invalidated and validated'
' one-at-a-time, whereas running the two commands in series'
' would invalidate all images before validating any.'
),
)
def handle(self, *args, **options):
apps = args or cache.app_models.keys()
validate_app_cache(apps, options['force_revalidation'])
|
<commit_before>from optparse import make_option
from django.core.management.base import BaseCommand
from django.db.models.loading import cache
from ...utils import validate_app_cache
class Command(BaseCommand):
help = ('Validates the image cache for a list of apps.')
args = '[apps]'
requires_model_validation = True
can_import_settings = True
option_list = BaseCommand.option_list + (
make_option('--force',
dest='force_revalidation',
action='store_true',
default=False,
help='Invalidate each image file before validating it, thereby'
' ensuring its revalidation. This is very similar to'
' running ikcacheinvalidate and then running'
' ikcachevalidate; the difference being that this option'
' causes files to be invalidated and validated'
' one-at-a-time, whereas running the two commands in series'
' would invalidate all images before validating any.'
),
)
def handle(self, *args, **options):
apps = args or cache.app_models.keys()
validate_app_cache(apps, options['force_revalidation'])
<commit_msg>Rename force flag to force-revalidation<commit_after>
|
from optparse import make_option
from django.core.management.base import BaseCommand
from django.db.models.loading import cache
from ...utils import validate_app_cache
class Command(BaseCommand):
help = ('Validates the image cache for a list of apps.')
args = '[apps]'
requires_model_validation = True
can_import_settings = True
option_list = BaseCommand.option_list + (
make_option('--force-revalidation',
dest='force_revalidation',
action='store_true',
default=False,
help='Invalidate each image file before validating it, thereby'
' ensuring its revalidation. This is very similar to'
' running ikcacheinvalidate and then running'
' ikcachevalidate; the difference being that this option'
' causes files to be invalidated and validated'
' one-at-a-time, whereas running the two commands in series'
' would invalidate all images before validating any.'
),
)
def handle(self, *args, **options):
apps = args or cache.app_models.keys()
validate_app_cache(apps, options['force_revalidation'])
|
from optparse import make_option
from django.core.management.base import BaseCommand
from django.db.models.loading import cache
from ...utils import validate_app_cache
class Command(BaseCommand):
help = ('Validates the image cache for a list of apps.')
args = '[apps]'
requires_model_validation = True
can_import_settings = True
option_list = BaseCommand.option_list + (
make_option('--force',
dest='force_revalidation',
action='store_true',
default=False,
help='Invalidate each image file before validating it, thereby'
' ensuring its revalidation. This is very similar to'
' running ikcacheinvalidate and then running'
' ikcachevalidate; the difference being that this option'
' causes files to be invalidated and validated'
' one-at-a-time, whereas running the two commands in series'
' would invalidate all images before validating any.'
),
)
def handle(self, *args, **options):
apps = args or cache.app_models.keys()
validate_app_cache(apps, options['force_revalidation'])
Rename force flag to force-revalidationfrom optparse import make_option
from django.core.management.base import BaseCommand
from django.db.models.loading import cache
from ...utils import validate_app_cache
class Command(BaseCommand):
help = ('Validates the image cache for a list of apps.')
args = '[apps]'
requires_model_validation = True
can_import_settings = True
option_list = BaseCommand.option_list + (
make_option('--force-revalidation',
dest='force_revalidation',
action='store_true',
default=False,
help='Invalidate each image file before validating it, thereby'
' ensuring its revalidation. This is very similar to'
' running ikcacheinvalidate and then running'
' ikcachevalidate; the difference being that this option'
' causes files to be invalidated and validated'
' one-at-a-time, whereas running the two commands in series'
' would invalidate all images before validating any.'
),
)
def handle(self, *args, **options):
apps = args or cache.app_models.keys()
validate_app_cache(apps, options['force_revalidation'])
|
<commit_before>from optparse import make_option
from django.core.management.base import BaseCommand
from django.db.models.loading import cache
from ...utils import validate_app_cache
class Command(BaseCommand):
help = ('Validates the image cache for a list of apps.')
args = '[apps]'
requires_model_validation = True
can_import_settings = True
option_list = BaseCommand.option_list + (
make_option('--force',
dest='force_revalidation',
action='store_true',
default=False,
help='Invalidate each image file before validating it, thereby'
' ensuring its revalidation. This is very similar to'
' running ikcacheinvalidate and then running'
' ikcachevalidate; the difference being that this option'
' causes files to be invalidated and validated'
' one-at-a-time, whereas running the two commands in series'
' would invalidate all images before validating any.'
),
)
def handle(self, *args, **options):
apps = args or cache.app_models.keys()
validate_app_cache(apps, options['force_revalidation'])
<commit_msg>Rename force flag to force-revalidation<commit_after>from optparse import make_option
from django.core.management.base import BaseCommand
from django.db.models.loading import cache
from ...utils import validate_app_cache
class Command(BaseCommand):
help = ('Validates the image cache for a list of apps.')
args = '[apps]'
requires_model_validation = True
can_import_settings = True
option_list = BaseCommand.option_list + (
make_option('--force-revalidation',
dest='force_revalidation',
action='store_true',
default=False,
help='Invalidate each image file before validating it, thereby'
' ensuring its revalidation. This is very similar to'
' running ikcacheinvalidate and then running'
' ikcachevalidate; the difference being that this option'
' causes files to be invalidated and validated'
' one-at-a-time, whereas running the two commands in series'
' would invalidate all images before validating any.'
),
)
def handle(self, *args, **options):
apps = args or cache.app_models.keys()
validate_app_cache(apps, options['force_revalidation'])
|
af2f7338c2c9bdddbb90af2ce96866af98482215
|
concurrency/test_get_websites.py
|
concurrency/test_get_websites.py
|
import unittest
from unittest.mock import patch
from concurrency.get_websites import load_url as load_url
class TestGetWebsites(unittest.TestCase):
@patch('concurrency.get_websites.requests')
def test_load_url(self, m):
""" Check that we're getting the data from a request object """
m.get = lambda req: {'text': 'foo'}
data = load_url('fakeurl')
self.assertEqual(data, 'foo')
if __name__ == "__main__":
unittest.main()
|
import unittest
from unittest.mock import patch, MagicMock
from concurrency.get_websites import load_url as load_url
class TestGetWebsites(unittest.TestCase):
@patch('concurrency.get_websites.requests')
def test_load_url_returns_data(self, m):
""" Check that we're getting the data from a request object """
m.get = MagicMock(return_value={'text': 'foo'})
data = data = load_url('fazzbear')
self.assertEqual(data, 'foo')
@patch('concurrency.get_websites.requests')
def test_load_called_with_correct_url(self, m):
""" Check that we're making the request with the url we pass """
m.get = MagicMock(return_value={'text': 'foo'})
data = load_url('fakeurl')
m.get.assert_called_with('fakeurl')
if __name__ == "__main__":
unittest.main()
|
Add a test case to see that we're making a request with the url we pass in
|
Add a test case to see that we're making a request with the url we pass in
|
Python
|
mit
|
b-ritter/python-notes,b-ritter/python-notes
|
import unittest
from unittest.mock import patch
from concurrency.get_websites import load_url as load_url
class TestGetWebsites(unittest.TestCase):
@patch('concurrency.get_websites.requests')
def test_load_url(self, m):
""" Check that we're getting the data from a request object """
m.get = lambda req: {'text': 'foo'}
data = load_url('fakeurl')
self.assertEqual(data, 'foo')
if __name__ == "__main__":
unittest.main()Add a test case to see that we're making a request with the url we pass in
|
import unittest
from unittest.mock import patch, MagicMock
from concurrency.get_websites import load_url as load_url
class TestGetWebsites(unittest.TestCase):
@patch('concurrency.get_websites.requests')
def test_load_url_returns_data(self, m):
""" Check that we're getting the data from a request object """
m.get = MagicMock(return_value={'text': 'foo'})
data = data = load_url('fazzbear')
self.assertEqual(data, 'foo')
@patch('concurrency.get_websites.requests')
def test_load_called_with_correct_url(self, m):
""" Check that we're making the request with the url we pass """
m.get = MagicMock(return_value={'text': 'foo'})
data = load_url('fakeurl')
m.get.assert_called_with('fakeurl')
if __name__ == "__main__":
unittest.main()
|
<commit_before>import unittest
from unittest.mock import patch
from concurrency.get_websites import load_url as load_url
class TestGetWebsites(unittest.TestCase):
@patch('concurrency.get_websites.requests')
def test_load_url(self, m):
""" Check that we're getting the data from a request object """
m.get = lambda req: {'text': 'foo'}
data = load_url('fakeurl')
self.assertEqual(data, 'foo')
if __name__ == "__main__":
unittest.main()<commit_msg>Add a test case to see that we're making a request with the url we pass in<commit_after>
|
import unittest
from unittest.mock import patch, MagicMock
from concurrency.get_websites import load_url as load_url
class TestGetWebsites(unittest.TestCase):
@patch('concurrency.get_websites.requests')
def test_load_url_returns_data(self, m):
""" Check that we're getting the data from a request object """
m.get = MagicMock(return_value={'text': 'foo'})
data = data = load_url('fazzbear')
self.assertEqual(data, 'foo')
@patch('concurrency.get_websites.requests')
def test_load_called_with_correct_url(self, m):
""" Check that we're making the request with the url we pass """
m.get = MagicMock(return_value={'text': 'foo'})
data = load_url('fakeurl')
m.get.assert_called_with('fakeurl')
if __name__ == "__main__":
unittest.main()
|
import unittest
from unittest.mock import patch
from concurrency.get_websites import load_url as load_url
class TestGetWebsites(unittest.TestCase):
@patch('concurrency.get_websites.requests')
def test_load_url(self, m):
""" Check that we're getting the data from a request object """
m.get = lambda req: {'text': 'foo'}
data = load_url('fakeurl')
self.assertEqual(data, 'foo')
if __name__ == "__main__":
unittest.main()Add a test case to see that we're making a request with the url we pass inimport unittest
from unittest.mock import patch, MagicMock
from concurrency.get_websites import load_url as load_url
class TestGetWebsites(unittest.TestCase):
@patch('concurrency.get_websites.requests')
def test_load_url_returns_data(self, m):
""" Check that we're getting the data from a request object """
m.get = MagicMock(return_value={'text': 'foo'})
data = data = load_url('fazzbear')
self.assertEqual(data, 'foo')
@patch('concurrency.get_websites.requests')
def test_load_called_with_correct_url(self, m):
""" Check that we're making the request with the url we pass """
m.get = MagicMock(return_value={'text': 'foo'})
data = load_url('fakeurl')
m.get.assert_called_with('fakeurl')
if __name__ == "__main__":
unittest.main()
|
<commit_before>import unittest
from unittest.mock import patch
from concurrency.get_websites import load_url as load_url
class TestGetWebsites(unittest.TestCase):
@patch('concurrency.get_websites.requests')
def test_load_url(self, m):
""" Check that we're getting the data from a request object """
m.get = lambda req: {'text': 'foo'}
data = load_url('fakeurl')
self.assertEqual(data, 'foo')
if __name__ == "__main__":
unittest.main()<commit_msg>Add a test case to see that we're making a request with the url we pass in<commit_after>import unittest
from unittest.mock import patch, MagicMock
from concurrency.get_websites import load_url as load_url
class TestGetWebsites(unittest.TestCase):
@patch('concurrency.get_websites.requests')
def test_load_url_returns_data(self, m):
""" Check that we're getting the data from a request object """
m.get = MagicMock(return_value={'text': 'foo'})
data = data = load_url('fazzbear')
self.assertEqual(data, 'foo')
@patch('concurrency.get_websites.requests')
def test_load_called_with_correct_url(self, m):
""" Check that we're making the request with the url we pass """
m.get = MagicMock(return_value={'text': 'foo'})
data = load_url('fakeurl')
m.get.assert_called_with('fakeurl')
if __name__ == "__main__":
unittest.main()
|
a09cf20b13c82b8521e2e36bbd8802a4578cefac
|
csunplugged/tests/topics/models/test_curriculum_integration.py
|
csunplugged/tests/topics/models/test_curriculum_integration.py
|
from model_mommy import mommy
from tests.BaseTestWithDB import BaseTestWithDB
from topics.models import CurriculumIntegration
from topics.models import CurriculumArea
from topics.models import Lesson
from tests.topics import create_topics_test_data
class CurriculumIntegrationModelTest(BaseTestWithDB):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def test_curriculum_integration(self):
# Setup Auxliary Data
topic = create_topics_test_data.create_test_topic(1)
curriculum_area = mommy.make(
CurriculumArea,
make_m2m=True,
name="cats"
)
prerequisite_lesson = mommy.make(
Lesson,
make_m2m=True,
name="dogs"
)
# Test
new_curriculum_integration = CurriculumIntegration.objects.create(
topic=topic,
slug="slug",
number=1,
name="name",
content="content"
)
new_curriculum_integration.curriculum_areas.add(curriculum_area)
new_curriculum_integration.prerequisite_lessons.add(prerequisite_lesson)
query_result = CurriculumIntegration.objects.get(slug="slug")
self.assertEqual(query_result, new_curriculum_integration)
|
from model_mommy import mommy
from tests.BaseTestWithDB import BaseTestWithDB
from topics.models import CurriculumIntegration
from topics.models import CurriculumArea
from topics.models import Lesson
from tests.topics import create_topics_test_data
class CurriculumIntegrationModelTest(BaseTestWithDB):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def test_curriculum_integration(self):
# Setup Auxliary Data
topic = create_topics_test_data.create_test_topic(1)
curriculum_area = mommy.make(
CurriculumArea,
name="cats"
)
prerequisite_lesson = mommy.make(
Lesson,
name="dogs"
)
new_curriculum_integration = CurriculumIntegration.objects.create(
topic=topic,
slug="slug",
number=1,
name="name",
content="content"
)
new_curriculum_integration.curriculum_areas.add(curriculum_area)
new_curriculum_integration.prerequisite_lessons.add(prerequisite_lesson)
query_result = CurriculumIntegration.objects.get(slug="slug")
self.assertEqual(query_result, new_curriculum_integration)
|
Remove many to many key generation in test
|
Remove many to many key generation in test
|
Python
|
mit
|
uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged
|
from model_mommy import mommy
from tests.BaseTestWithDB import BaseTestWithDB
from topics.models import CurriculumIntegration
from topics.models import CurriculumArea
from topics.models import Lesson
from tests.topics import create_topics_test_data
class CurriculumIntegrationModelTest(BaseTestWithDB):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def test_curriculum_integration(self):
# Setup Auxliary Data
topic = create_topics_test_data.create_test_topic(1)
curriculum_area = mommy.make(
CurriculumArea,
make_m2m=True,
name="cats"
)
prerequisite_lesson = mommy.make(
Lesson,
make_m2m=True,
name="dogs"
)
# Test
new_curriculum_integration = CurriculumIntegration.objects.create(
topic=topic,
slug="slug",
number=1,
name="name",
content="content"
)
new_curriculum_integration.curriculum_areas.add(curriculum_area)
new_curriculum_integration.prerequisite_lessons.add(prerequisite_lesson)
query_result = CurriculumIntegration.objects.get(slug="slug")
self.assertEqual(query_result, new_curriculum_integration)
Remove many to many key generation in test
|
from model_mommy import mommy
from tests.BaseTestWithDB import BaseTestWithDB
from topics.models import CurriculumIntegration
from topics.models import CurriculumArea
from topics.models import Lesson
from tests.topics import create_topics_test_data
class CurriculumIntegrationModelTest(BaseTestWithDB):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def test_curriculum_integration(self):
# Setup Auxliary Data
topic = create_topics_test_data.create_test_topic(1)
curriculum_area = mommy.make(
CurriculumArea,
name="cats"
)
prerequisite_lesson = mommy.make(
Lesson,
name="dogs"
)
new_curriculum_integration = CurriculumIntegration.objects.create(
topic=topic,
slug="slug",
number=1,
name="name",
content="content"
)
new_curriculum_integration.curriculum_areas.add(curriculum_area)
new_curriculum_integration.prerequisite_lessons.add(prerequisite_lesson)
query_result = CurriculumIntegration.objects.get(slug="slug")
self.assertEqual(query_result, new_curriculum_integration)
|
<commit_before>from model_mommy import mommy
from tests.BaseTestWithDB import BaseTestWithDB
from topics.models import CurriculumIntegration
from topics.models import CurriculumArea
from topics.models import Lesson
from tests.topics import create_topics_test_data
class CurriculumIntegrationModelTest(BaseTestWithDB):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def test_curriculum_integration(self):
# Setup Auxliary Data
topic = create_topics_test_data.create_test_topic(1)
curriculum_area = mommy.make(
CurriculumArea,
make_m2m=True,
name="cats"
)
prerequisite_lesson = mommy.make(
Lesson,
make_m2m=True,
name="dogs"
)
# Test
new_curriculum_integration = CurriculumIntegration.objects.create(
topic=topic,
slug="slug",
number=1,
name="name",
content="content"
)
new_curriculum_integration.curriculum_areas.add(curriculum_area)
new_curriculum_integration.prerequisite_lessons.add(prerequisite_lesson)
query_result = CurriculumIntegration.objects.get(slug="slug")
self.assertEqual(query_result, new_curriculum_integration)
<commit_msg>Remove many to many key generation in test<commit_after>
|
from model_mommy import mommy
from tests.BaseTestWithDB import BaseTestWithDB
from topics.models import CurriculumIntegration
from topics.models import CurriculumArea
from topics.models import Lesson
from tests.topics import create_topics_test_data
class CurriculumIntegrationModelTest(BaseTestWithDB):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def test_curriculum_integration(self):
# Setup Auxliary Data
topic = create_topics_test_data.create_test_topic(1)
curriculum_area = mommy.make(
CurriculumArea,
name="cats"
)
prerequisite_lesson = mommy.make(
Lesson,
name="dogs"
)
new_curriculum_integration = CurriculumIntegration.objects.create(
topic=topic,
slug="slug",
number=1,
name="name",
content="content"
)
new_curriculum_integration.curriculum_areas.add(curriculum_area)
new_curriculum_integration.prerequisite_lessons.add(prerequisite_lesson)
query_result = CurriculumIntegration.objects.get(slug="slug")
self.assertEqual(query_result, new_curriculum_integration)
|
from model_mommy import mommy
from tests.BaseTestWithDB import BaseTestWithDB
from topics.models import CurriculumIntegration
from topics.models import CurriculumArea
from topics.models import Lesson
from tests.topics import create_topics_test_data
class CurriculumIntegrationModelTest(BaseTestWithDB):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def test_curriculum_integration(self):
# Setup Auxliary Data
topic = create_topics_test_data.create_test_topic(1)
curriculum_area = mommy.make(
CurriculumArea,
make_m2m=True,
name="cats"
)
prerequisite_lesson = mommy.make(
Lesson,
make_m2m=True,
name="dogs"
)
# Test
new_curriculum_integration = CurriculumIntegration.objects.create(
topic=topic,
slug="slug",
number=1,
name="name",
content="content"
)
new_curriculum_integration.curriculum_areas.add(curriculum_area)
new_curriculum_integration.prerequisite_lessons.add(prerequisite_lesson)
query_result = CurriculumIntegration.objects.get(slug="slug")
self.assertEqual(query_result, new_curriculum_integration)
Remove many to many key generation in testfrom model_mommy import mommy
from tests.BaseTestWithDB import BaseTestWithDB
from topics.models import CurriculumIntegration
from topics.models import CurriculumArea
from topics.models import Lesson
from tests.topics import create_topics_test_data
class CurriculumIntegrationModelTest(BaseTestWithDB):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def test_curriculum_integration(self):
# Setup Auxliary Data
topic = create_topics_test_data.create_test_topic(1)
curriculum_area = mommy.make(
CurriculumArea,
name="cats"
)
prerequisite_lesson = mommy.make(
Lesson,
name="dogs"
)
new_curriculum_integration = CurriculumIntegration.objects.create(
topic=topic,
slug="slug",
number=1,
name="name",
content="content"
)
new_curriculum_integration.curriculum_areas.add(curriculum_area)
new_curriculum_integration.prerequisite_lessons.add(prerequisite_lesson)
query_result = CurriculumIntegration.objects.get(slug="slug")
self.assertEqual(query_result, new_curriculum_integration)
|
<commit_before>from model_mommy import mommy
from tests.BaseTestWithDB import BaseTestWithDB
from topics.models import CurriculumIntegration
from topics.models import CurriculumArea
from topics.models import Lesson
from tests.topics import create_topics_test_data
class CurriculumIntegrationModelTest(BaseTestWithDB):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def test_curriculum_integration(self):
# Setup Auxliary Data
topic = create_topics_test_data.create_test_topic(1)
curriculum_area = mommy.make(
CurriculumArea,
make_m2m=True,
name="cats"
)
prerequisite_lesson = mommy.make(
Lesson,
make_m2m=True,
name="dogs"
)
# Test
new_curriculum_integration = CurriculumIntegration.objects.create(
topic=topic,
slug="slug",
number=1,
name="name",
content="content"
)
new_curriculum_integration.curriculum_areas.add(curriculum_area)
new_curriculum_integration.prerequisite_lessons.add(prerequisite_lesson)
query_result = CurriculumIntegration.objects.get(slug="slug")
self.assertEqual(query_result, new_curriculum_integration)
<commit_msg>Remove many to many key generation in test<commit_after>from model_mommy import mommy
from tests.BaseTestWithDB import BaseTestWithDB
from topics.models import CurriculumIntegration
from topics.models import CurriculumArea
from topics.models import Lesson
from tests.topics import create_topics_test_data
class CurriculumIntegrationModelTest(BaseTestWithDB):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def test_curriculum_integration(self):
# Setup Auxliary Data
topic = create_topics_test_data.create_test_topic(1)
curriculum_area = mommy.make(
CurriculumArea,
name="cats"
)
prerequisite_lesson = mommy.make(
Lesson,
name="dogs"
)
new_curriculum_integration = CurriculumIntegration.objects.create(
topic=topic,
slug="slug",
number=1,
name="name",
content="content"
)
new_curriculum_integration.curriculum_areas.add(curriculum_area)
new_curriculum_integration.prerequisite_lessons.add(prerequisite_lesson)
query_result = CurriculumIntegration.objects.get(slug="slug")
self.assertEqual(query_result, new_curriculum_integration)
|
a00a657eff2b5ffc4453ef751b1d146ca386fd6a
|
app/views/create_user.py
|
app/views/create_user.py
|
from flask import request, flash, render_template
import bcrypt
from app import app, helpers
@app.route('/create_user', methods=['GET', 'POST'])
def create_user():
if request.method == 'POST':
username = request.form.get('username', None).strip() # Aa09_.- allowed
password = request.form.get('password', None)
role = request.form.get('role', 'Guest')
if re.match(r'^[\w.-]+$', username) and password:
# Form was completed with valid input
matching_user = "SELECT user_pk FROM users WHERE username = %s;"
user_does_exist = helpers.duplicate_check(matching_user, [username])
if user_does_exist:
flash('Username already exists')
else:
salt = bcrypt.gensalt(12)
password = bcrypt.hashpw(password.encode('utf-8'), bytes(salt))
new_user = ("INSERT INTO users (username, password, salt, role_fk) "
"VALUES (%s, %s, %s, %s);")
helpers.db_change(new_user, [username, password, salt, role])
flash('Your account was created!')
else:
flash('Please enter a username and password.')
return render_template('create_user.html')
|
from flask import request, flash, render_template
import re
import bcrypt
from app import app, helpers
@app.route('/create_user', methods=['GET', 'POST'])
def create_user():
if request.method == 'POST':
username = request.form.get('username', None).strip() # Aa09_.- allowed
password = request.form.get('password', None)
role = request.form.get('role', 'Guest')
if re.match(r'^[\w.-]+$', username) and password:
# Form was completed with valid input
matching_user = "SELECT user_pk FROM users WHERE username = %s;"
user_does_exist = helpers.duplicate_check(matching_user, [username])
if user_does_exist:
flash('Username already exists')
else:
salt = bcrypt.gensalt(12)
password = bcrypt.hashpw(password.encode('utf-8'), bytes(salt))
new_user = ("INSERT INTO users (username, password, salt, role_fk) "
"VALUES (%s, %s, %s, %s);")
helpers.db_change(new_user, [username, password, salt, role])
flash('Your account was created!')
else:
flash('Please enter a username and password.')
return render_template('create_user.html')
|
Add re import for regex
|
Add re import for regex
|
Python
|
agpl-3.0
|
kylemh/UO_CIS322,kylemh/UO_CIS322,kylemh/UO_CIS322
|
from flask import request, flash, render_template
import bcrypt
from app import app, helpers
@app.route('/create_user', methods=['GET', 'POST'])
def create_user():
if request.method == 'POST':
username = request.form.get('username', None).strip() # Aa09_.- allowed
password = request.form.get('password', None)
role = request.form.get('role', 'Guest')
if re.match(r'^[\w.-]+$', username) and password:
# Form was completed with valid input
matching_user = "SELECT user_pk FROM users WHERE username = %s;"
user_does_exist = helpers.duplicate_check(matching_user, [username])
if user_does_exist:
flash('Username already exists')
else:
salt = bcrypt.gensalt(12)
password = bcrypt.hashpw(password.encode('utf-8'), bytes(salt))
new_user = ("INSERT INTO users (username, password, salt, role_fk) "
"VALUES (%s, %s, %s, %s);")
helpers.db_change(new_user, [username, password, salt, role])
flash('Your account was created!')
else:
flash('Please enter a username and password.')
return render_template('create_user.html')
Add re import for regex
|
from flask import request, flash, render_template
import re
import bcrypt
from app import app, helpers
@app.route('/create_user', methods=['GET', 'POST'])
def create_user():
if request.method == 'POST':
username = request.form.get('username', None).strip() # Aa09_.- allowed
password = request.form.get('password', None)
role = request.form.get('role', 'Guest')
if re.match(r'^[\w.-]+$', username) and password:
# Form was completed with valid input
matching_user = "SELECT user_pk FROM users WHERE username = %s;"
user_does_exist = helpers.duplicate_check(matching_user, [username])
if user_does_exist:
flash('Username already exists')
else:
salt = bcrypt.gensalt(12)
password = bcrypt.hashpw(password.encode('utf-8'), bytes(salt))
new_user = ("INSERT INTO users (username, password, salt, role_fk) "
"VALUES (%s, %s, %s, %s);")
helpers.db_change(new_user, [username, password, salt, role])
flash('Your account was created!')
else:
flash('Please enter a username and password.')
return render_template('create_user.html')
|
<commit_before>from flask import request, flash, render_template
import bcrypt
from app import app, helpers
@app.route('/create_user', methods=['GET', 'POST'])
def create_user():
if request.method == 'POST':
username = request.form.get('username', None).strip() # Aa09_.- allowed
password = request.form.get('password', None)
role = request.form.get('role', 'Guest')
if re.match(r'^[\w.-]+$', username) and password:
# Form was completed with valid input
matching_user = "SELECT user_pk FROM users WHERE username = %s;"
user_does_exist = helpers.duplicate_check(matching_user, [username])
if user_does_exist:
flash('Username already exists')
else:
salt = bcrypt.gensalt(12)
password = bcrypt.hashpw(password.encode('utf-8'), bytes(salt))
new_user = ("INSERT INTO users (username, password, salt, role_fk) "
"VALUES (%s, %s, %s, %s);")
helpers.db_change(new_user, [username, password, salt, role])
flash('Your account was created!')
else:
flash('Please enter a username and password.')
return render_template('create_user.html')
<commit_msg>Add re import for regex<commit_after>
|
from flask import request, flash, render_template
import re
import bcrypt
from app import app, helpers
@app.route('/create_user', methods=['GET', 'POST'])
def create_user():
if request.method == 'POST':
username = request.form.get('username', None).strip() # Aa09_.- allowed
password = request.form.get('password', None)
role = request.form.get('role', 'Guest')
if re.match(r'^[\w.-]+$', username) and password:
# Form was completed with valid input
matching_user = "SELECT user_pk FROM users WHERE username = %s;"
user_does_exist = helpers.duplicate_check(matching_user, [username])
if user_does_exist:
flash('Username already exists')
else:
salt = bcrypt.gensalt(12)
password = bcrypt.hashpw(password.encode('utf-8'), bytes(salt))
new_user = ("INSERT INTO users (username, password, salt, role_fk) "
"VALUES (%s, %s, %s, %s);")
helpers.db_change(new_user, [username, password, salt, role])
flash('Your account was created!')
else:
flash('Please enter a username and password.')
return render_template('create_user.html')
|
from flask import request, flash, render_template
import bcrypt
from app import app, helpers
@app.route('/create_user', methods=['GET', 'POST'])
def create_user():
if request.method == 'POST':
username = request.form.get('username', None).strip() # Aa09_.- allowed
password = request.form.get('password', None)
role = request.form.get('role', 'Guest')
if re.match(r'^[\w.-]+$', username) and password:
# Form was completed with valid input
matching_user = "SELECT user_pk FROM users WHERE username = %s;"
user_does_exist = helpers.duplicate_check(matching_user, [username])
if user_does_exist:
flash('Username already exists')
else:
salt = bcrypt.gensalt(12)
password = bcrypt.hashpw(password.encode('utf-8'), bytes(salt))
new_user = ("INSERT INTO users (username, password, salt, role_fk) "
"VALUES (%s, %s, %s, %s);")
helpers.db_change(new_user, [username, password, salt, role])
flash('Your account was created!')
else:
flash('Please enter a username and password.')
return render_template('create_user.html')
Add re import for regexfrom flask import request, flash, render_template
import re
import bcrypt
from app import app, helpers
@app.route('/create_user', methods=['GET', 'POST'])
def create_user():
if request.method == 'POST':
username = request.form.get('username', None).strip() # Aa09_.- allowed
password = request.form.get('password', None)
role = request.form.get('role', 'Guest')
if re.match(r'^[\w.-]+$', username) and password:
# Form was completed with valid input
matching_user = "SELECT user_pk FROM users WHERE username = %s;"
user_does_exist = helpers.duplicate_check(matching_user, [username])
if user_does_exist:
flash('Username already exists')
else:
salt = bcrypt.gensalt(12)
password = bcrypt.hashpw(password.encode('utf-8'), bytes(salt))
new_user = ("INSERT INTO users (username, password, salt, role_fk) "
"VALUES (%s, %s, %s, %s);")
helpers.db_change(new_user, [username, password, salt, role])
flash('Your account was created!')
else:
flash('Please enter a username and password.')
return render_template('create_user.html')
|
<commit_before>from flask import request, flash, render_template
import bcrypt
from app import app, helpers
@app.route('/create_user', methods=['GET', 'POST'])
def create_user():
if request.method == 'POST':
username = request.form.get('username', None).strip() # Aa09_.- allowed
password = request.form.get('password', None)
role = request.form.get('role', 'Guest')
if re.match(r'^[\w.-]+$', username) and password:
# Form was completed with valid input
matching_user = "SELECT user_pk FROM users WHERE username = %s;"
user_does_exist = helpers.duplicate_check(matching_user, [username])
if user_does_exist:
flash('Username already exists')
else:
salt = bcrypt.gensalt(12)
password = bcrypt.hashpw(password.encode('utf-8'), bytes(salt))
new_user = ("INSERT INTO users (username, password, salt, role_fk) "
"VALUES (%s, %s, %s, %s);")
helpers.db_change(new_user, [username, password, salt, role])
flash('Your account was created!')
else:
flash('Please enter a username and password.')
return render_template('create_user.html')
<commit_msg>Add re import for regex<commit_after>from flask import request, flash, render_template
import re
import bcrypt
from app import app, helpers
@app.route('/create_user', methods=['GET', 'POST'])
def create_user():
if request.method == 'POST':
username = request.form.get('username', None).strip() # Aa09_.- allowed
password = request.form.get('password', None)
role = request.form.get('role', 'Guest')
if re.match(r'^[\w.-]+$', username) and password:
# Form was completed with valid input
matching_user = "SELECT user_pk FROM users WHERE username = %s;"
user_does_exist = helpers.duplicate_check(matching_user, [username])
if user_does_exist:
flash('Username already exists')
else:
salt = bcrypt.gensalt(12)
password = bcrypt.hashpw(password.encode('utf-8'), bytes(salt))
new_user = ("INSERT INTO users (username, password, salt, role_fk) "
"VALUES (%s, %s, %s, %s);")
helpers.db_change(new_user, [username, password, salt, role])
flash('Your account was created!')
else:
flash('Please enter a username and password.')
return render_template('create_user.html')
|
a7e22ba37a529ef8997cf252a715abd8dffaf763
|
solutions/generalsolution.py
|
solutions/generalsolution.py
|
'''generalsolution.py
Holds the general solution base class
Created on 22 Apr 2010
@author: Ian Huston
'''
import numpy as np
class GeneralSolution(object):
"""General solution base class."""
def __init__(self, fixture, srcclass):
"""Create a GeneralSolution object."""
self.srceqns = srcclass(fixture)
def full_source_from_model(self, m, nix):
pass
def J_A(self, C1, C2):
pass
def J_B(self, C3, C4):
pass
def J_C(self, C5):
pass
def J_D(self, C6, C7):
pass
|
'''generalsolution.py
Holds the general solution base class
Created on 22 Apr 2010
@author: Ian Huston
'''
import numpy as np
class GeneralSolution(object):
"""General solution base class."""
def __init__(self, fixture, srcclass):
"""Create a GeneralSolution object."""
self.srceqns = srcclass(fixture)
def full_source_from_model(self, m, nix):
pass
|
Remove unused functions from general solution.
|
Remove unused functions from general solution.
|
Python
|
bsd-3-clause
|
ihuston/pyflation,ihuston/pyflation
|
'''generalsolution.py
Holds the general solution base class
Created on 22 Apr 2010
@author: Ian Huston
'''
import numpy as np
class GeneralSolution(object):
"""General solution base class."""
def __init__(self, fixture, srcclass):
"""Create a GeneralSolution object."""
self.srceqns = srcclass(fixture)
def full_source_from_model(self, m, nix):
pass
def J_A(self, C1, C2):
pass
def J_B(self, C3, C4):
pass
def J_C(self, C5):
pass
def J_D(self, C6, C7):
pass
Remove unused functions from general solution.
|
'''generalsolution.py
Holds the general solution base class
Created on 22 Apr 2010
@author: Ian Huston
'''
import numpy as np
class GeneralSolution(object):
"""General solution base class."""
def __init__(self, fixture, srcclass):
"""Create a GeneralSolution object."""
self.srceqns = srcclass(fixture)
def full_source_from_model(self, m, nix):
pass
|
<commit_before>'''generalsolution.py
Holds the general solution base class
Created on 22 Apr 2010
@author: Ian Huston
'''
import numpy as np
class GeneralSolution(object):
"""General solution base class."""
def __init__(self, fixture, srcclass):
"""Create a GeneralSolution object."""
self.srceqns = srcclass(fixture)
def full_source_from_model(self, m, nix):
pass
def J_A(self, C1, C2):
pass
def J_B(self, C3, C4):
pass
def J_C(self, C5):
pass
def J_D(self, C6, C7):
pass
<commit_msg>Remove unused functions from general solution.<commit_after>
|
'''generalsolution.py
Holds the general solution base class
Created on 22 Apr 2010
@author: Ian Huston
'''
import numpy as np
class GeneralSolution(object):
"""General solution base class."""
def __init__(self, fixture, srcclass):
"""Create a GeneralSolution object."""
self.srceqns = srcclass(fixture)
def full_source_from_model(self, m, nix):
pass
|
'''generalsolution.py
Holds the general solution base class
Created on 22 Apr 2010
@author: Ian Huston
'''
import numpy as np
class GeneralSolution(object):
"""General solution base class."""
def __init__(self, fixture, srcclass):
"""Create a GeneralSolution object."""
self.srceqns = srcclass(fixture)
def full_source_from_model(self, m, nix):
pass
def J_A(self, C1, C2):
pass
def J_B(self, C3, C4):
pass
def J_C(self, C5):
pass
def J_D(self, C6, C7):
pass
Remove unused functions from general solution.'''generalsolution.py
Holds the general solution base class
Created on 22 Apr 2010
@author: Ian Huston
'''
import numpy as np
class GeneralSolution(object):
"""General solution base class."""
def __init__(self, fixture, srcclass):
"""Create a GeneralSolution object."""
self.srceqns = srcclass(fixture)
def full_source_from_model(self, m, nix):
pass
|
<commit_before>'''generalsolution.py
Holds the general solution base class
Created on 22 Apr 2010
@author: Ian Huston
'''
import numpy as np
class GeneralSolution(object):
"""General solution base class."""
def __init__(self, fixture, srcclass):
"""Create a GeneralSolution object."""
self.srceqns = srcclass(fixture)
def full_source_from_model(self, m, nix):
pass
def J_A(self, C1, C2):
pass
def J_B(self, C3, C4):
pass
def J_C(self, C5):
pass
def J_D(self, C6, C7):
pass
<commit_msg>Remove unused functions from general solution.<commit_after>'''generalsolution.py
Holds the general solution base class
Created on 22 Apr 2010
@author: Ian Huston
'''
import numpy as np
class GeneralSolution(object):
"""General solution base class."""
def __init__(self, fixture, srcclass):
"""Create a GeneralSolution object."""
self.srceqns = srcclass(fixture)
def full_source_from_model(self, m, nix):
pass
|
6fb7ae53fd8f47adcc10f61f11fc6954b93c4e1f
|
test_tododb.py
|
test_tododb.py
|
#!/usr/bin/python
# -*- coding:Utf-8 -*-
import unittest
from tododb import TodoDB, TodoAlreadyExist
class MaTest(unittest.TestCase):
def reinitialise(self):
tododb = TodoDB()
tododb.connect()
tododb.drop_db()
tododb.create_db()
return tododb
def test_create_a_db(self):
tododb = self.reinitialise()
def test_add_a_todo(self):
tododb = self.reinitialise()
was = tododb.todo_len()
tododb.add_todo("This is a new todo")
self.assertEqual(was + 1, tododb.todo_len())
def test_cant_add_two_time_the_same_todo(self):
tododb = self.reinitialise()
was = tododb.todo_len()
tododb.add_todo("This is a new todo")
self.assertEqual(was + 1, tododb.todo_len())
self.assertRaises(TodoAlreadyExist, tododb.add_todo, "This is a new todo")
if __name__ == "__main__":
unittest.main()
|
#!/usr/bin/python
# -*- coding:Utf-8 -*-
import unittest
from tododb import TodoDB, TodoAlreadyExist
class MaTest(unittest.TestCase):
def reinitialise(self):
"""
Reinitialise the db to make test with a clean one
"""
tododb = TodoDB()
tododb.connect()
tododb.drop_db()
tododb.create_db()
return tododb
def test_create_a_db(self):
"""
create a clean database to test
"""
tododb = self.reinitialise()
def test_add_a_todo(self):
"""
You should be able to add a new todo.
This should inscrease the number of todos by one
"""
tododb = self.reinitialise()
was = tododb.todo_len()
tododb.add_todo("This is a new todo")
self.assertEqual(was + 1, tododb.todo_len())
def test_cant_add_two_time_the_same_todo(self):
"""
You shouldn't be able to add two time a todo with the same description
"""
tododb = self.reinitialise()
was = tododb.todo_len()
tododb.add_todo("This is a new todo")
self.assertEqual(was + 1, tododb.todo_len())
self.assertRaises(TodoAlreadyExist, tododb.add_todo, "This is a new todo")
if __name__ == "__main__":
unittest.main()
|
Add some doc to the tests
|
Add some doc to the tests
|
Python
|
agpl-3.0
|
Psycojoker/holygrail
|
#!/usr/bin/python
# -*- coding:Utf-8 -*-
import unittest
from tododb import TodoDB, TodoAlreadyExist
class MaTest(unittest.TestCase):
def reinitialise(self):
tododb = TodoDB()
tododb.connect()
tododb.drop_db()
tododb.create_db()
return tododb
def test_create_a_db(self):
tododb = self.reinitialise()
def test_add_a_todo(self):
tododb = self.reinitialise()
was = tododb.todo_len()
tododb.add_todo("This is a new todo")
self.assertEqual(was + 1, tododb.todo_len())
def test_cant_add_two_time_the_same_todo(self):
tododb = self.reinitialise()
was = tododb.todo_len()
tododb.add_todo("This is a new todo")
self.assertEqual(was + 1, tododb.todo_len())
self.assertRaises(TodoAlreadyExist, tododb.add_todo, "This is a new todo")
if __name__ == "__main__":
unittest.main()
Add some doc to the tests
|
#!/usr/bin/python
# -*- coding:Utf-8 -*-
import unittest
from tododb import TodoDB, TodoAlreadyExist
class MaTest(unittest.TestCase):
def reinitialise(self):
"""
Reinitialise the db to make test with a clean one
"""
tododb = TodoDB()
tododb.connect()
tododb.drop_db()
tododb.create_db()
return tododb
def test_create_a_db(self):
"""
create a clean database to test
"""
tododb = self.reinitialise()
def test_add_a_todo(self):
"""
You should be able to add a new todo.
This should inscrease the number of todos by one
"""
tododb = self.reinitialise()
was = tododb.todo_len()
tododb.add_todo("This is a new todo")
self.assertEqual(was + 1, tododb.todo_len())
def test_cant_add_two_time_the_same_todo(self):
"""
You shouldn't be able to add two time a todo with the same description
"""
tododb = self.reinitialise()
was = tododb.todo_len()
tododb.add_todo("This is a new todo")
self.assertEqual(was + 1, tododb.todo_len())
self.assertRaises(TodoAlreadyExist, tododb.add_todo, "This is a new todo")
if __name__ == "__main__":
unittest.main()
|
<commit_before>#!/usr/bin/python
# -*- coding:Utf-8 -*-
import unittest
from tododb import TodoDB, TodoAlreadyExist
class MaTest(unittest.TestCase):
def reinitialise(self):
tododb = TodoDB()
tododb.connect()
tododb.drop_db()
tododb.create_db()
return tododb
def test_create_a_db(self):
tododb = self.reinitialise()
def test_add_a_todo(self):
tododb = self.reinitialise()
was = tododb.todo_len()
tododb.add_todo("This is a new todo")
self.assertEqual(was + 1, tododb.todo_len())
def test_cant_add_two_time_the_same_todo(self):
tododb = self.reinitialise()
was = tododb.todo_len()
tododb.add_todo("This is a new todo")
self.assertEqual(was + 1, tododb.todo_len())
self.assertRaises(TodoAlreadyExist, tododb.add_todo, "This is a new todo")
if __name__ == "__main__":
unittest.main()
<commit_msg>Add some doc to the tests<commit_after>
|
#!/usr/bin/python
# -*- coding:Utf-8 -*-
import unittest
from tododb import TodoDB, TodoAlreadyExist
class MaTest(unittest.TestCase):
def reinitialise(self):
"""
Reinitialise the db to make test with a clean one
"""
tododb = TodoDB()
tododb.connect()
tododb.drop_db()
tododb.create_db()
return tododb
def test_create_a_db(self):
"""
create a clean database to test
"""
tododb = self.reinitialise()
def test_add_a_todo(self):
"""
You should be able to add a new todo.
This should inscrease the number of todos by one
"""
tododb = self.reinitialise()
was = tododb.todo_len()
tododb.add_todo("This is a new todo")
self.assertEqual(was + 1, tododb.todo_len())
def test_cant_add_two_time_the_same_todo(self):
"""
You shouldn't be able to add two time a todo with the same description
"""
tododb = self.reinitialise()
was = tododb.todo_len()
tododb.add_todo("This is a new todo")
self.assertEqual(was + 1, tododb.todo_len())
self.assertRaises(TodoAlreadyExist, tododb.add_todo, "This is a new todo")
if __name__ == "__main__":
unittest.main()
|
#!/usr/bin/python
# -*- coding:Utf-8 -*-
import unittest
from tododb import TodoDB, TodoAlreadyExist
class MaTest(unittest.TestCase):
def reinitialise(self):
tododb = TodoDB()
tododb.connect()
tododb.drop_db()
tododb.create_db()
return tododb
def test_create_a_db(self):
tododb = self.reinitialise()
def test_add_a_todo(self):
tododb = self.reinitialise()
was = tododb.todo_len()
tododb.add_todo("This is a new todo")
self.assertEqual(was + 1, tododb.todo_len())
def test_cant_add_two_time_the_same_todo(self):
tododb = self.reinitialise()
was = tododb.todo_len()
tododb.add_todo("This is a new todo")
self.assertEqual(was + 1, tododb.todo_len())
self.assertRaises(TodoAlreadyExist, tododb.add_todo, "This is a new todo")
if __name__ == "__main__":
unittest.main()
Add some doc to the tests#!/usr/bin/python
# -*- coding:Utf-8 -*-
import unittest
from tododb import TodoDB, TodoAlreadyExist
class MaTest(unittest.TestCase):
def reinitialise(self):
"""
Reinitialise the db to make test with a clean one
"""
tododb = TodoDB()
tododb.connect()
tododb.drop_db()
tododb.create_db()
return tododb
def test_create_a_db(self):
"""
create a clean database to test
"""
tododb = self.reinitialise()
def test_add_a_todo(self):
"""
You should be able to add a new todo.
This should inscrease the number of todos by one
"""
tododb = self.reinitialise()
was = tododb.todo_len()
tododb.add_todo("This is a new todo")
self.assertEqual(was + 1, tododb.todo_len())
def test_cant_add_two_time_the_same_todo(self):
"""
You shouldn't be able to add two time a todo with the same description
"""
tododb = self.reinitialise()
was = tododb.todo_len()
tododb.add_todo("This is a new todo")
self.assertEqual(was + 1, tododb.todo_len())
self.assertRaises(TodoAlreadyExist, tododb.add_todo, "This is a new todo")
if __name__ == "__main__":
unittest.main()
|
<commit_before>#!/usr/bin/python
# -*- coding:Utf-8 -*-
import unittest
from tododb import TodoDB, TodoAlreadyExist
class MaTest(unittest.TestCase):
def reinitialise(self):
tododb = TodoDB()
tododb.connect()
tododb.drop_db()
tododb.create_db()
return tododb
def test_create_a_db(self):
tododb = self.reinitialise()
def test_add_a_todo(self):
tododb = self.reinitialise()
was = tododb.todo_len()
tododb.add_todo("This is a new todo")
self.assertEqual(was + 1, tododb.todo_len())
def test_cant_add_two_time_the_same_todo(self):
tododb = self.reinitialise()
was = tododb.todo_len()
tododb.add_todo("This is a new todo")
self.assertEqual(was + 1, tododb.todo_len())
self.assertRaises(TodoAlreadyExist, tododb.add_todo, "This is a new todo")
if __name__ == "__main__":
unittest.main()
<commit_msg>Add some doc to the tests<commit_after>#!/usr/bin/python
# -*- coding:Utf-8 -*-
import unittest
from tododb import TodoDB, TodoAlreadyExist
class MaTest(unittest.TestCase):
def reinitialise(self):
"""
Reinitialise the db to make test with a clean one
"""
tododb = TodoDB()
tododb.connect()
tododb.drop_db()
tododb.create_db()
return tododb
def test_create_a_db(self):
"""
create a clean database to test
"""
tododb = self.reinitialise()
def test_add_a_todo(self):
"""
You should be able to add a new todo.
This should inscrease the number of todos by one
"""
tododb = self.reinitialise()
was = tododb.todo_len()
tododb.add_todo("This is a new todo")
self.assertEqual(was + 1, tododb.todo_len())
def test_cant_add_two_time_the_same_todo(self):
"""
You shouldn't be able to add two time a todo with the same description
"""
tododb = self.reinitialise()
was = tododb.todo_len()
tododb.add_todo("This is a new todo")
self.assertEqual(was + 1, tododb.todo_len())
self.assertRaises(TodoAlreadyExist, tododb.add_todo, "This is a new todo")
if __name__ == "__main__":
unittest.main()
|
191ce5f1918ec5a9652475d19d806c5ffc8f0f1b
|
djstripe/__init__.py
|
djstripe/__init__.py
|
"""
dj-stripe - Django + Stripe Made Easy
"""
import pkg_resources
from django.apps import AppConfig
__version__ = pkg_resources.require("dj-stripe")[0].version
default_app_config = "djstripe.DjstripeAppConfig"
class DjstripeAppConfig(AppConfig):
"""
An AppConfig for dj-stripe which loads system checks
and event handlers once Django is ready.
"""
name = "djstripe"
def ready(self):
import stripe
from . import ( # noqa: Register the checks and event handlers
checks,
event_handlers,
)
# Set app info
# https://stripe.com/docs/building-plugins#setappinfo
stripe.set_app_info(
"dj-stripe",
version=__version__,
url="https://github.com/dj-stripe/dj-stripe",
)
|
"""
dj-stripe - Django + Stripe Made Easy
"""
import pkg_resources
from django.apps import AppConfig
__version__ = pkg_resources.get_distribution("dj-stripe").version
default_app_config = "djstripe.DjstripeAppConfig"
class DjstripeAppConfig(AppConfig):
"""
An AppConfig for dj-stripe which loads system checks
and event handlers once Django is ready.
"""
name = "djstripe"
def ready(self):
import stripe
from . import ( # noqa: Register the checks and event handlers
checks,
event_handlers,
)
# Set app info
# https://stripe.com/docs/building-plugins#setappinfo
stripe.set_app_info(
"dj-stripe",
version=__version__,
url="https://github.com/dj-stripe/dj-stripe",
)
|
Use pkg_resources.get_distribution instead of .require for version
|
Use pkg_resources.get_distribution instead of .require for version
|
Python
|
mit
|
dj-stripe/dj-stripe,pydanny/dj-stripe,pydanny/dj-stripe,dj-stripe/dj-stripe
|
"""
dj-stripe - Django + Stripe Made Easy
"""
import pkg_resources
from django.apps import AppConfig
__version__ = pkg_resources.require("dj-stripe")[0].version
default_app_config = "djstripe.DjstripeAppConfig"
class DjstripeAppConfig(AppConfig):
"""
An AppConfig for dj-stripe which loads system checks
and event handlers once Django is ready.
"""
name = "djstripe"
def ready(self):
import stripe
from . import ( # noqa: Register the checks and event handlers
checks,
event_handlers,
)
# Set app info
# https://stripe.com/docs/building-plugins#setappinfo
stripe.set_app_info(
"dj-stripe",
version=__version__,
url="https://github.com/dj-stripe/dj-stripe",
)
Use pkg_resources.get_distribution instead of .require for version
|
"""
dj-stripe - Django + Stripe Made Easy
"""
import pkg_resources
from django.apps import AppConfig
__version__ = pkg_resources.get_distribution("dj-stripe").version
default_app_config = "djstripe.DjstripeAppConfig"
class DjstripeAppConfig(AppConfig):
"""
An AppConfig for dj-stripe which loads system checks
and event handlers once Django is ready.
"""
name = "djstripe"
def ready(self):
import stripe
from . import ( # noqa: Register the checks and event handlers
checks,
event_handlers,
)
# Set app info
# https://stripe.com/docs/building-plugins#setappinfo
stripe.set_app_info(
"dj-stripe",
version=__version__,
url="https://github.com/dj-stripe/dj-stripe",
)
|
<commit_before>"""
dj-stripe - Django + Stripe Made Easy
"""
import pkg_resources
from django.apps import AppConfig
__version__ = pkg_resources.require("dj-stripe")[0].version
default_app_config = "djstripe.DjstripeAppConfig"
class DjstripeAppConfig(AppConfig):
"""
An AppConfig for dj-stripe which loads system checks
and event handlers once Django is ready.
"""
name = "djstripe"
def ready(self):
import stripe
from . import ( # noqa: Register the checks and event handlers
checks,
event_handlers,
)
# Set app info
# https://stripe.com/docs/building-plugins#setappinfo
stripe.set_app_info(
"dj-stripe",
version=__version__,
url="https://github.com/dj-stripe/dj-stripe",
)
<commit_msg>Use pkg_resources.get_distribution instead of .require for version<commit_after>
|
"""
dj-stripe - Django + Stripe Made Easy
"""
import pkg_resources
from django.apps import AppConfig
__version__ = pkg_resources.get_distribution("dj-stripe").version
default_app_config = "djstripe.DjstripeAppConfig"
class DjstripeAppConfig(AppConfig):
"""
An AppConfig for dj-stripe which loads system checks
and event handlers once Django is ready.
"""
name = "djstripe"
def ready(self):
import stripe
from . import ( # noqa: Register the checks and event handlers
checks,
event_handlers,
)
# Set app info
# https://stripe.com/docs/building-plugins#setappinfo
stripe.set_app_info(
"dj-stripe",
version=__version__,
url="https://github.com/dj-stripe/dj-stripe",
)
|
"""
dj-stripe - Django + Stripe Made Easy
"""
import pkg_resources
from django.apps import AppConfig
__version__ = pkg_resources.require("dj-stripe")[0].version
default_app_config = "djstripe.DjstripeAppConfig"
class DjstripeAppConfig(AppConfig):
"""
An AppConfig for dj-stripe which loads system checks
and event handlers once Django is ready.
"""
name = "djstripe"
def ready(self):
import stripe
from . import ( # noqa: Register the checks and event handlers
checks,
event_handlers,
)
# Set app info
# https://stripe.com/docs/building-plugins#setappinfo
stripe.set_app_info(
"dj-stripe",
version=__version__,
url="https://github.com/dj-stripe/dj-stripe",
)
Use pkg_resources.get_distribution instead of .require for version"""
dj-stripe - Django + Stripe Made Easy
"""
import pkg_resources
from django.apps import AppConfig
__version__ = pkg_resources.get_distribution("dj-stripe").version
default_app_config = "djstripe.DjstripeAppConfig"
class DjstripeAppConfig(AppConfig):
"""
An AppConfig for dj-stripe which loads system checks
and event handlers once Django is ready.
"""
name = "djstripe"
def ready(self):
import stripe
from . import ( # noqa: Register the checks and event handlers
checks,
event_handlers,
)
# Set app info
# https://stripe.com/docs/building-plugins#setappinfo
stripe.set_app_info(
"dj-stripe",
version=__version__,
url="https://github.com/dj-stripe/dj-stripe",
)
|
<commit_before>"""
dj-stripe - Django + Stripe Made Easy
"""
import pkg_resources
from django.apps import AppConfig
__version__ = pkg_resources.require("dj-stripe")[0].version
default_app_config = "djstripe.DjstripeAppConfig"
class DjstripeAppConfig(AppConfig):
"""
An AppConfig for dj-stripe which loads system checks
and event handlers once Django is ready.
"""
name = "djstripe"
def ready(self):
import stripe
from . import ( # noqa: Register the checks and event handlers
checks,
event_handlers,
)
# Set app info
# https://stripe.com/docs/building-plugins#setappinfo
stripe.set_app_info(
"dj-stripe",
version=__version__,
url="https://github.com/dj-stripe/dj-stripe",
)
<commit_msg>Use pkg_resources.get_distribution instead of .require for version<commit_after>"""
dj-stripe - Django + Stripe Made Easy
"""
import pkg_resources
from django.apps import AppConfig
__version__ = pkg_resources.get_distribution("dj-stripe").version
default_app_config = "djstripe.DjstripeAppConfig"
class DjstripeAppConfig(AppConfig):
"""
An AppConfig for dj-stripe which loads system checks
and event handlers once Django is ready.
"""
name = "djstripe"
def ready(self):
import stripe
from . import ( # noqa: Register the checks and event handlers
checks,
event_handlers,
)
# Set app info
# https://stripe.com/docs/building-plugins#setappinfo
stripe.set_app_info(
"dj-stripe",
version=__version__,
url="https://github.com/dj-stripe/dj-stripe",
)
|
786903e417c7dfd8773db10fcc7cd5fa1130272a
|
candidates/tests/test_caching.py
|
candidates/tests/test_caching.py
|
from __future__ import unicode_literals
from django_webtest import WebTest
from .auth import TestUserMixin
from .uk_examples import UK2015ExamplesMixin
class TestCaching(TestUserMixin, UK2015ExamplesMixin, WebTest):
def setUp(self):
super(TestCaching, self).setUp()
def test_unauth_user_cache_headers(self):
response = self.app.get(
'/election/2015/post/65808/dulwich-and-west-norwood',
)
headers = response.headerlist
seen_cache = False
for header, value in headers:
if header == 'Cache-Control':
seen_cache = True
self.assertTrue(value == 'max-age=1200')
self.assertTrue(seen_cache)
def test_auth_user_cache_headers(self):
response = self.app.get(
'/election/2015/post/65808/dulwich-and-west-norwood',
user=self.user
)
headers = response.headerlist
seen_cache = False
for header, value in headers:
if header == 'Cache-Control':
seen_cache = True
self.assertTrue(
value == 'no-cache, no-store, must-revalidate, max-age=0'
)
self.assertTrue(seen_cache)
|
from __future__ import unicode_literals
from django_webtest import WebTest
from .auth import TestUserMixin
from .uk_examples import UK2015ExamplesMixin
class TestCaching(TestUserMixin, UK2015ExamplesMixin, WebTest):
def setUp(self):
super(TestCaching, self).setUp()
def test_unauth_user_cache_headers(self):
response = self.app.get(
'/election/2015/post/65808/dulwich-and-west-norwood',
)
headers = response.headerlist
seen_cache = False
for header, value in headers:
if header == 'Cache-Control':
seen_cache = True
self.assertTrue(value == 'max-age=1200')
self.assertTrue(seen_cache)
def test_auth_user_cache_headers(self):
response = self.app.get(
'/election/2015/post/65808/dulwich-and-west-norwood',
user=self.user
)
headers = response.headerlist
seen_cache = False
for header, value in headers:
if header == 'Cache-Control':
seen_cache = True
values = set(value.split(', '))
self.assertEqual(
values,
{'no-cache', 'no-store', 'must-revalidate', 'max-age=0'}
)
self.assertTrue(seen_cache)
|
Make a test of multiple header values insensitve to their order
|
Make a test of multiple header values insensitve to their order
This test sometimes failed on Python 3 because the values in the header
were ordered differently - splitting them and comparing as a set should
fix that.
|
Python
|
agpl-3.0
|
DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative
|
from __future__ import unicode_literals
from django_webtest import WebTest
from .auth import TestUserMixin
from .uk_examples import UK2015ExamplesMixin
class TestCaching(TestUserMixin, UK2015ExamplesMixin, WebTest):
def setUp(self):
super(TestCaching, self).setUp()
def test_unauth_user_cache_headers(self):
response = self.app.get(
'/election/2015/post/65808/dulwich-and-west-norwood',
)
headers = response.headerlist
seen_cache = False
for header, value in headers:
if header == 'Cache-Control':
seen_cache = True
self.assertTrue(value == 'max-age=1200')
self.assertTrue(seen_cache)
def test_auth_user_cache_headers(self):
response = self.app.get(
'/election/2015/post/65808/dulwich-and-west-norwood',
user=self.user
)
headers = response.headerlist
seen_cache = False
for header, value in headers:
if header == 'Cache-Control':
seen_cache = True
self.assertTrue(
value == 'no-cache, no-store, must-revalidate, max-age=0'
)
self.assertTrue(seen_cache)
Make a test of multiple header values insensitve to their order
This test sometimes failed on Python 3 because the values in the header
were ordered differently - splitting them and comparing as a set should
fix that.
|
from __future__ import unicode_literals
from django_webtest import WebTest
from .auth import TestUserMixin
from .uk_examples import UK2015ExamplesMixin
class TestCaching(TestUserMixin, UK2015ExamplesMixin, WebTest):
def setUp(self):
super(TestCaching, self).setUp()
def test_unauth_user_cache_headers(self):
response = self.app.get(
'/election/2015/post/65808/dulwich-and-west-norwood',
)
headers = response.headerlist
seen_cache = False
for header, value in headers:
if header == 'Cache-Control':
seen_cache = True
self.assertTrue(value == 'max-age=1200')
self.assertTrue(seen_cache)
def test_auth_user_cache_headers(self):
response = self.app.get(
'/election/2015/post/65808/dulwich-and-west-norwood',
user=self.user
)
headers = response.headerlist
seen_cache = False
for header, value in headers:
if header == 'Cache-Control':
seen_cache = True
values = set(value.split(', '))
self.assertEqual(
values,
{'no-cache', 'no-store', 'must-revalidate', 'max-age=0'}
)
self.assertTrue(seen_cache)
|
<commit_before>from __future__ import unicode_literals
from django_webtest import WebTest
from .auth import TestUserMixin
from .uk_examples import UK2015ExamplesMixin
class TestCaching(TestUserMixin, UK2015ExamplesMixin, WebTest):
def setUp(self):
super(TestCaching, self).setUp()
def test_unauth_user_cache_headers(self):
response = self.app.get(
'/election/2015/post/65808/dulwich-and-west-norwood',
)
headers = response.headerlist
seen_cache = False
for header, value in headers:
if header == 'Cache-Control':
seen_cache = True
self.assertTrue(value == 'max-age=1200')
self.assertTrue(seen_cache)
def test_auth_user_cache_headers(self):
response = self.app.get(
'/election/2015/post/65808/dulwich-and-west-norwood',
user=self.user
)
headers = response.headerlist
seen_cache = False
for header, value in headers:
if header == 'Cache-Control':
seen_cache = True
self.assertTrue(
value == 'no-cache, no-store, must-revalidate, max-age=0'
)
self.assertTrue(seen_cache)
<commit_msg>Make a test of multiple header values insensitve to their order
This test sometimes failed on Python 3 because the values in the header
were ordered differently - splitting them and comparing as a set should
fix that.<commit_after>
|
from __future__ import unicode_literals
from django_webtest import WebTest
from .auth import TestUserMixin
from .uk_examples import UK2015ExamplesMixin
class TestCaching(TestUserMixin, UK2015ExamplesMixin, WebTest):
def setUp(self):
super(TestCaching, self).setUp()
def test_unauth_user_cache_headers(self):
response = self.app.get(
'/election/2015/post/65808/dulwich-and-west-norwood',
)
headers = response.headerlist
seen_cache = False
for header, value in headers:
if header == 'Cache-Control':
seen_cache = True
self.assertTrue(value == 'max-age=1200')
self.assertTrue(seen_cache)
def test_auth_user_cache_headers(self):
response = self.app.get(
'/election/2015/post/65808/dulwich-and-west-norwood',
user=self.user
)
headers = response.headerlist
seen_cache = False
for header, value in headers:
if header == 'Cache-Control':
seen_cache = True
values = set(value.split(', '))
self.assertEqual(
values,
{'no-cache', 'no-store', 'must-revalidate', 'max-age=0'}
)
self.assertTrue(seen_cache)
|
from __future__ import unicode_literals
from django_webtest import WebTest
from .auth import TestUserMixin
from .uk_examples import UK2015ExamplesMixin
class TestCaching(TestUserMixin, UK2015ExamplesMixin, WebTest):
def setUp(self):
super(TestCaching, self).setUp()
def test_unauth_user_cache_headers(self):
response = self.app.get(
'/election/2015/post/65808/dulwich-and-west-norwood',
)
headers = response.headerlist
seen_cache = False
for header, value in headers:
if header == 'Cache-Control':
seen_cache = True
self.assertTrue(value == 'max-age=1200')
self.assertTrue(seen_cache)
def test_auth_user_cache_headers(self):
response = self.app.get(
'/election/2015/post/65808/dulwich-and-west-norwood',
user=self.user
)
headers = response.headerlist
seen_cache = False
for header, value in headers:
if header == 'Cache-Control':
seen_cache = True
self.assertTrue(
value == 'no-cache, no-store, must-revalidate, max-age=0'
)
self.assertTrue(seen_cache)
Make a test of multiple header values insensitve to their order
This test sometimes failed on Python 3 because the values in the header
were ordered differently - splitting them and comparing as a set should
fix that.from __future__ import unicode_literals
from django_webtest import WebTest
from .auth import TestUserMixin
from .uk_examples import UK2015ExamplesMixin
class TestCaching(TestUserMixin, UK2015ExamplesMixin, WebTest):
def setUp(self):
super(TestCaching, self).setUp()
def test_unauth_user_cache_headers(self):
response = self.app.get(
'/election/2015/post/65808/dulwich-and-west-norwood',
)
headers = response.headerlist
seen_cache = False
for header, value in headers:
if header == 'Cache-Control':
seen_cache = True
self.assertTrue(value == 'max-age=1200')
self.assertTrue(seen_cache)
def test_auth_user_cache_headers(self):
response = self.app.get(
'/election/2015/post/65808/dulwich-and-west-norwood',
user=self.user
)
headers = response.headerlist
seen_cache = False
for header, value in headers:
if header == 'Cache-Control':
seen_cache = True
values = set(value.split(', '))
self.assertEqual(
values,
{'no-cache', 'no-store', 'must-revalidate', 'max-age=0'}
)
self.assertTrue(seen_cache)
|
<commit_before>from __future__ import unicode_literals
from django_webtest import WebTest
from .auth import TestUserMixin
from .uk_examples import UK2015ExamplesMixin
class TestCaching(TestUserMixin, UK2015ExamplesMixin, WebTest):
def setUp(self):
super(TestCaching, self).setUp()
def test_unauth_user_cache_headers(self):
response = self.app.get(
'/election/2015/post/65808/dulwich-and-west-norwood',
)
headers = response.headerlist
seen_cache = False
for header, value in headers:
if header == 'Cache-Control':
seen_cache = True
self.assertTrue(value == 'max-age=1200')
self.assertTrue(seen_cache)
def test_auth_user_cache_headers(self):
response = self.app.get(
'/election/2015/post/65808/dulwich-and-west-norwood',
user=self.user
)
headers = response.headerlist
seen_cache = False
for header, value in headers:
if header == 'Cache-Control':
seen_cache = True
self.assertTrue(
value == 'no-cache, no-store, must-revalidate, max-age=0'
)
self.assertTrue(seen_cache)
<commit_msg>Make a test of multiple header values insensitve to their order
This test sometimes failed on Python 3 because the values in the header
were ordered differently - splitting them and comparing as a set should
fix that.<commit_after>from __future__ import unicode_literals
from django_webtest import WebTest
from .auth import TestUserMixin
from .uk_examples import UK2015ExamplesMixin
class TestCaching(TestUserMixin, UK2015ExamplesMixin, WebTest):
def setUp(self):
super(TestCaching, self).setUp()
def test_unauth_user_cache_headers(self):
response = self.app.get(
'/election/2015/post/65808/dulwich-and-west-norwood',
)
headers = response.headerlist
seen_cache = False
for header, value in headers:
if header == 'Cache-Control':
seen_cache = True
self.assertTrue(value == 'max-age=1200')
self.assertTrue(seen_cache)
def test_auth_user_cache_headers(self):
response = self.app.get(
'/election/2015/post/65808/dulwich-and-west-norwood',
user=self.user
)
headers = response.headerlist
seen_cache = False
for header, value in headers:
if header == 'Cache-Control':
seen_cache = True
values = set(value.split(', '))
self.assertEqual(
values,
{'no-cache', 'no-store', 'must-revalidate', 'max-age=0'}
)
self.assertTrue(seen_cache)
|
17078f38c61fd012121aacb12683864592f31e79
|
bqueryd/util.py
|
bqueryd/util.py
|
import netifaces
def get_my_ip():
eth_interfaces = [ifname for ifname in netifaces.interfaces() if ifname.startswith('eth')]
if len(eth_interfaces) < 1:
ifname = 'lo'
else:
ifname = eth_interfaces[0]
for x in netifaces.ifaddresses(ifname)[netifaces.AF_INET]:
# Return first addr found
return x['addr']
|
import netifaces
import zmq
import random
def get_my_ip():
eth_interfaces = [ifname for ifname in netifaces.interfaces() if ifname.startswith('eth')]
if len(eth_interfaces) < 1:
ifname = 'lo'
else:
ifname = eth_interfaces[0]
for x in netifaces.ifaddresses(ifname)[netifaces.AF_INET]:
# Return first addr found
return x['addr']
def bind_to_random_port(socket, addr, min_port=49152, max_port=65536, max_tries=100):
"We can't just use the zmq.Socket.bind_to_random_port, as we wan't to set the identity before binding"
for i in range(max_tries):
try:
port = random.randrange(min_port, max_port)
socket.identity = '%s:%s' % (addr, port)
socket.bind('tcp://*:%s' % port)
#socket.bind('%s:%s' % (addr, port))
except zmq.ZMQError as exception:
en = exception.errno
if en == zmq.EADDRINUSE:
continue
else:
raise
else:
return socket.identity
raise zmq.ZMQBindError("Could not bind socket to random port.")
|
Add binding to random port with identity
|
Add binding to random port with identity
|
Python
|
bsd-3-clause
|
visualfabriq/bqueryd
|
import netifaces
def get_my_ip():
eth_interfaces = [ifname for ifname in netifaces.interfaces() if ifname.startswith('eth')]
if len(eth_interfaces) < 1:
ifname = 'lo'
else:
ifname = eth_interfaces[0]
for x in netifaces.ifaddresses(ifname)[netifaces.AF_INET]:
# Return first addr found
return x['addr']
Add binding to random port with identity
|
import netifaces
import zmq
import random
def get_my_ip():
eth_interfaces = [ifname for ifname in netifaces.interfaces() if ifname.startswith('eth')]
if len(eth_interfaces) < 1:
ifname = 'lo'
else:
ifname = eth_interfaces[0]
for x in netifaces.ifaddresses(ifname)[netifaces.AF_INET]:
# Return first addr found
return x['addr']
def bind_to_random_port(socket, addr, min_port=49152, max_port=65536, max_tries=100):
"We can't just use the zmq.Socket.bind_to_random_port, as we wan't to set the identity before binding"
for i in range(max_tries):
try:
port = random.randrange(min_port, max_port)
socket.identity = '%s:%s' % (addr, port)
socket.bind('tcp://*:%s' % port)
#socket.bind('%s:%s' % (addr, port))
except zmq.ZMQError as exception:
en = exception.errno
if en == zmq.EADDRINUSE:
continue
else:
raise
else:
return socket.identity
raise zmq.ZMQBindError("Could not bind socket to random port.")
|
<commit_before>import netifaces
def get_my_ip():
eth_interfaces = [ifname for ifname in netifaces.interfaces() if ifname.startswith('eth')]
if len(eth_interfaces) < 1:
ifname = 'lo'
else:
ifname = eth_interfaces[0]
for x in netifaces.ifaddresses(ifname)[netifaces.AF_INET]:
# Return first addr found
return x['addr']
<commit_msg>Add binding to random port with identity<commit_after>
|
import netifaces
import zmq
import random
def get_my_ip():
eth_interfaces = [ifname for ifname in netifaces.interfaces() if ifname.startswith('eth')]
if len(eth_interfaces) < 1:
ifname = 'lo'
else:
ifname = eth_interfaces[0]
for x in netifaces.ifaddresses(ifname)[netifaces.AF_INET]:
# Return first addr found
return x['addr']
def bind_to_random_port(socket, addr, min_port=49152, max_port=65536, max_tries=100):
"We can't just use the zmq.Socket.bind_to_random_port, as we wan't to set the identity before binding"
for i in range(max_tries):
try:
port = random.randrange(min_port, max_port)
socket.identity = '%s:%s' % (addr, port)
socket.bind('tcp://*:%s' % port)
#socket.bind('%s:%s' % (addr, port))
except zmq.ZMQError as exception:
en = exception.errno
if en == zmq.EADDRINUSE:
continue
else:
raise
else:
return socket.identity
raise zmq.ZMQBindError("Could not bind socket to random port.")
|
import netifaces
def get_my_ip():
eth_interfaces = [ifname for ifname in netifaces.interfaces() if ifname.startswith('eth')]
if len(eth_interfaces) < 1:
ifname = 'lo'
else:
ifname = eth_interfaces[0]
for x in netifaces.ifaddresses(ifname)[netifaces.AF_INET]:
# Return first addr found
return x['addr']
Add binding to random port with identityimport netifaces
import zmq
import random
def get_my_ip():
eth_interfaces = [ifname for ifname in netifaces.interfaces() if ifname.startswith('eth')]
if len(eth_interfaces) < 1:
ifname = 'lo'
else:
ifname = eth_interfaces[0]
for x in netifaces.ifaddresses(ifname)[netifaces.AF_INET]:
# Return first addr found
return x['addr']
def bind_to_random_port(socket, addr, min_port=49152, max_port=65536, max_tries=100):
"We can't just use the zmq.Socket.bind_to_random_port, as we wan't to set the identity before binding"
for i in range(max_tries):
try:
port = random.randrange(min_port, max_port)
socket.identity = '%s:%s' % (addr, port)
socket.bind('tcp://*:%s' % port)
#socket.bind('%s:%s' % (addr, port))
except zmq.ZMQError as exception:
en = exception.errno
if en == zmq.EADDRINUSE:
continue
else:
raise
else:
return socket.identity
raise zmq.ZMQBindError("Could not bind socket to random port.")
|
<commit_before>import netifaces
def get_my_ip():
eth_interfaces = [ifname for ifname in netifaces.interfaces() if ifname.startswith('eth')]
if len(eth_interfaces) < 1:
ifname = 'lo'
else:
ifname = eth_interfaces[0]
for x in netifaces.ifaddresses(ifname)[netifaces.AF_INET]:
# Return first addr found
return x['addr']
<commit_msg>Add binding to random port with identity<commit_after>import netifaces
import zmq
import random
def get_my_ip():
eth_interfaces = [ifname for ifname in netifaces.interfaces() if ifname.startswith('eth')]
if len(eth_interfaces) < 1:
ifname = 'lo'
else:
ifname = eth_interfaces[0]
for x in netifaces.ifaddresses(ifname)[netifaces.AF_INET]:
# Return first addr found
return x['addr']
def bind_to_random_port(socket, addr, min_port=49152, max_port=65536, max_tries=100):
"We can't just use the zmq.Socket.bind_to_random_port, as we wan't to set the identity before binding"
for i in range(max_tries):
try:
port = random.randrange(min_port, max_port)
socket.identity = '%s:%s' % (addr, port)
socket.bind('tcp://*:%s' % port)
#socket.bind('%s:%s' % (addr, port))
except zmq.ZMQError as exception:
en = exception.errno
if en == zmq.EADDRINUSE:
continue
else:
raise
else:
return socket.identity
raise zmq.ZMQBindError("Could not bind socket to random port.")
|
10b3e9a6b0af8058694811d2dc2771a476078b59
|
keep/commands/cmd_run.py
|
keep/commands/cmd_run.py
|
import json
import os
import re
import click
from keep import cli, utils
@click.command('run', short_help='Executes a saved command.')
@click.argument('pattern')
@cli.pass_context
def cli(ctx, pattern):
"""Executes a saved command."""
json_path = os.path.join(os.path.expanduser('~'), '.keep', 'commands.json')
if not os.path.exists(json_path):
click.echo('No commands to run. Add one by `keep new`.')
else:
FOUND = False
for cmd, desc in json.loads(open(json_path, 'r').read()).items():
if re.search(pattern, cmd + " :: " + desc):
FOUND = True
if click.confirm("Execute\n\n\t{}\n\n\n?".format(cmd), default=True):
os.system(cmd)
break
# Execute if all the parts of the pattern are in one command/desc
keywords_len = len(pattern.split())
i_keyword = 0
for keyword in pattern.split():
if keyword.lower() in cmd.lower() or keyword.lower() in desc.lower():
FOUND = True
i_keyword += 1
if i_keyword == keywords_len:
if click.confirm("Execute\n\n\t{}\n\n\n?".format(cmd), default=True):
os.system(cmd)
break
if not FOUND:
click.echo('No saved commands matches the pattern "{}"'.format(pattern))
|
import json
import os
import re
import click
from keep import cli, utils
@click.command('run', short_help='Executes a saved command.')
@click.argument('pattern')
@cli.pass_context
def cli(ctx, pattern):
"""Executes a saved command."""
json_path = os.path.join(os.path.expanduser('~'), '.keep', 'commands.json')
if not os.path.exists(json_path):
click.echo('No commands to run. Add one by `keep new`.')
else:
FOUND = False
for cmd, desc in json.loads(open(json_path, 'r').read()).items():
if re.search(pattern, cmd + " :: " + desc):
FOUND = True
if click.confirm("Execute\n\n\t{}\n\n\n?".format(cmd), default=True):
os.system(cmd)
break
# Execute if all the parts of the pattern are in one command/desc
keywords_len = len(pattern.split())
i_keyword = 0
for keyword in pattern.split():
if keyword.lower() in cmd.lower() or keyword.lower() in desc.lower():
FOUND = True
i_keyword += 1
if i_keyword == keywords_len:
if click.confirm("Execute\n\n\t{}\n\n\n?".format(cmd), default=True):
os.system(cmd)
break
if not FOUND:
click.echo('No saved commands matches the pattern "{}"'.format(pattern))
|
Fix repeated prompt bug in keep run
|
Fix repeated prompt bug in keep run
|
Python
|
mit
|
paci4416/keep,OrkoHunter/keep,paci4416/keep,OrkoHunter/keep
|
import json
import os
import re
import click
from keep import cli, utils
@click.command('run', short_help='Executes a saved command.')
@click.argument('pattern')
@cli.pass_context
def cli(ctx, pattern):
"""Executes a saved command."""
json_path = os.path.join(os.path.expanduser('~'), '.keep', 'commands.json')
if not os.path.exists(json_path):
click.echo('No commands to run. Add one by `keep new`.')
else:
FOUND = False
for cmd, desc in json.loads(open(json_path, 'r').read()).items():
if re.search(pattern, cmd + " :: " + desc):
FOUND = True
if click.confirm("Execute\n\n\t{}\n\n\n?".format(cmd), default=True):
os.system(cmd)
break
# Execute if all the parts of the pattern are in one command/desc
keywords_len = len(pattern.split())
i_keyword = 0
for keyword in pattern.split():
if keyword.lower() in cmd.lower() or keyword.lower() in desc.lower():
FOUND = True
i_keyword += 1
if i_keyword == keywords_len:
if click.confirm("Execute\n\n\t{}\n\n\n?".format(cmd), default=True):
os.system(cmd)
break
if not FOUND:
click.echo('No saved commands matches the pattern "{}"'.format(pattern))
Fix repeated prompt bug in keep run
|
import json
import os
import re
import click
from keep import cli, utils
@click.command('run', short_help='Executes a saved command.')
@click.argument('pattern')
@cli.pass_context
def cli(ctx, pattern):
"""Executes a saved command."""
json_path = os.path.join(os.path.expanduser('~'), '.keep', 'commands.json')
if not os.path.exists(json_path):
click.echo('No commands to run. Add one by `keep new`.')
else:
FOUND = False
for cmd, desc in json.loads(open(json_path, 'r').read()).items():
if re.search(pattern, cmd + " :: " + desc):
FOUND = True
if click.confirm("Execute\n\n\t{}\n\n\n?".format(cmd), default=True):
os.system(cmd)
break
# Execute if all the parts of the pattern are in one command/desc
keywords_len = len(pattern.split())
i_keyword = 0
for keyword in pattern.split():
if keyword.lower() in cmd.lower() or keyword.lower() in desc.lower():
FOUND = True
i_keyword += 1
if i_keyword == keywords_len:
if click.confirm("Execute\n\n\t{}\n\n\n?".format(cmd), default=True):
os.system(cmd)
break
if not FOUND:
click.echo('No saved commands matches the pattern "{}"'.format(pattern))
|
<commit_before>import json
import os
import re
import click
from keep import cli, utils
@click.command('run', short_help='Executes a saved command.')
@click.argument('pattern')
@cli.pass_context
def cli(ctx, pattern):
"""Executes a saved command."""
json_path = os.path.join(os.path.expanduser('~'), '.keep', 'commands.json')
if not os.path.exists(json_path):
click.echo('No commands to run. Add one by `keep new`.')
else:
FOUND = False
for cmd, desc in json.loads(open(json_path, 'r').read()).items():
if re.search(pattern, cmd + " :: " + desc):
FOUND = True
if click.confirm("Execute\n\n\t{}\n\n\n?".format(cmd), default=True):
os.system(cmd)
break
# Execute if all the parts of the pattern are in one command/desc
keywords_len = len(pattern.split())
i_keyword = 0
for keyword in pattern.split():
if keyword.lower() in cmd.lower() or keyword.lower() in desc.lower():
FOUND = True
i_keyword += 1
if i_keyword == keywords_len:
if click.confirm("Execute\n\n\t{}\n\n\n?".format(cmd), default=True):
os.system(cmd)
break
if not FOUND:
click.echo('No saved commands matches the pattern "{}"'.format(pattern))
<commit_msg>Fix repeated prompt bug in keep run<commit_after>
|
import json
import os
import re
import click
from keep import cli, utils
@click.command('run', short_help='Executes a saved command.')
@click.argument('pattern')
@cli.pass_context
def cli(ctx, pattern):
"""Executes a saved command."""
json_path = os.path.join(os.path.expanduser('~'), '.keep', 'commands.json')
if not os.path.exists(json_path):
click.echo('No commands to run. Add one by `keep new`.')
else:
FOUND = False
for cmd, desc in json.loads(open(json_path, 'r').read()).items():
if re.search(pattern, cmd + " :: " + desc):
FOUND = True
if click.confirm("Execute\n\n\t{}\n\n\n?".format(cmd), default=True):
os.system(cmd)
break
# Execute if all the parts of the pattern are in one command/desc
keywords_len = len(pattern.split())
i_keyword = 0
for keyword in pattern.split():
if keyword.lower() in cmd.lower() or keyword.lower() in desc.lower():
FOUND = True
i_keyword += 1
if i_keyword == keywords_len:
if click.confirm("Execute\n\n\t{}\n\n\n?".format(cmd), default=True):
os.system(cmd)
break
if not FOUND:
click.echo('No saved commands matches the pattern "{}"'.format(pattern))
|
import json
import os
import re
import click
from keep import cli, utils
@click.command('run', short_help='Executes a saved command.')
@click.argument('pattern')
@cli.pass_context
def cli(ctx, pattern):
"""Executes a saved command."""
json_path = os.path.join(os.path.expanduser('~'), '.keep', 'commands.json')
if not os.path.exists(json_path):
click.echo('No commands to run. Add one by `keep new`.')
else:
FOUND = False
for cmd, desc in json.loads(open(json_path, 'r').read()).items():
if re.search(pattern, cmd + " :: " + desc):
FOUND = True
if click.confirm("Execute\n\n\t{}\n\n\n?".format(cmd), default=True):
os.system(cmd)
break
# Execute if all the parts of the pattern are in one command/desc
keywords_len = len(pattern.split())
i_keyword = 0
for keyword in pattern.split():
if keyword.lower() in cmd.lower() or keyword.lower() in desc.lower():
FOUND = True
i_keyword += 1
if i_keyword == keywords_len:
if click.confirm("Execute\n\n\t{}\n\n\n?".format(cmd), default=True):
os.system(cmd)
break
if not FOUND:
click.echo('No saved commands matches the pattern "{}"'.format(pattern))
Fix repeated prompt bug in keep runimport json
import os
import re
import click
from keep import cli, utils
@click.command('run', short_help='Executes a saved command.')
@click.argument('pattern')
@cli.pass_context
def cli(ctx, pattern):
"""Executes a saved command."""
json_path = os.path.join(os.path.expanduser('~'), '.keep', 'commands.json')
if not os.path.exists(json_path):
click.echo('No commands to run. Add one by `keep new`.')
else:
FOUND = False
for cmd, desc in json.loads(open(json_path, 'r').read()).items():
if re.search(pattern, cmd + " :: " + desc):
FOUND = True
if click.confirm("Execute\n\n\t{}\n\n\n?".format(cmd), default=True):
os.system(cmd)
break
# Execute if all the parts of the pattern are in one command/desc
keywords_len = len(pattern.split())
i_keyword = 0
for keyword in pattern.split():
if keyword.lower() in cmd.lower() or keyword.lower() in desc.lower():
FOUND = True
i_keyword += 1
if i_keyword == keywords_len:
if click.confirm("Execute\n\n\t{}\n\n\n?".format(cmd), default=True):
os.system(cmd)
break
if not FOUND:
click.echo('No saved commands matches the pattern "{}"'.format(pattern))
|
<commit_before>import json
import os
import re
import click
from keep import cli, utils
@click.command('run', short_help='Executes a saved command.')
@click.argument('pattern')
@cli.pass_context
def cli(ctx, pattern):
"""Executes a saved command."""
json_path = os.path.join(os.path.expanduser('~'), '.keep', 'commands.json')
if not os.path.exists(json_path):
click.echo('No commands to run. Add one by `keep new`.')
else:
FOUND = False
for cmd, desc in json.loads(open(json_path, 'r').read()).items():
if re.search(pattern, cmd + " :: " + desc):
FOUND = True
if click.confirm("Execute\n\n\t{}\n\n\n?".format(cmd), default=True):
os.system(cmd)
break
# Execute if all the parts of the pattern are in one command/desc
keywords_len = len(pattern.split())
i_keyword = 0
for keyword in pattern.split():
if keyword.lower() in cmd.lower() or keyword.lower() in desc.lower():
FOUND = True
i_keyword += 1
if i_keyword == keywords_len:
if click.confirm("Execute\n\n\t{}\n\n\n?".format(cmd), default=True):
os.system(cmd)
break
if not FOUND:
click.echo('No saved commands matches the pattern "{}"'.format(pattern))
<commit_msg>Fix repeated prompt bug in keep run<commit_after>import json
import os
import re
import click
from keep import cli, utils
@click.command('run', short_help='Executes a saved command.')
@click.argument('pattern')
@cli.pass_context
def cli(ctx, pattern):
"""Executes a saved command."""
json_path = os.path.join(os.path.expanduser('~'), '.keep', 'commands.json')
if not os.path.exists(json_path):
click.echo('No commands to run. Add one by `keep new`.')
else:
FOUND = False
for cmd, desc in json.loads(open(json_path, 'r').read()).items():
if re.search(pattern, cmd + " :: " + desc):
FOUND = True
if click.confirm("Execute\n\n\t{}\n\n\n?".format(cmd), default=True):
os.system(cmd)
break
# Execute if all the parts of the pattern are in one command/desc
keywords_len = len(pattern.split())
i_keyword = 0
for keyword in pattern.split():
if keyword.lower() in cmd.lower() or keyword.lower() in desc.lower():
FOUND = True
i_keyword += 1
if i_keyword == keywords_len:
if click.confirm("Execute\n\n\t{}\n\n\n?".format(cmd), default=True):
os.system(cmd)
break
if not FOUND:
click.echo('No saved commands matches the pattern "{}"'.format(pattern))
|
b73b8797c3c9c6c9aa92bd6873e15a5b717f4142
|
test/test_nap.py
|
test/test_nap.py
|
"""
Tests for nap module.
These tests only focus that requests is called properly.
Everything related to HTTP requests should be tested in requests' own tests.
"""
import unittest
import requests
from nap.api import Api
class TestNap(unittest.TestCase):
def test_unallowed_method(self):
"""Tries to use non-existent HTTP method"""
api = Api('')
# lambda trickery is necessary, because otherwise it would raise
# AttributeError uncontrolled
self.assertRaises(AttributeError, lambda: api.resource.nonexisting)
def test_requests_raises_error(self):
"""Test that requests properly raises its own errors
>>> requests.get('/kk')
requests.exceptions.MissingSchema: Invalid URL u'/kk':
No schema supplied. Perhaps you meant http:///kk?
"""
api = Api('')
self.assertRaises(requests.exceptions.MissingSchema, api.resource.get)
def test_resource_not_callable(self):
"""Make sure resource can't be called directly"""
api = Api('')
self.assertRaises(TypeError, api.resource)
|
"""
Tests for nap module.
These tests only focus that requests is called properly.
Everything related to HTTP requests should be tested in requests' own tests.
"""
from mock import MagicMock, patch
import unittest
import requests
from nap.api import Api
class TestNap(unittest.TestCase):
def test_unallowed_method(self):
"""Tries to use non-existent HTTP method"""
api = Api('')
# lambda trickery is necessary, because otherwise it would raise
# AttributeError uncontrolled
self.assertRaises(AttributeError, lambda: api.resource.nonexisting)
def test_requests_raises_error(self):
"""Test that requests properly raises its own errors
>>> requests.get('/kk')
requests.exceptions.MissingSchema: Invalid URL u'/kk':
No schema supplied. Perhaps you meant http:///kk?
"""
api = Api('')
self.assertRaises(requests.exceptions.MissingSchema, api.resource.get)
def test_resource_not_callable(self):
"""Make sure resource can't be called directly"""
api = Api('')
self.assertRaises(TypeError, api.resource)
@patch('requests.get')
def test_default_parameters(self, requests_get):
"""Test default parameter behavior"""
api = Api('', auth=('user', 'password'))
requests.get = MagicMock(return_value=None)
# Make sure defaults are passed for each request
api.resource.get()
requests.get.assert_called_with('/resource', auth=('user', 'password'))
# Make sure single calls can override defaults
api.resource.get(auth=('defaults', 'overriden'))
requests.get.assert_called_with(
'/resource',
auth=('defaults', 'overriden')
)
|
Add tests which test default parameters for nap api
|
Add tests which test default parameters for nap api
|
Python
|
mit
|
kimmobrunfeldt/nap
|
"""
Tests for nap module.
These tests only focus that requests is called properly.
Everything related to HTTP requests should be tested in requests' own tests.
"""
import unittest
import requests
from nap.api import Api
class TestNap(unittest.TestCase):
def test_unallowed_method(self):
"""Tries to use non-existent HTTP method"""
api = Api('')
# lambda trickery is necessary, because otherwise it would raise
# AttributeError uncontrolled
self.assertRaises(AttributeError, lambda: api.resource.nonexisting)
def test_requests_raises_error(self):
"""Test that requests properly raises its own errors
>>> requests.get('/kk')
requests.exceptions.MissingSchema: Invalid URL u'/kk':
No schema supplied. Perhaps you meant http:///kk?
"""
api = Api('')
self.assertRaises(requests.exceptions.MissingSchema, api.resource.get)
def test_resource_not_callable(self):
"""Make sure resource can't be called directly"""
api = Api('')
self.assertRaises(TypeError, api.resource)
Add tests which test default parameters for nap api
|
"""
Tests for nap module.
These tests only focus that requests is called properly.
Everything related to HTTP requests should be tested in requests' own tests.
"""
from mock import MagicMock, patch
import unittest
import requests
from nap.api import Api
class TestNap(unittest.TestCase):
def test_unallowed_method(self):
"""Tries to use non-existent HTTP method"""
api = Api('')
# lambda trickery is necessary, because otherwise it would raise
# AttributeError uncontrolled
self.assertRaises(AttributeError, lambda: api.resource.nonexisting)
def test_requests_raises_error(self):
"""Test that requests properly raises its own errors
>>> requests.get('/kk')
requests.exceptions.MissingSchema: Invalid URL u'/kk':
No schema supplied. Perhaps you meant http:///kk?
"""
api = Api('')
self.assertRaises(requests.exceptions.MissingSchema, api.resource.get)
def test_resource_not_callable(self):
"""Make sure resource can't be called directly"""
api = Api('')
self.assertRaises(TypeError, api.resource)
@patch('requests.get')
def test_default_parameters(self, requests_get):
"""Test default parameter behavior"""
api = Api('', auth=('user', 'password'))
requests.get = MagicMock(return_value=None)
# Make sure defaults are passed for each request
api.resource.get()
requests.get.assert_called_with('/resource', auth=('user', 'password'))
# Make sure single calls can override defaults
api.resource.get(auth=('defaults', 'overriden'))
requests.get.assert_called_with(
'/resource',
auth=('defaults', 'overriden')
)
|
<commit_before>"""
Tests for nap module.
These tests only focus that requests is called properly.
Everything related to HTTP requests should be tested in requests' own tests.
"""
import unittest
import requests
from nap.api import Api
class TestNap(unittest.TestCase):
def test_unallowed_method(self):
"""Tries to use non-existent HTTP method"""
api = Api('')
# lambda trickery is necessary, because otherwise it would raise
# AttributeError uncontrolled
self.assertRaises(AttributeError, lambda: api.resource.nonexisting)
def test_requests_raises_error(self):
"""Test that requests properly raises its own errors
>>> requests.get('/kk')
requests.exceptions.MissingSchema: Invalid URL u'/kk':
No schema supplied. Perhaps you meant http:///kk?
"""
api = Api('')
self.assertRaises(requests.exceptions.MissingSchema, api.resource.get)
def test_resource_not_callable(self):
"""Make sure resource can't be called directly"""
api = Api('')
self.assertRaises(TypeError, api.resource)
<commit_msg>Add tests which test default parameters for nap api<commit_after>
|
"""
Tests for nap module.
These tests only focus that requests is called properly.
Everything related to HTTP requests should be tested in requests' own tests.
"""
from mock import MagicMock, patch
import unittest
import requests
from nap.api import Api
class TestNap(unittest.TestCase):
def test_unallowed_method(self):
"""Tries to use non-existent HTTP method"""
api = Api('')
# lambda trickery is necessary, because otherwise it would raise
# AttributeError uncontrolled
self.assertRaises(AttributeError, lambda: api.resource.nonexisting)
def test_requests_raises_error(self):
"""Test that requests properly raises its own errors
>>> requests.get('/kk')
requests.exceptions.MissingSchema: Invalid URL u'/kk':
No schema supplied. Perhaps you meant http:///kk?
"""
api = Api('')
self.assertRaises(requests.exceptions.MissingSchema, api.resource.get)
def test_resource_not_callable(self):
"""Make sure resource can't be called directly"""
api = Api('')
self.assertRaises(TypeError, api.resource)
@patch('requests.get')
def test_default_parameters(self, requests_get):
"""Test default parameter behavior"""
api = Api('', auth=('user', 'password'))
requests.get = MagicMock(return_value=None)
# Make sure defaults are passed for each request
api.resource.get()
requests.get.assert_called_with('/resource', auth=('user', 'password'))
# Make sure single calls can override defaults
api.resource.get(auth=('defaults', 'overriden'))
requests.get.assert_called_with(
'/resource',
auth=('defaults', 'overriden')
)
|
"""
Tests for nap module.
These tests only focus that requests is called properly.
Everything related to HTTP requests should be tested in requests' own tests.
"""
import unittest
import requests
from nap.api import Api
class TestNap(unittest.TestCase):
def test_unallowed_method(self):
"""Tries to use non-existent HTTP method"""
api = Api('')
# lambda trickery is necessary, because otherwise it would raise
# AttributeError uncontrolled
self.assertRaises(AttributeError, lambda: api.resource.nonexisting)
def test_requests_raises_error(self):
"""Test that requests properly raises its own errors
>>> requests.get('/kk')
requests.exceptions.MissingSchema: Invalid URL u'/kk':
No schema supplied. Perhaps you meant http:///kk?
"""
api = Api('')
self.assertRaises(requests.exceptions.MissingSchema, api.resource.get)
def test_resource_not_callable(self):
"""Make sure resource can't be called directly"""
api = Api('')
self.assertRaises(TypeError, api.resource)
Add tests which test default parameters for nap api"""
Tests for nap module.
These tests only focus that requests is called properly.
Everything related to HTTP requests should be tested in requests' own tests.
"""
from mock import MagicMock, patch
import unittest
import requests
from nap.api import Api
class TestNap(unittest.TestCase):
def test_unallowed_method(self):
"""Tries to use non-existent HTTP method"""
api = Api('')
# lambda trickery is necessary, because otherwise it would raise
# AttributeError uncontrolled
self.assertRaises(AttributeError, lambda: api.resource.nonexisting)
def test_requests_raises_error(self):
"""Test that requests properly raises its own errors
>>> requests.get('/kk')
requests.exceptions.MissingSchema: Invalid URL u'/kk':
No schema supplied. Perhaps you meant http:///kk?
"""
api = Api('')
self.assertRaises(requests.exceptions.MissingSchema, api.resource.get)
def test_resource_not_callable(self):
"""Make sure resource can't be called directly"""
api = Api('')
self.assertRaises(TypeError, api.resource)
@patch('requests.get')
def test_default_parameters(self, requests_get):
"""Test default parameter behavior"""
api = Api('', auth=('user', 'password'))
requests.get = MagicMock(return_value=None)
# Make sure defaults are passed for each request
api.resource.get()
requests.get.assert_called_with('/resource', auth=('user', 'password'))
# Make sure single calls can override defaults
api.resource.get(auth=('defaults', 'overriden'))
requests.get.assert_called_with(
'/resource',
auth=('defaults', 'overriden')
)
|
<commit_before>"""
Tests for nap module.
These tests only focus that requests is called properly.
Everything related to HTTP requests should be tested in requests' own tests.
"""
import unittest
import requests
from nap.api import Api
class TestNap(unittest.TestCase):
def test_unallowed_method(self):
"""Tries to use non-existent HTTP method"""
api = Api('')
# lambda trickery is necessary, because otherwise it would raise
# AttributeError uncontrolled
self.assertRaises(AttributeError, lambda: api.resource.nonexisting)
def test_requests_raises_error(self):
"""Test that requests properly raises its own errors
>>> requests.get('/kk')
requests.exceptions.MissingSchema: Invalid URL u'/kk':
No schema supplied. Perhaps you meant http:///kk?
"""
api = Api('')
self.assertRaises(requests.exceptions.MissingSchema, api.resource.get)
def test_resource_not_callable(self):
"""Make sure resource can't be called directly"""
api = Api('')
self.assertRaises(TypeError, api.resource)
<commit_msg>Add tests which test default parameters for nap api<commit_after>"""
Tests for nap module.
These tests only focus that requests is called properly.
Everything related to HTTP requests should be tested in requests' own tests.
"""
from mock import MagicMock, patch
import unittest
import requests
from nap.api import Api
class TestNap(unittest.TestCase):
def test_unallowed_method(self):
"""Tries to use non-existent HTTP method"""
api = Api('')
# lambda trickery is necessary, because otherwise it would raise
# AttributeError uncontrolled
self.assertRaises(AttributeError, lambda: api.resource.nonexisting)
def test_requests_raises_error(self):
"""Test that requests properly raises its own errors
>>> requests.get('/kk')
requests.exceptions.MissingSchema: Invalid URL u'/kk':
No schema supplied. Perhaps you meant http:///kk?
"""
api = Api('')
self.assertRaises(requests.exceptions.MissingSchema, api.resource.get)
def test_resource_not_callable(self):
"""Make sure resource can't be called directly"""
api = Api('')
self.assertRaises(TypeError, api.resource)
@patch('requests.get')
def test_default_parameters(self, requests_get):
"""Test default parameter behavior"""
api = Api('', auth=('user', 'password'))
requests.get = MagicMock(return_value=None)
# Make sure defaults are passed for each request
api.resource.get()
requests.get.assert_called_with('/resource', auth=('user', 'password'))
# Make sure single calls can override defaults
api.resource.get(auth=('defaults', 'overriden'))
requests.get.assert_called_with(
'/resource',
auth=('defaults', 'overriden')
)
|
30c875e1ba1dec3bcbd22850cd703198bcc5a1fb
|
peeringdb/migrations/0013_auto_20201207_2233.py
|
peeringdb/migrations/0013_auto_20201207_2233.py
|
# Generated by Django 3.1.3 on 2020-12-07 21:33
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("peeringdb", "0012_peerrecord_visible"),
]
def flush_peeringdb_tables(apps, schema_editor):
apps.get_model("peeringdb", "Contact").objects.all().delete()
apps.get_model("peeringdb", "Network").objects.all().delete()
apps.get_model("peeringdb", "NetworkIXLAN").objects.all().delete()
apps.get_model("peeringdb", "PeerRecord").objects.all().delete()
operations = [migrations.RunPython(flush_peeringdb_tables)]
|
# Generated by Django 3.1.3 on 2020-12-07 21:33
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("peeringdb", "0012_peerrecord_visible"),
]
def flush_peeringdb_tables(apps, schema_editor):
apps.get_model("peeringdb", "Contact").objects.all().delete()
apps.get_model("peeringdb", "Network").objects.all().delete()
apps.get_model("peeringdb", "NetworkIXLAN").objects.all().delete()
apps.get_model("peeringdb", "PeerRecord").objects.all().delete()
apps.get_model("peeringdb", "Synchronization").objects.all().delete()
operations = [migrations.RunPython(flush_peeringdb_tables)]
|
Remove PeeringDB sync records on migrate
|
Remove PeeringDB sync records on migrate
|
Python
|
apache-2.0
|
respawner/peering-manager,respawner/peering-manager,respawner/peering-manager,respawner/peering-manager
|
# Generated by Django 3.1.3 on 2020-12-07 21:33
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("peeringdb", "0012_peerrecord_visible"),
]
def flush_peeringdb_tables(apps, schema_editor):
apps.get_model("peeringdb", "Contact").objects.all().delete()
apps.get_model("peeringdb", "Network").objects.all().delete()
apps.get_model("peeringdb", "NetworkIXLAN").objects.all().delete()
apps.get_model("peeringdb", "PeerRecord").objects.all().delete()
operations = [migrations.RunPython(flush_peeringdb_tables)]
Remove PeeringDB sync records on migrate
|
# Generated by Django 3.1.3 on 2020-12-07 21:33
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("peeringdb", "0012_peerrecord_visible"),
]
def flush_peeringdb_tables(apps, schema_editor):
apps.get_model("peeringdb", "Contact").objects.all().delete()
apps.get_model("peeringdb", "Network").objects.all().delete()
apps.get_model("peeringdb", "NetworkIXLAN").objects.all().delete()
apps.get_model("peeringdb", "PeerRecord").objects.all().delete()
apps.get_model("peeringdb", "Synchronization").objects.all().delete()
operations = [migrations.RunPython(flush_peeringdb_tables)]
|
<commit_before># Generated by Django 3.1.3 on 2020-12-07 21:33
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("peeringdb", "0012_peerrecord_visible"),
]
def flush_peeringdb_tables(apps, schema_editor):
apps.get_model("peeringdb", "Contact").objects.all().delete()
apps.get_model("peeringdb", "Network").objects.all().delete()
apps.get_model("peeringdb", "NetworkIXLAN").objects.all().delete()
apps.get_model("peeringdb", "PeerRecord").objects.all().delete()
operations = [migrations.RunPython(flush_peeringdb_tables)]
<commit_msg>Remove PeeringDB sync records on migrate<commit_after>
|
# Generated by Django 3.1.3 on 2020-12-07 21:33
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("peeringdb", "0012_peerrecord_visible"),
]
def flush_peeringdb_tables(apps, schema_editor):
apps.get_model("peeringdb", "Contact").objects.all().delete()
apps.get_model("peeringdb", "Network").objects.all().delete()
apps.get_model("peeringdb", "NetworkIXLAN").objects.all().delete()
apps.get_model("peeringdb", "PeerRecord").objects.all().delete()
apps.get_model("peeringdb", "Synchronization").objects.all().delete()
operations = [migrations.RunPython(flush_peeringdb_tables)]
|
# Generated by Django 3.1.3 on 2020-12-07 21:33
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("peeringdb", "0012_peerrecord_visible"),
]
def flush_peeringdb_tables(apps, schema_editor):
apps.get_model("peeringdb", "Contact").objects.all().delete()
apps.get_model("peeringdb", "Network").objects.all().delete()
apps.get_model("peeringdb", "NetworkIXLAN").objects.all().delete()
apps.get_model("peeringdb", "PeerRecord").objects.all().delete()
operations = [migrations.RunPython(flush_peeringdb_tables)]
Remove PeeringDB sync records on migrate# Generated by Django 3.1.3 on 2020-12-07 21:33
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("peeringdb", "0012_peerrecord_visible"),
]
def flush_peeringdb_tables(apps, schema_editor):
apps.get_model("peeringdb", "Contact").objects.all().delete()
apps.get_model("peeringdb", "Network").objects.all().delete()
apps.get_model("peeringdb", "NetworkIXLAN").objects.all().delete()
apps.get_model("peeringdb", "PeerRecord").objects.all().delete()
apps.get_model("peeringdb", "Synchronization").objects.all().delete()
operations = [migrations.RunPython(flush_peeringdb_tables)]
|
<commit_before># Generated by Django 3.1.3 on 2020-12-07 21:33
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("peeringdb", "0012_peerrecord_visible"),
]
def flush_peeringdb_tables(apps, schema_editor):
apps.get_model("peeringdb", "Contact").objects.all().delete()
apps.get_model("peeringdb", "Network").objects.all().delete()
apps.get_model("peeringdb", "NetworkIXLAN").objects.all().delete()
apps.get_model("peeringdb", "PeerRecord").objects.all().delete()
operations = [migrations.RunPython(flush_peeringdb_tables)]
<commit_msg>Remove PeeringDB sync records on migrate<commit_after># Generated by Django 3.1.3 on 2020-12-07 21:33
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("peeringdb", "0012_peerrecord_visible"),
]
def flush_peeringdb_tables(apps, schema_editor):
apps.get_model("peeringdb", "Contact").objects.all().delete()
apps.get_model("peeringdb", "Network").objects.all().delete()
apps.get_model("peeringdb", "NetworkIXLAN").objects.all().delete()
apps.get_model("peeringdb", "PeerRecord").objects.all().delete()
apps.get_model("peeringdb", "Synchronization").objects.all().delete()
operations = [migrations.RunPython(flush_peeringdb_tables)]
|
bfe49055d0e63e37041bf99ecfb36a5584c263c6
|
sale_properties_easy_creation/mrp_property_formula.py
|
sale_properties_easy_creation/mrp_property_formula.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-15 Agile Business Group sagl
# (<http://www.agilebg.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import orm, fields
class MrpPropertyFormula(orm.Model):
_name = 'mrp.property.formula'
_columns = {
'name': fields.char('Name', size=128),
'formula_text': fields.text('Formula'),
}
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-15 Agile Business Group sagl
# (<http://www.agilebg.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import logging
from openerp.osv import orm, fields
from openerp.tools.translate import _
from openerp.tools.safe_eval import safe_eval
from openerp.exceptions import except_orm
_logger = logging.getLogger(__name__)
class MrpPropertyFormula(orm.Model):
_name = 'mrp.property.formula'
_columns = {
'name': fields.char('Name', size=128),
'formula_text': fields.text('Formula'),
}
def compute_formula(self, local_dict):
if ('result' not in self.formula_text):
raise except_orm(
_('Error'),
_("Formula must contain 'result' variable"))
safe_eval(self.formula_text, local_dict, mode="exec", nocopy=True)
return local_dict['result']
|
Add compute_formula method to MrpPropertyFormula to use in modules which depending on it
|
Add compute_formula method to MrpPropertyFormula to use in modules which depending on it
|
Python
|
agpl-3.0
|
acsone/sale-workflow,thomaspaulb/sale-workflow,factorlibre/sale-workflow,Eficent/sale-workflow,diagramsoftware/sale-workflow,jabibi/sale-workflow,akretion/sale-workflow,brain-tec/sale-workflow,ddico/sale-workflow,acsone/sale-workflow,brain-tec/sale-workflow,open-synergy/sale-workflow,akretion/sale-workflow,fevxie/sale-workflow,Antiun/sale-workflow,Endika/sale-workflow,xpansa/sale-workflow,BT-cserra/sale-workflow
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-15 Agile Business Group sagl
# (<http://www.agilebg.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import orm, fields
class MrpPropertyFormula(orm.Model):
_name = 'mrp.property.formula'
_columns = {
'name': fields.char('Name', size=128),
'formula_text': fields.text('Formula'),
}
Add compute_formula method to MrpPropertyFormula to use in modules which depending on it
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-15 Agile Business Group sagl
# (<http://www.agilebg.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import logging
from openerp.osv import orm, fields
from openerp.tools.translate import _
from openerp.tools.safe_eval import safe_eval
from openerp.exceptions import except_orm
_logger = logging.getLogger(__name__)
class MrpPropertyFormula(orm.Model):
_name = 'mrp.property.formula'
_columns = {
'name': fields.char('Name', size=128),
'formula_text': fields.text('Formula'),
}
def compute_formula(self, local_dict):
if ('result' not in self.formula_text):
raise except_orm(
_('Error'),
_("Formula must contain 'result' variable"))
safe_eval(self.formula_text, local_dict, mode="exec", nocopy=True)
return local_dict['result']
|
<commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-15 Agile Business Group sagl
# (<http://www.agilebg.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import orm, fields
class MrpPropertyFormula(orm.Model):
_name = 'mrp.property.formula'
_columns = {
'name': fields.char('Name', size=128),
'formula_text': fields.text('Formula'),
}
<commit_msg>Add compute_formula method to MrpPropertyFormula to use in modules which depending on it<commit_after>
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-15 Agile Business Group sagl
# (<http://www.agilebg.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import logging
from openerp.osv import orm, fields
from openerp.tools.translate import _
from openerp.tools.safe_eval import safe_eval
from openerp.exceptions import except_orm
_logger = logging.getLogger(__name__)
class MrpPropertyFormula(orm.Model):
_name = 'mrp.property.formula'
_columns = {
'name': fields.char('Name', size=128),
'formula_text': fields.text('Formula'),
}
def compute_formula(self, local_dict):
if ('result' not in self.formula_text):
raise except_orm(
_('Error'),
_("Formula must contain 'result' variable"))
safe_eval(self.formula_text, local_dict, mode="exec", nocopy=True)
return local_dict['result']
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-15 Agile Business Group sagl
# (<http://www.agilebg.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import orm, fields
class MrpPropertyFormula(orm.Model):
_name = 'mrp.property.formula'
_columns = {
'name': fields.char('Name', size=128),
'formula_text': fields.text('Formula'),
}
Add compute_formula method to MrpPropertyFormula to use in modules which depending on it# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-15 Agile Business Group sagl
# (<http://www.agilebg.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import logging
from openerp.osv import orm, fields
from openerp.tools.translate import _
from openerp.tools.safe_eval import safe_eval
from openerp.exceptions import except_orm
_logger = logging.getLogger(__name__)
class MrpPropertyFormula(orm.Model):
_name = 'mrp.property.formula'
_columns = {
'name': fields.char('Name', size=128),
'formula_text': fields.text('Formula'),
}
def compute_formula(self, local_dict):
if ('result' not in self.formula_text):
raise except_orm(
_('Error'),
_("Formula must contain 'result' variable"))
safe_eval(self.formula_text, local_dict, mode="exec", nocopy=True)
return local_dict['result']
|
<commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-15 Agile Business Group sagl
# (<http://www.agilebg.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import orm, fields
class MrpPropertyFormula(orm.Model):
_name = 'mrp.property.formula'
_columns = {
'name': fields.char('Name', size=128),
'formula_text': fields.text('Formula'),
}
<commit_msg>Add compute_formula method to MrpPropertyFormula to use in modules which depending on it<commit_after># -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-15 Agile Business Group sagl
# (<http://www.agilebg.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import logging
from openerp.osv import orm, fields
from openerp.tools.translate import _
from openerp.tools.safe_eval import safe_eval
from openerp.exceptions import except_orm
_logger = logging.getLogger(__name__)
class MrpPropertyFormula(orm.Model):
_name = 'mrp.property.formula'
_columns = {
'name': fields.char('Name', size=128),
'formula_text': fields.text('Formula'),
}
def compute_formula(self, local_dict):
if ('result' not in self.formula_text):
raise except_orm(
_('Error'),
_("Formula must contain 'result' variable"))
safe_eval(self.formula_text, local_dict, mode="exec", nocopy=True)
return local_dict['result']
|
a8e8c8c33075c4e60467da4e1f8e05e35351b07f
|
url_shortener/default_config.py
|
url_shortener/default_config.py
|
# -*- coding: utf-8 -*-
''' Default configuration for the application
This data must be supplemented with custom configuration to which
URL_SHORTENER_CONFIGURATION environment variable points, overriding
some of the values specified here.
:var SQLALCHEMY_DATABASE_URI: uri of database to be used by the application.
The default value servers only as documentation, and it was taken from:
http://docs.sqlalchemy.org/en/latest/core/engines.html#database-urls
'''
SQLALCHEMY_DATABASE_URI = (
'dialect+driver://username:password@host:port/database'
)
|
# -*- coding: utf-8 -*-
''' Default configuration for the application
This data must be supplemented with custom configuration to which
URL_SHORTENER_CONFIGURATION environment variable points, overriding
some of the values specified here.
:var SQLALCHEMY_DATABASE_URI: uri of database to be used by the application.
The default value servers only as documentation, and it was taken from:
http://docs.sqlalchemy.org/en/latest/core/engines.html#database-urls
:var MIN_NEW_ALIAS_LENGTH: a minimum number of characters in a newly
generated alias
:var MAX_NEW_ALIAS_LENGTH: a maximum number of characters in a newly
generated alias
'''
SQLALCHEMY_DATABASE_URI = (
'dialect+driver://username:password@host:port/database'
)
MIN_NEW_ALIAS_LENGTH = 1
MAX_NEW_ALIAS_LENGTH = 4
|
Add configuration values for length of newly generated aliases
|
Add configuration values for length of newly generated aliases
|
Python
|
mit
|
piotr-rusin/url-shortener,piotr-rusin/url-shortener
|
# -*- coding: utf-8 -*-
''' Default configuration for the application
This data must be supplemented with custom configuration to which
URL_SHORTENER_CONFIGURATION environment variable points, overriding
some of the values specified here.
:var SQLALCHEMY_DATABASE_URI: uri of database to be used by the application.
The default value servers only as documentation, and it was taken from:
http://docs.sqlalchemy.org/en/latest/core/engines.html#database-urls
'''
SQLALCHEMY_DATABASE_URI = (
'dialect+driver://username:password@host:port/database'
)
Add configuration values for length of newly generated aliases
|
# -*- coding: utf-8 -*-
''' Default configuration for the application
This data must be supplemented with custom configuration to which
URL_SHORTENER_CONFIGURATION environment variable points, overriding
some of the values specified here.
:var SQLALCHEMY_DATABASE_URI: uri of database to be used by the application.
The default value servers only as documentation, and it was taken from:
http://docs.sqlalchemy.org/en/latest/core/engines.html#database-urls
:var MIN_NEW_ALIAS_LENGTH: a minimum number of characters in a newly
generated alias
:var MAX_NEW_ALIAS_LENGTH: a maximum number of characters in a newly
generated alias
'''
SQLALCHEMY_DATABASE_URI = (
'dialect+driver://username:password@host:port/database'
)
MIN_NEW_ALIAS_LENGTH = 1
MAX_NEW_ALIAS_LENGTH = 4
|
<commit_before># -*- coding: utf-8 -*-
''' Default configuration for the application
This data must be supplemented with custom configuration to which
URL_SHORTENER_CONFIGURATION environment variable points, overriding
some of the values specified here.
:var SQLALCHEMY_DATABASE_URI: uri of database to be used by the application.
The default value servers only as documentation, and it was taken from:
http://docs.sqlalchemy.org/en/latest/core/engines.html#database-urls
'''
SQLALCHEMY_DATABASE_URI = (
'dialect+driver://username:password@host:port/database'
)
<commit_msg>Add configuration values for length of newly generated aliases<commit_after>
|
# -*- coding: utf-8 -*-
''' Default configuration for the application
This data must be supplemented with custom configuration to which
URL_SHORTENER_CONFIGURATION environment variable points, overriding
some of the values specified here.
:var SQLALCHEMY_DATABASE_URI: uri of database to be used by the application.
The default value servers only as documentation, and it was taken from:
http://docs.sqlalchemy.org/en/latest/core/engines.html#database-urls
:var MIN_NEW_ALIAS_LENGTH: a minimum number of characters in a newly
generated alias
:var MAX_NEW_ALIAS_LENGTH: a maximum number of characters in a newly
generated alias
'''
SQLALCHEMY_DATABASE_URI = (
'dialect+driver://username:password@host:port/database'
)
MIN_NEW_ALIAS_LENGTH = 1
MAX_NEW_ALIAS_LENGTH = 4
|
# -*- coding: utf-8 -*-
''' Default configuration for the application
This data must be supplemented with custom configuration to which
URL_SHORTENER_CONFIGURATION environment variable points, overriding
some of the values specified here.
:var SQLALCHEMY_DATABASE_URI: uri of database to be used by the application.
The default value servers only as documentation, and it was taken from:
http://docs.sqlalchemy.org/en/latest/core/engines.html#database-urls
'''
SQLALCHEMY_DATABASE_URI = (
'dialect+driver://username:password@host:port/database'
)
Add configuration values for length of newly generated aliases# -*- coding: utf-8 -*-
''' Default configuration for the application
This data must be supplemented with custom configuration to which
URL_SHORTENER_CONFIGURATION environment variable points, overriding
some of the values specified here.
:var SQLALCHEMY_DATABASE_URI: uri of database to be used by the application.
The default value servers only as documentation, and it was taken from:
http://docs.sqlalchemy.org/en/latest/core/engines.html#database-urls
:var MIN_NEW_ALIAS_LENGTH: a minimum number of characters in a newly
generated alias
:var MAX_NEW_ALIAS_LENGTH: a maximum number of characters in a newly
generated alias
'''
SQLALCHEMY_DATABASE_URI = (
'dialect+driver://username:password@host:port/database'
)
MIN_NEW_ALIAS_LENGTH = 1
MAX_NEW_ALIAS_LENGTH = 4
|
<commit_before># -*- coding: utf-8 -*-
''' Default configuration for the application
This data must be supplemented with custom configuration to which
URL_SHORTENER_CONFIGURATION environment variable points, overriding
some of the values specified here.
:var SQLALCHEMY_DATABASE_URI: uri of database to be used by the application.
The default value servers only as documentation, and it was taken from:
http://docs.sqlalchemy.org/en/latest/core/engines.html#database-urls
'''
SQLALCHEMY_DATABASE_URI = (
'dialect+driver://username:password@host:port/database'
)
<commit_msg>Add configuration values for length of newly generated aliases<commit_after># -*- coding: utf-8 -*-
''' Default configuration for the application
This data must be supplemented with custom configuration to which
URL_SHORTENER_CONFIGURATION environment variable points, overriding
some of the values specified here.
:var SQLALCHEMY_DATABASE_URI: uri of database to be used by the application.
The default value servers only as documentation, and it was taken from:
http://docs.sqlalchemy.org/en/latest/core/engines.html#database-urls
:var MIN_NEW_ALIAS_LENGTH: a minimum number of characters in a newly
generated alias
:var MAX_NEW_ALIAS_LENGTH: a maximum number of characters in a newly
generated alias
'''
SQLALCHEMY_DATABASE_URI = (
'dialect+driver://username:password@host:port/database'
)
MIN_NEW_ALIAS_LENGTH = 1
MAX_NEW_ALIAS_LENGTH = 4
|
564eb46a3e49ce94a913234895053acf3ef772f9
|
cdkmod/setup.py
|
cdkmod/setup.py
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
with open('README.rst') as f:
readme = f.read()
with open('LICENSE') as f:
license = f.read()
# I wish there was a way to do this w/o having to put data files in
# package dir. Couldn't ever get data_files arg working correctly...
setup(
name='cdk',
version='0.0.1',
description='Courseware Developement Kit based on asciidoc and deck.js',
long_description=readme,
author='Simeon Franklin',
author_email='simeonf@gmail.com',
url='https://github.com/twitter-university/cdk',
license=license,
packages=find_packages(exclude=('tests', 'docs')),
include_package_data=True,
entry_points = {'console_scripts': ['cdk = cdk:main']},
install_requires=['docopt', 'schema'],
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: BSD License",
]
)
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
with open('README.rst') as f:
readme = f.read()
with open('LICENSE') as f:
license = f.read()
# I wish there was a way to do this w/o having to put data files in
# package dir. Couldn't ever get data_files arg working correctly...
setup(
name='cdk',
version='0.0.1',
description='Courseware Developement Kit based on asciidoc and deck.js',
long_description=readme,
author='Simeon Franklin',
author_email='simeonf@gmail.com',
url='https://github.com/twitter-university/cdk',
license=license,
packages=find_packages(exclude=('tests', 'docs')),
include_package_data=True,
entry_points = {'console_scripts': ['cdk = cdk:main']},
install_requires=['docopt', 'pygments'],
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: Apache Software License"
]
)
|
Update requirements where it matters
|
Update requirements where it matters
|
Python
|
apache-2.0
|
twitter/cdk,twitter/cdk,twitter/cdk
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
with open('README.rst') as f:
readme = f.read()
with open('LICENSE') as f:
license = f.read()
# I wish there was a way to do this w/o having to put data files in
# package dir. Couldn't ever get data_files arg working correctly...
setup(
name='cdk',
version='0.0.1',
description='Courseware Developement Kit based on asciidoc and deck.js',
long_description=readme,
author='Simeon Franklin',
author_email='simeonf@gmail.com',
url='https://github.com/twitter-university/cdk',
license=license,
packages=find_packages(exclude=('tests', 'docs')),
include_package_data=True,
entry_points = {'console_scripts': ['cdk = cdk:main']},
install_requires=['docopt', 'schema'],
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: BSD License",
]
)
Update requirements where it matters
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
with open('README.rst') as f:
readme = f.read()
with open('LICENSE') as f:
license = f.read()
# I wish there was a way to do this w/o having to put data files in
# package dir. Couldn't ever get data_files arg working correctly...
setup(
name='cdk',
version='0.0.1',
description='Courseware Developement Kit based on asciidoc and deck.js',
long_description=readme,
author='Simeon Franklin',
author_email='simeonf@gmail.com',
url='https://github.com/twitter-university/cdk',
license=license,
packages=find_packages(exclude=('tests', 'docs')),
include_package_data=True,
entry_points = {'console_scripts': ['cdk = cdk:main']},
install_requires=['docopt', 'pygments'],
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: Apache Software License"
]
)
|
<commit_before># -*- coding: utf-8 -*-
from setuptools import setup, find_packages
with open('README.rst') as f:
readme = f.read()
with open('LICENSE') as f:
license = f.read()
# I wish there was a way to do this w/o having to put data files in
# package dir. Couldn't ever get data_files arg working correctly...
setup(
name='cdk',
version='0.0.1',
description='Courseware Developement Kit based on asciidoc and deck.js',
long_description=readme,
author='Simeon Franklin',
author_email='simeonf@gmail.com',
url='https://github.com/twitter-university/cdk',
license=license,
packages=find_packages(exclude=('tests', 'docs')),
include_package_data=True,
entry_points = {'console_scripts': ['cdk = cdk:main']},
install_requires=['docopt', 'schema'],
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: BSD License",
]
)
<commit_msg>Update requirements where it matters<commit_after>
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
with open('README.rst') as f:
readme = f.read()
with open('LICENSE') as f:
license = f.read()
# I wish there was a way to do this w/o having to put data files in
# package dir. Couldn't ever get data_files arg working correctly...
setup(
name='cdk',
version='0.0.1',
description='Courseware Developement Kit based on asciidoc and deck.js',
long_description=readme,
author='Simeon Franklin',
author_email='simeonf@gmail.com',
url='https://github.com/twitter-university/cdk',
license=license,
packages=find_packages(exclude=('tests', 'docs')),
include_package_data=True,
entry_points = {'console_scripts': ['cdk = cdk:main']},
install_requires=['docopt', 'pygments'],
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: Apache Software License"
]
)
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
with open('README.rst') as f:
readme = f.read()
with open('LICENSE') as f:
license = f.read()
# I wish there was a way to do this w/o having to put data files in
# package dir. Couldn't ever get data_files arg working correctly...
setup(
name='cdk',
version='0.0.1',
description='Courseware Developement Kit based on asciidoc and deck.js',
long_description=readme,
author='Simeon Franklin',
author_email='simeonf@gmail.com',
url='https://github.com/twitter-university/cdk',
license=license,
packages=find_packages(exclude=('tests', 'docs')),
include_package_data=True,
entry_points = {'console_scripts': ['cdk = cdk:main']},
install_requires=['docopt', 'schema'],
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: BSD License",
]
)
Update requirements where it matters# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
with open('README.rst') as f:
readme = f.read()
with open('LICENSE') as f:
license = f.read()
# I wish there was a way to do this w/o having to put data files in
# package dir. Couldn't ever get data_files arg working correctly...
setup(
name='cdk',
version='0.0.1',
description='Courseware Developement Kit based on asciidoc and deck.js',
long_description=readme,
author='Simeon Franklin',
author_email='simeonf@gmail.com',
url='https://github.com/twitter-university/cdk',
license=license,
packages=find_packages(exclude=('tests', 'docs')),
include_package_data=True,
entry_points = {'console_scripts': ['cdk = cdk:main']},
install_requires=['docopt', 'pygments'],
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: Apache Software License"
]
)
|
<commit_before># -*- coding: utf-8 -*-
from setuptools import setup, find_packages
with open('README.rst') as f:
readme = f.read()
with open('LICENSE') as f:
license = f.read()
# I wish there was a way to do this w/o having to put data files in
# package dir. Couldn't ever get data_files arg working correctly...
setup(
name='cdk',
version='0.0.1',
description='Courseware Developement Kit based on asciidoc and deck.js',
long_description=readme,
author='Simeon Franklin',
author_email='simeonf@gmail.com',
url='https://github.com/twitter-university/cdk',
license=license,
packages=find_packages(exclude=('tests', 'docs')),
include_package_data=True,
entry_points = {'console_scripts': ['cdk = cdk:main']},
install_requires=['docopt', 'schema'],
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: BSD License",
]
)
<commit_msg>Update requirements where it matters<commit_after># -*- coding: utf-8 -*-
from setuptools import setup, find_packages
with open('README.rst') as f:
readme = f.read()
with open('LICENSE') as f:
license = f.read()
# I wish there was a way to do this w/o having to put data files in
# package dir. Couldn't ever get data_files arg working correctly...
setup(
name='cdk',
version='0.0.1',
description='Courseware Developement Kit based on asciidoc and deck.js',
long_description=readme,
author='Simeon Franklin',
author_email='simeonf@gmail.com',
url='https://github.com/twitter-university/cdk',
license=license,
packages=find_packages(exclude=('tests', 'docs')),
include_package_data=True,
entry_points = {'console_scripts': ['cdk = cdk:main']},
install_requires=['docopt', 'pygments'],
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: Apache Software License"
]
)
|
c42e0974424d056e306e3f51e8345f2a9600b2dc
|
extract_language_package.py
|
extract_language_package.py
|
import optparse
import os
import glob
optparser = optparse.OptionParser()
optparser.add_option("-f", "--filename", dest="filename", help="Language package file")
optparser.add_option("-d", "--destination", dest="destination", help="Base destination folder")
optparser.add_option("-l", "--language", dest="language", help="Language to un-package")
(opts, _) = optparser.parse_args()
full_destination_path = opts.destination + "/" + opts.language
if not os.path.exists(full_destination_path):
os.makedirs(full_destination_path)
outer_untar_command = "tar -xvf " + opts.filename + " -C " + full_destination_path
print(outer_untar_command)
os.system(outer_untar_command)
for inner_filename in glob.glob(full_destination_path+"/*.tar.gz"):
inner_untar_command = "tar -xvzf " + inner_filename + " -C " + full_destination_path
print(inner_untar_command)
os.system(inner_untar_command)
delete_intermediate_files_command = "rm " + full_destination_path + "/*.tar.gz"
print(delete_intermediate_files_command)
os.system(delete_intermediate_files_command)
|
import optparse
import os
import glob
optparser = optparse.OptionParser()
optparser.add_option("-f", "--filename", dest="filename", help="Language package file")
optparser.add_option("-d", "--destination", dest="destination", help="Base destination folder")
optparser.add_option("-l", "--language", dest="language", help="Language to un-package")
(opts, _) = optparser.parse_args()
full_destination_path = opts.destination + "/" + opts.language
if not os.path.exists(full_destination_path):
os.makedirs(full_destination_path)
outer_untar_command = "tar -xvf " + opts.filename + " -C " + full_destination_path
print(outer_untar_command)
os.system(outer_untar_command)
for inner_filename in glob.glob(full_destination_path+"/*.tar.gz"):
inner_untar_command = "tar -xvzf " + inner_filename + " -C " + full_destination_path
print(inner_untar_command)
os.system(inner_untar_command)
inner_delete_command = "rm " + inner_filename
print(inner_delete_command)
os.system(inner_delete_command)
|
Update extract language package to use inner delete commands to reduce the amount of space used at any given point in time.
|
Update extract language package to use inner delete commands to reduce the
amount of space used at any given point in time.
|
Python
|
mit
|
brendandc/multilingual-google-image-scraper
|
import optparse
import os
import glob
optparser = optparse.OptionParser()
optparser.add_option("-f", "--filename", dest="filename", help="Language package file")
optparser.add_option("-d", "--destination", dest="destination", help="Base destination folder")
optparser.add_option("-l", "--language", dest="language", help="Language to un-package")
(opts, _) = optparser.parse_args()
full_destination_path = opts.destination + "/" + opts.language
if not os.path.exists(full_destination_path):
os.makedirs(full_destination_path)
outer_untar_command = "tar -xvf " + opts.filename + " -C " + full_destination_path
print(outer_untar_command)
os.system(outer_untar_command)
for inner_filename in glob.glob(full_destination_path+"/*.tar.gz"):
inner_untar_command = "tar -xvzf " + inner_filename + " -C " + full_destination_path
print(inner_untar_command)
os.system(inner_untar_command)
delete_intermediate_files_command = "rm " + full_destination_path + "/*.tar.gz"
print(delete_intermediate_files_command)
os.system(delete_intermediate_files_command)Update extract language package to use inner delete commands to reduce the
amount of space used at any given point in time.
|
import optparse
import os
import glob
optparser = optparse.OptionParser()
optparser.add_option("-f", "--filename", dest="filename", help="Language package file")
optparser.add_option("-d", "--destination", dest="destination", help="Base destination folder")
optparser.add_option("-l", "--language", dest="language", help="Language to un-package")
(opts, _) = optparser.parse_args()
full_destination_path = opts.destination + "/" + opts.language
if not os.path.exists(full_destination_path):
os.makedirs(full_destination_path)
outer_untar_command = "tar -xvf " + opts.filename + " -C " + full_destination_path
print(outer_untar_command)
os.system(outer_untar_command)
for inner_filename in glob.glob(full_destination_path+"/*.tar.gz"):
inner_untar_command = "tar -xvzf " + inner_filename + " -C " + full_destination_path
print(inner_untar_command)
os.system(inner_untar_command)
inner_delete_command = "rm " + inner_filename
print(inner_delete_command)
os.system(inner_delete_command)
|
<commit_before>import optparse
import os
import glob
optparser = optparse.OptionParser()
optparser.add_option("-f", "--filename", dest="filename", help="Language package file")
optparser.add_option("-d", "--destination", dest="destination", help="Base destination folder")
optparser.add_option("-l", "--language", dest="language", help="Language to un-package")
(opts, _) = optparser.parse_args()
full_destination_path = opts.destination + "/" + opts.language
if not os.path.exists(full_destination_path):
os.makedirs(full_destination_path)
outer_untar_command = "tar -xvf " + opts.filename + " -C " + full_destination_path
print(outer_untar_command)
os.system(outer_untar_command)
for inner_filename in glob.glob(full_destination_path+"/*.tar.gz"):
inner_untar_command = "tar -xvzf " + inner_filename + " -C " + full_destination_path
print(inner_untar_command)
os.system(inner_untar_command)
delete_intermediate_files_command = "rm " + full_destination_path + "/*.tar.gz"
print(delete_intermediate_files_command)
os.system(delete_intermediate_files_command)<commit_msg>Update extract language package to use inner delete commands to reduce the
amount of space used at any given point in time.<commit_after>
|
import optparse
import os
import glob
optparser = optparse.OptionParser()
optparser.add_option("-f", "--filename", dest="filename", help="Language package file")
optparser.add_option("-d", "--destination", dest="destination", help="Base destination folder")
optparser.add_option("-l", "--language", dest="language", help="Language to un-package")
(opts, _) = optparser.parse_args()
full_destination_path = opts.destination + "/" + opts.language
if not os.path.exists(full_destination_path):
os.makedirs(full_destination_path)
outer_untar_command = "tar -xvf " + opts.filename + " -C " + full_destination_path
print(outer_untar_command)
os.system(outer_untar_command)
for inner_filename in glob.glob(full_destination_path+"/*.tar.gz"):
inner_untar_command = "tar -xvzf " + inner_filename + " -C " + full_destination_path
print(inner_untar_command)
os.system(inner_untar_command)
inner_delete_command = "rm " + inner_filename
print(inner_delete_command)
os.system(inner_delete_command)
|
import optparse
import os
import glob
optparser = optparse.OptionParser()
optparser.add_option("-f", "--filename", dest="filename", help="Language package file")
optparser.add_option("-d", "--destination", dest="destination", help="Base destination folder")
optparser.add_option("-l", "--language", dest="language", help="Language to un-package")
(opts, _) = optparser.parse_args()
full_destination_path = opts.destination + "/" + opts.language
if not os.path.exists(full_destination_path):
os.makedirs(full_destination_path)
outer_untar_command = "tar -xvf " + opts.filename + " -C " + full_destination_path
print(outer_untar_command)
os.system(outer_untar_command)
for inner_filename in glob.glob(full_destination_path+"/*.tar.gz"):
inner_untar_command = "tar -xvzf " + inner_filename + " -C " + full_destination_path
print(inner_untar_command)
os.system(inner_untar_command)
delete_intermediate_files_command = "rm " + full_destination_path + "/*.tar.gz"
print(delete_intermediate_files_command)
os.system(delete_intermediate_files_command)Update extract language package to use inner delete commands to reduce the
amount of space used at any given point in time.import optparse
import os
import glob
optparser = optparse.OptionParser()
optparser.add_option("-f", "--filename", dest="filename", help="Language package file")
optparser.add_option("-d", "--destination", dest="destination", help="Base destination folder")
optparser.add_option("-l", "--language", dest="language", help="Language to un-package")
(opts, _) = optparser.parse_args()
full_destination_path = opts.destination + "/" + opts.language
if not os.path.exists(full_destination_path):
os.makedirs(full_destination_path)
outer_untar_command = "tar -xvf " + opts.filename + " -C " + full_destination_path
print(outer_untar_command)
os.system(outer_untar_command)
for inner_filename in glob.glob(full_destination_path+"/*.tar.gz"):
inner_untar_command = "tar -xvzf " + inner_filename + " -C " + full_destination_path
print(inner_untar_command)
os.system(inner_untar_command)
inner_delete_command = "rm " + inner_filename
print(inner_delete_command)
os.system(inner_delete_command)
|
<commit_before>import optparse
import os
import glob
optparser = optparse.OptionParser()
optparser.add_option("-f", "--filename", dest="filename", help="Language package file")
optparser.add_option("-d", "--destination", dest="destination", help="Base destination folder")
optparser.add_option("-l", "--language", dest="language", help="Language to un-package")
(opts, _) = optparser.parse_args()
full_destination_path = opts.destination + "/" + opts.language
if not os.path.exists(full_destination_path):
os.makedirs(full_destination_path)
outer_untar_command = "tar -xvf " + opts.filename + " -C " + full_destination_path
print(outer_untar_command)
os.system(outer_untar_command)
for inner_filename in glob.glob(full_destination_path+"/*.tar.gz"):
inner_untar_command = "tar -xvzf " + inner_filename + " -C " + full_destination_path
print(inner_untar_command)
os.system(inner_untar_command)
delete_intermediate_files_command = "rm " + full_destination_path + "/*.tar.gz"
print(delete_intermediate_files_command)
os.system(delete_intermediate_files_command)<commit_msg>Update extract language package to use inner delete commands to reduce the
amount of space used at any given point in time.<commit_after>import optparse
import os
import glob
optparser = optparse.OptionParser()
optparser.add_option("-f", "--filename", dest="filename", help="Language package file")
optparser.add_option("-d", "--destination", dest="destination", help="Base destination folder")
optparser.add_option("-l", "--language", dest="language", help="Language to un-package")
(opts, _) = optparser.parse_args()
full_destination_path = opts.destination + "/" + opts.language
if not os.path.exists(full_destination_path):
os.makedirs(full_destination_path)
outer_untar_command = "tar -xvf " + opts.filename + " -C " + full_destination_path
print(outer_untar_command)
os.system(outer_untar_command)
for inner_filename in glob.glob(full_destination_path+"/*.tar.gz"):
inner_untar_command = "tar -xvzf " + inner_filename + " -C " + full_destination_path
print(inner_untar_command)
os.system(inner_untar_command)
inner_delete_command = "rm " + inner_filename
print(inner_delete_command)
os.system(inner_delete_command)
|
fa518cdae22c1a762a593f3c4c67fadb04beb5e6
|
corehq/apps/reports/standard/cases/case_list_explorer.py
|
corehq/apps/reports/standard/cases/case_list_explorer.py
|
from __future__ import absolute_import, unicode_literals
from django.utils.translation import ugettext_lazy as _
from corehq.apps.es.case_search import CaseSearchES
from corehq.apps.reports.standard.cases.basic import CaseListReport
class CaseListExplorer(CaseListReport):
name = _('Case List Explorer')
slug = 'case_list_explorer'
search_class = CaseSearchES
|
from __future__ import absolute_import, unicode_literals
from django.utils.translation import ugettext_lazy as _
from corehq.apps.es.case_search import CaseSearchES
from corehq.apps.reports.standard.cases.basic import CaseListReport
from corehq.apps.reports.standard.cases.filters import (
XpathCaseSearchFilter,
)
class CaseListExplorer(CaseListReport):
name = _('Case List Explorer')
slug = 'case_list_explorer'
search_class = CaseSearchES
fields = [
'corehq.apps.reports.filters.case_list.CaseListFilter',
'corehq.apps.reports.filters.select.CaseTypeFilter',
'corehq.apps.reports.filters.select.SelectOpenCloseFilter',
XpathCaseSearchFilter,
]
def get_data(self):
for row in self.es_results['hits'].get('hits', []):
yield flatten_result(row)
def _build_query(self):
query = super(CaseListExplorer, self)._build_query()
xpath = XpathCaseSearchFilter.get_value(self.request, self.domain)
if xpath:
query = query.xpath_query(self.domain, xpath)
return query
|
Add XPath Query filter to report
|
Add XPath Query filter to report
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
from __future__ import absolute_import, unicode_literals
from django.utils.translation import ugettext_lazy as _
from corehq.apps.es.case_search import CaseSearchES
from corehq.apps.reports.standard.cases.basic import CaseListReport
class CaseListExplorer(CaseListReport):
name = _('Case List Explorer')
slug = 'case_list_explorer'
search_class = CaseSearchES
Add XPath Query filter to report
|
from __future__ import absolute_import, unicode_literals
from django.utils.translation import ugettext_lazy as _
from corehq.apps.es.case_search import CaseSearchES
from corehq.apps.reports.standard.cases.basic import CaseListReport
from corehq.apps.reports.standard.cases.filters import (
XpathCaseSearchFilter,
)
class CaseListExplorer(CaseListReport):
name = _('Case List Explorer')
slug = 'case_list_explorer'
search_class = CaseSearchES
fields = [
'corehq.apps.reports.filters.case_list.CaseListFilter',
'corehq.apps.reports.filters.select.CaseTypeFilter',
'corehq.apps.reports.filters.select.SelectOpenCloseFilter',
XpathCaseSearchFilter,
]
def get_data(self):
for row in self.es_results['hits'].get('hits', []):
yield flatten_result(row)
def _build_query(self):
query = super(CaseListExplorer, self)._build_query()
xpath = XpathCaseSearchFilter.get_value(self.request, self.domain)
if xpath:
query = query.xpath_query(self.domain, xpath)
return query
|
<commit_before>from __future__ import absolute_import, unicode_literals
from django.utils.translation import ugettext_lazy as _
from corehq.apps.es.case_search import CaseSearchES
from corehq.apps.reports.standard.cases.basic import CaseListReport
class CaseListExplorer(CaseListReport):
name = _('Case List Explorer')
slug = 'case_list_explorer'
search_class = CaseSearchES
<commit_msg>Add XPath Query filter to report<commit_after>
|
from __future__ import absolute_import, unicode_literals
from django.utils.translation import ugettext_lazy as _
from corehq.apps.es.case_search import CaseSearchES
from corehq.apps.reports.standard.cases.basic import CaseListReport
from corehq.apps.reports.standard.cases.filters import (
XpathCaseSearchFilter,
)
class CaseListExplorer(CaseListReport):
name = _('Case List Explorer')
slug = 'case_list_explorer'
search_class = CaseSearchES
fields = [
'corehq.apps.reports.filters.case_list.CaseListFilter',
'corehq.apps.reports.filters.select.CaseTypeFilter',
'corehq.apps.reports.filters.select.SelectOpenCloseFilter',
XpathCaseSearchFilter,
]
def get_data(self):
for row in self.es_results['hits'].get('hits', []):
yield flatten_result(row)
def _build_query(self):
query = super(CaseListExplorer, self)._build_query()
xpath = XpathCaseSearchFilter.get_value(self.request, self.domain)
if xpath:
query = query.xpath_query(self.domain, xpath)
return query
|
from __future__ import absolute_import, unicode_literals
from django.utils.translation import ugettext_lazy as _
from corehq.apps.es.case_search import CaseSearchES
from corehq.apps.reports.standard.cases.basic import CaseListReport
class CaseListExplorer(CaseListReport):
name = _('Case List Explorer')
slug = 'case_list_explorer'
search_class = CaseSearchES
Add XPath Query filter to reportfrom __future__ import absolute_import, unicode_literals
from django.utils.translation import ugettext_lazy as _
from corehq.apps.es.case_search import CaseSearchES
from corehq.apps.reports.standard.cases.basic import CaseListReport
from corehq.apps.reports.standard.cases.filters import (
XpathCaseSearchFilter,
)
class CaseListExplorer(CaseListReport):
name = _('Case List Explorer')
slug = 'case_list_explorer'
search_class = CaseSearchES
fields = [
'corehq.apps.reports.filters.case_list.CaseListFilter',
'corehq.apps.reports.filters.select.CaseTypeFilter',
'corehq.apps.reports.filters.select.SelectOpenCloseFilter',
XpathCaseSearchFilter,
]
def get_data(self):
for row in self.es_results['hits'].get('hits', []):
yield flatten_result(row)
def _build_query(self):
query = super(CaseListExplorer, self)._build_query()
xpath = XpathCaseSearchFilter.get_value(self.request, self.domain)
if xpath:
query = query.xpath_query(self.domain, xpath)
return query
|
<commit_before>from __future__ import absolute_import, unicode_literals
from django.utils.translation import ugettext_lazy as _
from corehq.apps.es.case_search import CaseSearchES
from corehq.apps.reports.standard.cases.basic import CaseListReport
class CaseListExplorer(CaseListReport):
name = _('Case List Explorer')
slug = 'case_list_explorer'
search_class = CaseSearchES
<commit_msg>Add XPath Query filter to report<commit_after>from __future__ import absolute_import, unicode_literals
from django.utils.translation import ugettext_lazy as _
from corehq.apps.es.case_search import CaseSearchES
from corehq.apps.reports.standard.cases.basic import CaseListReport
from corehq.apps.reports.standard.cases.filters import (
XpathCaseSearchFilter,
)
class CaseListExplorer(CaseListReport):
name = _('Case List Explorer')
slug = 'case_list_explorer'
search_class = CaseSearchES
fields = [
'corehq.apps.reports.filters.case_list.CaseListFilter',
'corehq.apps.reports.filters.select.CaseTypeFilter',
'corehq.apps.reports.filters.select.SelectOpenCloseFilter',
XpathCaseSearchFilter,
]
def get_data(self):
for row in self.es_results['hits'].get('hits', []):
yield flatten_result(row)
def _build_query(self):
query = super(CaseListExplorer, self)._build_query()
xpath = XpathCaseSearchFilter.get_value(self.request, self.domain)
if xpath:
query = query.xpath_query(self.domain, xpath)
return query
|
0ec3481ddd1ecf76cd7225427532075ce83be1b9
|
partner_firstname/migrations/12.0.1.0.0/pre-ir_config_param.py
|
partner_firstname/migrations/12.0.1.0.0/pre-ir_config_param.py
|
def store_ir_config_param(cr):
"""Prior to version 12.0 the default order of partner
names was last_first. In order to retain this behaviour we
store the config parameter if it is not present.
"""
cr.execute("SELECT 1 FROM ir_config_parameter "
"WHERE name = 'partner_names_order'")
if not cr.fetchone():
cr.execute("INSERT INTO ir_config_parameter (key, value) VALUES "
"('partner_names_order', 'last_first')")
def migrate(cr, version):
store_ir_config_param(cr)
|
def store_ir_config_param(cr):
"""Prior to version 12.0 the default order of partner
names was last_first. In order to retain this behaviour we
store the config parameter if it is not present.
"""
cr.execute("SELECT 1 FROM ir_config_parameter "
"WHERE key = 'partner_names_order'")
if not cr.fetchone():
cr.execute("INSERT INTO ir_config_parameter (key, value) VALUES "
"('partner_names_order', 'last_first')")
def migrate(cr, version):
store_ir_config_param(cr)
|
Fix migration script 12.0.1.0.0 of partner_firstname
|
[12.0] Fix migration script 12.0.1.0.0 of partner_firstname
|
Python
|
agpl-3.0
|
Vauxoo/partner-contact,Vauxoo/partner-contact
|
def store_ir_config_param(cr):
"""Prior to version 12.0 the default order of partner
names was last_first. In order to retain this behaviour we
store the config parameter if it is not present.
"""
cr.execute("SELECT 1 FROM ir_config_parameter "
"WHERE name = 'partner_names_order'")
if not cr.fetchone():
cr.execute("INSERT INTO ir_config_parameter (key, value) VALUES "
"('partner_names_order', 'last_first')")
def migrate(cr, version):
store_ir_config_param(cr)
[12.0] Fix migration script 12.0.1.0.0 of partner_firstname
|
def store_ir_config_param(cr):
"""Prior to version 12.0 the default order of partner
names was last_first. In order to retain this behaviour we
store the config parameter if it is not present.
"""
cr.execute("SELECT 1 FROM ir_config_parameter "
"WHERE key = 'partner_names_order'")
if not cr.fetchone():
cr.execute("INSERT INTO ir_config_parameter (key, value) VALUES "
"('partner_names_order', 'last_first')")
def migrate(cr, version):
store_ir_config_param(cr)
|
<commit_before>def store_ir_config_param(cr):
"""Prior to version 12.0 the default order of partner
names was last_first. In order to retain this behaviour we
store the config parameter if it is not present.
"""
cr.execute("SELECT 1 FROM ir_config_parameter "
"WHERE name = 'partner_names_order'")
if not cr.fetchone():
cr.execute("INSERT INTO ir_config_parameter (key, value) VALUES "
"('partner_names_order', 'last_first')")
def migrate(cr, version):
store_ir_config_param(cr)
<commit_msg>[12.0] Fix migration script 12.0.1.0.0 of partner_firstname<commit_after>
|
def store_ir_config_param(cr):
"""Prior to version 12.0 the default order of partner
names was last_first. In order to retain this behaviour we
store the config parameter if it is not present.
"""
cr.execute("SELECT 1 FROM ir_config_parameter "
"WHERE key = 'partner_names_order'")
if not cr.fetchone():
cr.execute("INSERT INTO ir_config_parameter (key, value) VALUES "
"('partner_names_order', 'last_first')")
def migrate(cr, version):
store_ir_config_param(cr)
|
def store_ir_config_param(cr):
"""Prior to version 12.0 the default order of partner
names was last_first. In order to retain this behaviour we
store the config parameter if it is not present.
"""
cr.execute("SELECT 1 FROM ir_config_parameter "
"WHERE name = 'partner_names_order'")
if not cr.fetchone():
cr.execute("INSERT INTO ir_config_parameter (key, value) VALUES "
"('partner_names_order', 'last_first')")
def migrate(cr, version):
store_ir_config_param(cr)
[12.0] Fix migration script 12.0.1.0.0 of partner_firstnamedef store_ir_config_param(cr):
"""Prior to version 12.0 the default order of partner
names was last_first. In order to retain this behaviour we
store the config parameter if it is not present.
"""
cr.execute("SELECT 1 FROM ir_config_parameter "
"WHERE key = 'partner_names_order'")
if not cr.fetchone():
cr.execute("INSERT INTO ir_config_parameter (key, value) VALUES "
"('partner_names_order', 'last_first')")
def migrate(cr, version):
store_ir_config_param(cr)
|
<commit_before>def store_ir_config_param(cr):
"""Prior to version 12.0 the default order of partner
names was last_first. In order to retain this behaviour we
store the config parameter if it is not present.
"""
cr.execute("SELECT 1 FROM ir_config_parameter "
"WHERE name = 'partner_names_order'")
if not cr.fetchone():
cr.execute("INSERT INTO ir_config_parameter (key, value) VALUES "
"('partner_names_order', 'last_first')")
def migrate(cr, version):
store_ir_config_param(cr)
<commit_msg>[12.0] Fix migration script 12.0.1.0.0 of partner_firstname<commit_after>def store_ir_config_param(cr):
"""Prior to version 12.0 the default order of partner
names was last_first. In order to retain this behaviour we
store the config parameter if it is not present.
"""
cr.execute("SELECT 1 FROM ir_config_parameter "
"WHERE key = 'partner_names_order'")
if not cr.fetchone():
cr.execute("INSERT INTO ir_config_parameter (key, value) VALUES "
"('partner_names_order', 'last_first')")
def migrate(cr, version):
store_ir_config_param(cr)
|
0e807b46ba044e1accb8fb767f6f2ed4ffb2d0ba
|
dataportal/tests/test_broker.py
|
dataportal/tests/test_broker.py
|
import unittest
from datetime import datetime
import numpy as np
import pandas as pd
from ..sources import channelarchiver as ca
from ..sources import switch
class TestBroker(unittest.TestCase):
def setUp(self):
switch(channelarchiver=False, metadatastore=False, filestore=False)
start, end = '2015-01-01 00:00:00', '2015-01-01 00:01:00'
simulated_ca_data = generate_ca_data(['ch1', 'ch2'], start, end)
ca.insert_data(simulated_ca_data)
def tearDown(self):
switch(channelarchiver=True, metadatastore=True, filestore=True)
def generate_ca_data(channels, start_time, end_time):
timestamps = pd.date_range(start_time, end_time, freq='T').to_series()
timestamps = list(timestamps.dt.to_pydatetime()) # list of datetime objects
values = list(np.arange(len(timestamps)))
return {channel: (timestamps, values) for channel in channels}
|
import unittest
from datetime import datetime
import numpy as np
import pandas as pd
from ..sources import channelarchiver as ca
from ..sources import switch
from ..examples.sample_data import temperature_ramp
from ..broker import DataBroker as db
class TestBroker(unittest.TestCase):
def setUp(self):
switch(channelarchiver=False, metadatastore=True, filestore=True)
start, end = '2015-01-01 00:00:00', '2015-01-01 00:01:00'
simulated_ca_data = generate_ca_data(['ch1', 'ch2'], start, end)
ca.insert_data(simulated_ca_data)
temperature_ramp.run()
def test_basic_usage(self):
header = db[-1]
events = db.fetch_events(header)
def tearDown(self):
switch(channelarchiver=True, metadatastore=True, filestore=True)
def generate_ca_data(channels, start_time, end_time):
timestamps = pd.date_range(start_time, end_time, freq='T').to_series()
timestamps = list(timestamps.dt.to_pydatetime()) # list of datetime objects
values = list(np.arange(len(timestamps)))
return {channel: (timestamps, values) for channel in channels}
|
Add coverage for basic broker usage.
|
TST: Add coverage for basic broker usage.
|
Python
|
bsd-3-clause
|
danielballan/dataportal,ericdill/datamuxer,danielballan/dataportal,NSLS-II/dataportal,NSLS-II/datamuxer,ericdill/datamuxer,tacaswell/dataportal,ericdill/databroker,danielballan/datamuxer,tacaswell/dataportal,danielballan/datamuxer,NSLS-II/dataportal,ericdill/databroker
|
import unittest
from datetime import datetime
import numpy as np
import pandas as pd
from ..sources import channelarchiver as ca
from ..sources import switch
class TestBroker(unittest.TestCase):
def setUp(self):
switch(channelarchiver=False, metadatastore=False, filestore=False)
start, end = '2015-01-01 00:00:00', '2015-01-01 00:01:00'
simulated_ca_data = generate_ca_data(['ch1', 'ch2'], start, end)
ca.insert_data(simulated_ca_data)
def tearDown(self):
switch(channelarchiver=True, metadatastore=True, filestore=True)
def generate_ca_data(channels, start_time, end_time):
timestamps = pd.date_range(start_time, end_time, freq='T').to_series()
timestamps = list(timestamps.dt.to_pydatetime()) # list of datetime objects
values = list(np.arange(len(timestamps)))
return {channel: (timestamps, values) for channel in channels}
TST: Add coverage for basic broker usage.
|
import unittest
from datetime import datetime
import numpy as np
import pandas as pd
from ..sources import channelarchiver as ca
from ..sources import switch
from ..examples.sample_data import temperature_ramp
from ..broker import DataBroker as db
class TestBroker(unittest.TestCase):
def setUp(self):
switch(channelarchiver=False, metadatastore=True, filestore=True)
start, end = '2015-01-01 00:00:00', '2015-01-01 00:01:00'
simulated_ca_data = generate_ca_data(['ch1', 'ch2'], start, end)
ca.insert_data(simulated_ca_data)
temperature_ramp.run()
def test_basic_usage(self):
header = db[-1]
events = db.fetch_events(header)
def tearDown(self):
switch(channelarchiver=True, metadatastore=True, filestore=True)
def generate_ca_data(channels, start_time, end_time):
timestamps = pd.date_range(start_time, end_time, freq='T').to_series()
timestamps = list(timestamps.dt.to_pydatetime()) # list of datetime objects
values = list(np.arange(len(timestamps)))
return {channel: (timestamps, values) for channel in channels}
|
<commit_before>import unittest
from datetime import datetime
import numpy as np
import pandas as pd
from ..sources import channelarchiver as ca
from ..sources import switch
class TestBroker(unittest.TestCase):
def setUp(self):
switch(channelarchiver=False, metadatastore=False, filestore=False)
start, end = '2015-01-01 00:00:00', '2015-01-01 00:01:00'
simulated_ca_data = generate_ca_data(['ch1', 'ch2'], start, end)
ca.insert_data(simulated_ca_data)
def tearDown(self):
switch(channelarchiver=True, metadatastore=True, filestore=True)
def generate_ca_data(channels, start_time, end_time):
timestamps = pd.date_range(start_time, end_time, freq='T').to_series()
timestamps = list(timestamps.dt.to_pydatetime()) # list of datetime objects
values = list(np.arange(len(timestamps)))
return {channel: (timestamps, values) for channel in channels}
<commit_msg>TST: Add coverage for basic broker usage.<commit_after>
|
import unittest
from datetime import datetime
import numpy as np
import pandas as pd
from ..sources import channelarchiver as ca
from ..sources import switch
from ..examples.sample_data import temperature_ramp
from ..broker import DataBroker as db
class TestBroker(unittest.TestCase):
def setUp(self):
switch(channelarchiver=False, metadatastore=True, filestore=True)
start, end = '2015-01-01 00:00:00', '2015-01-01 00:01:00'
simulated_ca_data = generate_ca_data(['ch1', 'ch2'], start, end)
ca.insert_data(simulated_ca_data)
temperature_ramp.run()
def test_basic_usage(self):
header = db[-1]
events = db.fetch_events(header)
def tearDown(self):
switch(channelarchiver=True, metadatastore=True, filestore=True)
def generate_ca_data(channels, start_time, end_time):
timestamps = pd.date_range(start_time, end_time, freq='T').to_series()
timestamps = list(timestamps.dt.to_pydatetime()) # list of datetime objects
values = list(np.arange(len(timestamps)))
return {channel: (timestamps, values) for channel in channels}
|
import unittest
from datetime import datetime
import numpy as np
import pandas as pd
from ..sources import channelarchiver as ca
from ..sources import switch
class TestBroker(unittest.TestCase):
def setUp(self):
switch(channelarchiver=False, metadatastore=False, filestore=False)
start, end = '2015-01-01 00:00:00', '2015-01-01 00:01:00'
simulated_ca_data = generate_ca_data(['ch1', 'ch2'], start, end)
ca.insert_data(simulated_ca_data)
def tearDown(self):
switch(channelarchiver=True, metadatastore=True, filestore=True)
def generate_ca_data(channels, start_time, end_time):
timestamps = pd.date_range(start_time, end_time, freq='T').to_series()
timestamps = list(timestamps.dt.to_pydatetime()) # list of datetime objects
values = list(np.arange(len(timestamps)))
return {channel: (timestamps, values) for channel in channels}
TST: Add coverage for basic broker usage.import unittest
from datetime import datetime
import numpy as np
import pandas as pd
from ..sources import channelarchiver as ca
from ..sources import switch
from ..examples.sample_data import temperature_ramp
from ..broker import DataBroker as db
class TestBroker(unittest.TestCase):
def setUp(self):
switch(channelarchiver=False, metadatastore=True, filestore=True)
start, end = '2015-01-01 00:00:00', '2015-01-01 00:01:00'
simulated_ca_data = generate_ca_data(['ch1', 'ch2'], start, end)
ca.insert_data(simulated_ca_data)
temperature_ramp.run()
def test_basic_usage(self):
header = db[-1]
events = db.fetch_events(header)
def tearDown(self):
switch(channelarchiver=True, metadatastore=True, filestore=True)
def generate_ca_data(channels, start_time, end_time):
timestamps = pd.date_range(start_time, end_time, freq='T').to_series()
timestamps = list(timestamps.dt.to_pydatetime()) # list of datetime objects
values = list(np.arange(len(timestamps)))
return {channel: (timestamps, values) for channel in channels}
|
<commit_before>import unittest
from datetime import datetime
import numpy as np
import pandas as pd
from ..sources import channelarchiver as ca
from ..sources import switch
class TestBroker(unittest.TestCase):
def setUp(self):
switch(channelarchiver=False, metadatastore=False, filestore=False)
start, end = '2015-01-01 00:00:00', '2015-01-01 00:01:00'
simulated_ca_data = generate_ca_data(['ch1', 'ch2'], start, end)
ca.insert_data(simulated_ca_data)
def tearDown(self):
switch(channelarchiver=True, metadatastore=True, filestore=True)
def generate_ca_data(channels, start_time, end_time):
timestamps = pd.date_range(start_time, end_time, freq='T').to_series()
timestamps = list(timestamps.dt.to_pydatetime()) # list of datetime objects
values = list(np.arange(len(timestamps)))
return {channel: (timestamps, values) for channel in channels}
<commit_msg>TST: Add coverage for basic broker usage.<commit_after>import unittest
from datetime import datetime
import numpy as np
import pandas as pd
from ..sources import channelarchiver as ca
from ..sources import switch
from ..examples.sample_data import temperature_ramp
from ..broker import DataBroker as db
class TestBroker(unittest.TestCase):
def setUp(self):
switch(channelarchiver=False, metadatastore=True, filestore=True)
start, end = '2015-01-01 00:00:00', '2015-01-01 00:01:00'
simulated_ca_data = generate_ca_data(['ch1', 'ch2'], start, end)
ca.insert_data(simulated_ca_data)
temperature_ramp.run()
def test_basic_usage(self):
header = db[-1]
events = db.fetch_events(header)
def tearDown(self):
switch(channelarchiver=True, metadatastore=True, filestore=True)
def generate_ca_data(channels, start_time, end_time):
timestamps = pd.date_range(start_time, end_time, freq='T').to_series()
timestamps = list(timestamps.dt.to_pydatetime()) # list of datetime objects
values = list(np.arange(len(timestamps)))
return {channel: (timestamps, values) for channel in channels}
|
8857ab5642e49761cc65093132352071ec28dba2
|
dataviva/utils/upload_helper.py
|
dataviva/utils/upload_helper.py
|
import boto3
from boto3.s3.transfer import S3Transfer
from config import AWS_ACCESS_KEY, AWS_SECRET_KEY
def delete_s3_file(file_id):
client = boto3.client(
's3',
aws_access_key_id=AWS_ACCESS_KEY,
aws_secret_access_key=AWS_SECRET_KEY
)
return client.delete_object(
Bucket='dataviva',
Key=file_id
)
def upload_s3_file(file_path, bucket, file_id, extra_args={'ContentType': "html/text"}):
client = boto3.client(
's3',
aws_access_key_id=AWS_ACCESS_KEY,
aws_secret_access_key=AWS_SECRET_KEY
)
transfer = S3Transfer(client)
return transfer.upload_file(file_path, bucket, file_id, extra_args)
|
import boto3
from boto3.s3.transfer import S3Transfer
from config import AWS_ACCESS_KEY, AWS_SECRET_KEY
def delete_s3_file(file_id):
client = boto3.client(
's3',
aws_access_key_id=AWS_ACCESS_KEY,
aws_secret_access_key=AWS_SECRET_KEY
)
return client.delete_object(
Bucket='dataviva',
Key=file_id
)
def upload_s3_file(file_path, bucket, file_id, extra_args={'ContentType': "html/text"}):
client = boto3.client(
's3',
aws_access_key_id=AWS_ACCESS_KEY,
aws_secret_access_key=AWS_SECRET_KEY
)
transfer = S3Transfer(client)
return transfer.upload_file(file_path, bucket, file_id, extra_args=extra_args)
|
Fix s3 upload file extra_args named paramether
|
Fix s3 upload file extra_args named paramether
|
Python
|
mit
|
DataViva/dataviva-site,DataViva/dataviva-site,DataViva/dataviva-site,DataViva/dataviva-site
|
import boto3
from boto3.s3.transfer import S3Transfer
from config import AWS_ACCESS_KEY, AWS_SECRET_KEY
def delete_s3_file(file_id):
client = boto3.client(
's3',
aws_access_key_id=AWS_ACCESS_KEY,
aws_secret_access_key=AWS_SECRET_KEY
)
return client.delete_object(
Bucket='dataviva',
Key=file_id
)
def upload_s3_file(file_path, bucket, file_id, extra_args={'ContentType': "html/text"}):
client = boto3.client(
's3',
aws_access_key_id=AWS_ACCESS_KEY,
aws_secret_access_key=AWS_SECRET_KEY
)
transfer = S3Transfer(client)
return transfer.upload_file(file_path, bucket, file_id, extra_args)
Fix s3 upload file extra_args named paramether
|
import boto3
from boto3.s3.transfer import S3Transfer
from config import AWS_ACCESS_KEY, AWS_SECRET_KEY
def delete_s3_file(file_id):
client = boto3.client(
's3',
aws_access_key_id=AWS_ACCESS_KEY,
aws_secret_access_key=AWS_SECRET_KEY
)
return client.delete_object(
Bucket='dataviva',
Key=file_id
)
def upload_s3_file(file_path, bucket, file_id, extra_args={'ContentType': "html/text"}):
client = boto3.client(
's3',
aws_access_key_id=AWS_ACCESS_KEY,
aws_secret_access_key=AWS_SECRET_KEY
)
transfer = S3Transfer(client)
return transfer.upload_file(file_path, bucket, file_id, extra_args=extra_args)
|
<commit_before>import boto3
from boto3.s3.transfer import S3Transfer
from config import AWS_ACCESS_KEY, AWS_SECRET_KEY
def delete_s3_file(file_id):
client = boto3.client(
's3',
aws_access_key_id=AWS_ACCESS_KEY,
aws_secret_access_key=AWS_SECRET_KEY
)
return client.delete_object(
Bucket='dataviva',
Key=file_id
)
def upload_s3_file(file_path, bucket, file_id, extra_args={'ContentType': "html/text"}):
client = boto3.client(
's3',
aws_access_key_id=AWS_ACCESS_KEY,
aws_secret_access_key=AWS_SECRET_KEY
)
transfer = S3Transfer(client)
return transfer.upload_file(file_path, bucket, file_id, extra_args)
<commit_msg>Fix s3 upload file extra_args named paramether<commit_after>
|
import boto3
from boto3.s3.transfer import S3Transfer
from config import AWS_ACCESS_KEY, AWS_SECRET_KEY
def delete_s3_file(file_id):
client = boto3.client(
's3',
aws_access_key_id=AWS_ACCESS_KEY,
aws_secret_access_key=AWS_SECRET_KEY
)
return client.delete_object(
Bucket='dataviva',
Key=file_id
)
def upload_s3_file(file_path, bucket, file_id, extra_args={'ContentType': "html/text"}):
client = boto3.client(
's3',
aws_access_key_id=AWS_ACCESS_KEY,
aws_secret_access_key=AWS_SECRET_KEY
)
transfer = S3Transfer(client)
return transfer.upload_file(file_path, bucket, file_id, extra_args=extra_args)
|
import boto3
from boto3.s3.transfer import S3Transfer
from config import AWS_ACCESS_KEY, AWS_SECRET_KEY
def delete_s3_file(file_id):
client = boto3.client(
's3',
aws_access_key_id=AWS_ACCESS_KEY,
aws_secret_access_key=AWS_SECRET_KEY
)
return client.delete_object(
Bucket='dataviva',
Key=file_id
)
def upload_s3_file(file_path, bucket, file_id, extra_args={'ContentType': "html/text"}):
client = boto3.client(
's3',
aws_access_key_id=AWS_ACCESS_KEY,
aws_secret_access_key=AWS_SECRET_KEY
)
transfer = S3Transfer(client)
return transfer.upload_file(file_path, bucket, file_id, extra_args)
Fix s3 upload file extra_args named parametherimport boto3
from boto3.s3.transfer import S3Transfer
from config import AWS_ACCESS_KEY, AWS_SECRET_KEY
def delete_s3_file(file_id):
client = boto3.client(
's3',
aws_access_key_id=AWS_ACCESS_KEY,
aws_secret_access_key=AWS_SECRET_KEY
)
return client.delete_object(
Bucket='dataviva',
Key=file_id
)
def upload_s3_file(file_path, bucket, file_id, extra_args={'ContentType': "html/text"}):
client = boto3.client(
's3',
aws_access_key_id=AWS_ACCESS_KEY,
aws_secret_access_key=AWS_SECRET_KEY
)
transfer = S3Transfer(client)
return transfer.upload_file(file_path, bucket, file_id, extra_args=extra_args)
|
<commit_before>import boto3
from boto3.s3.transfer import S3Transfer
from config import AWS_ACCESS_KEY, AWS_SECRET_KEY
def delete_s3_file(file_id):
client = boto3.client(
's3',
aws_access_key_id=AWS_ACCESS_KEY,
aws_secret_access_key=AWS_SECRET_KEY
)
return client.delete_object(
Bucket='dataviva',
Key=file_id
)
def upload_s3_file(file_path, bucket, file_id, extra_args={'ContentType': "html/text"}):
client = boto3.client(
's3',
aws_access_key_id=AWS_ACCESS_KEY,
aws_secret_access_key=AWS_SECRET_KEY
)
transfer = S3Transfer(client)
return transfer.upload_file(file_path, bucket, file_id, extra_args)
<commit_msg>Fix s3 upload file extra_args named paramether<commit_after>import boto3
from boto3.s3.transfer import S3Transfer
from config import AWS_ACCESS_KEY, AWS_SECRET_KEY
def delete_s3_file(file_id):
client = boto3.client(
's3',
aws_access_key_id=AWS_ACCESS_KEY,
aws_secret_access_key=AWS_SECRET_KEY
)
return client.delete_object(
Bucket='dataviva',
Key=file_id
)
def upload_s3_file(file_path, bucket, file_id, extra_args={'ContentType': "html/text"}):
client = boto3.client(
's3',
aws_access_key_id=AWS_ACCESS_KEY,
aws_secret_access_key=AWS_SECRET_KEY
)
transfer = S3Transfer(client)
return transfer.upload_file(file_path, bucket, file_id, extra_args=extra_args)
|
315a5c25429b3910446714238c28382ba727add8
|
copywriting/urls.py
|
copywriting/urls.py
|
from django.conf.urls.defaults import *
from .feed import blogFeed
urlpatterns = patterns('copywriting',
(r'^feed\.rss$', blogFeed()),
(r'^feed/$', blogFeed()),
(r'^tag/(?P<in_tag>\w+)/$', 'views.withTag'),
# (r'^(?P<year>\d+)/(?P<month>\d+)/(?P<day>\d+)/$', 'views.listBlogEntriesByYearMonthDay'),
(r'^(?P<requestYear>\d+)/(?P<requestMonth>\d+)/$', 'views.listArticlesByYearMonth'),
(r'^(?P<requestYear>\d+)/$', 'views.listArticlesByYear'),
(r'^(?P<slug>[^\.]+)/$', 'views.showArticle'),
(r'^$', 'views.listArticles'),
)
|
from django.conf.urls.defaults import *
from .feed import blogFeed
urlpatterns = patterns('copywriting',
(r'^feed\.rss$', blogFeed()),
(r'^feed/$', blogFeed()),
(r'^tag/(?P<in_tag>\w[\w-]+)/$', 'views.withTag'),
# (r'^(?P<year>\d+)/(?P<month>\d+)/(?P<day>\d+)/$', 'views.listBlogEntriesByYearMonthDay'),
(r'^(?P<requestYear>\d+)/(?P<requestMonth>\d+)/$', 'views.listArticlesByYearMonth'),
(r'^(?P<requestYear>\d+)/$', 'views.listArticlesByYear'),
(r'^(?P<slug>[^\.]+)/$', 'views.showArticle'),
(r'^$', 'views.listArticles'),
)
|
Allow slugs in url patterns
|
Allow slugs in url patterns
|
Python
|
mit
|
arteria/django-copywriting,arteria/django-copywriting
|
from django.conf.urls.defaults import *
from .feed import blogFeed
urlpatterns = patterns('copywriting',
(r'^feed\.rss$', blogFeed()),
(r'^feed/$', blogFeed()),
(r'^tag/(?P<in_tag>\w+)/$', 'views.withTag'),
# (r'^(?P<year>\d+)/(?P<month>\d+)/(?P<day>\d+)/$', 'views.listBlogEntriesByYearMonthDay'),
(r'^(?P<requestYear>\d+)/(?P<requestMonth>\d+)/$', 'views.listArticlesByYearMonth'),
(r'^(?P<requestYear>\d+)/$', 'views.listArticlesByYear'),
(r'^(?P<slug>[^\.]+)/$', 'views.showArticle'),
(r'^$', 'views.listArticles'),
)
Allow slugs in url patterns
|
from django.conf.urls.defaults import *
from .feed import blogFeed
urlpatterns = patterns('copywriting',
(r'^feed\.rss$', blogFeed()),
(r'^feed/$', blogFeed()),
(r'^tag/(?P<in_tag>\w[\w-]+)/$', 'views.withTag'),
# (r'^(?P<year>\d+)/(?P<month>\d+)/(?P<day>\d+)/$', 'views.listBlogEntriesByYearMonthDay'),
(r'^(?P<requestYear>\d+)/(?P<requestMonth>\d+)/$', 'views.listArticlesByYearMonth'),
(r'^(?P<requestYear>\d+)/$', 'views.listArticlesByYear'),
(r'^(?P<slug>[^\.]+)/$', 'views.showArticle'),
(r'^$', 'views.listArticles'),
)
|
<commit_before>from django.conf.urls.defaults import *
from .feed import blogFeed
urlpatterns = patterns('copywriting',
(r'^feed\.rss$', blogFeed()),
(r'^feed/$', blogFeed()),
(r'^tag/(?P<in_tag>\w+)/$', 'views.withTag'),
# (r'^(?P<year>\d+)/(?P<month>\d+)/(?P<day>\d+)/$', 'views.listBlogEntriesByYearMonthDay'),
(r'^(?P<requestYear>\d+)/(?P<requestMonth>\d+)/$', 'views.listArticlesByYearMonth'),
(r'^(?P<requestYear>\d+)/$', 'views.listArticlesByYear'),
(r'^(?P<slug>[^\.]+)/$', 'views.showArticle'),
(r'^$', 'views.listArticles'),
)
<commit_msg>Allow slugs in url patterns<commit_after>
|
from django.conf.urls.defaults import *
from .feed import blogFeed
urlpatterns = patterns('copywriting',
(r'^feed\.rss$', blogFeed()),
(r'^feed/$', blogFeed()),
(r'^tag/(?P<in_tag>\w[\w-]+)/$', 'views.withTag'),
# (r'^(?P<year>\d+)/(?P<month>\d+)/(?P<day>\d+)/$', 'views.listBlogEntriesByYearMonthDay'),
(r'^(?P<requestYear>\d+)/(?P<requestMonth>\d+)/$', 'views.listArticlesByYearMonth'),
(r'^(?P<requestYear>\d+)/$', 'views.listArticlesByYear'),
(r'^(?P<slug>[^\.]+)/$', 'views.showArticle'),
(r'^$', 'views.listArticles'),
)
|
from django.conf.urls.defaults import *
from .feed import blogFeed
urlpatterns = patterns('copywriting',
(r'^feed\.rss$', blogFeed()),
(r'^feed/$', blogFeed()),
(r'^tag/(?P<in_tag>\w+)/$', 'views.withTag'),
# (r'^(?P<year>\d+)/(?P<month>\d+)/(?P<day>\d+)/$', 'views.listBlogEntriesByYearMonthDay'),
(r'^(?P<requestYear>\d+)/(?P<requestMonth>\d+)/$', 'views.listArticlesByYearMonth'),
(r'^(?P<requestYear>\d+)/$', 'views.listArticlesByYear'),
(r'^(?P<slug>[^\.]+)/$', 'views.showArticle'),
(r'^$', 'views.listArticles'),
)
Allow slugs in url patternsfrom django.conf.urls.defaults import *
from .feed import blogFeed
urlpatterns = patterns('copywriting',
(r'^feed\.rss$', blogFeed()),
(r'^feed/$', blogFeed()),
(r'^tag/(?P<in_tag>\w[\w-]+)/$', 'views.withTag'),
# (r'^(?P<year>\d+)/(?P<month>\d+)/(?P<day>\d+)/$', 'views.listBlogEntriesByYearMonthDay'),
(r'^(?P<requestYear>\d+)/(?P<requestMonth>\d+)/$', 'views.listArticlesByYearMonth'),
(r'^(?P<requestYear>\d+)/$', 'views.listArticlesByYear'),
(r'^(?P<slug>[^\.]+)/$', 'views.showArticle'),
(r'^$', 'views.listArticles'),
)
|
<commit_before>from django.conf.urls.defaults import *
from .feed import blogFeed
urlpatterns = patterns('copywriting',
(r'^feed\.rss$', blogFeed()),
(r'^feed/$', blogFeed()),
(r'^tag/(?P<in_tag>\w+)/$', 'views.withTag'),
# (r'^(?P<year>\d+)/(?P<month>\d+)/(?P<day>\d+)/$', 'views.listBlogEntriesByYearMonthDay'),
(r'^(?P<requestYear>\d+)/(?P<requestMonth>\d+)/$', 'views.listArticlesByYearMonth'),
(r'^(?P<requestYear>\d+)/$', 'views.listArticlesByYear'),
(r'^(?P<slug>[^\.]+)/$', 'views.showArticle'),
(r'^$', 'views.listArticles'),
)
<commit_msg>Allow slugs in url patterns<commit_after>from django.conf.urls.defaults import *
from .feed import blogFeed
urlpatterns = patterns('copywriting',
(r'^feed\.rss$', blogFeed()),
(r'^feed/$', blogFeed()),
(r'^tag/(?P<in_tag>\w[\w-]+)/$', 'views.withTag'),
# (r'^(?P<year>\d+)/(?P<month>\d+)/(?P<day>\d+)/$', 'views.listBlogEntriesByYearMonthDay'),
(r'^(?P<requestYear>\d+)/(?P<requestMonth>\d+)/$', 'views.listArticlesByYearMonth'),
(r'^(?P<requestYear>\d+)/$', 'views.listArticlesByYear'),
(r'^(?P<slug>[^\.]+)/$', 'views.showArticle'),
(r'^$', 'views.listArticles'),
)
|
82430e9ec30be2003293640867c14af306dc9ca5
|
chmvh_website/gallery/management/commands/generatethumbnails.py
|
chmvh_website/gallery/management/commands/generatethumbnails.py
|
from django.core.management.base import BaseCommand
from gallery import models
from gallery.tasks import create_thumbnail
class Command(BaseCommand):
help = 'Generates thumbnails for gallery images without thumbnails'
def add_arguments(self, parser):
parser.add_argument(
'--overwrite',
action='store_true',
default=False,
dest='overwrite',
help='Generate thumbnails for all pictures.')
def handle(self, *args, **kwargs):
if kwargs['overwrite']:
self.stdout.write(self.style.WARNING(
'Overwriting previously generated thumbnails.'))
patients = models.Patient.objects.all()
else:
patients = models.Patient.objects.filter(thumbnail=None)
count = patients.count()
if count == 0:
self.stdout.write("No thumbnails to generate.")
return
elif count == 1:
count_bit = '1 thumbnail'
else:
count_bit = '{0} thumbnails'.format(count)
self.stdout.write('Generating {}...'.format(count_bit))
for patient in patients:
if kwargs['overwrite'] and patient.thumbnail:
patient.thumbnail.delete()
create_thumbnail(patient)
self.stdout.write(self.style.SUCCESS(
"Successfully generated {}.".format(count_bit)))
|
from django.core.management.base import BaseCommand
from gallery import models
from gallery.tasks import create_thumbnail
class Command(BaseCommand):
help = 'Generates thumbnails for gallery images without thumbnails'
def add_arguments(self, parser):
parser.add_argument(
'--overwrite',
action='store_true',
default=False,
dest='overwrite',
help='Generate thumbnails for all pictures.')
def handle(self, *args, **kwargs):
if kwargs['overwrite']:
self.stdout.write(self.style.WARNING(
'Overwriting previously generated thumbnails.'))
patients = models.Patient.objects.all()
else:
patients = models.Patient.objects.filter(thumbnail=None)
count = patients.count()
if count == 0:
self.stdout.write("No thumbnails to generate.")
return
elif count == 1:
count_bit = '1 thumbnail'
else:
count_bit = '{0} thumbnails'.format(count)
self.stdout.write('Generating {}...'.format(count_bit))
for patient in patients:
if kwargs['overwrite'] and patient.thumbnail:
patient.thumbnail.delete(save=False)
patient.save(update_fields=['thumbnail'])
create_thumbnail(patient)
self.stdout.write(self.style.SUCCESS(
"Successfully generated {}.".format(count_bit)))
|
Fix error in thumbnail deletion.
|
Fix error in thumbnail deletion.
|
Python
|
mit
|
cdriehuys/chmvh-website,cdriehuys/chmvh-website,cdriehuys/chmvh-website
|
from django.core.management.base import BaseCommand
from gallery import models
from gallery.tasks import create_thumbnail
class Command(BaseCommand):
help = 'Generates thumbnails for gallery images without thumbnails'
def add_arguments(self, parser):
parser.add_argument(
'--overwrite',
action='store_true',
default=False,
dest='overwrite',
help='Generate thumbnails for all pictures.')
def handle(self, *args, **kwargs):
if kwargs['overwrite']:
self.stdout.write(self.style.WARNING(
'Overwriting previously generated thumbnails.'))
patients = models.Patient.objects.all()
else:
patients = models.Patient.objects.filter(thumbnail=None)
count = patients.count()
if count == 0:
self.stdout.write("No thumbnails to generate.")
return
elif count == 1:
count_bit = '1 thumbnail'
else:
count_bit = '{0} thumbnails'.format(count)
self.stdout.write('Generating {}...'.format(count_bit))
for patient in patients:
if kwargs['overwrite'] and patient.thumbnail:
patient.thumbnail.delete()
create_thumbnail(patient)
self.stdout.write(self.style.SUCCESS(
"Successfully generated {}.".format(count_bit)))
Fix error in thumbnail deletion.
|
from django.core.management.base import BaseCommand
from gallery import models
from gallery.tasks import create_thumbnail
class Command(BaseCommand):
help = 'Generates thumbnails for gallery images without thumbnails'
def add_arguments(self, parser):
parser.add_argument(
'--overwrite',
action='store_true',
default=False,
dest='overwrite',
help='Generate thumbnails for all pictures.')
def handle(self, *args, **kwargs):
if kwargs['overwrite']:
self.stdout.write(self.style.WARNING(
'Overwriting previously generated thumbnails.'))
patients = models.Patient.objects.all()
else:
patients = models.Patient.objects.filter(thumbnail=None)
count = patients.count()
if count == 0:
self.stdout.write("No thumbnails to generate.")
return
elif count == 1:
count_bit = '1 thumbnail'
else:
count_bit = '{0} thumbnails'.format(count)
self.stdout.write('Generating {}...'.format(count_bit))
for patient in patients:
if kwargs['overwrite'] and patient.thumbnail:
patient.thumbnail.delete(save=False)
patient.save(update_fields=['thumbnail'])
create_thumbnail(patient)
self.stdout.write(self.style.SUCCESS(
"Successfully generated {}.".format(count_bit)))
|
<commit_before>from django.core.management.base import BaseCommand
from gallery import models
from gallery.tasks import create_thumbnail
class Command(BaseCommand):
help = 'Generates thumbnails for gallery images without thumbnails'
def add_arguments(self, parser):
parser.add_argument(
'--overwrite',
action='store_true',
default=False,
dest='overwrite',
help='Generate thumbnails for all pictures.')
def handle(self, *args, **kwargs):
if kwargs['overwrite']:
self.stdout.write(self.style.WARNING(
'Overwriting previously generated thumbnails.'))
patients = models.Patient.objects.all()
else:
patients = models.Patient.objects.filter(thumbnail=None)
count = patients.count()
if count == 0:
self.stdout.write("No thumbnails to generate.")
return
elif count == 1:
count_bit = '1 thumbnail'
else:
count_bit = '{0} thumbnails'.format(count)
self.stdout.write('Generating {}...'.format(count_bit))
for patient in patients:
if kwargs['overwrite'] and patient.thumbnail:
patient.thumbnail.delete()
create_thumbnail(patient)
self.stdout.write(self.style.SUCCESS(
"Successfully generated {}.".format(count_bit)))
<commit_msg>Fix error in thumbnail deletion.<commit_after>
|
from django.core.management.base import BaseCommand
from gallery import models
from gallery.tasks import create_thumbnail
class Command(BaseCommand):
help = 'Generates thumbnails for gallery images without thumbnails'
def add_arguments(self, parser):
parser.add_argument(
'--overwrite',
action='store_true',
default=False,
dest='overwrite',
help='Generate thumbnails for all pictures.')
def handle(self, *args, **kwargs):
if kwargs['overwrite']:
self.stdout.write(self.style.WARNING(
'Overwriting previously generated thumbnails.'))
patients = models.Patient.objects.all()
else:
patients = models.Patient.objects.filter(thumbnail=None)
count = patients.count()
if count == 0:
self.stdout.write("No thumbnails to generate.")
return
elif count == 1:
count_bit = '1 thumbnail'
else:
count_bit = '{0} thumbnails'.format(count)
self.stdout.write('Generating {}...'.format(count_bit))
for patient in patients:
if kwargs['overwrite'] and patient.thumbnail:
patient.thumbnail.delete(save=False)
patient.save(update_fields=['thumbnail'])
create_thumbnail(patient)
self.stdout.write(self.style.SUCCESS(
"Successfully generated {}.".format(count_bit)))
|
from django.core.management.base import BaseCommand
from gallery import models
from gallery.tasks import create_thumbnail
class Command(BaseCommand):
help = 'Generates thumbnails for gallery images without thumbnails'
def add_arguments(self, parser):
parser.add_argument(
'--overwrite',
action='store_true',
default=False,
dest='overwrite',
help='Generate thumbnails for all pictures.')
def handle(self, *args, **kwargs):
if kwargs['overwrite']:
self.stdout.write(self.style.WARNING(
'Overwriting previously generated thumbnails.'))
patients = models.Patient.objects.all()
else:
patients = models.Patient.objects.filter(thumbnail=None)
count = patients.count()
if count == 0:
self.stdout.write("No thumbnails to generate.")
return
elif count == 1:
count_bit = '1 thumbnail'
else:
count_bit = '{0} thumbnails'.format(count)
self.stdout.write('Generating {}...'.format(count_bit))
for patient in patients:
if kwargs['overwrite'] and patient.thumbnail:
patient.thumbnail.delete()
create_thumbnail(patient)
self.stdout.write(self.style.SUCCESS(
"Successfully generated {}.".format(count_bit)))
Fix error in thumbnail deletion.from django.core.management.base import BaseCommand
from gallery import models
from gallery.tasks import create_thumbnail
class Command(BaseCommand):
help = 'Generates thumbnails for gallery images without thumbnails'
def add_arguments(self, parser):
parser.add_argument(
'--overwrite',
action='store_true',
default=False,
dest='overwrite',
help='Generate thumbnails for all pictures.')
def handle(self, *args, **kwargs):
if kwargs['overwrite']:
self.stdout.write(self.style.WARNING(
'Overwriting previously generated thumbnails.'))
patients = models.Patient.objects.all()
else:
patients = models.Patient.objects.filter(thumbnail=None)
count = patients.count()
if count == 0:
self.stdout.write("No thumbnails to generate.")
return
elif count == 1:
count_bit = '1 thumbnail'
else:
count_bit = '{0} thumbnails'.format(count)
self.stdout.write('Generating {}...'.format(count_bit))
for patient in patients:
if kwargs['overwrite'] and patient.thumbnail:
patient.thumbnail.delete(save=False)
patient.save(update_fields=['thumbnail'])
create_thumbnail(patient)
self.stdout.write(self.style.SUCCESS(
"Successfully generated {}.".format(count_bit)))
|
<commit_before>from django.core.management.base import BaseCommand
from gallery import models
from gallery.tasks import create_thumbnail
class Command(BaseCommand):
help = 'Generates thumbnails for gallery images without thumbnails'
def add_arguments(self, parser):
parser.add_argument(
'--overwrite',
action='store_true',
default=False,
dest='overwrite',
help='Generate thumbnails for all pictures.')
def handle(self, *args, **kwargs):
if kwargs['overwrite']:
self.stdout.write(self.style.WARNING(
'Overwriting previously generated thumbnails.'))
patients = models.Patient.objects.all()
else:
patients = models.Patient.objects.filter(thumbnail=None)
count = patients.count()
if count == 0:
self.stdout.write("No thumbnails to generate.")
return
elif count == 1:
count_bit = '1 thumbnail'
else:
count_bit = '{0} thumbnails'.format(count)
self.stdout.write('Generating {}...'.format(count_bit))
for patient in patients:
if kwargs['overwrite'] and patient.thumbnail:
patient.thumbnail.delete()
create_thumbnail(patient)
self.stdout.write(self.style.SUCCESS(
"Successfully generated {}.".format(count_bit)))
<commit_msg>Fix error in thumbnail deletion.<commit_after>from django.core.management.base import BaseCommand
from gallery import models
from gallery.tasks import create_thumbnail
class Command(BaseCommand):
help = 'Generates thumbnails for gallery images without thumbnails'
def add_arguments(self, parser):
parser.add_argument(
'--overwrite',
action='store_true',
default=False,
dest='overwrite',
help='Generate thumbnails for all pictures.')
def handle(self, *args, **kwargs):
if kwargs['overwrite']:
self.stdout.write(self.style.WARNING(
'Overwriting previously generated thumbnails.'))
patients = models.Patient.objects.all()
else:
patients = models.Patient.objects.filter(thumbnail=None)
count = patients.count()
if count == 0:
self.stdout.write("No thumbnails to generate.")
return
elif count == 1:
count_bit = '1 thumbnail'
else:
count_bit = '{0} thumbnails'.format(count)
self.stdout.write('Generating {}...'.format(count_bit))
for patient in patients:
if kwargs['overwrite'] and patient.thumbnail:
patient.thumbnail.delete(save=False)
patient.save(update_fields=['thumbnail'])
create_thumbnail(patient)
self.stdout.write(self.style.SUCCESS(
"Successfully generated {}.".format(count_bit)))
|
c6db8701986e8eb075b92916067e5904cd13fe9f
|
deploy/delete_stale_projects.py
|
deploy/delete_stale_projects.py
|
import shutil
import os
from projects.models import Project
slugs = [p.slug for p in Project.objects.all()]
build_projects = os.listdir('/home/docs/checkouts/readthedocs.org/user_builds/')
final = []
for slug in build_projects:
if slug not in slugs and slug.replace('_', '-') not in slugs:
final.append(slug)
print "To delete: %s" % len(final)
for to_del in final:
root = '/home/docs/checkouts/readthedocs.org'
print "Deleting " + to_del
shutil.rmtree('{root}/user_builds/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/pdf/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/epub/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/json/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/man/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/htmlzip/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
|
import shutil
import os
from readthedocs.projects.models import Project
slugs = [p.slug for p in Project.objects.all()]
build_projects = os.listdir('/home/docs/checkouts/readthedocs.org/user_builds/')
final = []
for slug in build_projects:
if slug not in slugs and slug.replace('_', '-') not in slugs:
final.append(slug)
print "To delete: %s" % len(final)
for to_del in final:
root = '/home/docs/checkouts/readthedocs.org'
print "Deleting " + to_del
shutil.rmtree('{root}/user_builds/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/pdf/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/epub/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/json/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/man/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/htmlzip/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
|
Add rtd import to master
|
Add rtd import to master
|
Python
|
mit
|
safwanrahman/readthedocs.org,istresearch/readthedocs.org,tddv/readthedocs.org,pombredanne/readthedocs.org,espdev/readthedocs.org,safwanrahman/readthedocs.org,rtfd/readthedocs.org,espdev/readthedocs.org,safwanrahman/readthedocs.org,espdev/readthedocs.org,tddv/readthedocs.org,davidfischer/readthedocs.org,istresearch/readthedocs.org,stevepiercy/readthedocs.org,tddv/readthedocs.org,stevepiercy/readthedocs.org,espdev/readthedocs.org,stevepiercy/readthedocs.org,pombredanne/readthedocs.org,stevepiercy/readthedocs.org,istresearch/readthedocs.org,pombredanne/readthedocs.org,rtfd/readthedocs.org,espdev/readthedocs.org,rtfd/readthedocs.org,safwanrahman/readthedocs.org,rtfd/readthedocs.org,davidfischer/readthedocs.org,davidfischer/readthedocs.org,istresearch/readthedocs.org,davidfischer/readthedocs.org
|
import shutil
import os
from projects.models import Project
slugs = [p.slug for p in Project.objects.all()]
build_projects = os.listdir('/home/docs/checkouts/readthedocs.org/user_builds/')
final = []
for slug in build_projects:
if slug not in slugs and slug.replace('_', '-') not in slugs:
final.append(slug)
print "To delete: %s" % len(final)
for to_del in final:
root = '/home/docs/checkouts/readthedocs.org'
print "Deleting " + to_del
shutil.rmtree('{root}/user_builds/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/pdf/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/epub/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/json/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/man/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/htmlzip/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
Add rtd import to master
|
import shutil
import os
from readthedocs.projects.models import Project
slugs = [p.slug for p in Project.objects.all()]
build_projects = os.listdir('/home/docs/checkouts/readthedocs.org/user_builds/')
final = []
for slug in build_projects:
if slug not in slugs and slug.replace('_', '-') not in slugs:
final.append(slug)
print "To delete: %s" % len(final)
for to_del in final:
root = '/home/docs/checkouts/readthedocs.org'
print "Deleting " + to_del
shutil.rmtree('{root}/user_builds/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/pdf/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/epub/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/json/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/man/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/htmlzip/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
|
<commit_before>import shutil
import os
from projects.models import Project
slugs = [p.slug for p in Project.objects.all()]
build_projects = os.listdir('/home/docs/checkouts/readthedocs.org/user_builds/')
final = []
for slug in build_projects:
if slug not in slugs and slug.replace('_', '-') not in slugs:
final.append(slug)
print "To delete: %s" % len(final)
for to_del in final:
root = '/home/docs/checkouts/readthedocs.org'
print "Deleting " + to_del
shutil.rmtree('{root}/user_builds/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/pdf/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/epub/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/json/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/man/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/htmlzip/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
<commit_msg>Add rtd import to master<commit_after>
|
import shutil
import os
from readthedocs.projects.models import Project
slugs = [p.slug for p in Project.objects.all()]
build_projects = os.listdir('/home/docs/checkouts/readthedocs.org/user_builds/')
final = []
for slug in build_projects:
if slug not in slugs and slug.replace('_', '-') not in slugs:
final.append(slug)
print "To delete: %s" % len(final)
for to_del in final:
root = '/home/docs/checkouts/readthedocs.org'
print "Deleting " + to_del
shutil.rmtree('{root}/user_builds/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/pdf/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/epub/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/json/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/man/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/htmlzip/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
|
import shutil
import os
from projects.models import Project
slugs = [p.slug for p in Project.objects.all()]
build_projects = os.listdir('/home/docs/checkouts/readthedocs.org/user_builds/')
final = []
for slug in build_projects:
if slug not in slugs and slug.replace('_', '-') not in slugs:
final.append(slug)
print "To delete: %s" % len(final)
for to_del in final:
root = '/home/docs/checkouts/readthedocs.org'
print "Deleting " + to_del
shutil.rmtree('{root}/user_builds/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/pdf/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/epub/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/json/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/man/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/htmlzip/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
Add rtd import to masterimport shutil
import os
from readthedocs.projects.models import Project
slugs = [p.slug for p in Project.objects.all()]
build_projects = os.listdir('/home/docs/checkouts/readthedocs.org/user_builds/')
final = []
for slug in build_projects:
if slug not in slugs and slug.replace('_', '-') not in slugs:
final.append(slug)
print "To delete: %s" % len(final)
for to_del in final:
root = '/home/docs/checkouts/readthedocs.org'
print "Deleting " + to_del
shutil.rmtree('{root}/user_builds/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/pdf/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/epub/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/json/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/man/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/htmlzip/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
|
<commit_before>import shutil
import os
from projects.models import Project
slugs = [p.slug for p in Project.objects.all()]
build_projects = os.listdir('/home/docs/checkouts/readthedocs.org/user_builds/')
final = []
for slug in build_projects:
if slug not in slugs and slug.replace('_', '-') not in slugs:
final.append(slug)
print "To delete: %s" % len(final)
for to_del in final:
root = '/home/docs/checkouts/readthedocs.org'
print "Deleting " + to_del
shutil.rmtree('{root}/user_builds/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/pdf/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/epub/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/json/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/man/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/htmlzip/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
<commit_msg>Add rtd import to master<commit_after>import shutil
import os
from readthedocs.projects.models import Project
slugs = [p.slug for p in Project.objects.all()]
build_projects = os.listdir('/home/docs/checkouts/readthedocs.org/user_builds/')
final = []
for slug in build_projects:
if slug not in slugs and slug.replace('_', '-') not in slugs:
final.append(slug)
print "To delete: %s" % len(final)
for to_del in final:
root = '/home/docs/checkouts/readthedocs.org'
print "Deleting " + to_del
shutil.rmtree('{root}/user_builds/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/pdf/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/epub/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/json/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/man/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
shutil.rmtree('{root}/media/htmlzip/{slug}'.format(root=root, slug=to_del), ignore_errors=True)
|
c1dc3c503d09e95321fc6f3fe3d7ab114ff58fc9
|
patty/segmentation/pointCloudMeasurer.py
|
patty/segmentation/pointCloudMeasurer.py
|
import numpy as np
from sklearn.decomposition import PCA
def measureLength(pointCloud):
"""Returns the length of a point cloud in its longest direction."""
if len(pointCloud) == 0:
return 0
pca = PCA(n_components = 1)
pca.fit(np.asarray(pointCloud))
primary_axis = np.dot(pointCloud, np.transpose(pca.components_))[:,0]
return np.max(primary_axis) - np.min(primary_axis)
|
import numpy as np
from sklearn.decomposition import PCA
def measureLength(pointCloud):
"""Returns the length of a point cloud in its longest direction."""
if len(pointCloud) == 0:
return 0
pca = PCA(n_components = 1)
pc_array = np.asarray(pointCloud)
pca.fit(pc_array)
primary_axis = np.dot(pc_array, np.transpose(pca.components_))[:,0]
return np.max(primary_axis) - np.min(primary_axis)
|
Make sure np.array is used for PCA in measureLength
|
Make sure np.array is used for PCA in measureLength
|
Python
|
apache-2.0
|
NLeSC/PattyAnalytics
|
import numpy as np
from sklearn.decomposition import PCA
def measureLength(pointCloud):
"""Returns the length of a point cloud in its longest direction."""
if len(pointCloud) == 0:
return 0
pca = PCA(n_components = 1)
pca.fit(np.asarray(pointCloud))
primary_axis = np.dot(pointCloud, np.transpose(pca.components_))[:,0]
return np.max(primary_axis) - np.min(primary_axis)
Make sure np.array is used for PCA in measureLength
|
import numpy as np
from sklearn.decomposition import PCA
def measureLength(pointCloud):
"""Returns the length of a point cloud in its longest direction."""
if len(pointCloud) == 0:
return 0
pca = PCA(n_components = 1)
pc_array = np.asarray(pointCloud)
pca.fit(pc_array)
primary_axis = np.dot(pc_array, np.transpose(pca.components_))[:,0]
return np.max(primary_axis) - np.min(primary_axis)
|
<commit_before>import numpy as np
from sklearn.decomposition import PCA
def measureLength(pointCloud):
"""Returns the length of a point cloud in its longest direction."""
if len(pointCloud) == 0:
return 0
pca = PCA(n_components = 1)
pca.fit(np.asarray(pointCloud))
primary_axis = np.dot(pointCloud, np.transpose(pca.components_))[:,0]
return np.max(primary_axis) - np.min(primary_axis)
<commit_msg>Make sure np.array is used for PCA in measureLength<commit_after>
|
import numpy as np
from sklearn.decomposition import PCA
def measureLength(pointCloud):
"""Returns the length of a point cloud in its longest direction."""
if len(pointCloud) == 0:
return 0
pca = PCA(n_components = 1)
pc_array = np.asarray(pointCloud)
pca.fit(pc_array)
primary_axis = np.dot(pc_array, np.transpose(pca.components_))[:,0]
return np.max(primary_axis) - np.min(primary_axis)
|
import numpy as np
from sklearn.decomposition import PCA
def measureLength(pointCloud):
"""Returns the length of a point cloud in its longest direction."""
if len(pointCloud) == 0:
return 0
pca = PCA(n_components = 1)
pca.fit(np.asarray(pointCloud))
primary_axis = np.dot(pointCloud, np.transpose(pca.components_))[:,0]
return np.max(primary_axis) - np.min(primary_axis)
Make sure np.array is used for PCA in measureLengthimport numpy as np
from sklearn.decomposition import PCA
def measureLength(pointCloud):
"""Returns the length of a point cloud in its longest direction."""
if len(pointCloud) == 0:
return 0
pca = PCA(n_components = 1)
pc_array = np.asarray(pointCloud)
pca.fit(pc_array)
primary_axis = np.dot(pc_array, np.transpose(pca.components_))[:,0]
return np.max(primary_axis) - np.min(primary_axis)
|
<commit_before>import numpy as np
from sklearn.decomposition import PCA
def measureLength(pointCloud):
"""Returns the length of a point cloud in its longest direction."""
if len(pointCloud) == 0:
return 0
pca = PCA(n_components = 1)
pca.fit(np.asarray(pointCloud))
primary_axis = np.dot(pointCloud, np.transpose(pca.components_))[:,0]
return np.max(primary_axis) - np.min(primary_axis)
<commit_msg>Make sure np.array is used for PCA in measureLength<commit_after>import numpy as np
from sklearn.decomposition import PCA
def measureLength(pointCloud):
"""Returns the length of a point cloud in its longest direction."""
if len(pointCloud) == 0:
return 0
pca = PCA(n_components = 1)
pc_array = np.asarray(pointCloud)
pca.fit(pc_array)
primary_axis = np.dot(pc_array, np.transpose(pca.components_))[:,0]
return np.max(primary_axis) - np.min(primary_axis)
|
42f21057388361e50416197b25be9dfbdb2764b0
|
any_imagefield/forms/widgets.py
|
any_imagefield/forms/widgets.py
|
import mimetypes
from django.contrib.admin.widgets import AdminFileWidget
from django.template.loader import render_to_string
class ImagePreviewWidget(AdminFileWidget):
"""
An :class:`~django.forms.FileInput` widget that also displays a preview of the image.
"""
template_with_initial = u'%(clear_template)s</p><p>%(input_text)s: %(input)s'
def render(self, name, value, attrs=None):
is_image = False
if value:
if hasattr(value, 'path'):
(mime_type, encoding) = mimetypes.guess_type(value.path)
else:
# Try to guess mime_type from name alone, for remote FileSystems (S3, etc...)
(mime_type, encoding) = mimetypes.guess_type(value.name)
is_image = mime_type and mime_type.startswith('image/')
# Render different field for replacing
input_field = super(ImagePreviewWidget, self).render(name, value, attrs)
if not value:
return input_field
else:
return render_to_string("any_imagefield/imagepreviewwidget/update.html", {
'value': value,
'is_image': is_image,
'input_field': input_field,
'input_text': self.input_text,
})
|
import mimetypes
from django.contrib.admin.widgets import AdminFileWidget
from django.template.loader import render_to_string
class ImagePreviewWidget(AdminFileWidget):
"""
An :class:`~django.forms.FileInput` widget that also displays a preview of the image.
"""
template_with_initial = u'%(clear_template)s</p><p>%(input_text)s: %(input)s'
def render(self, name, value, *args, **kwargs):
is_image = False
if value:
if hasattr(value, 'path'):
(mime_type, encoding) = mimetypes.guess_type(value.path)
else:
# Try to guess mime_type from name alone, for remote FileSystems (S3, etc...)
(mime_type, encoding) = mimetypes.guess_type(value.name)
is_image = mime_type and mime_type.startswith('image/')
# Render different field for replacing
input_field = super(ImagePreviewWidget, self).render(name, value, *args, **kwargs)
if not value:
return input_field
else:
return render_to_string("any_imagefield/imagepreviewwidget/update.html", {
'value': value,
'is_image': is_image,
'input_field': input_field,
'input_text': self.input_text,
})
|
Fix render() kwargs for Django 2.1
|
Fix render() kwargs for Django 2.1
|
Python
|
apache-2.0
|
edoburu/django-any-imagefield,edoburu/django-any-imagefield
|
import mimetypes
from django.contrib.admin.widgets import AdminFileWidget
from django.template.loader import render_to_string
class ImagePreviewWidget(AdminFileWidget):
"""
An :class:`~django.forms.FileInput` widget that also displays a preview of the image.
"""
template_with_initial = u'%(clear_template)s</p><p>%(input_text)s: %(input)s'
def render(self, name, value, attrs=None):
is_image = False
if value:
if hasattr(value, 'path'):
(mime_type, encoding) = mimetypes.guess_type(value.path)
else:
# Try to guess mime_type from name alone, for remote FileSystems (S3, etc...)
(mime_type, encoding) = mimetypes.guess_type(value.name)
is_image = mime_type and mime_type.startswith('image/')
# Render different field for replacing
input_field = super(ImagePreviewWidget, self).render(name, value, attrs)
if not value:
return input_field
else:
return render_to_string("any_imagefield/imagepreviewwidget/update.html", {
'value': value,
'is_image': is_image,
'input_field': input_field,
'input_text': self.input_text,
})
Fix render() kwargs for Django 2.1
|
import mimetypes
from django.contrib.admin.widgets import AdminFileWidget
from django.template.loader import render_to_string
class ImagePreviewWidget(AdminFileWidget):
"""
An :class:`~django.forms.FileInput` widget that also displays a preview of the image.
"""
template_with_initial = u'%(clear_template)s</p><p>%(input_text)s: %(input)s'
def render(self, name, value, *args, **kwargs):
is_image = False
if value:
if hasattr(value, 'path'):
(mime_type, encoding) = mimetypes.guess_type(value.path)
else:
# Try to guess mime_type from name alone, for remote FileSystems (S3, etc...)
(mime_type, encoding) = mimetypes.guess_type(value.name)
is_image = mime_type and mime_type.startswith('image/')
# Render different field for replacing
input_field = super(ImagePreviewWidget, self).render(name, value, *args, **kwargs)
if not value:
return input_field
else:
return render_to_string("any_imagefield/imagepreviewwidget/update.html", {
'value': value,
'is_image': is_image,
'input_field': input_field,
'input_text': self.input_text,
})
|
<commit_before>import mimetypes
from django.contrib.admin.widgets import AdminFileWidget
from django.template.loader import render_to_string
class ImagePreviewWidget(AdminFileWidget):
"""
An :class:`~django.forms.FileInput` widget that also displays a preview of the image.
"""
template_with_initial = u'%(clear_template)s</p><p>%(input_text)s: %(input)s'
def render(self, name, value, attrs=None):
is_image = False
if value:
if hasattr(value, 'path'):
(mime_type, encoding) = mimetypes.guess_type(value.path)
else:
# Try to guess mime_type from name alone, for remote FileSystems (S3, etc...)
(mime_type, encoding) = mimetypes.guess_type(value.name)
is_image = mime_type and mime_type.startswith('image/')
# Render different field for replacing
input_field = super(ImagePreviewWidget, self).render(name, value, attrs)
if not value:
return input_field
else:
return render_to_string("any_imagefield/imagepreviewwidget/update.html", {
'value': value,
'is_image': is_image,
'input_field': input_field,
'input_text': self.input_text,
})
<commit_msg>Fix render() kwargs for Django 2.1<commit_after>
|
import mimetypes
from django.contrib.admin.widgets import AdminFileWidget
from django.template.loader import render_to_string
class ImagePreviewWidget(AdminFileWidget):
"""
An :class:`~django.forms.FileInput` widget that also displays a preview of the image.
"""
template_with_initial = u'%(clear_template)s</p><p>%(input_text)s: %(input)s'
def render(self, name, value, *args, **kwargs):
is_image = False
if value:
if hasattr(value, 'path'):
(mime_type, encoding) = mimetypes.guess_type(value.path)
else:
# Try to guess mime_type from name alone, for remote FileSystems (S3, etc...)
(mime_type, encoding) = mimetypes.guess_type(value.name)
is_image = mime_type and mime_type.startswith('image/')
# Render different field for replacing
input_field = super(ImagePreviewWidget, self).render(name, value, *args, **kwargs)
if not value:
return input_field
else:
return render_to_string("any_imagefield/imagepreviewwidget/update.html", {
'value': value,
'is_image': is_image,
'input_field': input_field,
'input_text': self.input_text,
})
|
import mimetypes
from django.contrib.admin.widgets import AdminFileWidget
from django.template.loader import render_to_string
class ImagePreviewWidget(AdminFileWidget):
"""
An :class:`~django.forms.FileInput` widget that also displays a preview of the image.
"""
template_with_initial = u'%(clear_template)s</p><p>%(input_text)s: %(input)s'
def render(self, name, value, attrs=None):
is_image = False
if value:
if hasattr(value, 'path'):
(mime_type, encoding) = mimetypes.guess_type(value.path)
else:
# Try to guess mime_type from name alone, for remote FileSystems (S3, etc...)
(mime_type, encoding) = mimetypes.guess_type(value.name)
is_image = mime_type and mime_type.startswith('image/')
# Render different field for replacing
input_field = super(ImagePreviewWidget, self).render(name, value, attrs)
if not value:
return input_field
else:
return render_to_string("any_imagefield/imagepreviewwidget/update.html", {
'value': value,
'is_image': is_image,
'input_field': input_field,
'input_text': self.input_text,
})
Fix render() kwargs for Django 2.1import mimetypes
from django.contrib.admin.widgets import AdminFileWidget
from django.template.loader import render_to_string
class ImagePreviewWidget(AdminFileWidget):
"""
An :class:`~django.forms.FileInput` widget that also displays a preview of the image.
"""
template_with_initial = u'%(clear_template)s</p><p>%(input_text)s: %(input)s'
def render(self, name, value, *args, **kwargs):
is_image = False
if value:
if hasattr(value, 'path'):
(mime_type, encoding) = mimetypes.guess_type(value.path)
else:
# Try to guess mime_type from name alone, for remote FileSystems (S3, etc...)
(mime_type, encoding) = mimetypes.guess_type(value.name)
is_image = mime_type and mime_type.startswith('image/')
# Render different field for replacing
input_field = super(ImagePreviewWidget, self).render(name, value, *args, **kwargs)
if not value:
return input_field
else:
return render_to_string("any_imagefield/imagepreviewwidget/update.html", {
'value': value,
'is_image': is_image,
'input_field': input_field,
'input_text': self.input_text,
})
|
<commit_before>import mimetypes
from django.contrib.admin.widgets import AdminFileWidget
from django.template.loader import render_to_string
class ImagePreviewWidget(AdminFileWidget):
"""
An :class:`~django.forms.FileInput` widget that also displays a preview of the image.
"""
template_with_initial = u'%(clear_template)s</p><p>%(input_text)s: %(input)s'
def render(self, name, value, attrs=None):
is_image = False
if value:
if hasattr(value, 'path'):
(mime_type, encoding) = mimetypes.guess_type(value.path)
else:
# Try to guess mime_type from name alone, for remote FileSystems (S3, etc...)
(mime_type, encoding) = mimetypes.guess_type(value.name)
is_image = mime_type and mime_type.startswith('image/')
# Render different field for replacing
input_field = super(ImagePreviewWidget, self).render(name, value, attrs)
if not value:
return input_field
else:
return render_to_string("any_imagefield/imagepreviewwidget/update.html", {
'value': value,
'is_image': is_image,
'input_field': input_field,
'input_text': self.input_text,
})
<commit_msg>Fix render() kwargs for Django 2.1<commit_after>import mimetypes
from django.contrib.admin.widgets import AdminFileWidget
from django.template.loader import render_to_string
class ImagePreviewWidget(AdminFileWidget):
"""
An :class:`~django.forms.FileInput` widget that also displays a preview of the image.
"""
template_with_initial = u'%(clear_template)s</p><p>%(input_text)s: %(input)s'
def render(self, name, value, *args, **kwargs):
is_image = False
if value:
if hasattr(value, 'path'):
(mime_type, encoding) = mimetypes.guess_type(value.path)
else:
# Try to guess mime_type from name alone, for remote FileSystems (S3, etc...)
(mime_type, encoding) = mimetypes.guess_type(value.name)
is_image = mime_type and mime_type.startswith('image/')
# Render different field for replacing
input_field = super(ImagePreviewWidget, self).render(name, value, *args, **kwargs)
if not value:
return input_field
else:
return render_to_string("any_imagefield/imagepreviewwidget/update.html", {
'value': value,
'is_image': is_image,
'input_field': input_field,
'input_text': self.input_text,
})
|
8a309491f6370814f88d8be7e5b7c697c4b59dcd
|
great_expectations/__init__.py
|
great_expectations/__init__.py
|
import pandas as pd
from util import *
import dataset
from pkg_resources import get_distribution
try:
__version__ = get_distribution('great_expectations').version
except:
pass
def list_sources():
raise NotImplementedError
def connect_to_datasource():
raise NotImplementedError
def connect_to_dataset():
raise NotImplementedError
def read_csv(filename, dataset_config=None, *args, **kwargs):
df = pd.read_csv(filename, *args, **kwargs)
df.__class__ = dataset.pandas_dataset.PandasDataSet
df.initialize_expectations(dataset_config)
return df
def expect(data_source_str, expectation):
raise NotImplementedError
|
import pandas as pd
from .util import *
import dataset
from pkg_resources import get_distribution
try:
__version__ = get_distribution('great_expectations').version
except:
pass
def list_sources():
raise NotImplementedError
def connect_to_datasource():
raise NotImplementedError
def connect_to_dataset():
raise NotImplementedError
def read_csv(filename, dataset_config=None, *args, **kwargs):
df = pd.read_csv(filename, *args, **kwargs)
df.__class__ = dataset.pandas_dataset.PandasDataSet
df.initialize_expectations(dataset_config)
return df
def df(df, dataset_config=None, *args, **kwargs):
df.__class__ = dataset.pandas_dataset.PandasDataSet
df.initialize_expectations(dataset_config)
return df
def expect(data_source_str, expectation):
raise NotImplementedError
|
Change import util to .util to support python 3
|
Change import util to .util to support python 3
|
Python
|
apache-2.0
|
great-expectations/great_expectations,great-expectations/great_expectations,great-expectations/great_expectations,great-expectations/great_expectations
|
import pandas as pd
from util import *
import dataset
from pkg_resources import get_distribution
try:
__version__ = get_distribution('great_expectations').version
except:
pass
def list_sources():
raise NotImplementedError
def connect_to_datasource():
raise NotImplementedError
def connect_to_dataset():
raise NotImplementedError
def read_csv(filename, dataset_config=None, *args, **kwargs):
df = pd.read_csv(filename, *args, **kwargs)
df.__class__ = dataset.pandas_dataset.PandasDataSet
df.initialize_expectations(dataset_config)
return df
def expect(data_source_str, expectation):
raise NotImplementedError
Change import util to .util to support python 3
|
import pandas as pd
from .util import *
import dataset
from pkg_resources import get_distribution
try:
__version__ = get_distribution('great_expectations').version
except:
pass
def list_sources():
raise NotImplementedError
def connect_to_datasource():
raise NotImplementedError
def connect_to_dataset():
raise NotImplementedError
def read_csv(filename, dataset_config=None, *args, **kwargs):
df = pd.read_csv(filename, *args, **kwargs)
df.__class__ = dataset.pandas_dataset.PandasDataSet
df.initialize_expectations(dataset_config)
return df
def df(df, dataset_config=None, *args, **kwargs):
df.__class__ = dataset.pandas_dataset.PandasDataSet
df.initialize_expectations(dataset_config)
return df
def expect(data_source_str, expectation):
raise NotImplementedError
|
<commit_before>import pandas as pd
from util import *
import dataset
from pkg_resources import get_distribution
try:
__version__ = get_distribution('great_expectations').version
except:
pass
def list_sources():
raise NotImplementedError
def connect_to_datasource():
raise NotImplementedError
def connect_to_dataset():
raise NotImplementedError
def read_csv(filename, dataset_config=None, *args, **kwargs):
df = pd.read_csv(filename, *args, **kwargs)
df.__class__ = dataset.pandas_dataset.PandasDataSet
df.initialize_expectations(dataset_config)
return df
def expect(data_source_str, expectation):
raise NotImplementedError
<commit_msg>Change import util to .util to support python 3<commit_after>
|
import pandas as pd
from .util import *
import dataset
from pkg_resources import get_distribution
try:
__version__ = get_distribution('great_expectations').version
except:
pass
def list_sources():
raise NotImplementedError
def connect_to_datasource():
raise NotImplementedError
def connect_to_dataset():
raise NotImplementedError
def read_csv(filename, dataset_config=None, *args, **kwargs):
df = pd.read_csv(filename, *args, **kwargs)
df.__class__ = dataset.pandas_dataset.PandasDataSet
df.initialize_expectations(dataset_config)
return df
def df(df, dataset_config=None, *args, **kwargs):
df.__class__ = dataset.pandas_dataset.PandasDataSet
df.initialize_expectations(dataset_config)
return df
def expect(data_source_str, expectation):
raise NotImplementedError
|
import pandas as pd
from util import *
import dataset
from pkg_resources import get_distribution
try:
__version__ = get_distribution('great_expectations').version
except:
pass
def list_sources():
raise NotImplementedError
def connect_to_datasource():
raise NotImplementedError
def connect_to_dataset():
raise NotImplementedError
def read_csv(filename, dataset_config=None, *args, **kwargs):
df = pd.read_csv(filename, *args, **kwargs)
df.__class__ = dataset.pandas_dataset.PandasDataSet
df.initialize_expectations(dataset_config)
return df
def expect(data_source_str, expectation):
raise NotImplementedError
Change import util to .util to support python 3import pandas as pd
from .util import *
import dataset
from pkg_resources import get_distribution
try:
__version__ = get_distribution('great_expectations').version
except:
pass
def list_sources():
raise NotImplementedError
def connect_to_datasource():
raise NotImplementedError
def connect_to_dataset():
raise NotImplementedError
def read_csv(filename, dataset_config=None, *args, **kwargs):
df = pd.read_csv(filename, *args, **kwargs)
df.__class__ = dataset.pandas_dataset.PandasDataSet
df.initialize_expectations(dataset_config)
return df
def df(df, dataset_config=None, *args, **kwargs):
df.__class__ = dataset.pandas_dataset.PandasDataSet
df.initialize_expectations(dataset_config)
return df
def expect(data_source_str, expectation):
raise NotImplementedError
|
<commit_before>import pandas as pd
from util import *
import dataset
from pkg_resources import get_distribution
try:
__version__ = get_distribution('great_expectations').version
except:
pass
def list_sources():
raise NotImplementedError
def connect_to_datasource():
raise NotImplementedError
def connect_to_dataset():
raise NotImplementedError
def read_csv(filename, dataset_config=None, *args, **kwargs):
df = pd.read_csv(filename, *args, **kwargs)
df.__class__ = dataset.pandas_dataset.PandasDataSet
df.initialize_expectations(dataset_config)
return df
def expect(data_source_str, expectation):
raise NotImplementedError
<commit_msg>Change import util to .util to support python 3<commit_after>import pandas as pd
from .util import *
import dataset
from pkg_resources import get_distribution
try:
__version__ = get_distribution('great_expectations').version
except:
pass
def list_sources():
raise NotImplementedError
def connect_to_datasource():
raise NotImplementedError
def connect_to_dataset():
raise NotImplementedError
def read_csv(filename, dataset_config=None, *args, **kwargs):
df = pd.read_csv(filename, *args, **kwargs)
df.__class__ = dataset.pandas_dataset.PandasDataSet
df.initialize_expectations(dataset_config)
return df
def df(df, dataset_config=None, *args, **kwargs):
df.__class__ = dataset.pandas_dataset.PandasDataSet
df.initialize_expectations(dataset_config)
return df
def expect(data_source_str, expectation):
raise NotImplementedError
|
ccafc2164b0b4a1734a1d4c47bf237eea293eae4
|
nodeconductor/logging/admin.py
|
nodeconductor/logging/admin.py
|
from django.contrib import admin
from nodeconductor.logging import models
class AlertAdmin(admin.ModelAdmin):
list_display = ('uuid', 'alert_type', 'closed', 'scope', 'severity')
ordering = ('alert_type',)
base_model = models.Alert
admin.site.register(models.Alert, AlertAdmin)
|
from django.contrib import admin
from nodeconductor.logging import models
class AlertAdmin(admin.ModelAdmin):
list_display = ('uuid', 'alert_type', 'created', 'closed', 'scope', 'severity')
list_filter = ('alert_type', 'created', 'closed', 'severity')
ordering = ('alert_type',)
base_model = models.Alert
admin.site.register(models.Alert, AlertAdmin)
|
Add list filtering for alerts
|
Add list filtering for alerts
|
Python
|
mit
|
opennode/nodeconductor,opennode/nodeconductor,opennode/nodeconductor
|
from django.contrib import admin
from nodeconductor.logging import models
class AlertAdmin(admin.ModelAdmin):
list_display = ('uuid', 'alert_type', 'closed', 'scope', 'severity')
ordering = ('alert_type',)
base_model = models.Alert
admin.site.register(models.Alert, AlertAdmin)
Add list filtering for alerts
|
from django.contrib import admin
from nodeconductor.logging import models
class AlertAdmin(admin.ModelAdmin):
list_display = ('uuid', 'alert_type', 'created', 'closed', 'scope', 'severity')
list_filter = ('alert_type', 'created', 'closed', 'severity')
ordering = ('alert_type',)
base_model = models.Alert
admin.site.register(models.Alert, AlertAdmin)
|
<commit_before>from django.contrib import admin
from nodeconductor.logging import models
class AlertAdmin(admin.ModelAdmin):
list_display = ('uuid', 'alert_type', 'closed', 'scope', 'severity')
ordering = ('alert_type',)
base_model = models.Alert
admin.site.register(models.Alert, AlertAdmin)
<commit_msg>Add list filtering for alerts<commit_after>
|
from django.contrib import admin
from nodeconductor.logging import models
class AlertAdmin(admin.ModelAdmin):
list_display = ('uuid', 'alert_type', 'created', 'closed', 'scope', 'severity')
list_filter = ('alert_type', 'created', 'closed', 'severity')
ordering = ('alert_type',)
base_model = models.Alert
admin.site.register(models.Alert, AlertAdmin)
|
from django.contrib import admin
from nodeconductor.logging import models
class AlertAdmin(admin.ModelAdmin):
list_display = ('uuid', 'alert_type', 'closed', 'scope', 'severity')
ordering = ('alert_type',)
base_model = models.Alert
admin.site.register(models.Alert, AlertAdmin)
Add list filtering for alertsfrom django.contrib import admin
from nodeconductor.logging import models
class AlertAdmin(admin.ModelAdmin):
list_display = ('uuid', 'alert_type', 'created', 'closed', 'scope', 'severity')
list_filter = ('alert_type', 'created', 'closed', 'severity')
ordering = ('alert_type',)
base_model = models.Alert
admin.site.register(models.Alert, AlertAdmin)
|
<commit_before>from django.contrib import admin
from nodeconductor.logging import models
class AlertAdmin(admin.ModelAdmin):
list_display = ('uuid', 'alert_type', 'closed', 'scope', 'severity')
ordering = ('alert_type',)
base_model = models.Alert
admin.site.register(models.Alert, AlertAdmin)
<commit_msg>Add list filtering for alerts<commit_after>from django.contrib import admin
from nodeconductor.logging import models
class AlertAdmin(admin.ModelAdmin):
list_display = ('uuid', 'alert_type', 'created', 'closed', 'scope', 'severity')
list_filter = ('alert_type', 'created', 'closed', 'severity')
ordering = ('alert_type',)
base_model = models.Alert
admin.site.register(models.Alert, AlertAdmin)
|
f9b09b751d7d3a61016ee49ef5fb27f904a70100
|
reppy/exceptions.py
|
reppy/exceptions.py
|
#! /usr/bin/env python
#
# Copyright (c) 2011 SEOmoz
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
'''All of our exceptions'''
class ReppyException(Exception):
'''Any internal exception'''
pass
class ServerError(ReppyException):
'''When the remote server returns an error'''
def __init__(self, *args, **kwargs):
self.status = kwargs.get('status')
if self.status is None and len(args) >= 2:
self.status = args[1]
ReppyException.init(self, *args, **kwargs)
|
#! /usr/bin/env python
#
# Copyright (c) 2011 SEOmoz
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
'''All of our exceptions'''
class ReppyException(Exception):
'''Any internal exception'''
pass
class ServerError(ReppyException):
'''When the remote server returns an error'''
def __init__(self, *args, **kwargs):
self.status = kwargs.get('status')
if self.status is None and len(args) >= 2:
self.status = args[1]
ReppyException.__init__(self, *args, **kwargs)
|
Fix a rather stupid bug.
|
Fix a rather stupid bug.
|
Python
|
mit
|
pombredanne/reppy,seomoz/reppy,seomoz/reppy,pombredanne/reppy
|
#! /usr/bin/env python
#
# Copyright (c) 2011 SEOmoz
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
'''All of our exceptions'''
class ReppyException(Exception):
'''Any internal exception'''
pass
class ServerError(ReppyException):
'''When the remote server returns an error'''
def __init__(self, *args, **kwargs):
self.status = kwargs.get('status')
if self.status is None and len(args) >= 2:
self.status = args[1]
ReppyException.init(self, *args, **kwargs)
Fix a rather stupid bug.
|
#! /usr/bin/env python
#
# Copyright (c) 2011 SEOmoz
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
'''All of our exceptions'''
class ReppyException(Exception):
'''Any internal exception'''
pass
class ServerError(ReppyException):
'''When the remote server returns an error'''
def __init__(self, *args, **kwargs):
self.status = kwargs.get('status')
if self.status is None and len(args) >= 2:
self.status = args[1]
ReppyException.__init__(self, *args, **kwargs)
|
<commit_before>#! /usr/bin/env python
#
# Copyright (c) 2011 SEOmoz
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
'''All of our exceptions'''
class ReppyException(Exception):
'''Any internal exception'''
pass
class ServerError(ReppyException):
'''When the remote server returns an error'''
def __init__(self, *args, **kwargs):
self.status = kwargs.get('status')
if self.status is None and len(args) >= 2:
self.status = args[1]
ReppyException.init(self, *args, **kwargs)
<commit_msg>Fix a rather stupid bug.<commit_after>
|
#! /usr/bin/env python
#
# Copyright (c) 2011 SEOmoz
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
'''All of our exceptions'''
class ReppyException(Exception):
'''Any internal exception'''
pass
class ServerError(ReppyException):
'''When the remote server returns an error'''
def __init__(self, *args, **kwargs):
self.status = kwargs.get('status')
if self.status is None and len(args) >= 2:
self.status = args[1]
ReppyException.__init__(self, *args, **kwargs)
|
#! /usr/bin/env python
#
# Copyright (c) 2011 SEOmoz
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
'''All of our exceptions'''
class ReppyException(Exception):
'''Any internal exception'''
pass
class ServerError(ReppyException):
'''When the remote server returns an error'''
def __init__(self, *args, **kwargs):
self.status = kwargs.get('status')
if self.status is None and len(args) >= 2:
self.status = args[1]
ReppyException.init(self, *args, **kwargs)
Fix a rather stupid bug.#! /usr/bin/env python
#
# Copyright (c) 2011 SEOmoz
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
'''All of our exceptions'''
class ReppyException(Exception):
'''Any internal exception'''
pass
class ServerError(ReppyException):
'''When the remote server returns an error'''
def __init__(self, *args, **kwargs):
self.status = kwargs.get('status')
if self.status is None and len(args) >= 2:
self.status = args[1]
ReppyException.__init__(self, *args, **kwargs)
|
<commit_before>#! /usr/bin/env python
#
# Copyright (c) 2011 SEOmoz
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
'''All of our exceptions'''
class ReppyException(Exception):
'''Any internal exception'''
pass
class ServerError(ReppyException):
'''When the remote server returns an error'''
def __init__(self, *args, **kwargs):
self.status = kwargs.get('status')
if self.status is None and len(args) >= 2:
self.status = args[1]
ReppyException.init(self, *args, **kwargs)
<commit_msg>Fix a rather stupid bug.<commit_after>#! /usr/bin/env python
#
# Copyright (c) 2011 SEOmoz
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
'''All of our exceptions'''
class ReppyException(Exception):
'''Any internal exception'''
pass
class ServerError(ReppyException):
'''When the remote server returns an error'''
def __init__(self, *args, **kwargs):
self.status = kwargs.get('status')
if self.status is None and len(args) >= 2:
self.status = args[1]
ReppyException.__init__(self, *args, **kwargs)
|
47428a8645203e3bf7b7c204b3157b41a0cddb33
|
linguist/tests/test_translation.py
|
linguist/tests/test_translation.py
|
# -*- coding: utf-8 -*-
from django.test import TestCase
from django.test.utils import override_settings
from ..registry import LinguistRegistry as Registry
from . import settings
from .translations import (
FooModel,
FooTranslation
)
LANGUAGES = [l[0] for l in settings.LANGUAGES]
@override_settings(DEBUG=True)
class TranslationTest(TestCase):
"""
Tests the Linguist's Translation class.
"""
def setUp(self):
self.registry = Registry()
self.registry.register(FooTranslation)
self.instance = FooModel()
def test_fields(self):
for language in LANGUAGES:
self.assertIn('title_%s' % language, dir(FooModel))
def test_getter_setter(self):
with self.assertNumQueries(3):
# save = 1 query
self.instance.save()
# get / create "en" translation = 2 queries
self.instance.title = 'Hello'
self.assertEqual(self.instance.title_en, 'Hello')
self.assertIsNone(self.instance.title_fr)
self.instance.set_current_language('fr')
self.instance.title = 'Bonjour'
self.assertEqual(self.instance.title_en, 'Hello')
self.assertEqual(self.instance.title_fr, 'Bonjour')
|
# -*- coding: utf-8 -*-
from django.test import TestCase
from django.test.utils import override_settings
from ..registry import LinguistRegistry as Registry
from . import settings
from .translations import (
FooModel,
FooTranslation
)
LANGUAGES = [l[0] for l in settings.LANGUAGES]
@override_settings(DEBUG=True)
class TranslationTest(TestCase):
"""
Tests the Linguist's Translation class.
"""
def setUp(self):
self.registry = Registry()
self.registry.register(FooTranslation)
self.instance = FooModel()
def test_fields(self):
for language in LANGUAGES:
self.assertIn('title_%s' % language, dir(FooModel))
def test_getter_setter(self):
with self.assertNumQueries(3):
# save = 1 query
self.instance.save()
# get / create "en" translation = 2 queries
self.instance.title = 'Hello'
self.assertEqual(self.instance.title_en, 'Hello')
self.assertIsNone(self.instance.title_fr)
self.instance.language = 'fr'
self.instance.title = 'Bonjour'
self.assertEqual(self.instance.title_en, 'Hello')
self.assertEqual(self.instance.title_fr, 'Bonjour')
|
Test translations now uses instance.language.
|
Test translations now uses instance.language.
|
Python
|
mit
|
ulule/django-linguist
|
# -*- coding: utf-8 -*-
from django.test import TestCase
from django.test.utils import override_settings
from ..registry import LinguistRegistry as Registry
from . import settings
from .translations import (
FooModel,
FooTranslation
)
LANGUAGES = [l[0] for l in settings.LANGUAGES]
@override_settings(DEBUG=True)
class TranslationTest(TestCase):
"""
Tests the Linguist's Translation class.
"""
def setUp(self):
self.registry = Registry()
self.registry.register(FooTranslation)
self.instance = FooModel()
def test_fields(self):
for language in LANGUAGES:
self.assertIn('title_%s' % language, dir(FooModel))
def test_getter_setter(self):
with self.assertNumQueries(3):
# save = 1 query
self.instance.save()
# get / create "en" translation = 2 queries
self.instance.title = 'Hello'
self.assertEqual(self.instance.title_en, 'Hello')
self.assertIsNone(self.instance.title_fr)
self.instance.set_current_language('fr')
self.instance.title = 'Bonjour'
self.assertEqual(self.instance.title_en, 'Hello')
self.assertEqual(self.instance.title_fr, 'Bonjour')
Test translations now uses instance.language.
|
# -*- coding: utf-8 -*-
from django.test import TestCase
from django.test.utils import override_settings
from ..registry import LinguistRegistry as Registry
from . import settings
from .translations import (
FooModel,
FooTranslation
)
LANGUAGES = [l[0] for l in settings.LANGUAGES]
@override_settings(DEBUG=True)
class TranslationTest(TestCase):
"""
Tests the Linguist's Translation class.
"""
def setUp(self):
self.registry = Registry()
self.registry.register(FooTranslation)
self.instance = FooModel()
def test_fields(self):
for language in LANGUAGES:
self.assertIn('title_%s' % language, dir(FooModel))
def test_getter_setter(self):
with self.assertNumQueries(3):
# save = 1 query
self.instance.save()
# get / create "en" translation = 2 queries
self.instance.title = 'Hello'
self.assertEqual(self.instance.title_en, 'Hello')
self.assertIsNone(self.instance.title_fr)
self.instance.language = 'fr'
self.instance.title = 'Bonjour'
self.assertEqual(self.instance.title_en, 'Hello')
self.assertEqual(self.instance.title_fr, 'Bonjour')
|
<commit_before># -*- coding: utf-8 -*-
from django.test import TestCase
from django.test.utils import override_settings
from ..registry import LinguistRegistry as Registry
from . import settings
from .translations import (
FooModel,
FooTranslation
)
LANGUAGES = [l[0] for l in settings.LANGUAGES]
@override_settings(DEBUG=True)
class TranslationTest(TestCase):
"""
Tests the Linguist's Translation class.
"""
def setUp(self):
self.registry = Registry()
self.registry.register(FooTranslation)
self.instance = FooModel()
def test_fields(self):
for language in LANGUAGES:
self.assertIn('title_%s' % language, dir(FooModel))
def test_getter_setter(self):
with self.assertNumQueries(3):
# save = 1 query
self.instance.save()
# get / create "en" translation = 2 queries
self.instance.title = 'Hello'
self.assertEqual(self.instance.title_en, 'Hello')
self.assertIsNone(self.instance.title_fr)
self.instance.set_current_language('fr')
self.instance.title = 'Bonjour'
self.assertEqual(self.instance.title_en, 'Hello')
self.assertEqual(self.instance.title_fr, 'Bonjour')
<commit_msg>Test translations now uses instance.language.<commit_after>
|
# -*- coding: utf-8 -*-
from django.test import TestCase
from django.test.utils import override_settings
from ..registry import LinguistRegistry as Registry
from . import settings
from .translations import (
FooModel,
FooTranslation
)
LANGUAGES = [l[0] for l in settings.LANGUAGES]
@override_settings(DEBUG=True)
class TranslationTest(TestCase):
"""
Tests the Linguist's Translation class.
"""
def setUp(self):
self.registry = Registry()
self.registry.register(FooTranslation)
self.instance = FooModel()
def test_fields(self):
for language in LANGUAGES:
self.assertIn('title_%s' % language, dir(FooModel))
def test_getter_setter(self):
with self.assertNumQueries(3):
# save = 1 query
self.instance.save()
# get / create "en" translation = 2 queries
self.instance.title = 'Hello'
self.assertEqual(self.instance.title_en, 'Hello')
self.assertIsNone(self.instance.title_fr)
self.instance.language = 'fr'
self.instance.title = 'Bonjour'
self.assertEqual(self.instance.title_en, 'Hello')
self.assertEqual(self.instance.title_fr, 'Bonjour')
|
# -*- coding: utf-8 -*-
from django.test import TestCase
from django.test.utils import override_settings
from ..registry import LinguistRegistry as Registry
from . import settings
from .translations import (
FooModel,
FooTranslation
)
LANGUAGES = [l[0] for l in settings.LANGUAGES]
@override_settings(DEBUG=True)
class TranslationTest(TestCase):
"""
Tests the Linguist's Translation class.
"""
def setUp(self):
self.registry = Registry()
self.registry.register(FooTranslation)
self.instance = FooModel()
def test_fields(self):
for language in LANGUAGES:
self.assertIn('title_%s' % language, dir(FooModel))
def test_getter_setter(self):
with self.assertNumQueries(3):
# save = 1 query
self.instance.save()
# get / create "en" translation = 2 queries
self.instance.title = 'Hello'
self.assertEqual(self.instance.title_en, 'Hello')
self.assertIsNone(self.instance.title_fr)
self.instance.set_current_language('fr')
self.instance.title = 'Bonjour'
self.assertEqual(self.instance.title_en, 'Hello')
self.assertEqual(self.instance.title_fr, 'Bonjour')
Test translations now uses instance.language.# -*- coding: utf-8 -*-
from django.test import TestCase
from django.test.utils import override_settings
from ..registry import LinguistRegistry as Registry
from . import settings
from .translations import (
FooModel,
FooTranslation
)
LANGUAGES = [l[0] for l in settings.LANGUAGES]
@override_settings(DEBUG=True)
class TranslationTest(TestCase):
"""
Tests the Linguist's Translation class.
"""
def setUp(self):
self.registry = Registry()
self.registry.register(FooTranslation)
self.instance = FooModel()
def test_fields(self):
for language in LANGUAGES:
self.assertIn('title_%s' % language, dir(FooModel))
def test_getter_setter(self):
with self.assertNumQueries(3):
# save = 1 query
self.instance.save()
# get / create "en" translation = 2 queries
self.instance.title = 'Hello'
self.assertEqual(self.instance.title_en, 'Hello')
self.assertIsNone(self.instance.title_fr)
self.instance.language = 'fr'
self.instance.title = 'Bonjour'
self.assertEqual(self.instance.title_en, 'Hello')
self.assertEqual(self.instance.title_fr, 'Bonjour')
|
<commit_before># -*- coding: utf-8 -*-
from django.test import TestCase
from django.test.utils import override_settings
from ..registry import LinguistRegistry as Registry
from . import settings
from .translations import (
FooModel,
FooTranslation
)
LANGUAGES = [l[0] for l in settings.LANGUAGES]
@override_settings(DEBUG=True)
class TranslationTest(TestCase):
"""
Tests the Linguist's Translation class.
"""
def setUp(self):
self.registry = Registry()
self.registry.register(FooTranslation)
self.instance = FooModel()
def test_fields(self):
for language in LANGUAGES:
self.assertIn('title_%s' % language, dir(FooModel))
def test_getter_setter(self):
with self.assertNumQueries(3):
# save = 1 query
self.instance.save()
# get / create "en" translation = 2 queries
self.instance.title = 'Hello'
self.assertEqual(self.instance.title_en, 'Hello')
self.assertIsNone(self.instance.title_fr)
self.instance.set_current_language('fr')
self.instance.title = 'Bonjour'
self.assertEqual(self.instance.title_en, 'Hello')
self.assertEqual(self.instance.title_fr, 'Bonjour')
<commit_msg>Test translations now uses instance.language.<commit_after># -*- coding: utf-8 -*-
from django.test import TestCase
from django.test.utils import override_settings
from ..registry import LinguistRegistry as Registry
from . import settings
from .translations import (
FooModel,
FooTranslation
)
LANGUAGES = [l[0] for l in settings.LANGUAGES]
@override_settings(DEBUG=True)
class TranslationTest(TestCase):
"""
Tests the Linguist's Translation class.
"""
def setUp(self):
self.registry = Registry()
self.registry.register(FooTranslation)
self.instance = FooModel()
def test_fields(self):
for language in LANGUAGES:
self.assertIn('title_%s' % language, dir(FooModel))
def test_getter_setter(self):
with self.assertNumQueries(3):
# save = 1 query
self.instance.save()
# get / create "en" translation = 2 queries
self.instance.title = 'Hello'
self.assertEqual(self.instance.title_en, 'Hello')
self.assertIsNone(self.instance.title_fr)
self.instance.language = 'fr'
self.instance.title = 'Bonjour'
self.assertEqual(self.instance.title_en, 'Hello')
self.assertEqual(self.instance.title_fr, 'Bonjour')
|
cc4764da88f1629554ec3760f08ad6b299aae821
|
examples/basics/scene/sphere.py
|
examples/basics/scene/sphere.py
|
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) 2015, Vispy Development Team. All Rights Reserved.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
# -----------------------------------------------------------------------------
"""
This example demonstrates how to create a sphere.
"""
import sys
from vispy import scene
canvas = scene.SceneCanvas(keys='interactive', size=(800, 600), show=True)
view = canvas.central_widget.add_view()
view.camera = 'arcball'
view.padding = 100
sphere = scene.visuals.Sphere(radius=1, method='ico', parent=view.scene,
edge_color='black')
if __name__ == '__main__' and sys.flags.interactive == 0:
canvas.app.run()
|
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) 2015, Vispy Development Team. All Rights Reserved.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
# -----------------------------------------------------------------------------
"""
This example demonstrates how to create a sphere.
"""
import sys
from vispy import scene
from vispy.visuals.transforms import STTransform
canvas = scene.SceneCanvas(keys='interactive', bgcolor='white',
size=(800, 600), show=True)
view = canvas.central_widget.add_view()
view.camera = 'arcball'
sphere1 = scene.visuals.Sphere(radius=1, method='latlon', parent=view.scene,
edge_color='black')
sphere2 = scene.visuals.Sphere(radius=1, method='ico', parent=view.scene,
edge_color='black')
sphere3 = scene.visuals.Sphere(radius=1, method='cube', parent=view.scene,
edge_color='black')
sphere1.transform = STTransform(translate=[-2.5, 0, 0])
sphere3.transform = STTransform(translate=[2.5, 0, 0])
view.camera.set_range(x=[-3,3])
if __name__ == '__main__' and sys.flags.interactive == 0:
canvas.app.run()
|
Update example to demo various tessellations
|
Update example to demo various tessellations
|
Python
|
bsd-3-clause
|
inclement/vispy,drufat/vispy,ghisvail/vispy,michaelaye/vispy,ghisvail/vispy,srinathv/vispy,RebeccaWPerry/vispy,julienr/vispy,inclement/vispy,bollu/vispy,inclement/vispy,dchilds7/Deysha-Star-Formation,Eric89GXL/vispy,michaelaye/vispy,kkuunnddaannkk/vispy,michaelaye/vispy,dchilds7/Deysha-Star-Formation,jdreaver/vispy,drufat/vispy,QuLogic/vispy,srinathv/vispy,kkuunnddaannkk/vispy,drufat/vispy,jdreaver/vispy,Eric89GXL/vispy,RebeccaWPerry/vispy,srinathv/vispy,ghisvail/vispy,jdreaver/vispy,julienr/vispy,kkuunnddaannkk/vispy,julienr/vispy,dchilds7/Deysha-Star-Formation,bollu/vispy,QuLogic/vispy,QuLogic/vispy,RebeccaWPerry/vispy,bollu/vispy,Eric89GXL/vispy
|
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) 2015, Vispy Development Team. All Rights Reserved.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
# -----------------------------------------------------------------------------
"""
This example demonstrates how to create a sphere.
"""
import sys
from vispy import scene
canvas = scene.SceneCanvas(keys='interactive', size=(800, 600), show=True)
view = canvas.central_widget.add_view()
view.camera = 'arcball'
view.padding = 100
sphere = scene.visuals.Sphere(radius=1, method='ico', parent=view.scene,
edge_color='black')
if __name__ == '__main__' and sys.flags.interactive == 0:
canvas.app.run()
Update example to demo various tessellations
|
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) 2015, Vispy Development Team. All Rights Reserved.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
# -----------------------------------------------------------------------------
"""
This example demonstrates how to create a sphere.
"""
import sys
from vispy import scene
from vispy.visuals.transforms import STTransform
canvas = scene.SceneCanvas(keys='interactive', bgcolor='white',
size=(800, 600), show=True)
view = canvas.central_widget.add_view()
view.camera = 'arcball'
sphere1 = scene.visuals.Sphere(radius=1, method='latlon', parent=view.scene,
edge_color='black')
sphere2 = scene.visuals.Sphere(radius=1, method='ico', parent=view.scene,
edge_color='black')
sphere3 = scene.visuals.Sphere(radius=1, method='cube', parent=view.scene,
edge_color='black')
sphere1.transform = STTransform(translate=[-2.5, 0, 0])
sphere3.transform = STTransform(translate=[2.5, 0, 0])
view.camera.set_range(x=[-3,3])
if __name__ == '__main__' and sys.flags.interactive == 0:
canvas.app.run()
|
<commit_before># -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) 2015, Vispy Development Team. All Rights Reserved.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
# -----------------------------------------------------------------------------
"""
This example demonstrates how to create a sphere.
"""
import sys
from vispy import scene
canvas = scene.SceneCanvas(keys='interactive', size=(800, 600), show=True)
view = canvas.central_widget.add_view()
view.camera = 'arcball'
view.padding = 100
sphere = scene.visuals.Sphere(radius=1, method='ico', parent=view.scene,
edge_color='black')
if __name__ == '__main__' and sys.flags.interactive == 0:
canvas.app.run()
<commit_msg>Update example to demo various tessellations<commit_after>
|
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) 2015, Vispy Development Team. All Rights Reserved.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
# -----------------------------------------------------------------------------
"""
This example demonstrates how to create a sphere.
"""
import sys
from vispy import scene
from vispy.visuals.transforms import STTransform
canvas = scene.SceneCanvas(keys='interactive', bgcolor='white',
size=(800, 600), show=True)
view = canvas.central_widget.add_view()
view.camera = 'arcball'
sphere1 = scene.visuals.Sphere(radius=1, method='latlon', parent=view.scene,
edge_color='black')
sphere2 = scene.visuals.Sphere(radius=1, method='ico', parent=view.scene,
edge_color='black')
sphere3 = scene.visuals.Sphere(radius=1, method='cube', parent=view.scene,
edge_color='black')
sphere1.transform = STTransform(translate=[-2.5, 0, 0])
sphere3.transform = STTransform(translate=[2.5, 0, 0])
view.camera.set_range(x=[-3,3])
if __name__ == '__main__' and sys.flags.interactive == 0:
canvas.app.run()
|
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) 2015, Vispy Development Team. All Rights Reserved.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
# -----------------------------------------------------------------------------
"""
This example demonstrates how to create a sphere.
"""
import sys
from vispy import scene
canvas = scene.SceneCanvas(keys='interactive', size=(800, 600), show=True)
view = canvas.central_widget.add_view()
view.camera = 'arcball'
view.padding = 100
sphere = scene.visuals.Sphere(radius=1, method='ico', parent=view.scene,
edge_color='black')
if __name__ == '__main__' and sys.flags.interactive == 0:
canvas.app.run()
Update example to demo various tessellations# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) 2015, Vispy Development Team. All Rights Reserved.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
# -----------------------------------------------------------------------------
"""
This example demonstrates how to create a sphere.
"""
import sys
from vispy import scene
from vispy.visuals.transforms import STTransform
canvas = scene.SceneCanvas(keys='interactive', bgcolor='white',
size=(800, 600), show=True)
view = canvas.central_widget.add_view()
view.camera = 'arcball'
sphere1 = scene.visuals.Sphere(radius=1, method='latlon', parent=view.scene,
edge_color='black')
sphere2 = scene.visuals.Sphere(radius=1, method='ico', parent=view.scene,
edge_color='black')
sphere3 = scene.visuals.Sphere(radius=1, method='cube', parent=view.scene,
edge_color='black')
sphere1.transform = STTransform(translate=[-2.5, 0, 0])
sphere3.transform = STTransform(translate=[2.5, 0, 0])
view.camera.set_range(x=[-3,3])
if __name__ == '__main__' and sys.flags.interactive == 0:
canvas.app.run()
|
<commit_before># -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) 2015, Vispy Development Team. All Rights Reserved.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
# -----------------------------------------------------------------------------
"""
This example demonstrates how to create a sphere.
"""
import sys
from vispy import scene
canvas = scene.SceneCanvas(keys='interactive', size=(800, 600), show=True)
view = canvas.central_widget.add_view()
view.camera = 'arcball'
view.padding = 100
sphere = scene.visuals.Sphere(radius=1, method='ico', parent=view.scene,
edge_color='black')
if __name__ == '__main__' and sys.flags.interactive == 0:
canvas.app.run()
<commit_msg>Update example to demo various tessellations<commit_after># -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) 2015, Vispy Development Team. All Rights Reserved.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
# -----------------------------------------------------------------------------
"""
This example demonstrates how to create a sphere.
"""
import sys
from vispy import scene
from vispy.visuals.transforms import STTransform
canvas = scene.SceneCanvas(keys='interactive', bgcolor='white',
size=(800, 600), show=True)
view = canvas.central_widget.add_view()
view.camera = 'arcball'
sphere1 = scene.visuals.Sphere(radius=1, method='latlon', parent=view.scene,
edge_color='black')
sphere2 = scene.visuals.Sphere(radius=1, method='ico', parent=view.scene,
edge_color='black')
sphere3 = scene.visuals.Sphere(radius=1, method='cube', parent=view.scene,
edge_color='black')
sphere1.transform = STTransform(translate=[-2.5, 0, 0])
sphere3.transform = STTransform(translate=[2.5, 0, 0])
view.camera.set_range(x=[-3,3])
if __name__ == '__main__' and sys.flags.interactive == 0:
canvas.app.run()
|
ccf6626d86dd00b3f9848e19b3cb1139dba17e56
|
tests/integration-test/test_junctions_create.py
|
tests/integration-test/test_junctions_create.py
|
#!/usr/bin/env python
from integrationtest import IntegrationTest, main
import unittest
class TestCreate(IntegrationTest, unittest.TestCase):
def test_junctions_create(self):
bam1 = self.inputFiles("test_hcc1395.bam")[0]
output_file = self.tempFile("create.out")
print "BAM1 is ", bam1
for anchor in ["", "30"]:
expected_file = self.inputFiles("junctions-create/expected-a" +
anchor + ".out")[0]
if anchor != "":
anchor = "-a " + anchor
params = [ "junctions", "create", bam1, anchor, "-o", output_file]
rv, err = self.execute(params)
self.assertEqual(rv, 0)
#self.assertEqual('', err)
self.assertFilesEqual(expected_file, output_file)
if __name__ == "__main__":
main()
|
#!/usr/bin/env python
from integrationtest import IntegrationTest, main
import unittest
class TestCreate(IntegrationTest, unittest.TestCase):
def test_junctions_create(self):
bam1 = self.inputFiles("test_hcc1395.bam")[0]
output_file = self.tempFile("create.out")
print "BAM1 is ", bam1
for anchor in ["", "30"]:
expected_file = self.inputFiles("junctions-create/expected-a" +
anchor + ".out")[0]
if anchor != "":
anchor = "-a " + anchor
params = [ "junctions", "create", anchor, "-o", output_file, bam1 ]
rv, err = self.execute(params)
self.assertEqual(rv, 0)
#self.assertEqual('', err)
self.assertFilesEqual(expected_file, output_file)
if __name__ == "__main__":
main()
|
Move positional argument to the end.
|
Move positional argument to the end.
This doesn't seem to work on a Mac when option comes after the positional
argument, not sure why this is, something to do with options parsing.
|
Python
|
mit
|
tabbott/regtools,griffithlab/regtools,tabbott/regtools,tabbott/regtools,griffithlab/regtools,gatoravi/regtools,griffithlab/regtools,griffithlab/regtools,griffithlab/regtools,gatoravi/regtools,gatoravi/regtools,gatoravi/regtools,gatoravi/regtools,griffithlab/regtools,tabbott/regtools,tabbott/regtools
|
#!/usr/bin/env python
from integrationtest import IntegrationTest, main
import unittest
class TestCreate(IntegrationTest, unittest.TestCase):
def test_junctions_create(self):
bam1 = self.inputFiles("test_hcc1395.bam")[0]
output_file = self.tempFile("create.out")
print "BAM1 is ", bam1
for anchor in ["", "30"]:
expected_file = self.inputFiles("junctions-create/expected-a" +
anchor + ".out")[0]
if anchor != "":
anchor = "-a " + anchor
params = [ "junctions", "create", bam1, anchor, "-o", output_file]
rv, err = self.execute(params)
self.assertEqual(rv, 0)
#self.assertEqual('', err)
self.assertFilesEqual(expected_file, output_file)
if __name__ == "__main__":
main()
Move positional argument to the end.
This doesn't seem to work on a Mac when option comes after the positional
argument, not sure why this is, something to do with options parsing.
|
#!/usr/bin/env python
from integrationtest import IntegrationTest, main
import unittest
class TestCreate(IntegrationTest, unittest.TestCase):
def test_junctions_create(self):
bam1 = self.inputFiles("test_hcc1395.bam")[0]
output_file = self.tempFile("create.out")
print "BAM1 is ", bam1
for anchor in ["", "30"]:
expected_file = self.inputFiles("junctions-create/expected-a" +
anchor + ".out")[0]
if anchor != "":
anchor = "-a " + anchor
params = [ "junctions", "create", anchor, "-o", output_file, bam1 ]
rv, err = self.execute(params)
self.assertEqual(rv, 0)
#self.assertEqual('', err)
self.assertFilesEqual(expected_file, output_file)
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python
from integrationtest import IntegrationTest, main
import unittest
class TestCreate(IntegrationTest, unittest.TestCase):
def test_junctions_create(self):
bam1 = self.inputFiles("test_hcc1395.bam")[0]
output_file = self.tempFile("create.out")
print "BAM1 is ", bam1
for anchor in ["", "30"]:
expected_file = self.inputFiles("junctions-create/expected-a" +
anchor + ".out")[0]
if anchor != "":
anchor = "-a " + anchor
params = [ "junctions", "create", bam1, anchor, "-o", output_file]
rv, err = self.execute(params)
self.assertEqual(rv, 0)
#self.assertEqual('', err)
self.assertFilesEqual(expected_file, output_file)
if __name__ == "__main__":
main()
<commit_msg>Move positional argument to the end.
This doesn't seem to work on a Mac when option comes after the positional
argument, not sure why this is, something to do with options parsing.<commit_after>
|
#!/usr/bin/env python
from integrationtest import IntegrationTest, main
import unittest
class TestCreate(IntegrationTest, unittest.TestCase):
def test_junctions_create(self):
bam1 = self.inputFiles("test_hcc1395.bam")[0]
output_file = self.tempFile("create.out")
print "BAM1 is ", bam1
for anchor in ["", "30"]:
expected_file = self.inputFiles("junctions-create/expected-a" +
anchor + ".out")[0]
if anchor != "":
anchor = "-a " + anchor
params = [ "junctions", "create", anchor, "-o", output_file, bam1 ]
rv, err = self.execute(params)
self.assertEqual(rv, 0)
#self.assertEqual('', err)
self.assertFilesEqual(expected_file, output_file)
if __name__ == "__main__":
main()
|
#!/usr/bin/env python
from integrationtest import IntegrationTest, main
import unittest
class TestCreate(IntegrationTest, unittest.TestCase):
def test_junctions_create(self):
bam1 = self.inputFiles("test_hcc1395.bam")[0]
output_file = self.tempFile("create.out")
print "BAM1 is ", bam1
for anchor in ["", "30"]:
expected_file = self.inputFiles("junctions-create/expected-a" +
anchor + ".out")[0]
if anchor != "":
anchor = "-a " + anchor
params = [ "junctions", "create", bam1, anchor, "-o", output_file]
rv, err = self.execute(params)
self.assertEqual(rv, 0)
#self.assertEqual('', err)
self.assertFilesEqual(expected_file, output_file)
if __name__ == "__main__":
main()
Move positional argument to the end.
This doesn't seem to work on a Mac when option comes after the positional
argument, not sure why this is, something to do with options parsing.#!/usr/bin/env python
from integrationtest import IntegrationTest, main
import unittest
class TestCreate(IntegrationTest, unittest.TestCase):
def test_junctions_create(self):
bam1 = self.inputFiles("test_hcc1395.bam")[0]
output_file = self.tempFile("create.out")
print "BAM1 is ", bam1
for anchor in ["", "30"]:
expected_file = self.inputFiles("junctions-create/expected-a" +
anchor + ".out")[0]
if anchor != "":
anchor = "-a " + anchor
params = [ "junctions", "create", anchor, "-o", output_file, bam1 ]
rv, err = self.execute(params)
self.assertEqual(rv, 0)
#self.assertEqual('', err)
self.assertFilesEqual(expected_file, output_file)
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python
from integrationtest import IntegrationTest, main
import unittest
class TestCreate(IntegrationTest, unittest.TestCase):
def test_junctions_create(self):
bam1 = self.inputFiles("test_hcc1395.bam")[0]
output_file = self.tempFile("create.out")
print "BAM1 is ", bam1
for anchor in ["", "30"]:
expected_file = self.inputFiles("junctions-create/expected-a" +
anchor + ".out")[0]
if anchor != "":
anchor = "-a " + anchor
params = [ "junctions", "create", bam1, anchor, "-o", output_file]
rv, err = self.execute(params)
self.assertEqual(rv, 0)
#self.assertEqual('', err)
self.assertFilesEqual(expected_file, output_file)
if __name__ == "__main__":
main()
<commit_msg>Move positional argument to the end.
This doesn't seem to work on a Mac when option comes after the positional
argument, not sure why this is, something to do with options parsing.<commit_after>#!/usr/bin/env python
from integrationtest import IntegrationTest, main
import unittest
class TestCreate(IntegrationTest, unittest.TestCase):
def test_junctions_create(self):
bam1 = self.inputFiles("test_hcc1395.bam")[0]
output_file = self.tempFile("create.out")
print "BAM1 is ", bam1
for anchor in ["", "30"]:
expected_file = self.inputFiles("junctions-create/expected-a" +
anchor + ".out")[0]
if anchor != "":
anchor = "-a " + anchor
params = [ "junctions", "create", anchor, "-o", output_file, bam1 ]
rv, err = self.execute(params)
self.assertEqual(rv, 0)
#self.assertEqual('', err)
self.assertFilesEqual(expected_file, output_file)
if __name__ == "__main__":
main()
|
8d0b472f6e84ac1167f8d8cedfb063f74f7fc3b0
|
diamondash/widgets/text/text.py
|
diamondash/widgets/text/text.py
|
from pkg_resources import resource_string
from twisted.web.template import renderer, XMLString
from diamondash.widgets.widget.widget import Widget
class TextWidget(Widget):
"""A widget that simply displays static text."""
loader = XMLString(resource_string(__name__, 'template.xml'))
TYPE_NAME = 'text'
MIN_COLUMN_SPAN = 2
STYLESHEETS = ('text/style.css',)
def __init__(self, text, **kwargs):
super(TextWidget, self).__init__(**kwargs)
self.text = text
@renderer
def text_renderer(self, request, tag):
return tag(self.text)
|
from pkg_resources import resource_string
from twisted.web.template import renderer, XMLString
from diamondash.widgets.widget.widget import Widget, WidgetConfig
class TextWidgetConfig(WidgetConfig):
TYPE_NAME = 'text'
MIN_COLUMN_SPAN = 2
class TextWidget(Widget):
"""A widget that simply displays static text."""
loader = XMLString(resource_string(__name__, 'template.xml'))
STYLESHEETS = ('text/style.css',)
def __init__(self, text, **kwargs):
super(TextWidget, self).__init__(**kwargs)
self.text = text
@renderer
def text_renderer(self, request, tag):
return tag(self.text)
|
Refactor TextWidgetConfig out of TextWidget
|
Refactor TextWidgetConfig out of TextWidget
|
Python
|
bsd-3-clause
|
praekelt/diamondash,praekelt/diamondash,praekelt/diamondash
|
from pkg_resources import resource_string
from twisted.web.template import renderer, XMLString
from diamondash.widgets.widget.widget import Widget
class TextWidget(Widget):
"""A widget that simply displays static text."""
loader = XMLString(resource_string(__name__, 'template.xml'))
TYPE_NAME = 'text'
MIN_COLUMN_SPAN = 2
STYLESHEETS = ('text/style.css',)
def __init__(self, text, **kwargs):
super(TextWidget, self).__init__(**kwargs)
self.text = text
@renderer
def text_renderer(self, request, tag):
return tag(self.text)
Refactor TextWidgetConfig out of TextWidget
|
from pkg_resources import resource_string
from twisted.web.template import renderer, XMLString
from diamondash.widgets.widget.widget import Widget, WidgetConfig
class TextWidgetConfig(WidgetConfig):
TYPE_NAME = 'text'
MIN_COLUMN_SPAN = 2
class TextWidget(Widget):
"""A widget that simply displays static text."""
loader = XMLString(resource_string(__name__, 'template.xml'))
STYLESHEETS = ('text/style.css',)
def __init__(self, text, **kwargs):
super(TextWidget, self).__init__(**kwargs)
self.text = text
@renderer
def text_renderer(self, request, tag):
return tag(self.text)
|
<commit_before>from pkg_resources import resource_string
from twisted.web.template import renderer, XMLString
from diamondash.widgets.widget.widget import Widget
class TextWidget(Widget):
"""A widget that simply displays static text."""
loader = XMLString(resource_string(__name__, 'template.xml'))
TYPE_NAME = 'text'
MIN_COLUMN_SPAN = 2
STYLESHEETS = ('text/style.css',)
def __init__(self, text, **kwargs):
super(TextWidget, self).__init__(**kwargs)
self.text = text
@renderer
def text_renderer(self, request, tag):
return tag(self.text)
<commit_msg>Refactor TextWidgetConfig out of TextWidget<commit_after>
|
from pkg_resources import resource_string
from twisted.web.template import renderer, XMLString
from diamondash.widgets.widget.widget import Widget, WidgetConfig
class TextWidgetConfig(WidgetConfig):
TYPE_NAME = 'text'
MIN_COLUMN_SPAN = 2
class TextWidget(Widget):
"""A widget that simply displays static text."""
loader = XMLString(resource_string(__name__, 'template.xml'))
STYLESHEETS = ('text/style.css',)
def __init__(self, text, **kwargs):
super(TextWidget, self).__init__(**kwargs)
self.text = text
@renderer
def text_renderer(self, request, tag):
return tag(self.text)
|
from pkg_resources import resource_string
from twisted.web.template import renderer, XMLString
from diamondash.widgets.widget.widget import Widget
class TextWidget(Widget):
"""A widget that simply displays static text."""
loader = XMLString(resource_string(__name__, 'template.xml'))
TYPE_NAME = 'text'
MIN_COLUMN_SPAN = 2
STYLESHEETS = ('text/style.css',)
def __init__(self, text, **kwargs):
super(TextWidget, self).__init__(**kwargs)
self.text = text
@renderer
def text_renderer(self, request, tag):
return tag(self.text)
Refactor TextWidgetConfig out of TextWidgetfrom pkg_resources import resource_string
from twisted.web.template import renderer, XMLString
from diamondash.widgets.widget.widget import Widget, WidgetConfig
class TextWidgetConfig(WidgetConfig):
TYPE_NAME = 'text'
MIN_COLUMN_SPAN = 2
class TextWidget(Widget):
"""A widget that simply displays static text."""
loader = XMLString(resource_string(__name__, 'template.xml'))
STYLESHEETS = ('text/style.css',)
def __init__(self, text, **kwargs):
super(TextWidget, self).__init__(**kwargs)
self.text = text
@renderer
def text_renderer(self, request, tag):
return tag(self.text)
|
<commit_before>from pkg_resources import resource_string
from twisted.web.template import renderer, XMLString
from diamondash.widgets.widget.widget import Widget
class TextWidget(Widget):
"""A widget that simply displays static text."""
loader = XMLString(resource_string(__name__, 'template.xml'))
TYPE_NAME = 'text'
MIN_COLUMN_SPAN = 2
STYLESHEETS = ('text/style.css',)
def __init__(self, text, **kwargs):
super(TextWidget, self).__init__(**kwargs)
self.text = text
@renderer
def text_renderer(self, request, tag):
return tag(self.text)
<commit_msg>Refactor TextWidgetConfig out of TextWidget<commit_after>from pkg_resources import resource_string
from twisted.web.template import renderer, XMLString
from diamondash.widgets.widget.widget import Widget, WidgetConfig
class TextWidgetConfig(WidgetConfig):
TYPE_NAME = 'text'
MIN_COLUMN_SPAN = 2
class TextWidget(Widget):
"""A widget that simply displays static text."""
loader = XMLString(resource_string(__name__, 'template.xml'))
STYLESHEETS = ('text/style.css',)
def __init__(self, text, **kwargs):
super(TextWidget, self).__init__(**kwargs)
self.text = text
@renderer
def text_renderer(self, request, tag):
return tag(self.text)
|
564a567baed6b24ded3e9940f11c67398af81bef
|
disco_aws_automation/version.py
|
disco_aws_automation/version.py
|
"""Place of record for the package version"""
__version__ = "1.1.15"
__rpm_version__ = "WILL_BE_SET_BY_RPM_BUILD"
__git_hash__ = "WILL_BE_SET_BY_EGG_BUILD"
|
"""Place of record for the package version"""
__version__ = "1.1.16"
__rpm_version__ = "WILL_BE_SET_BY_RPM_BUILD"
__git_hash__ = "WILL_BE_SET_BY_EGG_BUILD"
|
Add volume disk usage to snapshot tags
|
Add volume disk usage to snapshot tags
|
Python
|
bsd-2-clause
|
amplifylitco/asiaq,amplifylitco/asiaq,amplifylitco/asiaq
|
"""Place of record for the package version"""
__version__ = "1.1.15"
__rpm_version__ = "WILL_BE_SET_BY_RPM_BUILD"
__git_hash__ = "WILL_BE_SET_BY_EGG_BUILD"
Add volume disk usage to snapshot tags
|
"""Place of record for the package version"""
__version__ = "1.1.16"
__rpm_version__ = "WILL_BE_SET_BY_RPM_BUILD"
__git_hash__ = "WILL_BE_SET_BY_EGG_BUILD"
|
<commit_before>"""Place of record for the package version"""
__version__ = "1.1.15"
__rpm_version__ = "WILL_BE_SET_BY_RPM_BUILD"
__git_hash__ = "WILL_BE_SET_BY_EGG_BUILD"
<commit_msg>Add volume disk usage to snapshot tags<commit_after>
|
"""Place of record for the package version"""
__version__ = "1.1.16"
__rpm_version__ = "WILL_BE_SET_BY_RPM_BUILD"
__git_hash__ = "WILL_BE_SET_BY_EGG_BUILD"
|
"""Place of record for the package version"""
__version__ = "1.1.15"
__rpm_version__ = "WILL_BE_SET_BY_RPM_BUILD"
__git_hash__ = "WILL_BE_SET_BY_EGG_BUILD"
Add volume disk usage to snapshot tags"""Place of record for the package version"""
__version__ = "1.1.16"
__rpm_version__ = "WILL_BE_SET_BY_RPM_BUILD"
__git_hash__ = "WILL_BE_SET_BY_EGG_BUILD"
|
<commit_before>"""Place of record for the package version"""
__version__ = "1.1.15"
__rpm_version__ = "WILL_BE_SET_BY_RPM_BUILD"
__git_hash__ = "WILL_BE_SET_BY_EGG_BUILD"
<commit_msg>Add volume disk usage to snapshot tags<commit_after>"""Place of record for the package version"""
__version__ = "1.1.16"
__rpm_version__ = "WILL_BE_SET_BY_RPM_BUILD"
__git_hash__ = "WILL_BE_SET_BY_EGG_BUILD"
|
9de3f433a1c323831becbbe0d799475da96a92ae
|
virtualfish/__main__.py
|
virtualfish/__main__.py
|
from __future__ import print_function
import os
import sys
import inspect
if __name__ == "__main__":
base_path = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
commands = ['. {}'.format(os.path.join(base_path, 'virtual.fish'))]
for plugin in sys.argv[1:]:
path = os.path.join(base_path, plugin + '.fish')
if os.path.exists(path):
commands.append('. {}'.format(path))
else:
print('virtualfish loader error: plugin {} does not exist!'.format(plugin), file=sys.stderr)
commands.append('emit virtualfish_did_setup_plugins')
print(';'.join(commands))
|
from __future__ import print_function
import os
import sys
import inspect
if __name__ == "__main__":
base_path = os.path.dirname(os.path.abspath(__file__))
commands = ['. {}'.format(os.path.join(base_path, 'virtual.fish'))]
for plugin in sys.argv[1:]:
path = os.path.join(base_path, plugin + '.fish')
if os.path.exists(path):
commands.append('. {}'.format(path))
else:
print('virtualfish loader error: plugin {} does not exist!'.format(plugin), file=sys.stderr)
commands.append('emit virtualfish_did_setup_plugins')
print(';'.join(commands))
|
Use __file__ to find find the module
|
Use __file__ to find find the module
`inspect.getfile(inspect.currentframe())` seems to return a relative path that
ends up with the virtualfish functions not being loaded.
|
Python
|
mit
|
scorphus/virtualfish,adambrenecki/virtualfish,adambrenecki/virtualfish,scorphus/virtualfish
|
from __future__ import print_function
import os
import sys
import inspect
if __name__ == "__main__":
base_path = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
commands = ['. {}'.format(os.path.join(base_path, 'virtual.fish'))]
for plugin in sys.argv[1:]:
path = os.path.join(base_path, plugin + '.fish')
if os.path.exists(path):
commands.append('. {}'.format(path))
else:
print('virtualfish loader error: plugin {} does not exist!'.format(plugin), file=sys.stderr)
commands.append('emit virtualfish_did_setup_plugins')
print(';'.join(commands))Use __file__ to find find the module
`inspect.getfile(inspect.currentframe())` seems to return a relative path that
ends up with the virtualfish functions not being loaded.
|
from __future__ import print_function
import os
import sys
import inspect
if __name__ == "__main__":
base_path = os.path.dirname(os.path.abspath(__file__))
commands = ['. {}'.format(os.path.join(base_path, 'virtual.fish'))]
for plugin in sys.argv[1:]:
path = os.path.join(base_path, plugin + '.fish')
if os.path.exists(path):
commands.append('. {}'.format(path))
else:
print('virtualfish loader error: plugin {} does not exist!'.format(plugin), file=sys.stderr)
commands.append('emit virtualfish_did_setup_plugins')
print(';'.join(commands))
|
<commit_before>from __future__ import print_function
import os
import sys
import inspect
if __name__ == "__main__":
base_path = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
commands = ['. {}'.format(os.path.join(base_path, 'virtual.fish'))]
for plugin in sys.argv[1:]:
path = os.path.join(base_path, plugin + '.fish')
if os.path.exists(path):
commands.append('. {}'.format(path))
else:
print('virtualfish loader error: plugin {} does not exist!'.format(plugin), file=sys.stderr)
commands.append('emit virtualfish_did_setup_plugins')
print(';'.join(commands))<commit_msg>Use __file__ to find find the module
`inspect.getfile(inspect.currentframe())` seems to return a relative path that
ends up with the virtualfish functions not being loaded.<commit_after>
|
from __future__ import print_function
import os
import sys
import inspect
if __name__ == "__main__":
base_path = os.path.dirname(os.path.abspath(__file__))
commands = ['. {}'.format(os.path.join(base_path, 'virtual.fish'))]
for plugin in sys.argv[1:]:
path = os.path.join(base_path, plugin + '.fish')
if os.path.exists(path):
commands.append('. {}'.format(path))
else:
print('virtualfish loader error: plugin {} does not exist!'.format(plugin), file=sys.stderr)
commands.append('emit virtualfish_did_setup_plugins')
print(';'.join(commands))
|
from __future__ import print_function
import os
import sys
import inspect
if __name__ == "__main__":
base_path = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
commands = ['. {}'.format(os.path.join(base_path, 'virtual.fish'))]
for plugin in sys.argv[1:]:
path = os.path.join(base_path, plugin + '.fish')
if os.path.exists(path):
commands.append('. {}'.format(path))
else:
print('virtualfish loader error: plugin {} does not exist!'.format(plugin), file=sys.stderr)
commands.append('emit virtualfish_did_setup_plugins')
print(';'.join(commands))Use __file__ to find find the module
`inspect.getfile(inspect.currentframe())` seems to return a relative path that
ends up with the virtualfish functions not being loaded.from __future__ import print_function
import os
import sys
import inspect
if __name__ == "__main__":
base_path = os.path.dirname(os.path.abspath(__file__))
commands = ['. {}'.format(os.path.join(base_path, 'virtual.fish'))]
for plugin in sys.argv[1:]:
path = os.path.join(base_path, plugin + '.fish')
if os.path.exists(path):
commands.append('. {}'.format(path))
else:
print('virtualfish loader error: plugin {} does not exist!'.format(plugin), file=sys.stderr)
commands.append('emit virtualfish_did_setup_plugins')
print(';'.join(commands))
|
<commit_before>from __future__ import print_function
import os
import sys
import inspect
if __name__ == "__main__":
base_path = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
commands = ['. {}'.format(os.path.join(base_path, 'virtual.fish'))]
for plugin in sys.argv[1:]:
path = os.path.join(base_path, plugin + '.fish')
if os.path.exists(path):
commands.append('. {}'.format(path))
else:
print('virtualfish loader error: plugin {} does not exist!'.format(plugin), file=sys.stderr)
commands.append('emit virtualfish_did_setup_plugins')
print(';'.join(commands))<commit_msg>Use __file__ to find find the module
`inspect.getfile(inspect.currentframe())` seems to return a relative path that
ends up with the virtualfish functions not being loaded.<commit_after>from __future__ import print_function
import os
import sys
import inspect
if __name__ == "__main__":
base_path = os.path.dirname(os.path.abspath(__file__))
commands = ['. {}'.format(os.path.join(base_path, 'virtual.fish'))]
for plugin in sys.argv[1:]:
path = os.path.join(base_path, plugin + '.fish')
if os.path.exists(path):
commands.append('. {}'.format(path))
else:
print('virtualfish loader error: plugin {} does not exist!'.format(plugin), file=sys.stderr)
commands.append('emit virtualfish_did_setup_plugins')
print(';'.join(commands))
|
1e88a3de5ed96847baf17eb1beb2599f5c79fb6b
|
djangobb_forum/search_indexes.py
|
djangobb_forum/search_indexes.py
|
from haystack.indexes import *
from haystack import site
from celery_haystack.indexes import CelerySearchIndex
import djangobb_forum.models as models
class PostIndex(CelerySearchIndex):
text = CharField(document=True, use_template=True)
author = CharField(model_attr='user')
created = DateTimeField(model_attr='created')
topic = CharField(model_attr='topic')
category = CharField(model_attr='topic__forum__category__name')
forum = IntegerField(model_attr='topic__forum__pk')
site.register(models.Post, PostIndex)
|
from haystack.indexes import *
from haystack import site
from gargoyle import gargoyle
try:
if gargoyle.is_active('solr_indexing_enabled'):
from celery_haystack.indexes import CelerySearchIndex as SearchIndex
except:
# Allow migrations to run
from celery_haystack.indexes import CelerySearchIndex as SearchIndex
import djangobb_forum.models as models
class PostIndex(SearchIndex):
text = CharField(document=True, use_template=True)
author = CharField(model_attr='user')
created = DateTimeField(model_attr='created')
topic = CharField(model_attr='topic')
category = CharField(model_attr='topic__forum__category__name')
forum = IntegerField(model_attr='topic__forum__pk')
site.register(models.Post, PostIndex)
|
Disable indexing through celery when it's disabled.
|
Disable indexing through celery when it's disabled.
|
Python
|
bsd-3-clause
|
tjvr/s2forums,LLK/s2forums,LLK/s2forums,LLK/s2forums,tjvr/s2forums,tjvr/s2forums
|
from haystack.indexes import *
from haystack import site
from celery_haystack.indexes import CelerySearchIndex
import djangobb_forum.models as models
class PostIndex(CelerySearchIndex):
text = CharField(document=True, use_template=True)
author = CharField(model_attr='user')
created = DateTimeField(model_attr='created')
topic = CharField(model_attr='topic')
category = CharField(model_attr='topic__forum__category__name')
forum = IntegerField(model_attr='topic__forum__pk')
site.register(models.Post, PostIndex)
Disable indexing through celery when it's disabled.
|
from haystack.indexes import *
from haystack import site
from gargoyle import gargoyle
try:
if gargoyle.is_active('solr_indexing_enabled'):
from celery_haystack.indexes import CelerySearchIndex as SearchIndex
except:
# Allow migrations to run
from celery_haystack.indexes import CelerySearchIndex as SearchIndex
import djangobb_forum.models as models
class PostIndex(SearchIndex):
text = CharField(document=True, use_template=True)
author = CharField(model_attr='user')
created = DateTimeField(model_attr='created')
topic = CharField(model_attr='topic')
category = CharField(model_attr='topic__forum__category__name')
forum = IntegerField(model_attr='topic__forum__pk')
site.register(models.Post, PostIndex)
|
<commit_before>from haystack.indexes import *
from haystack import site
from celery_haystack.indexes import CelerySearchIndex
import djangobb_forum.models as models
class PostIndex(CelerySearchIndex):
text = CharField(document=True, use_template=True)
author = CharField(model_attr='user')
created = DateTimeField(model_attr='created')
topic = CharField(model_attr='topic')
category = CharField(model_attr='topic__forum__category__name')
forum = IntegerField(model_attr='topic__forum__pk')
site.register(models.Post, PostIndex)
<commit_msg>Disable indexing through celery when it's disabled.<commit_after>
|
from haystack.indexes import *
from haystack import site
from gargoyle import gargoyle
try:
if gargoyle.is_active('solr_indexing_enabled'):
from celery_haystack.indexes import CelerySearchIndex as SearchIndex
except:
# Allow migrations to run
from celery_haystack.indexes import CelerySearchIndex as SearchIndex
import djangobb_forum.models as models
class PostIndex(SearchIndex):
text = CharField(document=True, use_template=True)
author = CharField(model_attr='user')
created = DateTimeField(model_attr='created')
topic = CharField(model_attr='topic')
category = CharField(model_attr='topic__forum__category__name')
forum = IntegerField(model_attr='topic__forum__pk')
site.register(models.Post, PostIndex)
|
from haystack.indexes import *
from haystack import site
from celery_haystack.indexes import CelerySearchIndex
import djangobb_forum.models as models
class PostIndex(CelerySearchIndex):
text = CharField(document=True, use_template=True)
author = CharField(model_attr='user')
created = DateTimeField(model_attr='created')
topic = CharField(model_attr='topic')
category = CharField(model_attr='topic__forum__category__name')
forum = IntegerField(model_attr='topic__forum__pk')
site.register(models.Post, PostIndex)
Disable indexing through celery when it's disabled.from haystack.indexes import *
from haystack import site
from gargoyle import gargoyle
try:
if gargoyle.is_active('solr_indexing_enabled'):
from celery_haystack.indexes import CelerySearchIndex as SearchIndex
except:
# Allow migrations to run
from celery_haystack.indexes import CelerySearchIndex as SearchIndex
import djangobb_forum.models as models
class PostIndex(SearchIndex):
text = CharField(document=True, use_template=True)
author = CharField(model_attr='user')
created = DateTimeField(model_attr='created')
topic = CharField(model_attr='topic')
category = CharField(model_attr='topic__forum__category__name')
forum = IntegerField(model_attr='topic__forum__pk')
site.register(models.Post, PostIndex)
|
<commit_before>from haystack.indexes import *
from haystack import site
from celery_haystack.indexes import CelerySearchIndex
import djangobb_forum.models as models
class PostIndex(CelerySearchIndex):
text = CharField(document=True, use_template=True)
author = CharField(model_attr='user')
created = DateTimeField(model_attr='created')
topic = CharField(model_attr='topic')
category = CharField(model_attr='topic__forum__category__name')
forum = IntegerField(model_attr='topic__forum__pk')
site.register(models.Post, PostIndex)
<commit_msg>Disable indexing through celery when it's disabled.<commit_after>from haystack.indexes import *
from haystack import site
from gargoyle import gargoyle
try:
if gargoyle.is_active('solr_indexing_enabled'):
from celery_haystack.indexes import CelerySearchIndex as SearchIndex
except:
# Allow migrations to run
from celery_haystack.indexes import CelerySearchIndex as SearchIndex
import djangobb_forum.models as models
class PostIndex(SearchIndex):
text = CharField(document=True, use_template=True)
author = CharField(model_attr='user')
created = DateTimeField(model_attr='created')
topic = CharField(model_attr='topic')
category = CharField(model_attr='topic__forum__category__name')
forum = IntegerField(model_attr='topic__forum__pk')
site.register(models.Post, PostIndex)
|
2ca115789b96287ba0c8a32c514d1fe2beedb750
|
girc/capabilities.py
|
girc/capabilities.py
|
#!/usr/bin/env python3
# Written by Daniel Oaks <daniel@danieloaks.net>
# Released under the ISC license
from .utils import CaseInsensitiveDict, CaseInsensitiveList
class Capabilities:
"""Ingests sets of client capabilities and provides access to them."""
def __init__(self, wanted=[]):
self.available = CaseInsensitiveDict()
self.wanted = CaseInsensitiveList(wanted)
self.enabled = CaseInsensitiveList()
def ingest(self, cmd, parameters):
cmd = cmd.casefold()
if cmd == 'ls':
caps = parameters[0].split(' ')
for cap in caps:
# strip first initial =/~
if cap.startswith('=') or cap.startswith('~'):
cap = cap[1:]
if '=' in cap:
cap, value = cap.rsplit('=', 1)
else:
value = True
self.available[cap] = value
@property
def to_enable(self):
l = []
for cap in self.wanted:
if cap in self.available and cap not in self.enabled:
l.append(cap)
return l
def get(self, key, default=None):
return self._dict.get(key, default)
|
#!/usr/bin/env python3
# Written by Daniel Oaks <daniel@danieloaks.net>
# Released under the ISC license
from .utils import CaseInsensitiveDict, CaseInsensitiveList
class Capabilities:
"""Ingests sets of client capabilities and provides access to them."""
def __init__(self, wanted=[]):
self.available = CaseInsensitiveDict()
self.wanted = CaseInsensitiveList(wanted)
self.enabled = CaseInsensitiveList()
def ingest(self, cmd, parameters):
cmd = cmd.casefold()
if cmd == 'ls':
caps = parameters[0].split(' ')
for cap in caps:
# strip first initial =/~
if cap.startswith('=') or cap.startswith('~'):
cap = cap[1:]
if '=' in cap:
cap, value = cap.rsplit('=', 1)
if value = '':
value = None
else:
value = True
self.available[cap] = value
@property
def to_enable(self):
l = []
for cap in self.wanted:
if cap in self.available and cap not in self.enabled:
l.append(cap)
return l
def get(self, key, default=None):
return self._dict.get(key, default)
|
Mark CAPA= as None instead of True
|
[caps] Mark CAPA= as None instead of True
|
Python
|
isc
|
DanielOaks/girc,DanielOaks/girc
|
#!/usr/bin/env python3
# Written by Daniel Oaks <daniel@danieloaks.net>
# Released under the ISC license
from .utils import CaseInsensitiveDict, CaseInsensitiveList
class Capabilities:
"""Ingests sets of client capabilities and provides access to them."""
def __init__(self, wanted=[]):
self.available = CaseInsensitiveDict()
self.wanted = CaseInsensitiveList(wanted)
self.enabled = CaseInsensitiveList()
def ingest(self, cmd, parameters):
cmd = cmd.casefold()
if cmd == 'ls':
caps = parameters[0].split(' ')
for cap in caps:
# strip first initial =/~
if cap.startswith('=') or cap.startswith('~'):
cap = cap[1:]
if '=' in cap:
cap, value = cap.rsplit('=', 1)
else:
value = True
self.available[cap] = value
@property
def to_enable(self):
l = []
for cap in self.wanted:
if cap in self.available and cap not in self.enabled:
l.append(cap)
return l
def get(self, key, default=None):
return self._dict.get(key, default)
[caps] Mark CAPA= as None instead of True
|
#!/usr/bin/env python3
# Written by Daniel Oaks <daniel@danieloaks.net>
# Released under the ISC license
from .utils import CaseInsensitiveDict, CaseInsensitiveList
class Capabilities:
"""Ingests sets of client capabilities and provides access to them."""
def __init__(self, wanted=[]):
self.available = CaseInsensitiveDict()
self.wanted = CaseInsensitiveList(wanted)
self.enabled = CaseInsensitiveList()
def ingest(self, cmd, parameters):
cmd = cmd.casefold()
if cmd == 'ls':
caps = parameters[0].split(' ')
for cap in caps:
# strip first initial =/~
if cap.startswith('=') or cap.startswith('~'):
cap = cap[1:]
if '=' in cap:
cap, value = cap.rsplit('=', 1)
if value = '':
value = None
else:
value = True
self.available[cap] = value
@property
def to_enable(self):
l = []
for cap in self.wanted:
if cap in self.available and cap not in self.enabled:
l.append(cap)
return l
def get(self, key, default=None):
return self._dict.get(key, default)
|
<commit_before>#!/usr/bin/env python3
# Written by Daniel Oaks <daniel@danieloaks.net>
# Released under the ISC license
from .utils import CaseInsensitiveDict, CaseInsensitiveList
class Capabilities:
"""Ingests sets of client capabilities and provides access to them."""
def __init__(self, wanted=[]):
self.available = CaseInsensitiveDict()
self.wanted = CaseInsensitiveList(wanted)
self.enabled = CaseInsensitiveList()
def ingest(self, cmd, parameters):
cmd = cmd.casefold()
if cmd == 'ls':
caps = parameters[0].split(' ')
for cap in caps:
# strip first initial =/~
if cap.startswith('=') or cap.startswith('~'):
cap = cap[1:]
if '=' in cap:
cap, value = cap.rsplit('=', 1)
else:
value = True
self.available[cap] = value
@property
def to_enable(self):
l = []
for cap in self.wanted:
if cap in self.available and cap not in self.enabled:
l.append(cap)
return l
def get(self, key, default=None):
return self._dict.get(key, default)
<commit_msg>[caps] Mark CAPA= as None instead of True<commit_after>
|
#!/usr/bin/env python3
# Written by Daniel Oaks <daniel@danieloaks.net>
# Released under the ISC license
from .utils import CaseInsensitiveDict, CaseInsensitiveList
class Capabilities:
"""Ingests sets of client capabilities and provides access to them."""
def __init__(self, wanted=[]):
self.available = CaseInsensitiveDict()
self.wanted = CaseInsensitiveList(wanted)
self.enabled = CaseInsensitiveList()
def ingest(self, cmd, parameters):
cmd = cmd.casefold()
if cmd == 'ls':
caps = parameters[0].split(' ')
for cap in caps:
# strip first initial =/~
if cap.startswith('=') or cap.startswith('~'):
cap = cap[1:]
if '=' in cap:
cap, value = cap.rsplit('=', 1)
if value = '':
value = None
else:
value = True
self.available[cap] = value
@property
def to_enable(self):
l = []
for cap in self.wanted:
if cap in self.available and cap not in self.enabled:
l.append(cap)
return l
def get(self, key, default=None):
return self._dict.get(key, default)
|
#!/usr/bin/env python3
# Written by Daniel Oaks <daniel@danieloaks.net>
# Released under the ISC license
from .utils import CaseInsensitiveDict, CaseInsensitiveList
class Capabilities:
"""Ingests sets of client capabilities and provides access to them."""
def __init__(self, wanted=[]):
self.available = CaseInsensitiveDict()
self.wanted = CaseInsensitiveList(wanted)
self.enabled = CaseInsensitiveList()
def ingest(self, cmd, parameters):
cmd = cmd.casefold()
if cmd == 'ls':
caps = parameters[0].split(' ')
for cap in caps:
# strip first initial =/~
if cap.startswith('=') or cap.startswith('~'):
cap = cap[1:]
if '=' in cap:
cap, value = cap.rsplit('=', 1)
else:
value = True
self.available[cap] = value
@property
def to_enable(self):
l = []
for cap in self.wanted:
if cap in self.available and cap not in self.enabled:
l.append(cap)
return l
def get(self, key, default=None):
return self._dict.get(key, default)
[caps] Mark CAPA= as None instead of True#!/usr/bin/env python3
# Written by Daniel Oaks <daniel@danieloaks.net>
# Released under the ISC license
from .utils import CaseInsensitiveDict, CaseInsensitiveList
class Capabilities:
"""Ingests sets of client capabilities and provides access to them."""
def __init__(self, wanted=[]):
self.available = CaseInsensitiveDict()
self.wanted = CaseInsensitiveList(wanted)
self.enabled = CaseInsensitiveList()
def ingest(self, cmd, parameters):
cmd = cmd.casefold()
if cmd == 'ls':
caps = parameters[0].split(' ')
for cap in caps:
# strip first initial =/~
if cap.startswith('=') or cap.startswith('~'):
cap = cap[1:]
if '=' in cap:
cap, value = cap.rsplit('=', 1)
if value = '':
value = None
else:
value = True
self.available[cap] = value
@property
def to_enable(self):
l = []
for cap in self.wanted:
if cap in self.available and cap not in self.enabled:
l.append(cap)
return l
def get(self, key, default=None):
return self._dict.get(key, default)
|
<commit_before>#!/usr/bin/env python3
# Written by Daniel Oaks <daniel@danieloaks.net>
# Released under the ISC license
from .utils import CaseInsensitiveDict, CaseInsensitiveList
class Capabilities:
"""Ingests sets of client capabilities and provides access to them."""
def __init__(self, wanted=[]):
self.available = CaseInsensitiveDict()
self.wanted = CaseInsensitiveList(wanted)
self.enabled = CaseInsensitiveList()
def ingest(self, cmd, parameters):
cmd = cmd.casefold()
if cmd == 'ls':
caps = parameters[0].split(' ')
for cap in caps:
# strip first initial =/~
if cap.startswith('=') or cap.startswith('~'):
cap = cap[1:]
if '=' in cap:
cap, value = cap.rsplit('=', 1)
else:
value = True
self.available[cap] = value
@property
def to_enable(self):
l = []
for cap in self.wanted:
if cap in self.available and cap not in self.enabled:
l.append(cap)
return l
def get(self, key, default=None):
return self._dict.get(key, default)
<commit_msg>[caps] Mark CAPA= as None instead of True<commit_after>#!/usr/bin/env python3
# Written by Daniel Oaks <daniel@danieloaks.net>
# Released under the ISC license
from .utils import CaseInsensitiveDict, CaseInsensitiveList
class Capabilities:
"""Ingests sets of client capabilities and provides access to them."""
def __init__(self, wanted=[]):
self.available = CaseInsensitiveDict()
self.wanted = CaseInsensitiveList(wanted)
self.enabled = CaseInsensitiveList()
def ingest(self, cmd, parameters):
cmd = cmd.casefold()
if cmd == 'ls':
caps = parameters[0].split(' ')
for cap in caps:
# strip first initial =/~
if cap.startswith('=') or cap.startswith('~'):
cap = cap[1:]
if '=' in cap:
cap, value = cap.rsplit('=', 1)
if value = '':
value = None
else:
value = True
self.available[cap] = value
@property
def to_enable(self):
l = []
for cap in self.wanted:
if cap in self.available and cap not in self.enabled:
l.append(cap)
return l
def get(self, key, default=None):
return self._dict.get(key, default)
|
88aafb9cc5b9cf8f2bc75f7960d33782368603f7
|
eduid_dashboard_amp/__init__.py
|
eduid_dashboard_amp/__init__.py
|
from eduid_am.exceptions import UserDoesNotExist
WHITELIST_SET_ATTRS = (
'givenName',
'sn',
'displayName',
'photo',
'preferredLanguage',
'mail',
'date', # last modification
# TODO: Arrays must use put or pop, not set, but need more deep refacts
'norEduPersonNIN',
'eduPersonEntitlement',
'mobile',
'mailAliases',
'postalAddress',
'passwords',
)
def attribute_fetcher(db, user_id):
attributes = {}
user = db.profiles.find_one({'_id': user_id})
if user is None:
raise UserDoesNotExist("No user matching _id='%s'" % user_id)
# white list of valid attributes for security reasons
attributes_set = {}
for attr in WHITELIST_SET_ATTRS:
value = user.get(attr, None)
if value is not None:
attributes_set[attr] = value
attributes['$set'] = attributes_set
return attributes
|
from eduid_am.exceptions import UserDoesNotExist
WHITELIST_SET_ATTRS = (
'givenName',
'sn',
'displayName',
'photo',
'preferredLanguage',
'mail',
'date', # last modification
# TODO: Arrays must use put or pop, not set, but need more deep refacts
'norEduPersonNIN',
'eduPersonEntitlement',
'mobile',
'mailAliases',
'postalAddress',
'passwords',
)
def attribute_fetcher(db, user_id):
attributes = {}
user = db.profiles.find_one({'_id': user_id})
if user is None:
raise UserDoesNotExist("No user matching _id='%s'" % user_id)
# white list of valid attributes for security reasons
attributes_set = {}
attributes_unset = {}
for attr in WHITELIST_SET_ATTRS:
value = user.get(attr, None)
if value:
attributes_set[attr] = value
else:
attributes_unset[attr] = value
attributes['$set'] = attributes_set
attributes['$unset'] = attributes_unset
return attributes
|
Remove attributes instead of setting a false value like [].
|
Remove attributes instead of setting a false value like [].
|
Python
|
bsd-3-clause
|
SUNET/eduid-dashboard-amp
|
from eduid_am.exceptions import UserDoesNotExist
WHITELIST_SET_ATTRS = (
'givenName',
'sn',
'displayName',
'photo',
'preferredLanguage',
'mail',
'date', # last modification
# TODO: Arrays must use put or pop, not set, but need more deep refacts
'norEduPersonNIN',
'eduPersonEntitlement',
'mobile',
'mailAliases',
'postalAddress',
'passwords',
)
def attribute_fetcher(db, user_id):
attributes = {}
user = db.profiles.find_one({'_id': user_id})
if user is None:
raise UserDoesNotExist("No user matching _id='%s'" % user_id)
# white list of valid attributes for security reasons
attributes_set = {}
for attr in WHITELIST_SET_ATTRS:
value = user.get(attr, None)
if value is not None:
attributes_set[attr] = value
attributes['$set'] = attributes_set
return attributes
Remove attributes instead of setting a false value like [].
|
from eduid_am.exceptions import UserDoesNotExist
WHITELIST_SET_ATTRS = (
'givenName',
'sn',
'displayName',
'photo',
'preferredLanguage',
'mail',
'date', # last modification
# TODO: Arrays must use put or pop, not set, but need more deep refacts
'norEduPersonNIN',
'eduPersonEntitlement',
'mobile',
'mailAliases',
'postalAddress',
'passwords',
)
def attribute_fetcher(db, user_id):
attributes = {}
user = db.profiles.find_one({'_id': user_id})
if user is None:
raise UserDoesNotExist("No user matching _id='%s'" % user_id)
# white list of valid attributes for security reasons
attributes_set = {}
attributes_unset = {}
for attr in WHITELIST_SET_ATTRS:
value = user.get(attr, None)
if value:
attributes_set[attr] = value
else:
attributes_unset[attr] = value
attributes['$set'] = attributes_set
attributes['$unset'] = attributes_unset
return attributes
|
<commit_before>from eduid_am.exceptions import UserDoesNotExist
WHITELIST_SET_ATTRS = (
'givenName',
'sn',
'displayName',
'photo',
'preferredLanguage',
'mail',
'date', # last modification
# TODO: Arrays must use put or pop, not set, but need more deep refacts
'norEduPersonNIN',
'eduPersonEntitlement',
'mobile',
'mailAliases',
'postalAddress',
'passwords',
)
def attribute_fetcher(db, user_id):
attributes = {}
user = db.profiles.find_one({'_id': user_id})
if user is None:
raise UserDoesNotExist("No user matching _id='%s'" % user_id)
# white list of valid attributes for security reasons
attributes_set = {}
for attr in WHITELIST_SET_ATTRS:
value = user.get(attr, None)
if value is not None:
attributes_set[attr] = value
attributes['$set'] = attributes_set
return attributes
<commit_msg>Remove attributes instead of setting a false value like [].<commit_after>
|
from eduid_am.exceptions import UserDoesNotExist
WHITELIST_SET_ATTRS = (
'givenName',
'sn',
'displayName',
'photo',
'preferredLanguage',
'mail',
'date', # last modification
# TODO: Arrays must use put or pop, not set, but need more deep refacts
'norEduPersonNIN',
'eduPersonEntitlement',
'mobile',
'mailAliases',
'postalAddress',
'passwords',
)
def attribute_fetcher(db, user_id):
attributes = {}
user = db.profiles.find_one({'_id': user_id})
if user is None:
raise UserDoesNotExist("No user matching _id='%s'" % user_id)
# white list of valid attributes for security reasons
attributes_set = {}
attributes_unset = {}
for attr in WHITELIST_SET_ATTRS:
value = user.get(attr, None)
if value:
attributes_set[attr] = value
else:
attributes_unset[attr] = value
attributes['$set'] = attributes_set
attributes['$unset'] = attributes_unset
return attributes
|
from eduid_am.exceptions import UserDoesNotExist
WHITELIST_SET_ATTRS = (
'givenName',
'sn',
'displayName',
'photo',
'preferredLanguage',
'mail',
'date', # last modification
# TODO: Arrays must use put or pop, not set, but need more deep refacts
'norEduPersonNIN',
'eduPersonEntitlement',
'mobile',
'mailAliases',
'postalAddress',
'passwords',
)
def attribute_fetcher(db, user_id):
attributes = {}
user = db.profiles.find_one({'_id': user_id})
if user is None:
raise UserDoesNotExist("No user matching _id='%s'" % user_id)
# white list of valid attributes for security reasons
attributes_set = {}
for attr in WHITELIST_SET_ATTRS:
value = user.get(attr, None)
if value is not None:
attributes_set[attr] = value
attributes['$set'] = attributes_set
return attributes
Remove attributes instead of setting a false value like [].from eduid_am.exceptions import UserDoesNotExist
WHITELIST_SET_ATTRS = (
'givenName',
'sn',
'displayName',
'photo',
'preferredLanguage',
'mail',
'date', # last modification
# TODO: Arrays must use put or pop, not set, but need more deep refacts
'norEduPersonNIN',
'eduPersonEntitlement',
'mobile',
'mailAliases',
'postalAddress',
'passwords',
)
def attribute_fetcher(db, user_id):
attributes = {}
user = db.profiles.find_one({'_id': user_id})
if user is None:
raise UserDoesNotExist("No user matching _id='%s'" % user_id)
# white list of valid attributes for security reasons
attributes_set = {}
attributes_unset = {}
for attr in WHITELIST_SET_ATTRS:
value = user.get(attr, None)
if value:
attributes_set[attr] = value
else:
attributes_unset[attr] = value
attributes['$set'] = attributes_set
attributes['$unset'] = attributes_unset
return attributes
|
<commit_before>from eduid_am.exceptions import UserDoesNotExist
WHITELIST_SET_ATTRS = (
'givenName',
'sn',
'displayName',
'photo',
'preferredLanguage',
'mail',
'date', # last modification
# TODO: Arrays must use put or pop, not set, but need more deep refacts
'norEduPersonNIN',
'eduPersonEntitlement',
'mobile',
'mailAliases',
'postalAddress',
'passwords',
)
def attribute_fetcher(db, user_id):
attributes = {}
user = db.profiles.find_one({'_id': user_id})
if user is None:
raise UserDoesNotExist("No user matching _id='%s'" % user_id)
# white list of valid attributes for security reasons
attributes_set = {}
for attr in WHITELIST_SET_ATTRS:
value = user.get(attr, None)
if value is not None:
attributes_set[attr] = value
attributes['$set'] = attributes_set
return attributes
<commit_msg>Remove attributes instead of setting a false value like [].<commit_after>from eduid_am.exceptions import UserDoesNotExist
WHITELIST_SET_ATTRS = (
'givenName',
'sn',
'displayName',
'photo',
'preferredLanguage',
'mail',
'date', # last modification
# TODO: Arrays must use put or pop, not set, but need more deep refacts
'norEduPersonNIN',
'eduPersonEntitlement',
'mobile',
'mailAliases',
'postalAddress',
'passwords',
)
def attribute_fetcher(db, user_id):
attributes = {}
user = db.profiles.find_one({'_id': user_id})
if user is None:
raise UserDoesNotExist("No user matching _id='%s'" % user_id)
# white list of valid attributes for security reasons
attributes_set = {}
attributes_unset = {}
for attr in WHITELIST_SET_ATTRS:
value = user.get(attr, None)
if value:
attributes_set[attr] = value
else:
attributes_unset[attr] = value
attributes['$set'] = attributes_set
attributes['$unset'] = attributes_unset
return attributes
|
cc143597dd7673fb13d8257c4dd7bdafa31c2dd4
|
examples/distributed_workers.py
|
examples/distributed_workers.py
|
import sc2
from sc2 import run_game, maps, Race, Difficulty
from sc2.player import Bot, Computer
from sc2.constants import *
class TerranBot(sc2.BotAI):
async def on_step(self, iteration):
await self.distribute_workers()
await self.build_supply()
await self.build_workers()
await self.expand()
async def build_workers(self):
for cc in self.units(UnitTypeId.COMMANDCENTER).ready.noqueue:
if self.can_afford(UnitTypeId.SCV):
await self.do(cc.train(UnitTypeId.SCV))
async def expand(self):
if self.units(UnitTypeId.COMMANDCENTER).amount < 3 and self.can_afford(UnitTypeId.COMMANDCENTER):
await self.expand_now()
async def build_supply(self):
cc = self.units(UnitTypeId.COMMANDCENTER).ready.first
if self.supply_left < 4 and not self.already_pending(UnitTypeId.SUPPLYDEPOT):
if self.can_afford(UnitTypeId.SUPPLYDEPOT):
await self.build(UnitTypeId.SUPPLYDEPOT, near=cc.position.towards(self.game_info.map_center, 5))
run_game(maps.get("Abyssal Reef LE"), [
Bot(Race.Terran, TerranBot()),
Computer(Race.Protoss, Difficulty.Medium)
], realtime=False)
|
import sc2
from sc2 import run_game, maps, Race, Difficulty
from sc2.player import Bot, Computer
from sc2.constants import *
class TerranBot(sc2.BotAI):
async def on_step(self, iteration):
await self.distribute_workers()
await self.build_supply()
await self.build_workers()
await self.expand()
async def build_workers(self):
for cc in self.units(UnitTypeId.COMMANDCENTER).ready.noqueue:
if self.can_afford(UnitTypeId.SCV):
await self.do(cc.train(UnitTypeId.SCV))
async def expand(self):
if self.units(UnitTypeId.COMMANDCENTER).amount < 3 and self.can_afford(UnitTypeId.COMMANDCENTER):
await self.expand_now()
async def build_supply(self):
ccs = self.units(UnitTypeId.COMMANDCENTER).ready
if ccs.exists:
cc = ccs.first
if self.supply_left < 4 and not self.already_pending(UnitTypeId.SUPPLYDEPOT):
if self.can_afford(UnitTypeId.SUPPLYDEPOT):
await self.build(UnitTypeId.SUPPLYDEPOT, near=cc.position.towards(self.game_info.map_center, 5))
run_game(maps.get("Abyssal Reef LE"), [
Bot(Race.Terran, TerranBot()),
Computer(Race.Protoss, Difficulty.Medium)
], realtime=False)
|
Fix command center selection in example
|
Fix command center selection in example
|
Python
|
mit
|
Dentosal/python-sc2
|
import sc2
from sc2 import run_game, maps, Race, Difficulty
from sc2.player import Bot, Computer
from sc2.constants import *
class TerranBot(sc2.BotAI):
async def on_step(self, iteration):
await self.distribute_workers()
await self.build_supply()
await self.build_workers()
await self.expand()
async def build_workers(self):
for cc in self.units(UnitTypeId.COMMANDCENTER).ready.noqueue:
if self.can_afford(UnitTypeId.SCV):
await self.do(cc.train(UnitTypeId.SCV))
async def expand(self):
if self.units(UnitTypeId.COMMANDCENTER).amount < 3 and self.can_afford(UnitTypeId.COMMANDCENTER):
await self.expand_now()
async def build_supply(self):
cc = self.units(UnitTypeId.COMMANDCENTER).ready.first
if self.supply_left < 4 and not self.already_pending(UnitTypeId.SUPPLYDEPOT):
if self.can_afford(UnitTypeId.SUPPLYDEPOT):
await self.build(UnitTypeId.SUPPLYDEPOT, near=cc.position.towards(self.game_info.map_center, 5))
run_game(maps.get("Abyssal Reef LE"), [
Bot(Race.Terran, TerranBot()),
Computer(Race.Protoss, Difficulty.Medium)
], realtime=False)
Fix command center selection in example
|
import sc2
from sc2 import run_game, maps, Race, Difficulty
from sc2.player import Bot, Computer
from sc2.constants import *
class TerranBot(sc2.BotAI):
async def on_step(self, iteration):
await self.distribute_workers()
await self.build_supply()
await self.build_workers()
await self.expand()
async def build_workers(self):
for cc in self.units(UnitTypeId.COMMANDCENTER).ready.noqueue:
if self.can_afford(UnitTypeId.SCV):
await self.do(cc.train(UnitTypeId.SCV))
async def expand(self):
if self.units(UnitTypeId.COMMANDCENTER).amount < 3 and self.can_afford(UnitTypeId.COMMANDCENTER):
await self.expand_now()
async def build_supply(self):
ccs = self.units(UnitTypeId.COMMANDCENTER).ready
if ccs.exists:
cc = ccs.first
if self.supply_left < 4 and not self.already_pending(UnitTypeId.SUPPLYDEPOT):
if self.can_afford(UnitTypeId.SUPPLYDEPOT):
await self.build(UnitTypeId.SUPPLYDEPOT, near=cc.position.towards(self.game_info.map_center, 5))
run_game(maps.get("Abyssal Reef LE"), [
Bot(Race.Terran, TerranBot()),
Computer(Race.Protoss, Difficulty.Medium)
], realtime=False)
|
<commit_before>import sc2
from sc2 import run_game, maps, Race, Difficulty
from sc2.player import Bot, Computer
from sc2.constants import *
class TerranBot(sc2.BotAI):
async def on_step(self, iteration):
await self.distribute_workers()
await self.build_supply()
await self.build_workers()
await self.expand()
async def build_workers(self):
for cc in self.units(UnitTypeId.COMMANDCENTER).ready.noqueue:
if self.can_afford(UnitTypeId.SCV):
await self.do(cc.train(UnitTypeId.SCV))
async def expand(self):
if self.units(UnitTypeId.COMMANDCENTER).amount < 3 and self.can_afford(UnitTypeId.COMMANDCENTER):
await self.expand_now()
async def build_supply(self):
cc = self.units(UnitTypeId.COMMANDCENTER).ready.first
if self.supply_left < 4 and not self.already_pending(UnitTypeId.SUPPLYDEPOT):
if self.can_afford(UnitTypeId.SUPPLYDEPOT):
await self.build(UnitTypeId.SUPPLYDEPOT, near=cc.position.towards(self.game_info.map_center, 5))
run_game(maps.get("Abyssal Reef LE"), [
Bot(Race.Terran, TerranBot()),
Computer(Race.Protoss, Difficulty.Medium)
], realtime=False)
<commit_msg>Fix command center selection in example<commit_after>
|
import sc2
from sc2 import run_game, maps, Race, Difficulty
from sc2.player import Bot, Computer
from sc2.constants import *
class TerranBot(sc2.BotAI):
async def on_step(self, iteration):
await self.distribute_workers()
await self.build_supply()
await self.build_workers()
await self.expand()
async def build_workers(self):
for cc in self.units(UnitTypeId.COMMANDCENTER).ready.noqueue:
if self.can_afford(UnitTypeId.SCV):
await self.do(cc.train(UnitTypeId.SCV))
async def expand(self):
if self.units(UnitTypeId.COMMANDCENTER).amount < 3 and self.can_afford(UnitTypeId.COMMANDCENTER):
await self.expand_now()
async def build_supply(self):
ccs = self.units(UnitTypeId.COMMANDCENTER).ready
if ccs.exists:
cc = ccs.first
if self.supply_left < 4 and not self.already_pending(UnitTypeId.SUPPLYDEPOT):
if self.can_afford(UnitTypeId.SUPPLYDEPOT):
await self.build(UnitTypeId.SUPPLYDEPOT, near=cc.position.towards(self.game_info.map_center, 5))
run_game(maps.get("Abyssal Reef LE"), [
Bot(Race.Terran, TerranBot()),
Computer(Race.Protoss, Difficulty.Medium)
], realtime=False)
|
import sc2
from sc2 import run_game, maps, Race, Difficulty
from sc2.player import Bot, Computer
from sc2.constants import *
class TerranBot(sc2.BotAI):
async def on_step(self, iteration):
await self.distribute_workers()
await self.build_supply()
await self.build_workers()
await self.expand()
async def build_workers(self):
for cc in self.units(UnitTypeId.COMMANDCENTER).ready.noqueue:
if self.can_afford(UnitTypeId.SCV):
await self.do(cc.train(UnitTypeId.SCV))
async def expand(self):
if self.units(UnitTypeId.COMMANDCENTER).amount < 3 and self.can_afford(UnitTypeId.COMMANDCENTER):
await self.expand_now()
async def build_supply(self):
cc = self.units(UnitTypeId.COMMANDCENTER).ready.first
if self.supply_left < 4 and not self.already_pending(UnitTypeId.SUPPLYDEPOT):
if self.can_afford(UnitTypeId.SUPPLYDEPOT):
await self.build(UnitTypeId.SUPPLYDEPOT, near=cc.position.towards(self.game_info.map_center, 5))
run_game(maps.get("Abyssal Reef LE"), [
Bot(Race.Terran, TerranBot()),
Computer(Race.Protoss, Difficulty.Medium)
], realtime=False)
Fix command center selection in exampleimport sc2
from sc2 import run_game, maps, Race, Difficulty
from sc2.player import Bot, Computer
from sc2.constants import *
class TerranBot(sc2.BotAI):
async def on_step(self, iteration):
await self.distribute_workers()
await self.build_supply()
await self.build_workers()
await self.expand()
async def build_workers(self):
for cc in self.units(UnitTypeId.COMMANDCENTER).ready.noqueue:
if self.can_afford(UnitTypeId.SCV):
await self.do(cc.train(UnitTypeId.SCV))
async def expand(self):
if self.units(UnitTypeId.COMMANDCENTER).amount < 3 and self.can_afford(UnitTypeId.COMMANDCENTER):
await self.expand_now()
async def build_supply(self):
ccs = self.units(UnitTypeId.COMMANDCENTER).ready
if ccs.exists:
cc = ccs.first
if self.supply_left < 4 and not self.already_pending(UnitTypeId.SUPPLYDEPOT):
if self.can_afford(UnitTypeId.SUPPLYDEPOT):
await self.build(UnitTypeId.SUPPLYDEPOT, near=cc.position.towards(self.game_info.map_center, 5))
run_game(maps.get("Abyssal Reef LE"), [
Bot(Race.Terran, TerranBot()),
Computer(Race.Protoss, Difficulty.Medium)
], realtime=False)
|
<commit_before>import sc2
from sc2 import run_game, maps, Race, Difficulty
from sc2.player import Bot, Computer
from sc2.constants import *
class TerranBot(sc2.BotAI):
async def on_step(self, iteration):
await self.distribute_workers()
await self.build_supply()
await self.build_workers()
await self.expand()
async def build_workers(self):
for cc in self.units(UnitTypeId.COMMANDCENTER).ready.noqueue:
if self.can_afford(UnitTypeId.SCV):
await self.do(cc.train(UnitTypeId.SCV))
async def expand(self):
if self.units(UnitTypeId.COMMANDCENTER).amount < 3 and self.can_afford(UnitTypeId.COMMANDCENTER):
await self.expand_now()
async def build_supply(self):
cc = self.units(UnitTypeId.COMMANDCENTER).ready.first
if self.supply_left < 4 and not self.already_pending(UnitTypeId.SUPPLYDEPOT):
if self.can_afford(UnitTypeId.SUPPLYDEPOT):
await self.build(UnitTypeId.SUPPLYDEPOT, near=cc.position.towards(self.game_info.map_center, 5))
run_game(maps.get("Abyssal Reef LE"), [
Bot(Race.Terran, TerranBot()),
Computer(Race.Protoss, Difficulty.Medium)
], realtime=False)
<commit_msg>Fix command center selection in example<commit_after>import sc2
from sc2 import run_game, maps, Race, Difficulty
from sc2.player import Bot, Computer
from sc2.constants import *
class TerranBot(sc2.BotAI):
async def on_step(self, iteration):
await self.distribute_workers()
await self.build_supply()
await self.build_workers()
await self.expand()
async def build_workers(self):
for cc in self.units(UnitTypeId.COMMANDCENTER).ready.noqueue:
if self.can_afford(UnitTypeId.SCV):
await self.do(cc.train(UnitTypeId.SCV))
async def expand(self):
if self.units(UnitTypeId.COMMANDCENTER).amount < 3 and self.can_afford(UnitTypeId.COMMANDCENTER):
await self.expand_now()
async def build_supply(self):
ccs = self.units(UnitTypeId.COMMANDCENTER).ready
if ccs.exists:
cc = ccs.first
if self.supply_left < 4 and not self.already_pending(UnitTypeId.SUPPLYDEPOT):
if self.can_afford(UnitTypeId.SUPPLYDEPOT):
await self.build(UnitTypeId.SUPPLYDEPOT, near=cc.position.towards(self.game_info.map_center, 5))
run_game(maps.get("Abyssal Reef LE"), [
Bot(Race.Terran, TerranBot()),
Computer(Race.Protoss, Difficulty.Medium)
], realtime=False)
|
fd9f69cbc5512ea91837ff4512d4c9549b2f9eeb
|
plugin/DebianUtils/__init__.py
|
plugin/DebianUtils/__init__.py
|
# -*- coding: utf-8 -*-
#
# Debian Changes Bot
# Copyright (C) 2008 Chris Lamb <chris@chris-lamb.co.uk>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
__version__ = "1"
__author__ = 'Chris Lamb <chris@chris-lamb.co.uk>'
__contributors__ = {}
__url__ = ''
basedir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
if basedir not in sys.path:
sys.path.append(basedir)
import config
import plugin
reload(plugin)
Class = plugin.Class
configure = config.configure
|
# -*- coding: utf-8 -*-
#
# Debian Changes Bot
# Copyright (C) 2008 Chris Lamb <chris@chris-lamb.co.uk>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
__version__ = "1"
__author__ = 'Chris Lamb <chris@chris-lamb.co.uk>'
__contributors__ = {}
__url__ = ''
basedir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
if basedir not in sys.path:
sys.path.append(basedir)
import DebianDevelChangesBot
reload(DebianDevelChangesBot)
import config
import plugin
reload(plugin)
Class = plugin.Class
configure = config.configure
|
Add reload routines to DebianUtils plugin
|
Add reload routines to DebianUtils plugin
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@chris-lamb.co.uk>
|
Python
|
agpl-3.0
|
lamby/debian-devel-changes-bot,lamby/debian-devel-changes-bot,lamby/debian-devel-changes-bot,xtaran/debian-devel-changes-bot,sebastinas/debian-devel-changes-bot,xtaran/debian-devel-changes-bot
|
# -*- coding: utf-8 -*-
#
# Debian Changes Bot
# Copyright (C) 2008 Chris Lamb <chris@chris-lamb.co.uk>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
__version__ = "1"
__author__ = 'Chris Lamb <chris@chris-lamb.co.uk>'
__contributors__ = {}
__url__ = ''
basedir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
if basedir not in sys.path:
sys.path.append(basedir)
import config
import plugin
reload(plugin)
Class = plugin.Class
configure = config.configure
Add reload routines to DebianUtils plugin
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@chris-lamb.co.uk>
|
# -*- coding: utf-8 -*-
#
# Debian Changes Bot
# Copyright (C) 2008 Chris Lamb <chris@chris-lamb.co.uk>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
__version__ = "1"
__author__ = 'Chris Lamb <chris@chris-lamb.co.uk>'
__contributors__ = {}
__url__ = ''
basedir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
if basedir not in sys.path:
sys.path.append(basedir)
import DebianDevelChangesBot
reload(DebianDevelChangesBot)
import config
import plugin
reload(plugin)
Class = plugin.Class
configure = config.configure
|
<commit_before># -*- coding: utf-8 -*-
#
# Debian Changes Bot
# Copyright (C) 2008 Chris Lamb <chris@chris-lamb.co.uk>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
__version__ = "1"
__author__ = 'Chris Lamb <chris@chris-lamb.co.uk>'
__contributors__ = {}
__url__ = ''
basedir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
if basedir not in sys.path:
sys.path.append(basedir)
import config
import plugin
reload(plugin)
Class = plugin.Class
configure = config.configure
<commit_msg>Add reload routines to DebianUtils plugin
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@chris-lamb.co.uk><commit_after>
|
# -*- coding: utf-8 -*-
#
# Debian Changes Bot
# Copyright (C) 2008 Chris Lamb <chris@chris-lamb.co.uk>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
__version__ = "1"
__author__ = 'Chris Lamb <chris@chris-lamb.co.uk>'
__contributors__ = {}
__url__ = ''
basedir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
if basedir not in sys.path:
sys.path.append(basedir)
import DebianDevelChangesBot
reload(DebianDevelChangesBot)
import config
import plugin
reload(plugin)
Class = plugin.Class
configure = config.configure
|
# -*- coding: utf-8 -*-
#
# Debian Changes Bot
# Copyright (C) 2008 Chris Lamb <chris@chris-lamb.co.uk>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
__version__ = "1"
__author__ = 'Chris Lamb <chris@chris-lamb.co.uk>'
__contributors__ = {}
__url__ = ''
basedir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
if basedir not in sys.path:
sys.path.append(basedir)
import config
import plugin
reload(plugin)
Class = plugin.Class
configure = config.configure
Add reload routines to DebianUtils plugin
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@chris-lamb.co.uk># -*- coding: utf-8 -*-
#
# Debian Changes Bot
# Copyright (C) 2008 Chris Lamb <chris@chris-lamb.co.uk>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
__version__ = "1"
__author__ = 'Chris Lamb <chris@chris-lamb.co.uk>'
__contributors__ = {}
__url__ = ''
basedir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
if basedir not in sys.path:
sys.path.append(basedir)
import DebianDevelChangesBot
reload(DebianDevelChangesBot)
import config
import plugin
reload(plugin)
Class = plugin.Class
configure = config.configure
|
<commit_before># -*- coding: utf-8 -*-
#
# Debian Changes Bot
# Copyright (C) 2008 Chris Lamb <chris@chris-lamb.co.uk>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
__version__ = "1"
__author__ = 'Chris Lamb <chris@chris-lamb.co.uk>'
__contributors__ = {}
__url__ = ''
basedir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
if basedir not in sys.path:
sys.path.append(basedir)
import config
import plugin
reload(plugin)
Class = plugin.Class
configure = config.configure
<commit_msg>Add reload routines to DebianUtils plugin
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@chris-lamb.co.uk><commit_after># -*- coding: utf-8 -*-
#
# Debian Changes Bot
# Copyright (C) 2008 Chris Lamb <chris@chris-lamb.co.uk>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
__version__ = "1"
__author__ = 'Chris Lamb <chris@chris-lamb.co.uk>'
__contributors__ = {}
__url__ = ''
basedir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
if basedir not in sys.path:
sys.path.append(basedir)
import DebianDevelChangesBot
reload(DebianDevelChangesBot)
import config
import plugin
reload(plugin)
Class = plugin.Class
configure = config.configure
|
8f30a7d3794891373a1f707bdf6afa083717dfc0
|
ggplot/scales/scale_identity.py
|
ggplot/scales/scale_identity.py
|
from __future__ import absolute_import, division, print_function
from ..utils import identity, alias
from .scale import scale_discrete, scale_continuous
class scale_color_identity(scale_discrete):
aesthetics = ['color']
palette = staticmethod(identity)
class scale_fill_identity(scale_color_identity):
aesthetics = ['fill']
class scale_shape_identity(scale_discrete):
aesthetics = ['shape']
palette = staticmethod(identity)
class scale_linetype_identity(scale_discrete):
aesthetics = ['linetype']
palette = staticmethod(identity)
class scale_alpha_identity(scale_continuous):
aesthetics = ['alpha']
palette = staticmethod(identity)
class scale_size_identity(scale_continuous):
aesthetics = ['size']
palette = staticmethod(identity)
# American to British spelling
alias('scale_colour_identity', scale_color_identity)
|
from __future__ import absolute_import, division, print_function
from ..utils import identity, alias
from .scale import scale_discrete, scale_continuous
class MapTrainMixin(object):
"""
Override map and train methods
"""
def map(self, x):
return x
def train(self, x):
# do nothing if no guide,
# otherwise train so we know what breaks to use
if self.guide is None:
return
return super(MapTrainMixin, self).train(x)
class scale_color_identity(MapTrainMixin, scale_discrete):
aesthetics = ['color']
palette = staticmethod(identity)
guide = None
class scale_fill_identity(scale_color_identity):
aesthetics = ['fill']
class scale_shape_identity(MapTrainMixin, scale_discrete):
aesthetics = ['shape']
palette = staticmethod(identity)
guide = None
class scale_linetype_identity(MapTrainMixin, scale_discrete):
aesthetics = ['linetype']
palette = staticmethod(identity)
guide = None
class scale_alpha_identity(MapTrainMixin, scale_continuous):
aesthetics = ['alpha']
palette = staticmethod(identity)
guide = None
class scale_size_identity(MapTrainMixin, scale_continuous):
aesthetics = ['size']
palette = staticmethod(identity)
guide = None
# American to British spelling
alias('scale_colour_identity', scale_color_identity)
|
Fix identity scales, override map & train methods
|
Fix identity scales, override map & train methods
|
Python
|
mit
|
has2k1/plotnine,has2k1/plotnine
|
from __future__ import absolute_import, division, print_function
from ..utils import identity, alias
from .scale import scale_discrete, scale_continuous
class scale_color_identity(scale_discrete):
aesthetics = ['color']
palette = staticmethod(identity)
class scale_fill_identity(scale_color_identity):
aesthetics = ['fill']
class scale_shape_identity(scale_discrete):
aesthetics = ['shape']
palette = staticmethod(identity)
class scale_linetype_identity(scale_discrete):
aesthetics = ['linetype']
palette = staticmethod(identity)
class scale_alpha_identity(scale_continuous):
aesthetics = ['alpha']
palette = staticmethod(identity)
class scale_size_identity(scale_continuous):
aesthetics = ['size']
palette = staticmethod(identity)
# American to British spelling
alias('scale_colour_identity', scale_color_identity)
Fix identity scales, override map & train methods
|
from __future__ import absolute_import, division, print_function
from ..utils import identity, alias
from .scale import scale_discrete, scale_continuous
class MapTrainMixin(object):
"""
Override map and train methods
"""
def map(self, x):
return x
def train(self, x):
# do nothing if no guide,
# otherwise train so we know what breaks to use
if self.guide is None:
return
return super(MapTrainMixin, self).train(x)
class scale_color_identity(MapTrainMixin, scale_discrete):
aesthetics = ['color']
palette = staticmethod(identity)
guide = None
class scale_fill_identity(scale_color_identity):
aesthetics = ['fill']
class scale_shape_identity(MapTrainMixin, scale_discrete):
aesthetics = ['shape']
palette = staticmethod(identity)
guide = None
class scale_linetype_identity(MapTrainMixin, scale_discrete):
aesthetics = ['linetype']
palette = staticmethod(identity)
guide = None
class scale_alpha_identity(MapTrainMixin, scale_continuous):
aesthetics = ['alpha']
palette = staticmethod(identity)
guide = None
class scale_size_identity(MapTrainMixin, scale_continuous):
aesthetics = ['size']
palette = staticmethod(identity)
guide = None
# American to British spelling
alias('scale_colour_identity', scale_color_identity)
|
<commit_before>from __future__ import absolute_import, division, print_function
from ..utils import identity, alias
from .scale import scale_discrete, scale_continuous
class scale_color_identity(scale_discrete):
aesthetics = ['color']
palette = staticmethod(identity)
class scale_fill_identity(scale_color_identity):
aesthetics = ['fill']
class scale_shape_identity(scale_discrete):
aesthetics = ['shape']
palette = staticmethod(identity)
class scale_linetype_identity(scale_discrete):
aesthetics = ['linetype']
palette = staticmethod(identity)
class scale_alpha_identity(scale_continuous):
aesthetics = ['alpha']
palette = staticmethod(identity)
class scale_size_identity(scale_continuous):
aesthetics = ['size']
palette = staticmethod(identity)
# American to British spelling
alias('scale_colour_identity', scale_color_identity)
<commit_msg>Fix identity scales, override map & train methods<commit_after>
|
from __future__ import absolute_import, division, print_function
from ..utils import identity, alias
from .scale import scale_discrete, scale_continuous
class MapTrainMixin(object):
"""
Override map and train methods
"""
def map(self, x):
return x
def train(self, x):
# do nothing if no guide,
# otherwise train so we know what breaks to use
if self.guide is None:
return
return super(MapTrainMixin, self).train(x)
class scale_color_identity(MapTrainMixin, scale_discrete):
aesthetics = ['color']
palette = staticmethod(identity)
guide = None
class scale_fill_identity(scale_color_identity):
aesthetics = ['fill']
class scale_shape_identity(MapTrainMixin, scale_discrete):
aesthetics = ['shape']
palette = staticmethod(identity)
guide = None
class scale_linetype_identity(MapTrainMixin, scale_discrete):
aesthetics = ['linetype']
palette = staticmethod(identity)
guide = None
class scale_alpha_identity(MapTrainMixin, scale_continuous):
aesthetics = ['alpha']
palette = staticmethod(identity)
guide = None
class scale_size_identity(MapTrainMixin, scale_continuous):
aesthetics = ['size']
palette = staticmethod(identity)
guide = None
# American to British spelling
alias('scale_colour_identity', scale_color_identity)
|
from __future__ import absolute_import, division, print_function
from ..utils import identity, alias
from .scale import scale_discrete, scale_continuous
class scale_color_identity(scale_discrete):
aesthetics = ['color']
palette = staticmethod(identity)
class scale_fill_identity(scale_color_identity):
aesthetics = ['fill']
class scale_shape_identity(scale_discrete):
aesthetics = ['shape']
palette = staticmethod(identity)
class scale_linetype_identity(scale_discrete):
aesthetics = ['linetype']
palette = staticmethod(identity)
class scale_alpha_identity(scale_continuous):
aesthetics = ['alpha']
palette = staticmethod(identity)
class scale_size_identity(scale_continuous):
aesthetics = ['size']
palette = staticmethod(identity)
# American to British spelling
alias('scale_colour_identity', scale_color_identity)
Fix identity scales, override map & train methodsfrom __future__ import absolute_import, division, print_function
from ..utils import identity, alias
from .scale import scale_discrete, scale_continuous
class MapTrainMixin(object):
"""
Override map and train methods
"""
def map(self, x):
return x
def train(self, x):
# do nothing if no guide,
# otherwise train so we know what breaks to use
if self.guide is None:
return
return super(MapTrainMixin, self).train(x)
class scale_color_identity(MapTrainMixin, scale_discrete):
aesthetics = ['color']
palette = staticmethod(identity)
guide = None
class scale_fill_identity(scale_color_identity):
aesthetics = ['fill']
class scale_shape_identity(MapTrainMixin, scale_discrete):
aesthetics = ['shape']
palette = staticmethod(identity)
guide = None
class scale_linetype_identity(MapTrainMixin, scale_discrete):
aesthetics = ['linetype']
palette = staticmethod(identity)
guide = None
class scale_alpha_identity(MapTrainMixin, scale_continuous):
aesthetics = ['alpha']
palette = staticmethod(identity)
guide = None
class scale_size_identity(MapTrainMixin, scale_continuous):
aesthetics = ['size']
palette = staticmethod(identity)
guide = None
# American to British spelling
alias('scale_colour_identity', scale_color_identity)
|
<commit_before>from __future__ import absolute_import, division, print_function
from ..utils import identity, alias
from .scale import scale_discrete, scale_continuous
class scale_color_identity(scale_discrete):
aesthetics = ['color']
palette = staticmethod(identity)
class scale_fill_identity(scale_color_identity):
aesthetics = ['fill']
class scale_shape_identity(scale_discrete):
aesthetics = ['shape']
palette = staticmethod(identity)
class scale_linetype_identity(scale_discrete):
aesthetics = ['linetype']
palette = staticmethod(identity)
class scale_alpha_identity(scale_continuous):
aesthetics = ['alpha']
palette = staticmethod(identity)
class scale_size_identity(scale_continuous):
aesthetics = ['size']
palette = staticmethod(identity)
# American to British spelling
alias('scale_colour_identity', scale_color_identity)
<commit_msg>Fix identity scales, override map & train methods<commit_after>from __future__ import absolute_import, division, print_function
from ..utils import identity, alias
from .scale import scale_discrete, scale_continuous
class MapTrainMixin(object):
"""
Override map and train methods
"""
def map(self, x):
return x
def train(self, x):
# do nothing if no guide,
# otherwise train so we know what breaks to use
if self.guide is None:
return
return super(MapTrainMixin, self).train(x)
class scale_color_identity(MapTrainMixin, scale_discrete):
aesthetics = ['color']
palette = staticmethod(identity)
guide = None
class scale_fill_identity(scale_color_identity):
aesthetics = ['fill']
class scale_shape_identity(MapTrainMixin, scale_discrete):
aesthetics = ['shape']
palette = staticmethod(identity)
guide = None
class scale_linetype_identity(MapTrainMixin, scale_discrete):
aesthetics = ['linetype']
palette = staticmethod(identity)
guide = None
class scale_alpha_identity(MapTrainMixin, scale_continuous):
aesthetics = ['alpha']
palette = staticmethod(identity)
guide = None
class scale_size_identity(MapTrainMixin, scale_continuous):
aesthetics = ['size']
palette = staticmethod(identity)
guide = None
# American to British spelling
alias('scale_colour_identity', scale_color_identity)
|
f04ec6d18ac1dd9932c517ef19d0d7a9d7bae003
|
app/__init__.py
|
app/__init__.py
|
import os
from flask import Flask, jsonify, g
from flask.ext.sqlalchemy import SQLAlchemy
db = SQLAlchemy()
def create_app(config_name):
"""Create an application instance.
This is called by a runner script, such as /run.py.
"""
from .auth import password_auth
app = Flask(__name__)
# apply configuration
cfg = os.path.join(os.getcwd(), 'config', config_name + '.py')
app.config.from_pyfile(cfg)
# initialize extensions
db.init_app(app)
# register blueprints
from .api_v1 import api as api_blueprint
app.register_blueprint(api_blueprint, url_prefix='/v1')
# authentication token route
@app.route('/token')
@password_auth.login_required
def get_auth_token():
return jsonify({'token': g.user.generate_auth_token()})
return app
|
import os
from flask import Flask, jsonify, g
from flask.ext.sqlalchemy import SQLAlchemy
db = SQLAlchemy()
def create_app(config_name):
"""Create an application instance.
This is called by a runner script, such as /run.py.
"""
from .auth import password_auth
app = Flask(__name__)
# apply configuration
_this_dir = os.path.dirname(os.path.abspath(__file__))
cfg = os.path.join(_this_dir, '../config', config_name + '.py')
app.config.from_pyfile(cfg)
# initialize extensions
db.init_app(app)
# register blueprints
from .api_v1 import api as api_blueprint
app.register_blueprint(api_blueprint, url_prefix='/v1')
# authentication token route
@app.route('/token')
@password_auth.login_required
def get_auth_token():
return jsonify({'token': g.user.generate_auth_token()})
return app
|
Allow app to be imported
|
Allow app to be imported
Flask httpdomain needs to import the Flask app. It turns out the the
way to get the path to the configuration file assumed the app was always
being run/imported from the base of the ltd-keeper repo, which isn't
always true, especially for Sphinx doc builds.
This correctly derives a path from the absolute path of the Python
module itself.
For DM-5100.
|
Python
|
mit
|
lsst-sqre/ltd-keeper,lsst-sqre/ltd-keeper
|
import os
from flask import Flask, jsonify, g
from flask.ext.sqlalchemy import SQLAlchemy
db = SQLAlchemy()
def create_app(config_name):
"""Create an application instance.
This is called by a runner script, such as /run.py.
"""
from .auth import password_auth
app = Flask(__name__)
# apply configuration
cfg = os.path.join(os.getcwd(), 'config', config_name + '.py')
app.config.from_pyfile(cfg)
# initialize extensions
db.init_app(app)
# register blueprints
from .api_v1 import api as api_blueprint
app.register_blueprint(api_blueprint, url_prefix='/v1')
# authentication token route
@app.route('/token')
@password_auth.login_required
def get_auth_token():
return jsonify({'token': g.user.generate_auth_token()})
return app
Allow app to be imported
Flask httpdomain needs to import the Flask app. It turns out the the
way to get the path to the configuration file assumed the app was always
being run/imported from the base of the ltd-keeper repo, which isn't
always true, especially for Sphinx doc builds.
This correctly derives a path from the absolute path of the Python
module itself.
For DM-5100.
|
import os
from flask import Flask, jsonify, g
from flask.ext.sqlalchemy import SQLAlchemy
db = SQLAlchemy()
def create_app(config_name):
"""Create an application instance.
This is called by a runner script, such as /run.py.
"""
from .auth import password_auth
app = Flask(__name__)
# apply configuration
_this_dir = os.path.dirname(os.path.abspath(__file__))
cfg = os.path.join(_this_dir, '../config', config_name + '.py')
app.config.from_pyfile(cfg)
# initialize extensions
db.init_app(app)
# register blueprints
from .api_v1 import api as api_blueprint
app.register_blueprint(api_blueprint, url_prefix='/v1')
# authentication token route
@app.route('/token')
@password_auth.login_required
def get_auth_token():
return jsonify({'token': g.user.generate_auth_token()})
return app
|
<commit_before>import os
from flask import Flask, jsonify, g
from flask.ext.sqlalchemy import SQLAlchemy
db = SQLAlchemy()
def create_app(config_name):
"""Create an application instance.
This is called by a runner script, such as /run.py.
"""
from .auth import password_auth
app = Flask(__name__)
# apply configuration
cfg = os.path.join(os.getcwd(), 'config', config_name + '.py')
app.config.from_pyfile(cfg)
# initialize extensions
db.init_app(app)
# register blueprints
from .api_v1 import api as api_blueprint
app.register_blueprint(api_blueprint, url_prefix='/v1')
# authentication token route
@app.route('/token')
@password_auth.login_required
def get_auth_token():
return jsonify({'token': g.user.generate_auth_token()})
return app
<commit_msg>Allow app to be imported
Flask httpdomain needs to import the Flask app. It turns out the the
way to get the path to the configuration file assumed the app was always
being run/imported from the base of the ltd-keeper repo, which isn't
always true, especially for Sphinx doc builds.
This correctly derives a path from the absolute path of the Python
module itself.
For DM-5100.<commit_after>
|
import os
from flask import Flask, jsonify, g
from flask.ext.sqlalchemy import SQLAlchemy
db = SQLAlchemy()
def create_app(config_name):
"""Create an application instance.
This is called by a runner script, such as /run.py.
"""
from .auth import password_auth
app = Flask(__name__)
# apply configuration
_this_dir = os.path.dirname(os.path.abspath(__file__))
cfg = os.path.join(_this_dir, '../config', config_name + '.py')
app.config.from_pyfile(cfg)
# initialize extensions
db.init_app(app)
# register blueprints
from .api_v1 import api as api_blueprint
app.register_blueprint(api_blueprint, url_prefix='/v1')
# authentication token route
@app.route('/token')
@password_auth.login_required
def get_auth_token():
return jsonify({'token': g.user.generate_auth_token()})
return app
|
import os
from flask import Flask, jsonify, g
from flask.ext.sqlalchemy import SQLAlchemy
db = SQLAlchemy()
def create_app(config_name):
"""Create an application instance.
This is called by a runner script, such as /run.py.
"""
from .auth import password_auth
app = Flask(__name__)
# apply configuration
cfg = os.path.join(os.getcwd(), 'config', config_name + '.py')
app.config.from_pyfile(cfg)
# initialize extensions
db.init_app(app)
# register blueprints
from .api_v1 import api as api_blueprint
app.register_blueprint(api_blueprint, url_prefix='/v1')
# authentication token route
@app.route('/token')
@password_auth.login_required
def get_auth_token():
return jsonify({'token': g.user.generate_auth_token()})
return app
Allow app to be imported
Flask httpdomain needs to import the Flask app. It turns out the the
way to get the path to the configuration file assumed the app was always
being run/imported from the base of the ltd-keeper repo, which isn't
always true, especially for Sphinx doc builds.
This correctly derives a path from the absolute path of the Python
module itself.
For DM-5100.import os
from flask import Flask, jsonify, g
from flask.ext.sqlalchemy import SQLAlchemy
db = SQLAlchemy()
def create_app(config_name):
"""Create an application instance.
This is called by a runner script, such as /run.py.
"""
from .auth import password_auth
app = Flask(__name__)
# apply configuration
_this_dir = os.path.dirname(os.path.abspath(__file__))
cfg = os.path.join(_this_dir, '../config', config_name + '.py')
app.config.from_pyfile(cfg)
# initialize extensions
db.init_app(app)
# register blueprints
from .api_v1 import api as api_blueprint
app.register_blueprint(api_blueprint, url_prefix='/v1')
# authentication token route
@app.route('/token')
@password_auth.login_required
def get_auth_token():
return jsonify({'token': g.user.generate_auth_token()})
return app
|
<commit_before>import os
from flask import Flask, jsonify, g
from flask.ext.sqlalchemy import SQLAlchemy
db = SQLAlchemy()
def create_app(config_name):
"""Create an application instance.
This is called by a runner script, such as /run.py.
"""
from .auth import password_auth
app = Flask(__name__)
# apply configuration
cfg = os.path.join(os.getcwd(), 'config', config_name + '.py')
app.config.from_pyfile(cfg)
# initialize extensions
db.init_app(app)
# register blueprints
from .api_v1 import api as api_blueprint
app.register_blueprint(api_blueprint, url_prefix='/v1')
# authentication token route
@app.route('/token')
@password_auth.login_required
def get_auth_token():
return jsonify({'token': g.user.generate_auth_token()})
return app
<commit_msg>Allow app to be imported
Flask httpdomain needs to import the Flask app. It turns out the the
way to get the path to the configuration file assumed the app was always
being run/imported from the base of the ltd-keeper repo, which isn't
always true, especially for Sphinx doc builds.
This correctly derives a path from the absolute path of the Python
module itself.
For DM-5100.<commit_after>import os
from flask import Flask, jsonify, g
from flask.ext.sqlalchemy import SQLAlchemy
db = SQLAlchemy()
def create_app(config_name):
"""Create an application instance.
This is called by a runner script, such as /run.py.
"""
from .auth import password_auth
app = Flask(__name__)
# apply configuration
_this_dir = os.path.dirname(os.path.abspath(__file__))
cfg = os.path.join(_this_dir, '../config', config_name + '.py')
app.config.from_pyfile(cfg)
# initialize extensions
db.init_app(app)
# register blueprints
from .api_v1 import api as api_blueprint
app.register_blueprint(api_blueprint, url_prefix='/v1')
# authentication token route
@app.route('/token')
@password_auth.login_required
def get_auth_token():
return jsonify({'token': g.user.generate_auth_token()})
return app
|
5c1fc4b6ebbd2ee54318c5bc9877868858ea03ee
|
auth0/v2/authentication/base.py
|
auth0/v2/authentication/base.py
|
import json
import requests
from ..exceptions import Auth0Error
class AuthenticationBase(object):
def post(self, url, data={}, headers={}):
response = requests.post(url=url, data=json.dumps(data),
headers=headers)
return self._process_response(response)
def _process_response(self, response):
text = json.loads(response.text) if response.text else {}
if 'error' in text:
raise Auth0Error(status_code=text['error'],
error_code=text['error'],
message=text['error_description'])
return text
|
import json
import requests
from ..exceptions import Auth0Error
class AuthenticationBase(object):
def post(self, url, data={}, headers={}):
response = requests.post(url=url, data=json.dumps(data),
headers=headers)
return self._process_response(response)
def get(self, url, params={}, headers={}):
return requests.get(url=url, params=params, headers=headers).text
def _process_response(self, response):
text = json.loads(response.text) if response.text else {}
if 'error' in text:
raise Auth0Error(status_code=text['error'],
error_code=text['error'],
message=text['error_description'])
return text
|
Add .get() method to AuthenticationBase
|
Add .get() method to AuthenticationBase
|
Python
|
mit
|
auth0/auth0-python,auth0/auth0-python
|
import json
import requests
from ..exceptions import Auth0Error
class AuthenticationBase(object):
def post(self, url, data={}, headers={}):
response = requests.post(url=url, data=json.dumps(data),
headers=headers)
return self._process_response(response)
def _process_response(self, response):
text = json.loads(response.text) if response.text else {}
if 'error' in text:
raise Auth0Error(status_code=text['error'],
error_code=text['error'],
message=text['error_description'])
return text
Add .get() method to AuthenticationBase
|
import json
import requests
from ..exceptions import Auth0Error
class AuthenticationBase(object):
def post(self, url, data={}, headers={}):
response = requests.post(url=url, data=json.dumps(data),
headers=headers)
return self._process_response(response)
def get(self, url, params={}, headers={}):
return requests.get(url=url, params=params, headers=headers).text
def _process_response(self, response):
text = json.loads(response.text) if response.text else {}
if 'error' in text:
raise Auth0Error(status_code=text['error'],
error_code=text['error'],
message=text['error_description'])
return text
|
<commit_before>import json
import requests
from ..exceptions import Auth0Error
class AuthenticationBase(object):
def post(self, url, data={}, headers={}):
response = requests.post(url=url, data=json.dumps(data),
headers=headers)
return self._process_response(response)
def _process_response(self, response):
text = json.loads(response.text) if response.text else {}
if 'error' in text:
raise Auth0Error(status_code=text['error'],
error_code=text['error'],
message=text['error_description'])
return text
<commit_msg>Add .get() method to AuthenticationBase<commit_after>
|
import json
import requests
from ..exceptions import Auth0Error
class AuthenticationBase(object):
def post(self, url, data={}, headers={}):
response = requests.post(url=url, data=json.dumps(data),
headers=headers)
return self._process_response(response)
def get(self, url, params={}, headers={}):
return requests.get(url=url, params=params, headers=headers).text
def _process_response(self, response):
text = json.loads(response.text) if response.text else {}
if 'error' in text:
raise Auth0Error(status_code=text['error'],
error_code=text['error'],
message=text['error_description'])
return text
|
import json
import requests
from ..exceptions import Auth0Error
class AuthenticationBase(object):
def post(self, url, data={}, headers={}):
response = requests.post(url=url, data=json.dumps(data),
headers=headers)
return self._process_response(response)
def _process_response(self, response):
text = json.loads(response.text) if response.text else {}
if 'error' in text:
raise Auth0Error(status_code=text['error'],
error_code=text['error'],
message=text['error_description'])
return text
Add .get() method to AuthenticationBaseimport json
import requests
from ..exceptions import Auth0Error
class AuthenticationBase(object):
def post(self, url, data={}, headers={}):
response = requests.post(url=url, data=json.dumps(data),
headers=headers)
return self._process_response(response)
def get(self, url, params={}, headers={}):
return requests.get(url=url, params=params, headers=headers).text
def _process_response(self, response):
text = json.loads(response.text) if response.text else {}
if 'error' in text:
raise Auth0Error(status_code=text['error'],
error_code=text['error'],
message=text['error_description'])
return text
|
<commit_before>import json
import requests
from ..exceptions import Auth0Error
class AuthenticationBase(object):
def post(self, url, data={}, headers={}):
response = requests.post(url=url, data=json.dumps(data),
headers=headers)
return self._process_response(response)
def _process_response(self, response):
text = json.loads(response.text) if response.text else {}
if 'error' in text:
raise Auth0Error(status_code=text['error'],
error_code=text['error'],
message=text['error_description'])
return text
<commit_msg>Add .get() method to AuthenticationBase<commit_after>import json
import requests
from ..exceptions import Auth0Error
class AuthenticationBase(object):
def post(self, url, data={}, headers={}):
response = requests.post(url=url, data=json.dumps(data),
headers=headers)
return self._process_response(response)
def get(self, url, params={}, headers={}):
return requests.get(url=url, params=params, headers=headers).text
def _process_response(self, response):
text = json.loads(response.text) if response.text else {}
if 'error' in text:
raise Auth0Error(status_code=text['error'],
error_code=text['error'],
message=text['error_description'])
return text
|
d2e88ec95f3a4b2ac01b47154d675996cbed23d3
|
split_dataset.py
|
split_dataset.py
|
import os
import numpy as np
data_dir = "data/dataset/"
jpg_filenames = list(filter(lambda x: x[-3:] == "jpg", os.listdir(data_dir)))
# Randomly select the test dataset
test_percentage = 0.1
n_test = int(round(len(jpg_filenames) * test_percentage))
if n_test == 0: n_test = 1
# Randomly select the images for testing
test_indexes = np.random.choice(len(jpg_filenames), n_test, replace=False)
test_indexes = test_indexes.astype(int)
jpg_filenames_copy = jpg_filenames[:]
with open("test.txt", "w") as f:
for index in test_indexes:
# Write filename
f.write(data_dir + jpg_filenames[index] + "\n")
# Remove from copy list
jpg_filenames_copy.pop(index)
# Write from the copy list
with open("train.txt", "w") as f:
for filename in jpg_filenames_copy:
f.write(data_dir + filename + "\n")
|
import os
import numpy as np
data_dir = "data/dataset/"
jpg_filenames = list(filter(lambda x: x[-3:] == "jpg", os.listdir(data_dir)))
# Randomly select the test dataset
test_percentage = 0.1
n_test = int(round(len(jpg_filenames) * test_percentage))
if n_test == 0: n_test = 1
# Randomly select the images for testing
test_indexes = np.random.choice(len(jpg_filenames), n_test, replace=False)
test_indexes = test_indexes.astype(int)
jpg_filenames_copy = list(jpg_filenames)
with open("test.txt", "w") as f:
for index in test_indexes:
print(index, len(jpg_filenames_copy), len(jpg_filenames))
# Write filename
f.write(data_dir + jpg_filenames[index] + "\n")
# Remove from copy list
jpg_filenames_copy.pop(index)
# Write from the copy list
with open("train.txt", "w") as f:
for filename in jpg_filenames_copy:
f.write(data_dir + filename + "\n")
|
Add verbose print on split dataset script
|
Add verbose print on split dataset script
|
Python
|
mit
|
SetaSouto/license-plate-detection
|
import os
import numpy as np
data_dir = "data/dataset/"
jpg_filenames = list(filter(lambda x: x[-3:] == "jpg", os.listdir(data_dir)))
# Randomly select the test dataset
test_percentage = 0.1
n_test = int(round(len(jpg_filenames) * test_percentage))
if n_test == 0: n_test = 1
# Randomly select the images for testing
test_indexes = np.random.choice(len(jpg_filenames), n_test, replace=False)
test_indexes = test_indexes.astype(int)
jpg_filenames_copy = jpg_filenames[:]
with open("test.txt", "w") as f:
for index in test_indexes:
# Write filename
f.write(data_dir + jpg_filenames[index] + "\n")
# Remove from copy list
jpg_filenames_copy.pop(index)
# Write from the copy list
with open("train.txt", "w") as f:
for filename in jpg_filenames_copy:
f.write(data_dir + filename + "\n")
Add verbose print on split dataset script
|
import os
import numpy as np
data_dir = "data/dataset/"
jpg_filenames = list(filter(lambda x: x[-3:] == "jpg", os.listdir(data_dir)))
# Randomly select the test dataset
test_percentage = 0.1
n_test = int(round(len(jpg_filenames) * test_percentage))
if n_test == 0: n_test = 1
# Randomly select the images for testing
test_indexes = np.random.choice(len(jpg_filenames), n_test, replace=False)
test_indexes = test_indexes.astype(int)
jpg_filenames_copy = list(jpg_filenames)
with open("test.txt", "w") as f:
for index in test_indexes:
print(index, len(jpg_filenames_copy), len(jpg_filenames))
# Write filename
f.write(data_dir + jpg_filenames[index] + "\n")
# Remove from copy list
jpg_filenames_copy.pop(index)
# Write from the copy list
with open("train.txt", "w") as f:
for filename in jpg_filenames_copy:
f.write(data_dir + filename + "\n")
|
<commit_before>import os
import numpy as np
data_dir = "data/dataset/"
jpg_filenames = list(filter(lambda x: x[-3:] == "jpg", os.listdir(data_dir)))
# Randomly select the test dataset
test_percentage = 0.1
n_test = int(round(len(jpg_filenames) * test_percentage))
if n_test == 0: n_test = 1
# Randomly select the images for testing
test_indexes = np.random.choice(len(jpg_filenames), n_test, replace=False)
test_indexes = test_indexes.astype(int)
jpg_filenames_copy = jpg_filenames[:]
with open("test.txt", "w") as f:
for index in test_indexes:
# Write filename
f.write(data_dir + jpg_filenames[index] + "\n")
# Remove from copy list
jpg_filenames_copy.pop(index)
# Write from the copy list
with open("train.txt", "w") as f:
for filename in jpg_filenames_copy:
f.write(data_dir + filename + "\n")
<commit_msg>Add verbose print on split dataset script<commit_after>
|
import os
import numpy as np
data_dir = "data/dataset/"
jpg_filenames = list(filter(lambda x: x[-3:] == "jpg", os.listdir(data_dir)))
# Randomly select the test dataset
test_percentage = 0.1
n_test = int(round(len(jpg_filenames) * test_percentage))
if n_test == 0: n_test = 1
# Randomly select the images for testing
test_indexes = np.random.choice(len(jpg_filenames), n_test, replace=False)
test_indexes = test_indexes.astype(int)
jpg_filenames_copy = list(jpg_filenames)
with open("test.txt", "w") as f:
for index in test_indexes:
print(index, len(jpg_filenames_copy), len(jpg_filenames))
# Write filename
f.write(data_dir + jpg_filenames[index] + "\n")
# Remove from copy list
jpg_filenames_copy.pop(index)
# Write from the copy list
with open("train.txt", "w") as f:
for filename in jpg_filenames_copy:
f.write(data_dir + filename + "\n")
|
import os
import numpy as np
data_dir = "data/dataset/"
jpg_filenames = list(filter(lambda x: x[-3:] == "jpg", os.listdir(data_dir)))
# Randomly select the test dataset
test_percentage = 0.1
n_test = int(round(len(jpg_filenames) * test_percentage))
if n_test == 0: n_test = 1
# Randomly select the images for testing
test_indexes = np.random.choice(len(jpg_filenames), n_test, replace=False)
test_indexes = test_indexes.astype(int)
jpg_filenames_copy = jpg_filenames[:]
with open("test.txt", "w") as f:
for index in test_indexes:
# Write filename
f.write(data_dir + jpg_filenames[index] + "\n")
# Remove from copy list
jpg_filenames_copy.pop(index)
# Write from the copy list
with open("train.txt", "w") as f:
for filename in jpg_filenames_copy:
f.write(data_dir + filename + "\n")
Add verbose print on split dataset scriptimport os
import numpy as np
data_dir = "data/dataset/"
jpg_filenames = list(filter(lambda x: x[-3:] == "jpg", os.listdir(data_dir)))
# Randomly select the test dataset
test_percentage = 0.1
n_test = int(round(len(jpg_filenames) * test_percentage))
if n_test == 0: n_test = 1
# Randomly select the images for testing
test_indexes = np.random.choice(len(jpg_filenames), n_test, replace=False)
test_indexes = test_indexes.astype(int)
jpg_filenames_copy = list(jpg_filenames)
with open("test.txt", "w") as f:
for index in test_indexes:
print(index, len(jpg_filenames_copy), len(jpg_filenames))
# Write filename
f.write(data_dir + jpg_filenames[index] + "\n")
# Remove from copy list
jpg_filenames_copy.pop(index)
# Write from the copy list
with open("train.txt", "w") as f:
for filename in jpg_filenames_copy:
f.write(data_dir + filename + "\n")
|
<commit_before>import os
import numpy as np
data_dir = "data/dataset/"
jpg_filenames = list(filter(lambda x: x[-3:] == "jpg", os.listdir(data_dir)))
# Randomly select the test dataset
test_percentage = 0.1
n_test = int(round(len(jpg_filenames) * test_percentage))
if n_test == 0: n_test = 1
# Randomly select the images for testing
test_indexes = np.random.choice(len(jpg_filenames), n_test, replace=False)
test_indexes = test_indexes.astype(int)
jpg_filenames_copy = jpg_filenames[:]
with open("test.txt", "w") as f:
for index in test_indexes:
# Write filename
f.write(data_dir + jpg_filenames[index] + "\n")
# Remove from copy list
jpg_filenames_copy.pop(index)
# Write from the copy list
with open("train.txt", "w") as f:
for filename in jpg_filenames_copy:
f.write(data_dir + filename + "\n")
<commit_msg>Add verbose print on split dataset script<commit_after>import os
import numpy as np
data_dir = "data/dataset/"
jpg_filenames = list(filter(lambda x: x[-3:] == "jpg", os.listdir(data_dir)))
# Randomly select the test dataset
test_percentage = 0.1
n_test = int(round(len(jpg_filenames) * test_percentage))
if n_test == 0: n_test = 1
# Randomly select the images for testing
test_indexes = np.random.choice(len(jpg_filenames), n_test, replace=False)
test_indexes = test_indexes.astype(int)
jpg_filenames_copy = list(jpg_filenames)
with open("test.txt", "w") as f:
for index in test_indexes:
print(index, len(jpg_filenames_copy), len(jpg_filenames))
# Write filename
f.write(data_dir + jpg_filenames[index] + "\n")
# Remove from copy list
jpg_filenames_copy.pop(index)
# Write from the copy list
with open("train.txt", "w") as f:
for filename in jpg_filenames_copy:
f.write(data_dir + filename + "\n")
|
edab226942fbab75aa66e16d5814b1c38c0e8507
|
2048/policy.py
|
2048/policy.py
|
import tensorflow as tf
class EpsilonGreedyPolicy:
def __init__(self, env, dqn, epsilon_max, epsilon_min, epsilon_decay):
self.env = env
self.dqn = dqn
self.epsilon_max = epsilon_max
self.epsilon_min = epsilon_min
self.epsilon_decay = epsilon_decay
def take_action(self, state, step):
explore_probability = self.epsilon_min + (self.epsilon_max - self.epsilon_min) * tf.math.exp(
-self.epsilon_decay * tf.cast(step, tf.float32))
if explore_probability > tf.random.uniform(shape=()):
return tf.constant(self.env.action_space.sample(), dtype=tf.int32), explore_probability
state = tf.reshape(state, (1, *state.shape, -1))
q_preds = self.dqn(state)[0]
return tf.argmax(q_preds, output_type=tf.int32), explore_probability
|
import tensorflow as tf
class EpsilonGreedyPolicy:
def __init__(self, env, dqn, epsilon_max, epsilon_min, epsilon_decay):
self.env = env
self.dqn = dqn
self.epsilon_max = epsilon_max
self.epsilon_min = epsilon_min
self.epsilon_decay = epsilon_decay
def take_action(self, state, step):
explore_probability = self.epsilon_min + (self.epsilon_max - self.epsilon_min) * tf.math.exp(
-self.epsilon_decay * tf.cast(step, tf.float32))
if explore_probability > tf.random.uniform(shape=()):
return tf.constant(self.env.action_space.sample(), dtype=tf.int32), explore_probability
state = tf.expand_dims(state, axis=0)
q_preds = self.dqn(state)[0]
return tf.argmax(q_preds, output_type=tf.int32), explore_probability
|
Fix error in state shape in EGP
|
[2048] Fix error in state shape in EGP
|
Python
|
mit
|
akshaykurmi/reinforcement-learning
|
import tensorflow as tf
class EpsilonGreedyPolicy:
def __init__(self, env, dqn, epsilon_max, epsilon_min, epsilon_decay):
self.env = env
self.dqn = dqn
self.epsilon_max = epsilon_max
self.epsilon_min = epsilon_min
self.epsilon_decay = epsilon_decay
def take_action(self, state, step):
explore_probability = self.epsilon_min + (self.epsilon_max - self.epsilon_min) * tf.math.exp(
-self.epsilon_decay * tf.cast(step, tf.float32))
if explore_probability > tf.random.uniform(shape=()):
return tf.constant(self.env.action_space.sample(), dtype=tf.int32), explore_probability
state = tf.reshape(state, (1, *state.shape, -1))
q_preds = self.dqn(state)[0]
return tf.argmax(q_preds, output_type=tf.int32), explore_probability
[2048] Fix error in state shape in EGP
|
import tensorflow as tf
class EpsilonGreedyPolicy:
def __init__(self, env, dqn, epsilon_max, epsilon_min, epsilon_decay):
self.env = env
self.dqn = dqn
self.epsilon_max = epsilon_max
self.epsilon_min = epsilon_min
self.epsilon_decay = epsilon_decay
def take_action(self, state, step):
explore_probability = self.epsilon_min + (self.epsilon_max - self.epsilon_min) * tf.math.exp(
-self.epsilon_decay * tf.cast(step, tf.float32))
if explore_probability > tf.random.uniform(shape=()):
return tf.constant(self.env.action_space.sample(), dtype=tf.int32), explore_probability
state = tf.expand_dims(state, axis=0)
q_preds = self.dqn(state)[0]
return tf.argmax(q_preds, output_type=tf.int32), explore_probability
|
<commit_before>import tensorflow as tf
class EpsilonGreedyPolicy:
def __init__(self, env, dqn, epsilon_max, epsilon_min, epsilon_decay):
self.env = env
self.dqn = dqn
self.epsilon_max = epsilon_max
self.epsilon_min = epsilon_min
self.epsilon_decay = epsilon_decay
def take_action(self, state, step):
explore_probability = self.epsilon_min + (self.epsilon_max - self.epsilon_min) * tf.math.exp(
-self.epsilon_decay * tf.cast(step, tf.float32))
if explore_probability > tf.random.uniform(shape=()):
return tf.constant(self.env.action_space.sample(), dtype=tf.int32), explore_probability
state = tf.reshape(state, (1, *state.shape, -1))
q_preds = self.dqn(state)[0]
return tf.argmax(q_preds, output_type=tf.int32), explore_probability
<commit_msg>[2048] Fix error in state shape in EGP<commit_after>
|
import tensorflow as tf
class EpsilonGreedyPolicy:
def __init__(self, env, dqn, epsilon_max, epsilon_min, epsilon_decay):
self.env = env
self.dqn = dqn
self.epsilon_max = epsilon_max
self.epsilon_min = epsilon_min
self.epsilon_decay = epsilon_decay
def take_action(self, state, step):
explore_probability = self.epsilon_min + (self.epsilon_max - self.epsilon_min) * tf.math.exp(
-self.epsilon_decay * tf.cast(step, tf.float32))
if explore_probability > tf.random.uniform(shape=()):
return tf.constant(self.env.action_space.sample(), dtype=tf.int32), explore_probability
state = tf.expand_dims(state, axis=0)
q_preds = self.dqn(state)[0]
return tf.argmax(q_preds, output_type=tf.int32), explore_probability
|
import tensorflow as tf
class EpsilonGreedyPolicy:
def __init__(self, env, dqn, epsilon_max, epsilon_min, epsilon_decay):
self.env = env
self.dqn = dqn
self.epsilon_max = epsilon_max
self.epsilon_min = epsilon_min
self.epsilon_decay = epsilon_decay
def take_action(self, state, step):
explore_probability = self.epsilon_min + (self.epsilon_max - self.epsilon_min) * tf.math.exp(
-self.epsilon_decay * tf.cast(step, tf.float32))
if explore_probability > tf.random.uniform(shape=()):
return tf.constant(self.env.action_space.sample(), dtype=tf.int32), explore_probability
state = tf.reshape(state, (1, *state.shape, -1))
q_preds = self.dqn(state)[0]
return tf.argmax(q_preds, output_type=tf.int32), explore_probability
[2048] Fix error in state shape in EGPimport tensorflow as tf
class EpsilonGreedyPolicy:
def __init__(self, env, dqn, epsilon_max, epsilon_min, epsilon_decay):
self.env = env
self.dqn = dqn
self.epsilon_max = epsilon_max
self.epsilon_min = epsilon_min
self.epsilon_decay = epsilon_decay
def take_action(self, state, step):
explore_probability = self.epsilon_min + (self.epsilon_max - self.epsilon_min) * tf.math.exp(
-self.epsilon_decay * tf.cast(step, tf.float32))
if explore_probability > tf.random.uniform(shape=()):
return tf.constant(self.env.action_space.sample(), dtype=tf.int32), explore_probability
state = tf.expand_dims(state, axis=0)
q_preds = self.dqn(state)[0]
return tf.argmax(q_preds, output_type=tf.int32), explore_probability
|
<commit_before>import tensorflow as tf
class EpsilonGreedyPolicy:
def __init__(self, env, dqn, epsilon_max, epsilon_min, epsilon_decay):
self.env = env
self.dqn = dqn
self.epsilon_max = epsilon_max
self.epsilon_min = epsilon_min
self.epsilon_decay = epsilon_decay
def take_action(self, state, step):
explore_probability = self.epsilon_min + (self.epsilon_max - self.epsilon_min) * tf.math.exp(
-self.epsilon_decay * tf.cast(step, tf.float32))
if explore_probability > tf.random.uniform(shape=()):
return tf.constant(self.env.action_space.sample(), dtype=tf.int32), explore_probability
state = tf.reshape(state, (1, *state.shape, -1))
q_preds = self.dqn(state)[0]
return tf.argmax(q_preds, output_type=tf.int32), explore_probability
<commit_msg>[2048] Fix error in state shape in EGP<commit_after>import tensorflow as tf
class EpsilonGreedyPolicy:
def __init__(self, env, dqn, epsilon_max, epsilon_min, epsilon_decay):
self.env = env
self.dqn = dqn
self.epsilon_max = epsilon_max
self.epsilon_min = epsilon_min
self.epsilon_decay = epsilon_decay
def take_action(self, state, step):
explore_probability = self.epsilon_min + (self.epsilon_max - self.epsilon_min) * tf.math.exp(
-self.epsilon_decay * tf.cast(step, tf.float32))
if explore_probability > tf.random.uniform(shape=()):
return tf.constant(self.env.action_space.sample(), dtype=tf.int32), explore_probability
state = tf.expand_dims(state, axis=0)
q_preds = self.dqn(state)[0]
return tf.argmax(q_preds, output_type=tf.int32), explore_probability
|
ee623ec4511c4aa7d93384b8a935144cd52621ae
|
test.py
|
test.py
|
import subprocess
import unittest
class CompareErrorMessages(unittest.TestCase):
def test_missing_file_return_code_the_same_as_ls(self):
args = ['./lss.sh', 'foo']
ret = subprocess.call(args)
args2 = ['ls', 'foo']
ret2 = subprocess.call(args2)
self.assertEqual(ret == 0, ret2 == 0)
def get_output(self, args):
try:
msg = subprocess.check_output(args, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
msg = e.output
return msg
def test_missing_file_message_code_the_same_as_ls(self):
args = ['./lss.sh', 'foo']
msg = self.get_output(args)
args2 = ['ls', 'foo']
msg2 = self.get_output(args2)
self.assertEqual(msg, msg2)
|
import os
import subprocess
import unittest
class CompareErrorMessages(unittest.TestCase):
def test_missing_file_return_code_the_same_as_ls(self):
DEVNULL = open(os.devnull, 'wb')
args = ['./lss.sh', 'foo']
ret = subprocess.call(args, stderr=DEVNULL)
args2 = ['ls', 'foo']
ret2 = subprocess.call(args2, stderr=DEVNULL)
self.assertEqual(ret == 0, ret2 == 0)
def get_output(self, args):
try:
msg = subprocess.check_output(args, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
msg = e.output
return msg
def test_missing_file_message_code_the_same_as_ls(self):
args = ['./lss.sh', 'foo']
msg = self.get_output(args)
args2 = ['ls', 'foo']
msg2 = self.get_output(args2)
self.assertEqual(msg, msg2)
|
Discard output when checking return code.
|
Discard output when checking return code.
|
Python
|
bsd-3-clause
|
jwg4/les,jwg4/les
|
import subprocess
import unittest
class CompareErrorMessages(unittest.TestCase):
def test_missing_file_return_code_the_same_as_ls(self):
args = ['./lss.sh', 'foo']
ret = subprocess.call(args)
args2 = ['ls', 'foo']
ret2 = subprocess.call(args2)
self.assertEqual(ret == 0, ret2 == 0)
def get_output(self, args):
try:
msg = subprocess.check_output(args, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
msg = e.output
return msg
def test_missing_file_message_code_the_same_as_ls(self):
args = ['./lss.sh', 'foo']
msg = self.get_output(args)
args2 = ['ls', 'foo']
msg2 = self.get_output(args2)
self.assertEqual(msg, msg2)
Discard output when checking return code.
|
import os
import subprocess
import unittest
class CompareErrorMessages(unittest.TestCase):
def test_missing_file_return_code_the_same_as_ls(self):
DEVNULL = open(os.devnull, 'wb')
args = ['./lss.sh', 'foo']
ret = subprocess.call(args, stderr=DEVNULL)
args2 = ['ls', 'foo']
ret2 = subprocess.call(args2, stderr=DEVNULL)
self.assertEqual(ret == 0, ret2 == 0)
def get_output(self, args):
try:
msg = subprocess.check_output(args, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
msg = e.output
return msg
def test_missing_file_message_code_the_same_as_ls(self):
args = ['./lss.sh', 'foo']
msg = self.get_output(args)
args2 = ['ls', 'foo']
msg2 = self.get_output(args2)
self.assertEqual(msg, msg2)
|
<commit_before>import subprocess
import unittest
class CompareErrorMessages(unittest.TestCase):
def test_missing_file_return_code_the_same_as_ls(self):
args = ['./lss.sh', 'foo']
ret = subprocess.call(args)
args2 = ['ls', 'foo']
ret2 = subprocess.call(args2)
self.assertEqual(ret == 0, ret2 == 0)
def get_output(self, args):
try:
msg = subprocess.check_output(args, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
msg = e.output
return msg
def test_missing_file_message_code_the_same_as_ls(self):
args = ['./lss.sh', 'foo']
msg = self.get_output(args)
args2 = ['ls', 'foo']
msg2 = self.get_output(args2)
self.assertEqual(msg, msg2)
<commit_msg>Discard output when checking return code.<commit_after>
|
import os
import subprocess
import unittest
class CompareErrorMessages(unittest.TestCase):
def test_missing_file_return_code_the_same_as_ls(self):
DEVNULL = open(os.devnull, 'wb')
args = ['./lss.sh', 'foo']
ret = subprocess.call(args, stderr=DEVNULL)
args2 = ['ls', 'foo']
ret2 = subprocess.call(args2, stderr=DEVNULL)
self.assertEqual(ret == 0, ret2 == 0)
def get_output(self, args):
try:
msg = subprocess.check_output(args, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
msg = e.output
return msg
def test_missing_file_message_code_the_same_as_ls(self):
args = ['./lss.sh', 'foo']
msg = self.get_output(args)
args2 = ['ls', 'foo']
msg2 = self.get_output(args2)
self.assertEqual(msg, msg2)
|
import subprocess
import unittest
class CompareErrorMessages(unittest.TestCase):
def test_missing_file_return_code_the_same_as_ls(self):
args = ['./lss.sh', 'foo']
ret = subprocess.call(args)
args2 = ['ls', 'foo']
ret2 = subprocess.call(args2)
self.assertEqual(ret == 0, ret2 == 0)
def get_output(self, args):
try:
msg = subprocess.check_output(args, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
msg = e.output
return msg
def test_missing_file_message_code_the_same_as_ls(self):
args = ['./lss.sh', 'foo']
msg = self.get_output(args)
args2 = ['ls', 'foo']
msg2 = self.get_output(args2)
self.assertEqual(msg, msg2)
Discard output when checking return code.import os
import subprocess
import unittest
class CompareErrorMessages(unittest.TestCase):
def test_missing_file_return_code_the_same_as_ls(self):
DEVNULL = open(os.devnull, 'wb')
args = ['./lss.sh', 'foo']
ret = subprocess.call(args, stderr=DEVNULL)
args2 = ['ls', 'foo']
ret2 = subprocess.call(args2, stderr=DEVNULL)
self.assertEqual(ret == 0, ret2 == 0)
def get_output(self, args):
try:
msg = subprocess.check_output(args, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
msg = e.output
return msg
def test_missing_file_message_code_the_same_as_ls(self):
args = ['./lss.sh', 'foo']
msg = self.get_output(args)
args2 = ['ls', 'foo']
msg2 = self.get_output(args2)
self.assertEqual(msg, msg2)
|
<commit_before>import subprocess
import unittest
class CompareErrorMessages(unittest.TestCase):
def test_missing_file_return_code_the_same_as_ls(self):
args = ['./lss.sh', 'foo']
ret = subprocess.call(args)
args2 = ['ls', 'foo']
ret2 = subprocess.call(args2)
self.assertEqual(ret == 0, ret2 == 0)
def get_output(self, args):
try:
msg = subprocess.check_output(args, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
msg = e.output
return msg
def test_missing_file_message_code_the_same_as_ls(self):
args = ['./lss.sh', 'foo']
msg = self.get_output(args)
args2 = ['ls', 'foo']
msg2 = self.get_output(args2)
self.assertEqual(msg, msg2)
<commit_msg>Discard output when checking return code.<commit_after>import os
import subprocess
import unittest
class CompareErrorMessages(unittest.TestCase):
def test_missing_file_return_code_the_same_as_ls(self):
DEVNULL = open(os.devnull, 'wb')
args = ['./lss.sh', 'foo']
ret = subprocess.call(args, stderr=DEVNULL)
args2 = ['ls', 'foo']
ret2 = subprocess.call(args2, stderr=DEVNULL)
self.assertEqual(ret == 0, ret2 == 0)
def get_output(self, args):
try:
msg = subprocess.check_output(args, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
msg = e.output
return msg
def test_missing_file_message_code_the_same_as_ls(self):
args = ['./lss.sh', 'foo']
msg = self.get_output(args)
args2 = ['ls', 'foo']
msg2 = self.get_output(args2)
self.assertEqual(msg, msg2)
|
fcd7f4b8f9bc7889d8857eea176e5bd90819107d
|
test.py
|
test.py
|
import unittest
import imp
rollbar_agent = imp.load_source('rollbar-agent', './rollbar-agent')
class FakeScanner:
def __init__(self, config):
self.config = config
class TestDefaultMessageStartParserUsage(unittest.TestCase):
app = {'name': 'pyramid',
'config': {
'log_format.default': 'pyramid',
'log_format.patterns': 'celery*.log celery_process',
'min_log_level': 'INFO'
}
}
def test_process_log_debug_with_format_name(self):
# check if self.default_parser uses valid format name provided in the config
config = {'_formats': {'pyramid': {'name': 'pyramid'}}}
scanner = FakeScanner(config)
new_processor = rollbar_agent.LogFileProcessor(scanner, self.app)
self.assertEqual('pyramid', new_processor.default_parser['name'])
def test_process_log_debug_without_format_name(self):
# check if self.default_parser can access _default_message_start_parser if format name not provided in config
config = {'_formats': {}}
scanner = FakeScanner(config)
new_processor = rollbar_agent.LogFileProcessor(scanner, self.app)
self.assertEqual('default parser', new_processor.default_parser['name'])
if __name__ == '__main__':
unittest.main()
|
import sys
import types
import unittest
if sys.version_info >= (3, 4):
from importlib.machinery import SourceFileLoader
loader = SourceFileLoader('rollbar-agent', './rollbar-agent')
rollbar_agent = types.ModuleType(loader.name)
loader.exec_module(rollbar_agent)
else:
import imp
rollbar_agent = imp.load_source('rollbar-agent', './rollbar-agent')
class FakeScanner:
def __init__(self, config):
self.config = config
class TestDefaultMessageStartParserUsage(unittest.TestCase):
app = {'name': 'pyramid',
'config': {
'log_format.default': 'pyramid',
'log_format.patterns': 'celery*.log celery_process',
'min_log_level': 'INFO'
}
}
def test_process_log_debug_with_format_name(self):
# check if self.default_parser uses valid format name provided in the config
config = {'_formats': {'pyramid': {'name': 'pyramid'}}}
scanner = FakeScanner(config)
new_processor = rollbar_agent.LogFileProcessor(scanner, self.app)
self.assertEqual('pyramid', new_processor.default_parser['name'])
def test_process_log_debug_without_format_name(self):
# check if self.default_parser can access _default_message_start_parser if format name not provided in config
config = {'_formats': {}}
scanner = FakeScanner(config)
new_processor = rollbar_agent.LogFileProcessor(scanner, self.app)
self.assertEqual('default parser', new_processor.default_parser['name'])
if __name__ == '__main__':
unittest.main()
|
Update module loader for Python 3.4+
|
Update module loader for Python 3.4+
|
Python
|
mit
|
rollbar/rollbar-agent,rollbar/rollbar-agent
|
import unittest
import imp
rollbar_agent = imp.load_source('rollbar-agent', './rollbar-agent')
class FakeScanner:
def __init__(self, config):
self.config = config
class TestDefaultMessageStartParserUsage(unittest.TestCase):
app = {'name': 'pyramid',
'config': {
'log_format.default': 'pyramid',
'log_format.patterns': 'celery*.log celery_process',
'min_log_level': 'INFO'
}
}
def test_process_log_debug_with_format_name(self):
# check if self.default_parser uses valid format name provided in the config
config = {'_formats': {'pyramid': {'name': 'pyramid'}}}
scanner = FakeScanner(config)
new_processor = rollbar_agent.LogFileProcessor(scanner, self.app)
self.assertEqual('pyramid', new_processor.default_parser['name'])
def test_process_log_debug_without_format_name(self):
# check if self.default_parser can access _default_message_start_parser if format name not provided in config
config = {'_formats': {}}
scanner = FakeScanner(config)
new_processor = rollbar_agent.LogFileProcessor(scanner, self.app)
self.assertEqual('default parser', new_processor.default_parser['name'])
if __name__ == '__main__':
unittest.main()
Update module loader for Python 3.4+
|
import sys
import types
import unittest
if sys.version_info >= (3, 4):
from importlib.machinery import SourceFileLoader
loader = SourceFileLoader('rollbar-agent', './rollbar-agent')
rollbar_agent = types.ModuleType(loader.name)
loader.exec_module(rollbar_agent)
else:
import imp
rollbar_agent = imp.load_source('rollbar-agent', './rollbar-agent')
class FakeScanner:
def __init__(self, config):
self.config = config
class TestDefaultMessageStartParserUsage(unittest.TestCase):
app = {'name': 'pyramid',
'config': {
'log_format.default': 'pyramid',
'log_format.patterns': 'celery*.log celery_process',
'min_log_level': 'INFO'
}
}
def test_process_log_debug_with_format_name(self):
# check if self.default_parser uses valid format name provided in the config
config = {'_formats': {'pyramid': {'name': 'pyramid'}}}
scanner = FakeScanner(config)
new_processor = rollbar_agent.LogFileProcessor(scanner, self.app)
self.assertEqual('pyramid', new_processor.default_parser['name'])
def test_process_log_debug_without_format_name(self):
# check if self.default_parser can access _default_message_start_parser if format name not provided in config
config = {'_formats': {}}
scanner = FakeScanner(config)
new_processor = rollbar_agent.LogFileProcessor(scanner, self.app)
self.assertEqual('default parser', new_processor.default_parser['name'])
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
import imp
rollbar_agent = imp.load_source('rollbar-agent', './rollbar-agent')
class FakeScanner:
def __init__(self, config):
self.config = config
class TestDefaultMessageStartParserUsage(unittest.TestCase):
app = {'name': 'pyramid',
'config': {
'log_format.default': 'pyramid',
'log_format.patterns': 'celery*.log celery_process',
'min_log_level': 'INFO'
}
}
def test_process_log_debug_with_format_name(self):
# check if self.default_parser uses valid format name provided in the config
config = {'_formats': {'pyramid': {'name': 'pyramid'}}}
scanner = FakeScanner(config)
new_processor = rollbar_agent.LogFileProcessor(scanner, self.app)
self.assertEqual('pyramid', new_processor.default_parser['name'])
def test_process_log_debug_without_format_name(self):
# check if self.default_parser can access _default_message_start_parser if format name not provided in config
config = {'_formats': {}}
scanner = FakeScanner(config)
new_processor = rollbar_agent.LogFileProcessor(scanner, self.app)
self.assertEqual('default parser', new_processor.default_parser['name'])
if __name__ == '__main__':
unittest.main()
<commit_msg>Update module loader for Python 3.4+<commit_after>
|
import sys
import types
import unittest
if sys.version_info >= (3, 4):
from importlib.machinery import SourceFileLoader
loader = SourceFileLoader('rollbar-agent', './rollbar-agent')
rollbar_agent = types.ModuleType(loader.name)
loader.exec_module(rollbar_agent)
else:
import imp
rollbar_agent = imp.load_source('rollbar-agent', './rollbar-agent')
class FakeScanner:
def __init__(self, config):
self.config = config
class TestDefaultMessageStartParserUsage(unittest.TestCase):
app = {'name': 'pyramid',
'config': {
'log_format.default': 'pyramid',
'log_format.patterns': 'celery*.log celery_process',
'min_log_level': 'INFO'
}
}
def test_process_log_debug_with_format_name(self):
# check if self.default_parser uses valid format name provided in the config
config = {'_formats': {'pyramid': {'name': 'pyramid'}}}
scanner = FakeScanner(config)
new_processor = rollbar_agent.LogFileProcessor(scanner, self.app)
self.assertEqual('pyramid', new_processor.default_parser['name'])
def test_process_log_debug_without_format_name(self):
# check if self.default_parser can access _default_message_start_parser if format name not provided in config
config = {'_formats': {}}
scanner = FakeScanner(config)
new_processor = rollbar_agent.LogFileProcessor(scanner, self.app)
self.assertEqual('default parser', new_processor.default_parser['name'])
if __name__ == '__main__':
unittest.main()
|
import unittest
import imp
rollbar_agent = imp.load_source('rollbar-agent', './rollbar-agent')
class FakeScanner:
def __init__(self, config):
self.config = config
class TestDefaultMessageStartParserUsage(unittest.TestCase):
app = {'name': 'pyramid',
'config': {
'log_format.default': 'pyramid',
'log_format.patterns': 'celery*.log celery_process',
'min_log_level': 'INFO'
}
}
def test_process_log_debug_with_format_name(self):
# check if self.default_parser uses valid format name provided in the config
config = {'_formats': {'pyramid': {'name': 'pyramid'}}}
scanner = FakeScanner(config)
new_processor = rollbar_agent.LogFileProcessor(scanner, self.app)
self.assertEqual('pyramid', new_processor.default_parser['name'])
def test_process_log_debug_without_format_name(self):
# check if self.default_parser can access _default_message_start_parser if format name not provided in config
config = {'_formats': {}}
scanner = FakeScanner(config)
new_processor = rollbar_agent.LogFileProcessor(scanner, self.app)
self.assertEqual('default parser', new_processor.default_parser['name'])
if __name__ == '__main__':
unittest.main()
Update module loader for Python 3.4+import sys
import types
import unittest
if sys.version_info >= (3, 4):
from importlib.machinery import SourceFileLoader
loader = SourceFileLoader('rollbar-agent', './rollbar-agent')
rollbar_agent = types.ModuleType(loader.name)
loader.exec_module(rollbar_agent)
else:
import imp
rollbar_agent = imp.load_source('rollbar-agent', './rollbar-agent')
class FakeScanner:
def __init__(self, config):
self.config = config
class TestDefaultMessageStartParserUsage(unittest.TestCase):
app = {'name': 'pyramid',
'config': {
'log_format.default': 'pyramid',
'log_format.patterns': 'celery*.log celery_process',
'min_log_level': 'INFO'
}
}
def test_process_log_debug_with_format_name(self):
# check if self.default_parser uses valid format name provided in the config
config = {'_formats': {'pyramid': {'name': 'pyramid'}}}
scanner = FakeScanner(config)
new_processor = rollbar_agent.LogFileProcessor(scanner, self.app)
self.assertEqual('pyramid', new_processor.default_parser['name'])
def test_process_log_debug_without_format_name(self):
# check if self.default_parser can access _default_message_start_parser if format name not provided in config
config = {'_formats': {}}
scanner = FakeScanner(config)
new_processor = rollbar_agent.LogFileProcessor(scanner, self.app)
self.assertEqual('default parser', new_processor.default_parser['name'])
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
import imp
rollbar_agent = imp.load_source('rollbar-agent', './rollbar-agent')
class FakeScanner:
def __init__(self, config):
self.config = config
class TestDefaultMessageStartParserUsage(unittest.TestCase):
app = {'name': 'pyramid',
'config': {
'log_format.default': 'pyramid',
'log_format.patterns': 'celery*.log celery_process',
'min_log_level': 'INFO'
}
}
def test_process_log_debug_with_format_name(self):
# check if self.default_parser uses valid format name provided in the config
config = {'_formats': {'pyramid': {'name': 'pyramid'}}}
scanner = FakeScanner(config)
new_processor = rollbar_agent.LogFileProcessor(scanner, self.app)
self.assertEqual('pyramid', new_processor.default_parser['name'])
def test_process_log_debug_without_format_name(self):
# check if self.default_parser can access _default_message_start_parser if format name not provided in config
config = {'_formats': {}}
scanner = FakeScanner(config)
new_processor = rollbar_agent.LogFileProcessor(scanner, self.app)
self.assertEqual('default parser', new_processor.default_parser['name'])
if __name__ == '__main__':
unittest.main()
<commit_msg>Update module loader for Python 3.4+<commit_after>import sys
import types
import unittest
if sys.version_info >= (3, 4):
from importlib.machinery import SourceFileLoader
loader = SourceFileLoader('rollbar-agent', './rollbar-agent')
rollbar_agent = types.ModuleType(loader.name)
loader.exec_module(rollbar_agent)
else:
import imp
rollbar_agent = imp.load_source('rollbar-agent', './rollbar-agent')
class FakeScanner:
def __init__(self, config):
self.config = config
class TestDefaultMessageStartParserUsage(unittest.TestCase):
app = {'name': 'pyramid',
'config': {
'log_format.default': 'pyramid',
'log_format.patterns': 'celery*.log celery_process',
'min_log_level': 'INFO'
}
}
def test_process_log_debug_with_format_name(self):
# check if self.default_parser uses valid format name provided in the config
config = {'_formats': {'pyramid': {'name': 'pyramid'}}}
scanner = FakeScanner(config)
new_processor = rollbar_agent.LogFileProcessor(scanner, self.app)
self.assertEqual('pyramid', new_processor.default_parser['name'])
def test_process_log_debug_without_format_name(self):
# check if self.default_parser can access _default_message_start_parser if format name not provided in config
config = {'_formats': {}}
scanner = FakeScanner(config)
new_processor = rollbar_agent.LogFileProcessor(scanner, self.app)
self.assertEqual('default parser', new_processor.default_parser['name'])
if __name__ == '__main__':
unittest.main()
|
5dae59bc17f0f8a0ef97bbc461eb18c0ea725bc9
|
config-example.py
|
config-example.py
|
# Copy this file to config.py and change the settings. Don't forget to specify your own SECRET_KEY.
# The app name will be used in several places.
APP_NAME = 'Yelp Love'
APP_BASE_URL = 'https://PROJECT_ID.appspot.com/'
LOVE_SENDER_EMAIL = 'Yelp Love <love@PROJECT_ID.appspot.com>'
# Flask's secret key, used to encrypt the session cookie.
# Set this to any random string and make sure not to share this!
SECRET_KEY = 'YOUR_SECRET_HERE'
# Use default theme
THEME = 'default'
# Every employee needs a reference to a Google Account. This reference is based on the users
# Google Account email address and created when employee data is imported: we take the *username*
# and this DOMAIN
DOMAIN = 'example.com'
# Name of the S3 bucket used to import employee data from a file named employees.json
# Check out /import/employees.json.example to see how this file should look like.
S3_BUCKET = 'employees'
|
# Copy this file to config.py and change the settings. Don't forget to specify your own SECRET_KEY.
# The app name will be used in several places.
APP_NAME = 'Yelp Love'
APP_BASE_URL = 'https://PROJECT_ID.appspot.com/'
LOVE_SENDER_EMAIL = 'Yelp Love <love@PROJECT_ID.appspotmail.com>'
# Flask's secret key, used to encrypt the session cookie.
# Set this to any random string and make sure not to share this!
SECRET_KEY = 'YOUR_SECRET_HERE'
# Use default theme
THEME = 'default'
# Every employee needs a reference to a Google Account. This reference is based on the users
# Google Account email address and created when employee data is imported: we take the *username*
# and this DOMAIN
DOMAIN = 'example.com'
# Name of the S3 bucket used to import employee data from a file named employees.json
# Check out /import/employees.json.example to see how this file should look like.
S3_BUCKET = 'employees'
|
Use appspotmail.com instead of appspot.com for email sender
|
Use appspotmail.com instead of appspot.com for email sender
|
Python
|
mit
|
Yelp/love,Yelp/love,Yelp/love
|
# Copy this file to config.py and change the settings. Don't forget to specify your own SECRET_KEY.
# The app name will be used in several places.
APP_NAME = 'Yelp Love'
APP_BASE_URL = 'https://PROJECT_ID.appspot.com/'
LOVE_SENDER_EMAIL = 'Yelp Love <love@PROJECT_ID.appspot.com>'
# Flask's secret key, used to encrypt the session cookie.
# Set this to any random string and make sure not to share this!
SECRET_KEY = 'YOUR_SECRET_HERE'
# Use default theme
THEME = 'default'
# Every employee needs a reference to a Google Account. This reference is based on the users
# Google Account email address and created when employee data is imported: we take the *username*
# and this DOMAIN
DOMAIN = 'example.com'
# Name of the S3 bucket used to import employee data from a file named employees.json
# Check out /import/employees.json.example to see how this file should look like.
S3_BUCKET = 'employees'
Use appspotmail.com instead of appspot.com for email sender
|
# Copy this file to config.py and change the settings. Don't forget to specify your own SECRET_KEY.
# The app name will be used in several places.
APP_NAME = 'Yelp Love'
APP_BASE_URL = 'https://PROJECT_ID.appspot.com/'
LOVE_SENDER_EMAIL = 'Yelp Love <love@PROJECT_ID.appspotmail.com>'
# Flask's secret key, used to encrypt the session cookie.
# Set this to any random string and make sure not to share this!
SECRET_KEY = 'YOUR_SECRET_HERE'
# Use default theme
THEME = 'default'
# Every employee needs a reference to a Google Account. This reference is based on the users
# Google Account email address and created when employee data is imported: we take the *username*
# and this DOMAIN
DOMAIN = 'example.com'
# Name of the S3 bucket used to import employee data from a file named employees.json
# Check out /import/employees.json.example to see how this file should look like.
S3_BUCKET = 'employees'
|
<commit_before># Copy this file to config.py and change the settings. Don't forget to specify your own SECRET_KEY.
# The app name will be used in several places.
APP_NAME = 'Yelp Love'
APP_BASE_URL = 'https://PROJECT_ID.appspot.com/'
LOVE_SENDER_EMAIL = 'Yelp Love <love@PROJECT_ID.appspot.com>'
# Flask's secret key, used to encrypt the session cookie.
# Set this to any random string and make sure not to share this!
SECRET_KEY = 'YOUR_SECRET_HERE'
# Use default theme
THEME = 'default'
# Every employee needs a reference to a Google Account. This reference is based on the users
# Google Account email address and created when employee data is imported: we take the *username*
# and this DOMAIN
DOMAIN = 'example.com'
# Name of the S3 bucket used to import employee data from a file named employees.json
# Check out /import/employees.json.example to see how this file should look like.
S3_BUCKET = 'employees'
<commit_msg>Use appspotmail.com instead of appspot.com for email sender<commit_after>
|
# Copy this file to config.py and change the settings. Don't forget to specify your own SECRET_KEY.
# The app name will be used in several places.
APP_NAME = 'Yelp Love'
APP_BASE_URL = 'https://PROJECT_ID.appspot.com/'
LOVE_SENDER_EMAIL = 'Yelp Love <love@PROJECT_ID.appspotmail.com>'
# Flask's secret key, used to encrypt the session cookie.
# Set this to any random string and make sure not to share this!
SECRET_KEY = 'YOUR_SECRET_HERE'
# Use default theme
THEME = 'default'
# Every employee needs a reference to a Google Account. This reference is based on the users
# Google Account email address and created when employee data is imported: we take the *username*
# and this DOMAIN
DOMAIN = 'example.com'
# Name of the S3 bucket used to import employee data from a file named employees.json
# Check out /import/employees.json.example to see how this file should look like.
S3_BUCKET = 'employees'
|
# Copy this file to config.py and change the settings. Don't forget to specify your own SECRET_KEY.
# The app name will be used in several places.
APP_NAME = 'Yelp Love'
APP_BASE_URL = 'https://PROJECT_ID.appspot.com/'
LOVE_SENDER_EMAIL = 'Yelp Love <love@PROJECT_ID.appspot.com>'
# Flask's secret key, used to encrypt the session cookie.
# Set this to any random string and make sure not to share this!
SECRET_KEY = 'YOUR_SECRET_HERE'
# Use default theme
THEME = 'default'
# Every employee needs a reference to a Google Account. This reference is based on the users
# Google Account email address and created when employee data is imported: we take the *username*
# and this DOMAIN
DOMAIN = 'example.com'
# Name of the S3 bucket used to import employee data from a file named employees.json
# Check out /import/employees.json.example to see how this file should look like.
S3_BUCKET = 'employees'
Use appspotmail.com instead of appspot.com for email sender# Copy this file to config.py and change the settings. Don't forget to specify your own SECRET_KEY.
# The app name will be used in several places.
APP_NAME = 'Yelp Love'
APP_BASE_URL = 'https://PROJECT_ID.appspot.com/'
LOVE_SENDER_EMAIL = 'Yelp Love <love@PROJECT_ID.appspotmail.com>'
# Flask's secret key, used to encrypt the session cookie.
# Set this to any random string and make sure not to share this!
SECRET_KEY = 'YOUR_SECRET_HERE'
# Use default theme
THEME = 'default'
# Every employee needs a reference to a Google Account. This reference is based on the users
# Google Account email address and created when employee data is imported: we take the *username*
# and this DOMAIN
DOMAIN = 'example.com'
# Name of the S3 bucket used to import employee data from a file named employees.json
# Check out /import/employees.json.example to see how this file should look like.
S3_BUCKET = 'employees'
|
<commit_before># Copy this file to config.py and change the settings. Don't forget to specify your own SECRET_KEY.
# The app name will be used in several places.
APP_NAME = 'Yelp Love'
APP_BASE_URL = 'https://PROJECT_ID.appspot.com/'
LOVE_SENDER_EMAIL = 'Yelp Love <love@PROJECT_ID.appspot.com>'
# Flask's secret key, used to encrypt the session cookie.
# Set this to any random string and make sure not to share this!
SECRET_KEY = 'YOUR_SECRET_HERE'
# Use default theme
THEME = 'default'
# Every employee needs a reference to a Google Account. This reference is based on the users
# Google Account email address and created when employee data is imported: we take the *username*
# and this DOMAIN
DOMAIN = 'example.com'
# Name of the S3 bucket used to import employee data from a file named employees.json
# Check out /import/employees.json.example to see how this file should look like.
S3_BUCKET = 'employees'
<commit_msg>Use appspotmail.com instead of appspot.com for email sender<commit_after># Copy this file to config.py and change the settings. Don't forget to specify your own SECRET_KEY.
# The app name will be used in several places.
APP_NAME = 'Yelp Love'
APP_BASE_URL = 'https://PROJECT_ID.appspot.com/'
LOVE_SENDER_EMAIL = 'Yelp Love <love@PROJECT_ID.appspotmail.com>'
# Flask's secret key, used to encrypt the session cookie.
# Set this to any random string and make sure not to share this!
SECRET_KEY = 'YOUR_SECRET_HERE'
# Use default theme
THEME = 'default'
# Every employee needs a reference to a Google Account. This reference is based on the users
# Google Account email address and created when employee data is imported: we take the *username*
# and this DOMAIN
DOMAIN = 'example.com'
# Name of the S3 bucket used to import employee data from a file named employees.json
# Check out /import/employees.json.example to see how this file should look like.
S3_BUCKET = 'employees'
|
d0d6e2cf02fc5a5b8b1f7070629dad5d22760ca0
|
x10d.py
|
x10d.py
|
#!/usr/bin/env python
from daemon import Daemon, SerialDispatcher
from serial import Serial
import api
from threading import Thread
import sys
def callback(event):
if event:
print(str(event))
def listen(daemon):
while True:
house, unit, act = input().split()
unit = int(unit)
if act.upper() == "ON":
daemon.on(house, unit)
elif act.upper() == "OFF":
daemon.off(house, unit)
def main(args):
serial_port = args[1]
baud = 9600
s = Serial(serial_port, baud)
dispatcher = SerialDispatcher(s)
daemon = Daemon(dispatcher)
daemon.subscribe(callback)
daemon_thread = Thread(target=daemon.listen, name="daemon-listener")
daemon_thread.start()
api_thread = Thread(target=api.run_api, args=(daemon,), name="web-api", daemon=True)
api_thread.start()
user_thread = Thread(target=listen, args=(daemon,), name="user-listener")
user_thread.start()
user_thread.join()
daemon_thread.join()
api_thread.join()
s.close()
if __name__ == "__main__":
# TODO: Parse arguments for things
main(sys.argv)
|
#!/usr/bin/env python
from daemon import Daemon, SerialDispatcher
from serial import Serial
import api
from threading import Thread
import sys
def callback(event):
if event:
print(str(event))
def listen(daemon):
while True:
house, unit, act = input().split()
unit = int(unit)
if act.upper() == "ON":
daemon.on(house, unit)
elif act.upper() == "OFF":
daemon.off(house, unit)
def main(args):
serial_port = args[1]
baud = 9600
s = Serial(serial_port, baud)
dispatcher = SerialDispatcher(s)
daemon = Daemon(dispatcher)
daemon.subscribe(callback)
daemon_thread = Thread(target=daemon.listen, name="daemon-listener", daemon=True)
daemon_thread.start()
api_thread = Thread(target=api.run_api, args=(daemon,), name="web-api", daemon=True)
api_thread.start()
user_thread = Thread(target=listen, args=(daemon,), name="user-listener")
user_thread.start()
user_thread.join()
daemon_thread.join()
api_thread.join()
s.close()
if __name__ == "__main__":
# TODO: Parse arguments for things
main(sys.argv)
|
Make it easier to kill the daemon
|
Make it easier to kill the daemon
|
Python
|
unlicense
|
umbc-hackafe/x10-controller
|
#!/usr/bin/env python
from daemon import Daemon, SerialDispatcher
from serial import Serial
import api
from threading import Thread
import sys
def callback(event):
if event:
print(str(event))
def listen(daemon):
while True:
house, unit, act = input().split()
unit = int(unit)
if act.upper() == "ON":
daemon.on(house, unit)
elif act.upper() == "OFF":
daemon.off(house, unit)
def main(args):
serial_port = args[1]
baud = 9600
s = Serial(serial_port, baud)
dispatcher = SerialDispatcher(s)
daemon = Daemon(dispatcher)
daemon.subscribe(callback)
daemon_thread = Thread(target=daemon.listen, name="daemon-listener")
daemon_thread.start()
api_thread = Thread(target=api.run_api, args=(daemon,), name="web-api", daemon=True)
api_thread.start()
user_thread = Thread(target=listen, args=(daemon,), name="user-listener")
user_thread.start()
user_thread.join()
daemon_thread.join()
api_thread.join()
s.close()
if __name__ == "__main__":
# TODO: Parse arguments for things
main(sys.argv)
Make it easier to kill the daemon
|
#!/usr/bin/env python
from daemon import Daemon, SerialDispatcher
from serial import Serial
import api
from threading import Thread
import sys
def callback(event):
if event:
print(str(event))
def listen(daemon):
while True:
house, unit, act = input().split()
unit = int(unit)
if act.upper() == "ON":
daemon.on(house, unit)
elif act.upper() == "OFF":
daemon.off(house, unit)
def main(args):
serial_port = args[1]
baud = 9600
s = Serial(serial_port, baud)
dispatcher = SerialDispatcher(s)
daemon = Daemon(dispatcher)
daemon.subscribe(callback)
daemon_thread = Thread(target=daemon.listen, name="daemon-listener", daemon=True)
daemon_thread.start()
api_thread = Thread(target=api.run_api, args=(daemon,), name="web-api", daemon=True)
api_thread.start()
user_thread = Thread(target=listen, args=(daemon,), name="user-listener")
user_thread.start()
user_thread.join()
daemon_thread.join()
api_thread.join()
s.close()
if __name__ == "__main__":
# TODO: Parse arguments for things
main(sys.argv)
|
<commit_before>#!/usr/bin/env python
from daemon import Daemon, SerialDispatcher
from serial import Serial
import api
from threading import Thread
import sys
def callback(event):
if event:
print(str(event))
def listen(daemon):
while True:
house, unit, act = input().split()
unit = int(unit)
if act.upper() == "ON":
daemon.on(house, unit)
elif act.upper() == "OFF":
daemon.off(house, unit)
def main(args):
serial_port = args[1]
baud = 9600
s = Serial(serial_port, baud)
dispatcher = SerialDispatcher(s)
daemon = Daemon(dispatcher)
daemon.subscribe(callback)
daemon_thread = Thread(target=daemon.listen, name="daemon-listener")
daemon_thread.start()
api_thread = Thread(target=api.run_api, args=(daemon,), name="web-api", daemon=True)
api_thread.start()
user_thread = Thread(target=listen, args=(daemon,), name="user-listener")
user_thread.start()
user_thread.join()
daemon_thread.join()
api_thread.join()
s.close()
if __name__ == "__main__":
# TODO: Parse arguments for things
main(sys.argv)
<commit_msg>Make it easier to kill the daemon<commit_after>
|
#!/usr/bin/env python
from daemon import Daemon, SerialDispatcher
from serial import Serial
import api
from threading import Thread
import sys
def callback(event):
if event:
print(str(event))
def listen(daemon):
while True:
house, unit, act = input().split()
unit = int(unit)
if act.upper() == "ON":
daemon.on(house, unit)
elif act.upper() == "OFF":
daemon.off(house, unit)
def main(args):
serial_port = args[1]
baud = 9600
s = Serial(serial_port, baud)
dispatcher = SerialDispatcher(s)
daemon = Daemon(dispatcher)
daemon.subscribe(callback)
daemon_thread = Thread(target=daemon.listen, name="daemon-listener", daemon=True)
daemon_thread.start()
api_thread = Thread(target=api.run_api, args=(daemon,), name="web-api", daemon=True)
api_thread.start()
user_thread = Thread(target=listen, args=(daemon,), name="user-listener")
user_thread.start()
user_thread.join()
daemon_thread.join()
api_thread.join()
s.close()
if __name__ == "__main__":
# TODO: Parse arguments for things
main(sys.argv)
|
#!/usr/bin/env python
from daemon import Daemon, SerialDispatcher
from serial import Serial
import api
from threading import Thread
import sys
def callback(event):
if event:
print(str(event))
def listen(daemon):
while True:
house, unit, act = input().split()
unit = int(unit)
if act.upper() == "ON":
daemon.on(house, unit)
elif act.upper() == "OFF":
daemon.off(house, unit)
def main(args):
serial_port = args[1]
baud = 9600
s = Serial(serial_port, baud)
dispatcher = SerialDispatcher(s)
daemon = Daemon(dispatcher)
daemon.subscribe(callback)
daemon_thread = Thread(target=daemon.listen, name="daemon-listener")
daemon_thread.start()
api_thread = Thread(target=api.run_api, args=(daemon,), name="web-api", daemon=True)
api_thread.start()
user_thread = Thread(target=listen, args=(daemon,), name="user-listener")
user_thread.start()
user_thread.join()
daemon_thread.join()
api_thread.join()
s.close()
if __name__ == "__main__":
# TODO: Parse arguments for things
main(sys.argv)
Make it easier to kill the daemon#!/usr/bin/env python
from daemon import Daemon, SerialDispatcher
from serial import Serial
import api
from threading import Thread
import sys
def callback(event):
if event:
print(str(event))
def listen(daemon):
while True:
house, unit, act = input().split()
unit = int(unit)
if act.upper() == "ON":
daemon.on(house, unit)
elif act.upper() == "OFF":
daemon.off(house, unit)
def main(args):
serial_port = args[1]
baud = 9600
s = Serial(serial_port, baud)
dispatcher = SerialDispatcher(s)
daemon = Daemon(dispatcher)
daemon.subscribe(callback)
daemon_thread = Thread(target=daemon.listen, name="daemon-listener", daemon=True)
daemon_thread.start()
api_thread = Thread(target=api.run_api, args=(daemon,), name="web-api", daemon=True)
api_thread.start()
user_thread = Thread(target=listen, args=(daemon,), name="user-listener")
user_thread.start()
user_thread.join()
daemon_thread.join()
api_thread.join()
s.close()
if __name__ == "__main__":
# TODO: Parse arguments for things
main(sys.argv)
|
<commit_before>#!/usr/bin/env python
from daemon import Daemon, SerialDispatcher
from serial import Serial
import api
from threading import Thread
import sys
def callback(event):
if event:
print(str(event))
def listen(daemon):
while True:
house, unit, act = input().split()
unit = int(unit)
if act.upper() == "ON":
daemon.on(house, unit)
elif act.upper() == "OFF":
daemon.off(house, unit)
def main(args):
serial_port = args[1]
baud = 9600
s = Serial(serial_port, baud)
dispatcher = SerialDispatcher(s)
daemon = Daemon(dispatcher)
daemon.subscribe(callback)
daemon_thread = Thread(target=daemon.listen, name="daemon-listener")
daemon_thread.start()
api_thread = Thread(target=api.run_api, args=(daemon,), name="web-api", daemon=True)
api_thread.start()
user_thread = Thread(target=listen, args=(daemon,), name="user-listener")
user_thread.start()
user_thread.join()
daemon_thread.join()
api_thread.join()
s.close()
if __name__ == "__main__":
# TODO: Parse arguments for things
main(sys.argv)
<commit_msg>Make it easier to kill the daemon<commit_after>#!/usr/bin/env python
from daemon import Daemon, SerialDispatcher
from serial import Serial
import api
from threading import Thread
import sys
def callback(event):
if event:
print(str(event))
def listen(daemon):
while True:
house, unit, act = input().split()
unit = int(unit)
if act.upper() == "ON":
daemon.on(house, unit)
elif act.upper() == "OFF":
daemon.off(house, unit)
def main(args):
serial_port = args[1]
baud = 9600
s = Serial(serial_port, baud)
dispatcher = SerialDispatcher(s)
daemon = Daemon(dispatcher)
daemon.subscribe(callback)
daemon_thread = Thread(target=daemon.listen, name="daemon-listener", daemon=True)
daemon_thread.start()
api_thread = Thread(target=api.run_api, args=(daemon,), name="web-api", daemon=True)
api_thread.start()
user_thread = Thread(target=listen, args=(daemon,), name="user-listener")
user_thread.start()
user_thread.join()
daemon_thread.join()
api_thread.join()
s.close()
if __name__ == "__main__":
# TODO: Parse arguments for things
main(sys.argv)
|
7c09e2df765b466b65570116fe8d0cc5f42d30dd
|
indra/sources/phosphoELM/api.py
|
indra/sources/phosphoELM/api.py
|
import csv
ppelm_s3_key = ''
def process_from_dump(fname=None, delimiter='\t'):
if fname is None:
# ToDo Get from S3
return []
else:
with open(fname, 'r') as f:
csv_reader = csv.reader(f.readlines(), delimiter=delimiter)
ppelm_json = _get_json_from_entry_rows(csv_reader)
return ppelm_json
def _get_json_from_entry_rows(row_iter):
ppelm_json = []
columns = next(row_iter)
for entry in row_iter:
row_dict = {columns[n]: entry[n]
for n in range(len(columns))}
ppelm_json.append(row_dict)
return ppelm_json
|
import csv
from .processor import PhosphoELMPRocessor
s3_bucket = 'bigmech'
ppelm_s3_key = ''
def process_from_dump(fname=None, delimiter='\t'):
if fname is None:
# ToDo Get from S3
return []
else:
with open(fname, 'r') as f:
csv_reader = csv.reader(f.readlines(), delimiter=delimiter)
ppelm_json = _get_json_from_entry_rows(csv_reader)
return PhosphoELMPRocessor(file_dump_json=ppelm_json)
def _get_json_from_entry_rows(row_iter):
ppelm_json = []
columns = next(row_iter)
for entry in row_iter:
row_dict = {columns[n]: entry[n]
for n in range(len(columns))}
ppelm_json.append(row_dict)
return ppelm_json
|
Return processor w processed statements
|
Return processor w processed statements
|
Python
|
bsd-2-clause
|
johnbachman/belpy,bgyori/indra,bgyori/indra,sorgerlab/indra,sorgerlab/belpy,bgyori/indra,sorgerlab/indra,sorgerlab/indra,johnbachman/indra,sorgerlab/belpy,johnbachman/belpy,johnbachman/indra,sorgerlab/belpy,johnbachman/indra,johnbachman/belpy
|
import csv
ppelm_s3_key = ''
def process_from_dump(fname=None, delimiter='\t'):
if fname is None:
# ToDo Get from S3
return []
else:
with open(fname, 'r') as f:
csv_reader = csv.reader(f.readlines(), delimiter=delimiter)
ppelm_json = _get_json_from_entry_rows(csv_reader)
return ppelm_json
def _get_json_from_entry_rows(row_iter):
ppelm_json = []
columns = next(row_iter)
for entry in row_iter:
row_dict = {columns[n]: entry[n]
for n in range(len(columns))}
ppelm_json.append(row_dict)
return ppelm_json
Return processor w processed statements
|
import csv
from .processor import PhosphoELMPRocessor
s3_bucket = 'bigmech'
ppelm_s3_key = ''
def process_from_dump(fname=None, delimiter='\t'):
if fname is None:
# ToDo Get from S3
return []
else:
with open(fname, 'r') as f:
csv_reader = csv.reader(f.readlines(), delimiter=delimiter)
ppelm_json = _get_json_from_entry_rows(csv_reader)
return PhosphoELMPRocessor(file_dump_json=ppelm_json)
def _get_json_from_entry_rows(row_iter):
ppelm_json = []
columns = next(row_iter)
for entry in row_iter:
row_dict = {columns[n]: entry[n]
for n in range(len(columns))}
ppelm_json.append(row_dict)
return ppelm_json
|
<commit_before>import csv
ppelm_s3_key = ''
def process_from_dump(fname=None, delimiter='\t'):
if fname is None:
# ToDo Get from S3
return []
else:
with open(fname, 'r') as f:
csv_reader = csv.reader(f.readlines(), delimiter=delimiter)
ppelm_json = _get_json_from_entry_rows(csv_reader)
return ppelm_json
def _get_json_from_entry_rows(row_iter):
ppelm_json = []
columns = next(row_iter)
for entry in row_iter:
row_dict = {columns[n]: entry[n]
for n in range(len(columns))}
ppelm_json.append(row_dict)
return ppelm_json
<commit_msg>Return processor w processed statements<commit_after>
|
import csv
from .processor import PhosphoELMPRocessor
s3_bucket = 'bigmech'
ppelm_s3_key = ''
def process_from_dump(fname=None, delimiter='\t'):
if fname is None:
# ToDo Get from S3
return []
else:
with open(fname, 'r') as f:
csv_reader = csv.reader(f.readlines(), delimiter=delimiter)
ppelm_json = _get_json_from_entry_rows(csv_reader)
return PhosphoELMPRocessor(file_dump_json=ppelm_json)
def _get_json_from_entry_rows(row_iter):
ppelm_json = []
columns = next(row_iter)
for entry in row_iter:
row_dict = {columns[n]: entry[n]
for n in range(len(columns))}
ppelm_json.append(row_dict)
return ppelm_json
|
import csv
ppelm_s3_key = ''
def process_from_dump(fname=None, delimiter='\t'):
if fname is None:
# ToDo Get from S3
return []
else:
with open(fname, 'r') as f:
csv_reader = csv.reader(f.readlines(), delimiter=delimiter)
ppelm_json = _get_json_from_entry_rows(csv_reader)
return ppelm_json
def _get_json_from_entry_rows(row_iter):
ppelm_json = []
columns = next(row_iter)
for entry in row_iter:
row_dict = {columns[n]: entry[n]
for n in range(len(columns))}
ppelm_json.append(row_dict)
return ppelm_json
Return processor w processed statementsimport csv
from .processor import PhosphoELMPRocessor
s3_bucket = 'bigmech'
ppelm_s3_key = ''
def process_from_dump(fname=None, delimiter='\t'):
if fname is None:
# ToDo Get from S3
return []
else:
with open(fname, 'r') as f:
csv_reader = csv.reader(f.readlines(), delimiter=delimiter)
ppelm_json = _get_json_from_entry_rows(csv_reader)
return PhosphoELMPRocessor(file_dump_json=ppelm_json)
def _get_json_from_entry_rows(row_iter):
ppelm_json = []
columns = next(row_iter)
for entry in row_iter:
row_dict = {columns[n]: entry[n]
for n in range(len(columns))}
ppelm_json.append(row_dict)
return ppelm_json
|
<commit_before>import csv
ppelm_s3_key = ''
def process_from_dump(fname=None, delimiter='\t'):
if fname is None:
# ToDo Get from S3
return []
else:
with open(fname, 'r') as f:
csv_reader = csv.reader(f.readlines(), delimiter=delimiter)
ppelm_json = _get_json_from_entry_rows(csv_reader)
return ppelm_json
def _get_json_from_entry_rows(row_iter):
ppelm_json = []
columns = next(row_iter)
for entry in row_iter:
row_dict = {columns[n]: entry[n]
for n in range(len(columns))}
ppelm_json.append(row_dict)
return ppelm_json
<commit_msg>Return processor w processed statements<commit_after>import csv
from .processor import PhosphoELMPRocessor
s3_bucket = 'bigmech'
ppelm_s3_key = ''
def process_from_dump(fname=None, delimiter='\t'):
if fname is None:
# ToDo Get from S3
return []
else:
with open(fname, 'r') as f:
csv_reader = csv.reader(f.readlines(), delimiter=delimiter)
ppelm_json = _get_json_from_entry_rows(csv_reader)
return PhosphoELMPRocessor(file_dump_json=ppelm_json)
def _get_json_from_entry_rows(row_iter):
ppelm_json = []
columns = next(row_iter)
for entry in row_iter:
row_dict = {columns[n]: entry[n]
for n in range(len(columns))}
ppelm_json.append(row_dict)
return ppelm_json
|
0858d7ded502ee7d1a31d8df767bf1b52648e32e
|
issues_hel/tests/test_import.py
|
issues_hel/tests/test_import.py
|
import json
import os
import pytest
from issues.models import Issue
from issues.sync.down import update_local_issue
@pytest.mark.django_db
def test_import_taskful_georeport():
with open(os.path.join(os.path.dirname(__file__), "taskful_request.json"), "r") as infp:
data = json.load(infp)
issue, created = update_local_issue(data, 'import')
assert created
assert issue.tasks.count() == 7
issue, created = update_local_issue(data, 'import')
assert not created
assert issue.tasks.count() == 7
|
import json
import os
import pytest
from issues.sync.down import update_local_issue
@pytest.mark.django_db
def test_import_taskful_georeport():
with open(os.path.join(os.path.dirname(__file__), "taskful_request.json"), "r", encoding="utf8") as infp:
data = json.load(infp)
issue, created = update_local_issue(data, 'import')
assert created
assert issue.tasks.count() == 7
issue, created = update_local_issue(data, 'import')
assert not created
assert issue.tasks.count() == 7
|
Fix failing import test (how did this ever work?)
|
Fix failing import test (how did this ever work?)
|
Python
|
mit
|
6aika/issue-reporting,6aika/issue-reporting,6aika/issue-reporting
|
import json
import os
import pytest
from issues.models import Issue
from issues.sync.down import update_local_issue
@pytest.mark.django_db
def test_import_taskful_georeport():
with open(os.path.join(os.path.dirname(__file__), "taskful_request.json"), "r") as infp:
data = json.load(infp)
issue, created = update_local_issue(data, 'import')
assert created
assert issue.tasks.count() == 7
issue, created = update_local_issue(data, 'import')
assert not created
assert issue.tasks.count() == 7
Fix failing import test (how did this ever work?)
|
import json
import os
import pytest
from issues.sync.down import update_local_issue
@pytest.mark.django_db
def test_import_taskful_georeport():
with open(os.path.join(os.path.dirname(__file__), "taskful_request.json"), "r", encoding="utf8") as infp:
data = json.load(infp)
issue, created = update_local_issue(data, 'import')
assert created
assert issue.tasks.count() == 7
issue, created = update_local_issue(data, 'import')
assert not created
assert issue.tasks.count() == 7
|
<commit_before>import json
import os
import pytest
from issues.models import Issue
from issues.sync.down import update_local_issue
@pytest.mark.django_db
def test_import_taskful_georeport():
with open(os.path.join(os.path.dirname(__file__), "taskful_request.json"), "r") as infp:
data = json.load(infp)
issue, created = update_local_issue(data, 'import')
assert created
assert issue.tasks.count() == 7
issue, created = update_local_issue(data, 'import')
assert not created
assert issue.tasks.count() == 7
<commit_msg>Fix failing import test (how did this ever work?)<commit_after>
|
import json
import os
import pytest
from issues.sync.down import update_local_issue
@pytest.mark.django_db
def test_import_taskful_georeport():
with open(os.path.join(os.path.dirname(__file__), "taskful_request.json"), "r", encoding="utf8") as infp:
data = json.load(infp)
issue, created = update_local_issue(data, 'import')
assert created
assert issue.tasks.count() == 7
issue, created = update_local_issue(data, 'import')
assert not created
assert issue.tasks.count() == 7
|
import json
import os
import pytest
from issues.models import Issue
from issues.sync.down import update_local_issue
@pytest.mark.django_db
def test_import_taskful_georeport():
with open(os.path.join(os.path.dirname(__file__), "taskful_request.json"), "r") as infp:
data = json.load(infp)
issue, created = update_local_issue(data, 'import')
assert created
assert issue.tasks.count() == 7
issue, created = update_local_issue(data, 'import')
assert not created
assert issue.tasks.count() == 7
Fix failing import test (how did this ever work?)import json
import os
import pytest
from issues.sync.down import update_local_issue
@pytest.mark.django_db
def test_import_taskful_georeport():
with open(os.path.join(os.path.dirname(__file__), "taskful_request.json"), "r", encoding="utf8") as infp:
data = json.load(infp)
issue, created = update_local_issue(data, 'import')
assert created
assert issue.tasks.count() == 7
issue, created = update_local_issue(data, 'import')
assert not created
assert issue.tasks.count() == 7
|
<commit_before>import json
import os
import pytest
from issues.models import Issue
from issues.sync.down import update_local_issue
@pytest.mark.django_db
def test_import_taskful_georeport():
with open(os.path.join(os.path.dirname(__file__), "taskful_request.json"), "r") as infp:
data = json.load(infp)
issue, created = update_local_issue(data, 'import')
assert created
assert issue.tasks.count() == 7
issue, created = update_local_issue(data, 'import')
assert not created
assert issue.tasks.count() == 7
<commit_msg>Fix failing import test (how did this ever work?)<commit_after>import json
import os
import pytest
from issues.sync.down import update_local_issue
@pytest.mark.django_db
def test_import_taskful_georeport():
with open(os.path.join(os.path.dirname(__file__), "taskful_request.json"), "r", encoding="utf8") as infp:
data = json.load(infp)
issue, created = update_local_issue(data, 'import')
assert created
assert issue.tasks.count() == 7
issue, created = update_local_issue(data, 'import')
assert not created
assert issue.tasks.count() == 7
|
604e7d15c3072682ba3327c1ef6333d6bb0c768b
|
astropy/io/misc/asdf/__init__.py
|
astropy/io/misc/asdf/__init__.py
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
"""
The **asdf** subpackage contains code that is used to serialize astropy types
so that they can be represented and stored using the Advanced Scientific Data
Format (ASDF). This subpackage defines classes, referred to as **tags**, that
implement the logic for serialization and deserialization.
ASDF makes use of abstract data type definitons called **schemas**. The tags
provided here are simply specific implementations of particular schemas.
Currently astropy only implements tags for a subset of schemas that are defined
externally by the ASDF Standard. However, it is likely that astropy will
eventually define schemas of its own.
Astropy currently has no ability to read or write ASDF files itself. In order
to process ASDF files it is necessary to make use of the standalone **asdf**
package. Users should never need to refer to tag implementations directly.
Their presence should be entirely transparent when processing ASDF files.
If both **asdf** and **astropy** are installed, no futher configuration is
required in order to process ASDF files. The **asdf** package has been designed
to automatically detect the presence of the tags defined by **astropy**.
Documentation on the ASDF Standard can be found `here
<https://asdf-standard.readthedocs.io>`__. Documentation on the ASDF Python
module can be found `here <https://asdf.readthedocs.io>`__.
"""
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
"""
The **asdf** subpackage contains code that is used to serialize astropy types
so that they can be represented and stored using the Advanced Scientific Data
Format (ASDF). This subpackage defines classes, referred to as **tags**, that
implement the logic for serialization and deserialization.
ASDF makes use of abstract data type definitons called **schemas**. The tag
classes provided here are specific implementations of particular schemas. Some
of the tags in Astropy (e.g., those related to transforms) implement schemas
that are defined by the ASDF Standard. In other cases, both the tags and
schemas are defined within Astropy (e.g., those related to many of the
coordinate frames).
Astropy currently has no ability to read or write ASDF files itself. In order
to process ASDF files it is necessary to make use of the standalone **asdf**
package. Users should never need to refer to tag implementations directly.
Their presence should be entirely transparent when processing ASDF files.
If both **asdf** and **astropy** are installed, no futher configuration is
required in order to process ASDF files. The **asdf** package has been designed
to automatically detect the presence of the tags defined by **astropy**.
Documentation on the ASDF Standard can be found `here
<https://asdf-standard.readthedocs.io>`__. Documentation on the ASDF Python
module can be found `here <https://asdf.readthedocs.io>`__.
"""
|
Update ASDF-related docs to reflect presence of schemas [docs only]
|
Update ASDF-related docs to reflect presence of schemas [docs only]
|
Python
|
bsd-3-clause
|
lpsinger/astropy,StuartLittlefair/astropy,pllim/astropy,StuartLittlefair/astropy,larrybradley/astropy,saimn/astropy,mhvk/astropy,astropy/astropy,mhvk/astropy,lpsinger/astropy,StuartLittlefair/astropy,bsipocz/astropy,stargaser/astropy,dhomeier/astropy,astropy/astropy,pllim/astropy,StuartLittlefair/astropy,aleksandr-bakanov/astropy,lpsinger/astropy,saimn/astropy,larrybradley/astropy,dhomeier/astropy,mhvk/astropy,larrybradley/astropy,stargaser/astropy,mhvk/astropy,dhomeier/astropy,saimn/astropy,aleksandr-bakanov/astropy,dhomeier/astropy,mhvk/astropy,bsipocz/astropy,MSeifert04/astropy,saimn/astropy,aleksandr-bakanov/astropy,stargaser/astropy,aleksandr-bakanov/astropy,astropy/astropy,larrybradley/astropy,astropy/astropy,MSeifert04/astropy,bsipocz/astropy,stargaser/astropy,saimn/astropy,bsipocz/astropy,pllim/astropy,astropy/astropy,StuartLittlefair/astropy,lpsinger/astropy,pllim/astropy,lpsinger/astropy,pllim/astropy,MSeifert04/astropy,dhomeier/astropy,larrybradley/astropy,MSeifert04/astropy
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
"""
The **asdf** subpackage contains code that is used to serialize astropy types
so that they can be represented and stored using the Advanced Scientific Data
Format (ASDF). This subpackage defines classes, referred to as **tags**, that
implement the logic for serialization and deserialization.
ASDF makes use of abstract data type definitons called **schemas**. The tags
provided here are simply specific implementations of particular schemas.
Currently astropy only implements tags for a subset of schemas that are defined
externally by the ASDF Standard. However, it is likely that astropy will
eventually define schemas of its own.
Astropy currently has no ability to read or write ASDF files itself. In order
to process ASDF files it is necessary to make use of the standalone **asdf**
package. Users should never need to refer to tag implementations directly.
Their presence should be entirely transparent when processing ASDF files.
If both **asdf** and **astropy** are installed, no futher configuration is
required in order to process ASDF files. The **asdf** package has been designed
to automatically detect the presence of the tags defined by **astropy**.
Documentation on the ASDF Standard can be found `here
<https://asdf-standard.readthedocs.io>`__. Documentation on the ASDF Python
module can be found `here <https://asdf.readthedocs.io>`__.
"""
Update ASDF-related docs to reflect presence of schemas [docs only]
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
"""
The **asdf** subpackage contains code that is used to serialize astropy types
so that they can be represented and stored using the Advanced Scientific Data
Format (ASDF). This subpackage defines classes, referred to as **tags**, that
implement the logic for serialization and deserialization.
ASDF makes use of abstract data type definitons called **schemas**. The tag
classes provided here are specific implementations of particular schemas. Some
of the tags in Astropy (e.g., those related to transforms) implement schemas
that are defined by the ASDF Standard. In other cases, both the tags and
schemas are defined within Astropy (e.g., those related to many of the
coordinate frames).
Astropy currently has no ability to read or write ASDF files itself. In order
to process ASDF files it is necessary to make use of the standalone **asdf**
package. Users should never need to refer to tag implementations directly.
Their presence should be entirely transparent when processing ASDF files.
If both **asdf** and **astropy** are installed, no futher configuration is
required in order to process ASDF files. The **asdf** package has been designed
to automatically detect the presence of the tags defined by **astropy**.
Documentation on the ASDF Standard can be found `here
<https://asdf-standard.readthedocs.io>`__. Documentation on the ASDF Python
module can be found `here <https://asdf.readthedocs.io>`__.
"""
|
<commit_before># Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
"""
The **asdf** subpackage contains code that is used to serialize astropy types
so that they can be represented and stored using the Advanced Scientific Data
Format (ASDF). This subpackage defines classes, referred to as **tags**, that
implement the logic for serialization and deserialization.
ASDF makes use of abstract data type definitons called **schemas**. The tags
provided here are simply specific implementations of particular schemas.
Currently astropy only implements tags for a subset of schemas that are defined
externally by the ASDF Standard. However, it is likely that astropy will
eventually define schemas of its own.
Astropy currently has no ability to read or write ASDF files itself. In order
to process ASDF files it is necessary to make use of the standalone **asdf**
package. Users should never need to refer to tag implementations directly.
Their presence should be entirely transparent when processing ASDF files.
If both **asdf** and **astropy** are installed, no futher configuration is
required in order to process ASDF files. The **asdf** package has been designed
to automatically detect the presence of the tags defined by **astropy**.
Documentation on the ASDF Standard can be found `here
<https://asdf-standard.readthedocs.io>`__. Documentation on the ASDF Python
module can be found `here <https://asdf.readthedocs.io>`__.
"""
<commit_msg>Update ASDF-related docs to reflect presence of schemas [docs only]<commit_after>
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
"""
The **asdf** subpackage contains code that is used to serialize astropy types
so that they can be represented and stored using the Advanced Scientific Data
Format (ASDF). This subpackage defines classes, referred to as **tags**, that
implement the logic for serialization and deserialization.
ASDF makes use of abstract data type definitons called **schemas**. The tag
classes provided here are specific implementations of particular schemas. Some
of the tags in Astropy (e.g., those related to transforms) implement schemas
that are defined by the ASDF Standard. In other cases, both the tags and
schemas are defined within Astropy (e.g., those related to many of the
coordinate frames).
Astropy currently has no ability to read or write ASDF files itself. In order
to process ASDF files it is necessary to make use of the standalone **asdf**
package. Users should never need to refer to tag implementations directly.
Their presence should be entirely transparent when processing ASDF files.
If both **asdf** and **astropy** are installed, no futher configuration is
required in order to process ASDF files. The **asdf** package has been designed
to automatically detect the presence of the tags defined by **astropy**.
Documentation on the ASDF Standard can be found `here
<https://asdf-standard.readthedocs.io>`__. Documentation on the ASDF Python
module can be found `here <https://asdf.readthedocs.io>`__.
"""
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
"""
The **asdf** subpackage contains code that is used to serialize astropy types
so that they can be represented and stored using the Advanced Scientific Data
Format (ASDF). This subpackage defines classes, referred to as **tags**, that
implement the logic for serialization and deserialization.
ASDF makes use of abstract data type definitons called **schemas**. The tags
provided here are simply specific implementations of particular schemas.
Currently astropy only implements tags for a subset of schemas that are defined
externally by the ASDF Standard. However, it is likely that astropy will
eventually define schemas of its own.
Astropy currently has no ability to read or write ASDF files itself. In order
to process ASDF files it is necessary to make use of the standalone **asdf**
package. Users should never need to refer to tag implementations directly.
Their presence should be entirely transparent when processing ASDF files.
If both **asdf** and **astropy** are installed, no futher configuration is
required in order to process ASDF files. The **asdf** package has been designed
to automatically detect the presence of the tags defined by **astropy**.
Documentation on the ASDF Standard can be found `here
<https://asdf-standard.readthedocs.io>`__. Documentation on the ASDF Python
module can be found `here <https://asdf.readthedocs.io>`__.
"""
Update ASDF-related docs to reflect presence of schemas [docs only]# Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
"""
The **asdf** subpackage contains code that is used to serialize astropy types
so that they can be represented and stored using the Advanced Scientific Data
Format (ASDF). This subpackage defines classes, referred to as **tags**, that
implement the logic for serialization and deserialization.
ASDF makes use of abstract data type definitons called **schemas**. The tag
classes provided here are specific implementations of particular schemas. Some
of the tags in Astropy (e.g., those related to transforms) implement schemas
that are defined by the ASDF Standard. In other cases, both the tags and
schemas are defined within Astropy (e.g., those related to many of the
coordinate frames).
Astropy currently has no ability to read or write ASDF files itself. In order
to process ASDF files it is necessary to make use of the standalone **asdf**
package. Users should never need to refer to tag implementations directly.
Their presence should be entirely transparent when processing ASDF files.
If both **asdf** and **astropy** are installed, no futher configuration is
required in order to process ASDF files. The **asdf** package has been designed
to automatically detect the presence of the tags defined by **astropy**.
Documentation on the ASDF Standard can be found `here
<https://asdf-standard.readthedocs.io>`__. Documentation on the ASDF Python
module can be found `here <https://asdf.readthedocs.io>`__.
"""
|
<commit_before># Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
"""
The **asdf** subpackage contains code that is used to serialize astropy types
so that they can be represented and stored using the Advanced Scientific Data
Format (ASDF). This subpackage defines classes, referred to as **tags**, that
implement the logic for serialization and deserialization.
ASDF makes use of abstract data type definitons called **schemas**. The tags
provided here are simply specific implementations of particular schemas.
Currently astropy only implements tags for a subset of schemas that are defined
externally by the ASDF Standard. However, it is likely that astropy will
eventually define schemas of its own.
Astropy currently has no ability to read or write ASDF files itself. In order
to process ASDF files it is necessary to make use of the standalone **asdf**
package. Users should never need to refer to tag implementations directly.
Their presence should be entirely transparent when processing ASDF files.
If both **asdf** and **astropy** are installed, no futher configuration is
required in order to process ASDF files. The **asdf** package has been designed
to automatically detect the presence of the tags defined by **astropy**.
Documentation on the ASDF Standard can be found `here
<https://asdf-standard.readthedocs.io>`__. Documentation on the ASDF Python
module can be found `here <https://asdf.readthedocs.io>`__.
"""
<commit_msg>Update ASDF-related docs to reflect presence of schemas [docs only]<commit_after># Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
"""
The **asdf** subpackage contains code that is used to serialize astropy types
so that they can be represented and stored using the Advanced Scientific Data
Format (ASDF). This subpackage defines classes, referred to as **tags**, that
implement the logic for serialization and deserialization.
ASDF makes use of abstract data type definitons called **schemas**. The tag
classes provided here are specific implementations of particular schemas. Some
of the tags in Astropy (e.g., those related to transforms) implement schemas
that are defined by the ASDF Standard. In other cases, both the tags and
schemas are defined within Astropy (e.g., those related to many of the
coordinate frames).
Astropy currently has no ability to read or write ASDF files itself. In order
to process ASDF files it is necessary to make use of the standalone **asdf**
package. Users should never need to refer to tag implementations directly.
Their presence should be entirely transparent when processing ASDF files.
If both **asdf** and **astropy** are installed, no futher configuration is
required in order to process ASDF files. The **asdf** package has been designed
to automatically detect the presence of the tags defined by **astropy**.
Documentation on the ASDF Standard can be found `here
<https://asdf-standard.readthedocs.io>`__. Documentation on the ASDF Python
module can be found `here <https://asdf.readthedocs.io>`__.
"""
|
4b63093abbc388bc26151422991ce39553cf137f
|
neuroimaging/utils/tests/test_odict.py
|
neuroimaging/utils/tests/test_odict.py
|
"""Test file for the ordered dictionary module, odict.py."""
from neuroimaging.externals.scipy.testing import *
from neuroimaging.utils.odict import odict
class TestOdict(TestCase):
def setUp(self):
print 'setUp'
self.thedict = odict((('one', 1.0), ('two', 2.0), ('three', 3.0)))
def test_copy(self):
"""Test odict.copy method."""
print self.thedict
cpydict = self.thedict.copy()
assert cpydict == self.thedict
# test that it's a copy and not a reference
assert cpydict is not self.thedict
if __name__ == "__main__":
nose.run(argv=['', __file__])
|
"""Test file for the ordered dictionary module, odict.py."""
from neuroimaging.externals.scipy.testing import *
from neuroimaging.utils.odict import odict
class TestOdict(TestCase):
def setUp(self):
print 'setUp'
self.thedict = odict((('one', 1.0), ('two', 2.0), ('three', 3.0)))
def test_copy(self):
"""Test odict.copy method."""
print self.thedict
cpydict = self.thedict.copy()
assert cpydict == self.thedict
# test that it's a copy and not a reference
assert cpydict is not self.thedict
if __name__ == "__main__":
nose.runmodule()
|
Fix nose call so tests run in __main__.
|
BUG: Fix nose call so tests run in __main__.
|
Python
|
bsd-3-clause
|
alexis-roche/nipy,alexis-roche/nipy,arokem/nipy,nipy/nipy-labs,nipy/nireg,alexis-roche/nireg,alexis-roche/register,nipy/nipy-labs,arokem/nipy,alexis-roche/register,nipy/nireg,bthirion/nipy,bthirion/nipy,alexis-roche/nireg,arokem/nipy,bthirion/nipy,alexis-roche/register,arokem/nipy,alexis-roche/niseg,alexis-roche/nipy,alexis-roche/nipy,alexis-roche/niseg,bthirion/nipy
|
"""Test file for the ordered dictionary module, odict.py."""
from neuroimaging.externals.scipy.testing import *
from neuroimaging.utils.odict import odict
class TestOdict(TestCase):
def setUp(self):
print 'setUp'
self.thedict = odict((('one', 1.0), ('two', 2.0), ('three', 3.0)))
def test_copy(self):
"""Test odict.copy method."""
print self.thedict
cpydict = self.thedict.copy()
assert cpydict == self.thedict
# test that it's a copy and not a reference
assert cpydict is not self.thedict
if __name__ == "__main__":
nose.run(argv=['', __file__])
BUG: Fix nose call so tests run in __main__.
|
"""Test file for the ordered dictionary module, odict.py."""
from neuroimaging.externals.scipy.testing import *
from neuroimaging.utils.odict import odict
class TestOdict(TestCase):
def setUp(self):
print 'setUp'
self.thedict = odict((('one', 1.0), ('two', 2.0), ('three', 3.0)))
def test_copy(self):
"""Test odict.copy method."""
print self.thedict
cpydict = self.thedict.copy()
assert cpydict == self.thedict
# test that it's a copy and not a reference
assert cpydict is not self.thedict
if __name__ == "__main__":
nose.runmodule()
|
<commit_before>"""Test file for the ordered dictionary module, odict.py."""
from neuroimaging.externals.scipy.testing import *
from neuroimaging.utils.odict import odict
class TestOdict(TestCase):
def setUp(self):
print 'setUp'
self.thedict = odict((('one', 1.0), ('two', 2.0), ('three', 3.0)))
def test_copy(self):
"""Test odict.copy method."""
print self.thedict
cpydict = self.thedict.copy()
assert cpydict == self.thedict
# test that it's a copy and not a reference
assert cpydict is not self.thedict
if __name__ == "__main__":
nose.run(argv=['', __file__])
<commit_msg>BUG: Fix nose call so tests run in __main__.<commit_after>
|
"""Test file for the ordered dictionary module, odict.py."""
from neuroimaging.externals.scipy.testing import *
from neuroimaging.utils.odict import odict
class TestOdict(TestCase):
def setUp(self):
print 'setUp'
self.thedict = odict((('one', 1.0), ('two', 2.0), ('three', 3.0)))
def test_copy(self):
"""Test odict.copy method."""
print self.thedict
cpydict = self.thedict.copy()
assert cpydict == self.thedict
# test that it's a copy and not a reference
assert cpydict is not self.thedict
if __name__ == "__main__":
nose.runmodule()
|
"""Test file for the ordered dictionary module, odict.py."""
from neuroimaging.externals.scipy.testing import *
from neuroimaging.utils.odict import odict
class TestOdict(TestCase):
def setUp(self):
print 'setUp'
self.thedict = odict((('one', 1.0), ('two', 2.0), ('three', 3.0)))
def test_copy(self):
"""Test odict.copy method."""
print self.thedict
cpydict = self.thedict.copy()
assert cpydict == self.thedict
# test that it's a copy and not a reference
assert cpydict is not self.thedict
if __name__ == "__main__":
nose.run(argv=['', __file__])
BUG: Fix nose call so tests run in __main__."""Test file for the ordered dictionary module, odict.py."""
from neuroimaging.externals.scipy.testing import *
from neuroimaging.utils.odict import odict
class TestOdict(TestCase):
def setUp(self):
print 'setUp'
self.thedict = odict((('one', 1.0), ('two', 2.0), ('three', 3.0)))
def test_copy(self):
"""Test odict.copy method."""
print self.thedict
cpydict = self.thedict.copy()
assert cpydict == self.thedict
# test that it's a copy and not a reference
assert cpydict is not self.thedict
if __name__ == "__main__":
nose.runmodule()
|
<commit_before>"""Test file for the ordered dictionary module, odict.py."""
from neuroimaging.externals.scipy.testing import *
from neuroimaging.utils.odict import odict
class TestOdict(TestCase):
def setUp(self):
print 'setUp'
self.thedict = odict((('one', 1.0), ('two', 2.0), ('three', 3.0)))
def test_copy(self):
"""Test odict.copy method."""
print self.thedict
cpydict = self.thedict.copy()
assert cpydict == self.thedict
# test that it's a copy and not a reference
assert cpydict is not self.thedict
if __name__ == "__main__":
nose.run(argv=['', __file__])
<commit_msg>BUG: Fix nose call so tests run in __main__.<commit_after>"""Test file for the ordered dictionary module, odict.py."""
from neuroimaging.externals.scipy.testing import *
from neuroimaging.utils.odict import odict
class TestOdict(TestCase):
def setUp(self):
print 'setUp'
self.thedict = odict((('one', 1.0), ('two', 2.0), ('three', 3.0)))
def test_copy(self):
"""Test odict.copy method."""
print self.thedict
cpydict = self.thedict.copy()
assert cpydict == self.thedict
# test that it's a copy and not a reference
assert cpydict is not self.thedict
if __name__ == "__main__":
nose.runmodule()
|
b54ebe45b10e4bc645fae2004c333d78602a62e0
|
vext/__init__.py
|
vext/__init__.py
|
import logging
from os import environ
from os.path import join
from distutils.sysconfig import get_python_lib
VEXT_DEBUG_LOG = "VEXT_DEBUG_LOG"
vext_pth = join(get_python_lib(), 'vext_importer.pth')
logger = logging.getLogger("vext")
if VEXT_DEBUG_LOG in environ:
if environ.get(VEXT_DEBUG_LOG, "0") == "1":
logger.setLevel(logging.DEBUG)
else:
logger.addHandler(logging.NullHandler())
def install_importer():
logger.debug("install_importer has been moved to gatekeeper module")
from vext import gatekeeper
gatekeeper.install_importer()
|
import logging
import sys
from os import environ
from os.path import join
from distutils.sysconfig import get_python_lib
VEXT_DEBUG_LOG = "VEXT_DEBUG_LOG"
vext_pth = join(get_python_lib(), 'vext_importer.pth')
logger = logging.getLogger("vext")
if VEXT_DEBUG_LOG in environ and environ.get(VEXT_DEBUG_LOG, "0") == "1":
logger.setLevel(logging.DEBUG)
logger.addHandler(logging.StreamHandler())
else:
logger.addHandler(logging.NullHandler())
def install_importer():
# This is here so that older versions of vext don't break.
logger.warning("install_importer has been moved to gatekeeper module")
from vext import gatekeeper
gatekeeper.install_importer()
|
Change how logging works again :)
|
Change how logging works again :)
|
Python
|
mit
|
stuaxo/vext
|
import logging
from os import environ
from os.path import join
from distutils.sysconfig import get_python_lib
VEXT_DEBUG_LOG = "VEXT_DEBUG_LOG"
vext_pth = join(get_python_lib(), 'vext_importer.pth')
logger = logging.getLogger("vext")
if VEXT_DEBUG_LOG in environ:
if environ.get(VEXT_DEBUG_LOG, "0") == "1":
logger.setLevel(logging.DEBUG)
else:
logger.addHandler(logging.NullHandler())
def install_importer():
logger.debug("install_importer has been moved to gatekeeper module")
from vext import gatekeeper
gatekeeper.install_importer()
Change how logging works again :)
|
import logging
import sys
from os import environ
from os.path import join
from distutils.sysconfig import get_python_lib
VEXT_DEBUG_LOG = "VEXT_DEBUG_LOG"
vext_pth = join(get_python_lib(), 'vext_importer.pth')
logger = logging.getLogger("vext")
if VEXT_DEBUG_LOG in environ and environ.get(VEXT_DEBUG_LOG, "0") == "1":
logger.setLevel(logging.DEBUG)
logger.addHandler(logging.StreamHandler())
else:
logger.addHandler(logging.NullHandler())
def install_importer():
# This is here so that older versions of vext don't break.
logger.warning("install_importer has been moved to gatekeeper module")
from vext import gatekeeper
gatekeeper.install_importer()
|
<commit_before>import logging
from os import environ
from os.path import join
from distutils.sysconfig import get_python_lib
VEXT_DEBUG_LOG = "VEXT_DEBUG_LOG"
vext_pth = join(get_python_lib(), 'vext_importer.pth')
logger = logging.getLogger("vext")
if VEXT_DEBUG_LOG in environ:
if environ.get(VEXT_DEBUG_LOG, "0") == "1":
logger.setLevel(logging.DEBUG)
else:
logger.addHandler(logging.NullHandler())
def install_importer():
logger.debug("install_importer has been moved to gatekeeper module")
from vext import gatekeeper
gatekeeper.install_importer()
<commit_msg>Change how logging works again :)<commit_after>
|
import logging
import sys
from os import environ
from os.path import join
from distutils.sysconfig import get_python_lib
VEXT_DEBUG_LOG = "VEXT_DEBUG_LOG"
vext_pth = join(get_python_lib(), 'vext_importer.pth')
logger = logging.getLogger("vext")
if VEXT_DEBUG_LOG in environ and environ.get(VEXT_DEBUG_LOG, "0") == "1":
logger.setLevel(logging.DEBUG)
logger.addHandler(logging.StreamHandler())
else:
logger.addHandler(logging.NullHandler())
def install_importer():
# This is here so that older versions of vext don't break.
logger.warning("install_importer has been moved to gatekeeper module")
from vext import gatekeeper
gatekeeper.install_importer()
|
import logging
from os import environ
from os.path import join
from distutils.sysconfig import get_python_lib
VEXT_DEBUG_LOG = "VEXT_DEBUG_LOG"
vext_pth = join(get_python_lib(), 'vext_importer.pth')
logger = logging.getLogger("vext")
if VEXT_DEBUG_LOG in environ:
if environ.get(VEXT_DEBUG_LOG, "0") == "1":
logger.setLevel(logging.DEBUG)
else:
logger.addHandler(logging.NullHandler())
def install_importer():
logger.debug("install_importer has been moved to gatekeeper module")
from vext import gatekeeper
gatekeeper.install_importer()
Change how logging works again :)import logging
import sys
from os import environ
from os.path import join
from distutils.sysconfig import get_python_lib
VEXT_DEBUG_LOG = "VEXT_DEBUG_LOG"
vext_pth = join(get_python_lib(), 'vext_importer.pth')
logger = logging.getLogger("vext")
if VEXT_DEBUG_LOG in environ and environ.get(VEXT_DEBUG_LOG, "0") == "1":
logger.setLevel(logging.DEBUG)
logger.addHandler(logging.StreamHandler())
else:
logger.addHandler(logging.NullHandler())
def install_importer():
# This is here so that older versions of vext don't break.
logger.warning("install_importer has been moved to gatekeeper module")
from vext import gatekeeper
gatekeeper.install_importer()
|
<commit_before>import logging
from os import environ
from os.path import join
from distutils.sysconfig import get_python_lib
VEXT_DEBUG_LOG = "VEXT_DEBUG_LOG"
vext_pth = join(get_python_lib(), 'vext_importer.pth')
logger = logging.getLogger("vext")
if VEXT_DEBUG_LOG in environ:
if environ.get(VEXT_DEBUG_LOG, "0") == "1":
logger.setLevel(logging.DEBUG)
else:
logger.addHandler(logging.NullHandler())
def install_importer():
logger.debug("install_importer has been moved to gatekeeper module")
from vext import gatekeeper
gatekeeper.install_importer()
<commit_msg>Change how logging works again :)<commit_after>import logging
import sys
from os import environ
from os.path import join
from distutils.sysconfig import get_python_lib
VEXT_DEBUG_LOG = "VEXT_DEBUG_LOG"
vext_pth = join(get_python_lib(), 'vext_importer.pth')
logger = logging.getLogger("vext")
if VEXT_DEBUG_LOG in environ and environ.get(VEXT_DEBUG_LOG, "0") == "1":
logger.setLevel(logging.DEBUG)
logger.addHandler(logging.StreamHandler())
else:
logger.addHandler(logging.NullHandler())
def install_importer():
# This is here so that older versions of vext don't break.
logger.warning("install_importer has been moved to gatekeeper module")
from vext import gatekeeper
gatekeeper.install_importer()
|
8b92bc6c4a782dbb83aadb1bbfc5951dc53f53e1
|
netbox/dcim/migrations/0145_site_remove_deprecated_fields.py
|
netbox/dcim/migrations/0145_site_remove_deprecated_fields.py
|
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('dcim', '0144_fix_cable_abs_length'),
]
operations = [
migrations.RemoveField(
model_name='site',
name='asn',
),
migrations.RemoveField(
model_name='site',
name='contact_email',
),
migrations.RemoveField(
model_name='site',
name='contact_name',
),
migrations.RemoveField(
model_name='site',
name='contact_phone',
),
]
|
from django.db import migrations
from django.db.utils import DataError
def check_legacy_data(apps, schema_editor):
"""
Abort the migration if any legacy site fields still contain data.
"""
Site = apps.get_model('dcim', 'Site')
if site_count := Site.objects.exclude(asn__isnull=True).count():
raise DataError(
f"Unable to proceed with deleting asn field from Site model: Found {site_count} sites with "
f"legacy ASN data. Please ensure all legacy site ASN data has been migrated to ASN objects "
f"before proceeding."
)
if site_count := Site.objects.exclude(contact_name='', contact_phone='', contact_email='').count():
raise DataError(
f"Unable to proceed with deleting contact fields from Site model: Found {site_count} sites "
f"with legacy contact data. Please ensure all legacy site contact data has been migrated to "
f"contact objects before proceeding."
)
class Migration(migrations.Migration):
dependencies = [
('dcim', '0144_fix_cable_abs_length'),
]
operations = [
migrations.RunPython(
code=check_legacy_data,
reverse_code=migrations.RunPython.noop
),
migrations.RemoveField(
model_name='site',
name='asn',
),
migrations.RemoveField(
model_name='site',
name='contact_email',
),
migrations.RemoveField(
model_name='site',
name='contact_name',
),
migrations.RemoveField(
model_name='site',
name='contact_phone',
),
]
|
Add migration safeguard to prevent accidental destruction of data
|
Add migration safeguard to prevent accidental destruction of data
|
Python
|
apache-2.0
|
digitalocean/netbox,digitalocean/netbox,digitalocean/netbox,digitalocean/netbox
|
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('dcim', '0144_fix_cable_abs_length'),
]
operations = [
migrations.RemoveField(
model_name='site',
name='asn',
),
migrations.RemoveField(
model_name='site',
name='contact_email',
),
migrations.RemoveField(
model_name='site',
name='contact_name',
),
migrations.RemoveField(
model_name='site',
name='contact_phone',
),
]
Add migration safeguard to prevent accidental destruction of data
|
from django.db import migrations
from django.db.utils import DataError
def check_legacy_data(apps, schema_editor):
"""
Abort the migration if any legacy site fields still contain data.
"""
Site = apps.get_model('dcim', 'Site')
if site_count := Site.objects.exclude(asn__isnull=True).count():
raise DataError(
f"Unable to proceed with deleting asn field from Site model: Found {site_count} sites with "
f"legacy ASN data. Please ensure all legacy site ASN data has been migrated to ASN objects "
f"before proceeding."
)
if site_count := Site.objects.exclude(contact_name='', contact_phone='', contact_email='').count():
raise DataError(
f"Unable to proceed with deleting contact fields from Site model: Found {site_count} sites "
f"with legacy contact data. Please ensure all legacy site contact data has been migrated to "
f"contact objects before proceeding."
)
class Migration(migrations.Migration):
dependencies = [
('dcim', '0144_fix_cable_abs_length'),
]
operations = [
migrations.RunPython(
code=check_legacy_data,
reverse_code=migrations.RunPython.noop
),
migrations.RemoveField(
model_name='site',
name='asn',
),
migrations.RemoveField(
model_name='site',
name='contact_email',
),
migrations.RemoveField(
model_name='site',
name='contact_name',
),
migrations.RemoveField(
model_name='site',
name='contact_phone',
),
]
|
<commit_before>from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('dcim', '0144_fix_cable_abs_length'),
]
operations = [
migrations.RemoveField(
model_name='site',
name='asn',
),
migrations.RemoveField(
model_name='site',
name='contact_email',
),
migrations.RemoveField(
model_name='site',
name='contact_name',
),
migrations.RemoveField(
model_name='site',
name='contact_phone',
),
]
<commit_msg>Add migration safeguard to prevent accidental destruction of data<commit_after>
|
from django.db import migrations
from django.db.utils import DataError
def check_legacy_data(apps, schema_editor):
"""
Abort the migration if any legacy site fields still contain data.
"""
Site = apps.get_model('dcim', 'Site')
if site_count := Site.objects.exclude(asn__isnull=True).count():
raise DataError(
f"Unable to proceed with deleting asn field from Site model: Found {site_count} sites with "
f"legacy ASN data. Please ensure all legacy site ASN data has been migrated to ASN objects "
f"before proceeding."
)
if site_count := Site.objects.exclude(contact_name='', contact_phone='', contact_email='').count():
raise DataError(
f"Unable to proceed with deleting contact fields from Site model: Found {site_count} sites "
f"with legacy contact data. Please ensure all legacy site contact data has been migrated to "
f"contact objects before proceeding."
)
class Migration(migrations.Migration):
dependencies = [
('dcim', '0144_fix_cable_abs_length'),
]
operations = [
migrations.RunPython(
code=check_legacy_data,
reverse_code=migrations.RunPython.noop
),
migrations.RemoveField(
model_name='site',
name='asn',
),
migrations.RemoveField(
model_name='site',
name='contact_email',
),
migrations.RemoveField(
model_name='site',
name='contact_name',
),
migrations.RemoveField(
model_name='site',
name='contact_phone',
),
]
|
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('dcim', '0144_fix_cable_abs_length'),
]
operations = [
migrations.RemoveField(
model_name='site',
name='asn',
),
migrations.RemoveField(
model_name='site',
name='contact_email',
),
migrations.RemoveField(
model_name='site',
name='contact_name',
),
migrations.RemoveField(
model_name='site',
name='contact_phone',
),
]
Add migration safeguard to prevent accidental destruction of datafrom django.db import migrations
from django.db.utils import DataError
def check_legacy_data(apps, schema_editor):
"""
Abort the migration if any legacy site fields still contain data.
"""
Site = apps.get_model('dcim', 'Site')
if site_count := Site.objects.exclude(asn__isnull=True).count():
raise DataError(
f"Unable to proceed with deleting asn field from Site model: Found {site_count} sites with "
f"legacy ASN data. Please ensure all legacy site ASN data has been migrated to ASN objects "
f"before proceeding."
)
if site_count := Site.objects.exclude(contact_name='', contact_phone='', contact_email='').count():
raise DataError(
f"Unable to proceed with deleting contact fields from Site model: Found {site_count} sites "
f"with legacy contact data. Please ensure all legacy site contact data has been migrated to "
f"contact objects before proceeding."
)
class Migration(migrations.Migration):
dependencies = [
('dcim', '0144_fix_cable_abs_length'),
]
operations = [
migrations.RunPython(
code=check_legacy_data,
reverse_code=migrations.RunPython.noop
),
migrations.RemoveField(
model_name='site',
name='asn',
),
migrations.RemoveField(
model_name='site',
name='contact_email',
),
migrations.RemoveField(
model_name='site',
name='contact_name',
),
migrations.RemoveField(
model_name='site',
name='contact_phone',
),
]
|
<commit_before>from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('dcim', '0144_fix_cable_abs_length'),
]
operations = [
migrations.RemoveField(
model_name='site',
name='asn',
),
migrations.RemoveField(
model_name='site',
name='contact_email',
),
migrations.RemoveField(
model_name='site',
name='contact_name',
),
migrations.RemoveField(
model_name='site',
name='contact_phone',
),
]
<commit_msg>Add migration safeguard to prevent accidental destruction of data<commit_after>from django.db import migrations
from django.db.utils import DataError
def check_legacy_data(apps, schema_editor):
"""
Abort the migration if any legacy site fields still contain data.
"""
Site = apps.get_model('dcim', 'Site')
if site_count := Site.objects.exclude(asn__isnull=True).count():
raise DataError(
f"Unable to proceed with deleting asn field from Site model: Found {site_count} sites with "
f"legacy ASN data. Please ensure all legacy site ASN data has been migrated to ASN objects "
f"before proceeding."
)
if site_count := Site.objects.exclude(contact_name='', contact_phone='', contact_email='').count():
raise DataError(
f"Unable to proceed with deleting contact fields from Site model: Found {site_count} sites "
f"with legacy contact data. Please ensure all legacy site contact data has been migrated to "
f"contact objects before proceeding."
)
class Migration(migrations.Migration):
dependencies = [
('dcim', '0144_fix_cable_abs_length'),
]
operations = [
migrations.RunPython(
code=check_legacy_data,
reverse_code=migrations.RunPython.noop
),
migrations.RemoveField(
model_name='site',
name='asn',
),
migrations.RemoveField(
model_name='site',
name='contact_email',
),
migrations.RemoveField(
model_name='site',
name='contact_name',
),
migrations.RemoveField(
model_name='site',
name='contact_phone',
),
]
|
90eb1118c69a1b9e9785145c59a98d7c48613650
|
nlppln/commands/ls_chunk.py
|
nlppln/commands/ls_chunk.py
|
#!/usr/bin/env python
import click
import os
import json
from nlppln.utils import cwl_file
@click.command()
@click.argument('in_dir', type=click.Path(exists=True))
@click.argument('chunks', type=click.File(encoding='utf-8'))
@click.option('--name', '-n')
def ls_chunk(in_dir, chunks, name):
div = json.load(chunks)
files = div.get(name, [])
files_out = [cwl_file(os.path.abspath(os.path.join(in_dir, f)))
for f in files]
stdout_text = click.get_text_stream('stdout')
stdout_text.write(json.dumps({'out_files': files_out}))
if __name__ == '__main__':
ls_chunk()
|
#!/usr/bin/env python
import click
import os
import json
from nlppln.utils import cwl_file
@click.command()
@click.argument('in_dir', type=click.Path(exists=True))
@click.argument('chunks', type=click.File(encoding='utf-8'))
@click.argument('name')
def ls_chunk(in_dir, chunks, name):
div = json.load(chunks)
files = div.get(name, [])
files_out = [cwl_file(os.path.abspath(os.path.join(in_dir, f)))
for f in files]
stdout_text = click.get_text_stream('stdout')
stdout_text.write(json.dumps({'out_files': files_out}))
if __name__ == '__main__':
ls_chunk()
|
Make name an argument instead of an option
|
Make name an argument instead of an option
It is required to specify a chunk name.
|
Python
|
apache-2.0
|
WhatWorksWhenForWhom/nlppln,WhatWorksWhenForWhom/nlppln,WhatWorksWhenForWhom/nlppln
|
#!/usr/bin/env python
import click
import os
import json
from nlppln.utils import cwl_file
@click.command()
@click.argument('in_dir', type=click.Path(exists=True))
@click.argument('chunks', type=click.File(encoding='utf-8'))
@click.option('--name', '-n')
def ls_chunk(in_dir, chunks, name):
div = json.load(chunks)
files = div.get(name, [])
files_out = [cwl_file(os.path.abspath(os.path.join(in_dir, f)))
for f in files]
stdout_text = click.get_text_stream('stdout')
stdout_text.write(json.dumps({'out_files': files_out}))
if __name__ == '__main__':
ls_chunk()
Make name an argument instead of an option
It is required to specify a chunk name.
|
#!/usr/bin/env python
import click
import os
import json
from nlppln.utils import cwl_file
@click.command()
@click.argument('in_dir', type=click.Path(exists=True))
@click.argument('chunks', type=click.File(encoding='utf-8'))
@click.argument('name')
def ls_chunk(in_dir, chunks, name):
div = json.load(chunks)
files = div.get(name, [])
files_out = [cwl_file(os.path.abspath(os.path.join(in_dir, f)))
for f in files]
stdout_text = click.get_text_stream('stdout')
stdout_text.write(json.dumps({'out_files': files_out}))
if __name__ == '__main__':
ls_chunk()
|
<commit_before>#!/usr/bin/env python
import click
import os
import json
from nlppln.utils import cwl_file
@click.command()
@click.argument('in_dir', type=click.Path(exists=True))
@click.argument('chunks', type=click.File(encoding='utf-8'))
@click.option('--name', '-n')
def ls_chunk(in_dir, chunks, name):
div = json.load(chunks)
files = div.get(name, [])
files_out = [cwl_file(os.path.abspath(os.path.join(in_dir, f)))
for f in files]
stdout_text = click.get_text_stream('stdout')
stdout_text.write(json.dumps({'out_files': files_out}))
if __name__ == '__main__':
ls_chunk()
<commit_msg>Make name an argument instead of an option
It is required to specify a chunk name.<commit_after>
|
#!/usr/bin/env python
import click
import os
import json
from nlppln.utils import cwl_file
@click.command()
@click.argument('in_dir', type=click.Path(exists=True))
@click.argument('chunks', type=click.File(encoding='utf-8'))
@click.argument('name')
def ls_chunk(in_dir, chunks, name):
div = json.load(chunks)
files = div.get(name, [])
files_out = [cwl_file(os.path.abspath(os.path.join(in_dir, f)))
for f in files]
stdout_text = click.get_text_stream('stdout')
stdout_text.write(json.dumps({'out_files': files_out}))
if __name__ == '__main__':
ls_chunk()
|
#!/usr/bin/env python
import click
import os
import json
from nlppln.utils import cwl_file
@click.command()
@click.argument('in_dir', type=click.Path(exists=True))
@click.argument('chunks', type=click.File(encoding='utf-8'))
@click.option('--name', '-n')
def ls_chunk(in_dir, chunks, name):
div = json.load(chunks)
files = div.get(name, [])
files_out = [cwl_file(os.path.abspath(os.path.join(in_dir, f)))
for f in files]
stdout_text = click.get_text_stream('stdout')
stdout_text.write(json.dumps({'out_files': files_out}))
if __name__ == '__main__':
ls_chunk()
Make name an argument instead of an option
It is required to specify a chunk name.#!/usr/bin/env python
import click
import os
import json
from nlppln.utils import cwl_file
@click.command()
@click.argument('in_dir', type=click.Path(exists=True))
@click.argument('chunks', type=click.File(encoding='utf-8'))
@click.argument('name')
def ls_chunk(in_dir, chunks, name):
div = json.load(chunks)
files = div.get(name, [])
files_out = [cwl_file(os.path.abspath(os.path.join(in_dir, f)))
for f in files]
stdout_text = click.get_text_stream('stdout')
stdout_text.write(json.dumps({'out_files': files_out}))
if __name__ == '__main__':
ls_chunk()
|
<commit_before>#!/usr/bin/env python
import click
import os
import json
from nlppln.utils import cwl_file
@click.command()
@click.argument('in_dir', type=click.Path(exists=True))
@click.argument('chunks', type=click.File(encoding='utf-8'))
@click.option('--name', '-n')
def ls_chunk(in_dir, chunks, name):
div = json.load(chunks)
files = div.get(name, [])
files_out = [cwl_file(os.path.abspath(os.path.join(in_dir, f)))
for f in files]
stdout_text = click.get_text_stream('stdout')
stdout_text.write(json.dumps({'out_files': files_out}))
if __name__ == '__main__':
ls_chunk()
<commit_msg>Make name an argument instead of an option
It is required to specify a chunk name.<commit_after>#!/usr/bin/env python
import click
import os
import json
from nlppln.utils import cwl_file
@click.command()
@click.argument('in_dir', type=click.Path(exists=True))
@click.argument('chunks', type=click.File(encoding='utf-8'))
@click.argument('name')
def ls_chunk(in_dir, chunks, name):
div = json.load(chunks)
files = div.get(name, [])
files_out = [cwl_file(os.path.abspath(os.path.join(in_dir, f)))
for f in files]
stdout_text = click.get_text_stream('stdout')
stdout_text.write(json.dumps({'out_files': files_out}))
if __name__ == '__main__':
ls_chunk()
|
70f9d5dbb07ce7e1dfc1d167165f90b27b5dfb8f
|
oscar_sagepay/dashboard/app.py
|
oscar_sagepay/dashboard/app.py
|
from django.conf.urls import patterns, url
from django.contrib.admin.views.decorators import staff_member_required
from oscar.core.application import Application
from . import views
try:
from oscar.apps.dashboard.nav import register, Node
except ImportError:
pass
else:
# Old way of registering Dashboard nodes
node = Node('Datacash', 'sagepay-transaction-list')
register(node, 100)
class SagepayDashboard(Application):
name = None
list_view = views.Transactions
detail_view = views.Transaction
def get_urls(self):
urlpatterns = patterns('',
url(r'^transactions/$', self.list_view.as_view(),
name='sagepay-transaction-list'),
url(r'^transactions/(?P<pk>\d+)/$', self.detail_view.as_view(),
name='sagepay-transaction-detail'),
)
return self.post_process_urls(urlpatterns)
def get_url_decorator(self, url_name):
return staff_member_required
application = SagepayDashboard()
|
from django.conf.urls import patterns, url
from django.contrib.admin.views.decorators import staff_member_required
from oscar.core.application import Application
from . import views
try:
from oscar.apps.dashboard.nav import register, Node
except ImportError:
pass
else:
# Old way of registering Dashboard nodes
node = Node('Sagepay', 'sagepay-transaction-list')
register(node, 100)
class SagepayDashboard(Application):
name = None
list_view = views.Transactions
detail_view = views.Transaction
def get_urls(self):
urlpatterns = patterns('',
url(r'^transactions/$', self.list_view.as_view(),
name='sagepay-transaction-list'),
url(r'^transactions/(?P<pk>\d+)/$', self.detail_view.as_view(),
name='sagepay-transaction-detail'),
)
return self.post_process_urls(urlpatterns)
def get_url_decorator(self, url_name):
return staff_member_required
application = SagepayDashboard()
|
Fix 'Datacash' to 'Sagepay' in Dashboard
|
Fix 'Datacash' to 'Sagepay' in Dashboard
Copy-paste did it again
|
Python
|
bsd-3-clause
|
django-oscar/django-oscar-sagepay-direct
|
from django.conf.urls import patterns, url
from django.contrib.admin.views.decorators import staff_member_required
from oscar.core.application import Application
from . import views
try:
from oscar.apps.dashboard.nav import register, Node
except ImportError:
pass
else:
# Old way of registering Dashboard nodes
node = Node('Datacash', 'sagepay-transaction-list')
register(node, 100)
class SagepayDashboard(Application):
name = None
list_view = views.Transactions
detail_view = views.Transaction
def get_urls(self):
urlpatterns = patterns('',
url(r'^transactions/$', self.list_view.as_view(),
name='sagepay-transaction-list'),
url(r'^transactions/(?P<pk>\d+)/$', self.detail_view.as_view(),
name='sagepay-transaction-detail'),
)
return self.post_process_urls(urlpatterns)
def get_url_decorator(self, url_name):
return staff_member_required
application = SagepayDashboard()
Fix 'Datacash' to 'Sagepay' in Dashboard
Copy-paste did it again
|
from django.conf.urls import patterns, url
from django.contrib.admin.views.decorators import staff_member_required
from oscar.core.application import Application
from . import views
try:
from oscar.apps.dashboard.nav import register, Node
except ImportError:
pass
else:
# Old way of registering Dashboard nodes
node = Node('Sagepay', 'sagepay-transaction-list')
register(node, 100)
class SagepayDashboard(Application):
name = None
list_view = views.Transactions
detail_view = views.Transaction
def get_urls(self):
urlpatterns = patterns('',
url(r'^transactions/$', self.list_view.as_view(),
name='sagepay-transaction-list'),
url(r'^transactions/(?P<pk>\d+)/$', self.detail_view.as_view(),
name='sagepay-transaction-detail'),
)
return self.post_process_urls(urlpatterns)
def get_url_decorator(self, url_name):
return staff_member_required
application = SagepayDashboard()
|
<commit_before>from django.conf.urls import patterns, url
from django.contrib.admin.views.decorators import staff_member_required
from oscar.core.application import Application
from . import views
try:
from oscar.apps.dashboard.nav import register, Node
except ImportError:
pass
else:
# Old way of registering Dashboard nodes
node = Node('Datacash', 'sagepay-transaction-list')
register(node, 100)
class SagepayDashboard(Application):
name = None
list_view = views.Transactions
detail_view = views.Transaction
def get_urls(self):
urlpatterns = patterns('',
url(r'^transactions/$', self.list_view.as_view(),
name='sagepay-transaction-list'),
url(r'^transactions/(?P<pk>\d+)/$', self.detail_view.as_view(),
name='sagepay-transaction-detail'),
)
return self.post_process_urls(urlpatterns)
def get_url_decorator(self, url_name):
return staff_member_required
application = SagepayDashboard()
<commit_msg>Fix 'Datacash' to 'Sagepay' in Dashboard
Copy-paste did it again<commit_after>
|
from django.conf.urls import patterns, url
from django.contrib.admin.views.decorators import staff_member_required
from oscar.core.application import Application
from . import views
try:
from oscar.apps.dashboard.nav import register, Node
except ImportError:
pass
else:
# Old way of registering Dashboard nodes
node = Node('Sagepay', 'sagepay-transaction-list')
register(node, 100)
class SagepayDashboard(Application):
name = None
list_view = views.Transactions
detail_view = views.Transaction
def get_urls(self):
urlpatterns = patterns('',
url(r'^transactions/$', self.list_view.as_view(),
name='sagepay-transaction-list'),
url(r'^transactions/(?P<pk>\d+)/$', self.detail_view.as_view(),
name='sagepay-transaction-detail'),
)
return self.post_process_urls(urlpatterns)
def get_url_decorator(self, url_name):
return staff_member_required
application = SagepayDashboard()
|
from django.conf.urls import patterns, url
from django.contrib.admin.views.decorators import staff_member_required
from oscar.core.application import Application
from . import views
try:
from oscar.apps.dashboard.nav import register, Node
except ImportError:
pass
else:
# Old way of registering Dashboard nodes
node = Node('Datacash', 'sagepay-transaction-list')
register(node, 100)
class SagepayDashboard(Application):
name = None
list_view = views.Transactions
detail_view = views.Transaction
def get_urls(self):
urlpatterns = patterns('',
url(r'^transactions/$', self.list_view.as_view(),
name='sagepay-transaction-list'),
url(r'^transactions/(?P<pk>\d+)/$', self.detail_view.as_view(),
name='sagepay-transaction-detail'),
)
return self.post_process_urls(urlpatterns)
def get_url_decorator(self, url_name):
return staff_member_required
application = SagepayDashboard()
Fix 'Datacash' to 'Sagepay' in Dashboard
Copy-paste did it againfrom django.conf.urls import patterns, url
from django.contrib.admin.views.decorators import staff_member_required
from oscar.core.application import Application
from . import views
try:
from oscar.apps.dashboard.nav import register, Node
except ImportError:
pass
else:
# Old way of registering Dashboard nodes
node = Node('Sagepay', 'sagepay-transaction-list')
register(node, 100)
class SagepayDashboard(Application):
name = None
list_view = views.Transactions
detail_view = views.Transaction
def get_urls(self):
urlpatterns = patterns('',
url(r'^transactions/$', self.list_view.as_view(),
name='sagepay-transaction-list'),
url(r'^transactions/(?P<pk>\d+)/$', self.detail_view.as_view(),
name='sagepay-transaction-detail'),
)
return self.post_process_urls(urlpatterns)
def get_url_decorator(self, url_name):
return staff_member_required
application = SagepayDashboard()
|
<commit_before>from django.conf.urls import patterns, url
from django.contrib.admin.views.decorators import staff_member_required
from oscar.core.application import Application
from . import views
try:
from oscar.apps.dashboard.nav import register, Node
except ImportError:
pass
else:
# Old way of registering Dashboard nodes
node = Node('Datacash', 'sagepay-transaction-list')
register(node, 100)
class SagepayDashboard(Application):
name = None
list_view = views.Transactions
detail_view = views.Transaction
def get_urls(self):
urlpatterns = patterns('',
url(r'^transactions/$', self.list_view.as_view(),
name='sagepay-transaction-list'),
url(r'^transactions/(?P<pk>\d+)/$', self.detail_view.as_view(),
name='sagepay-transaction-detail'),
)
return self.post_process_urls(urlpatterns)
def get_url_decorator(self, url_name):
return staff_member_required
application = SagepayDashboard()
<commit_msg>Fix 'Datacash' to 'Sagepay' in Dashboard
Copy-paste did it again<commit_after>from django.conf.urls import patterns, url
from django.contrib.admin.views.decorators import staff_member_required
from oscar.core.application import Application
from . import views
try:
from oscar.apps.dashboard.nav import register, Node
except ImportError:
pass
else:
# Old way of registering Dashboard nodes
node = Node('Sagepay', 'sagepay-transaction-list')
register(node, 100)
class SagepayDashboard(Application):
name = None
list_view = views.Transactions
detail_view = views.Transaction
def get_urls(self):
urlpatterns = patterns('',
url(r'^transactions/$', self.list_view.as_view(),
name='sagepay-transaction-list'),
url(r'^transactions/(?P<pk>\d+)/$', self.detail_view.as_view(),
name='sagepay-transaction-detail'),
)
return self.post_process_urls(urlpatterns)
def get_url_decorator(self, url_name):
return staff_member_required
application = SagepayDashboard()
|
b50aee7a23c44b98b3cd6fee607cc5978a57c927
|
contrail_provisioning/config/templates/contrail_sudoers.py
|
contrail_provisioning/config/templates/contrail_sudoers.py
|
import string
template = string.Template("""
Defaults:contrail !requiretty
Cmnd_Alias CONFIGRESTART = /usr/sbin/service supervisor-config restart
contrail ALL = (root) NOPASSWD:CONFIGRESTART
""")
|
import string
template = string.Template("""
Defaults:contrail !requiretty
Cmnd_Alias CONFIGRESTART = /usr/sbin/service supervisor-config restart
Cmnd_Alias IFMAPRESTART = /usr/sbin/service ifmap restart
contrail ALL = (root) NOPASSWD:CONFIGRESTART,IFMAPRESTART
""")
|
Allow contrail user to restart ifmap without password closes-jira-bug: JCB-218958
|
Allow contrail user to restart ifmap without password
closes-jira-bug: JCB-218958
Change-Id: Id95001cf5ab455650b5b900b9b5f7bb33ccef8e3
|
Python
|
apache-2.0
|
Juniper/contrail-provisioning,Juniper/contrail-provisioning
|
import string
template = string.Template("""
Defaults:contrail !requiretty
Cmnd_Alias CONFIGRESTART = /usr/sbin/service supervisor-config restart
contrail ALL = (root) NOPASSWD:CONFIGRESTART
""")
Allow contrail user to restart ifmap without password
closes-jira-bug: JCB-218958
Change-Id: Id95001cf5ab455650b5b900b9b5f7bb33ccef8e3
|
import string
template = string.Template("""
Defaults:contrail !requiretty
Cmnd_Alias CONFIGRESTART = /usr/sbin/service supervisor-config restart
Cmnd_Alias IFMAPRESTART = /usr/sbin/service ifmap restart
contrail ALL = (root) NOPASSWD:CONFIGRESTART,IFMAPRESTART
""")
|
<commit_before>import string
template = string.Template("""
Defaults:contrail !requiretty
Cmnd_Alias CONFIGRESTART = /usr/sbin/service supervisor-config restart
contrail ALL = (root) NOPASSWD:CONFIGRESTART
""")
<commit_msg>Allow contrail user to restart ifmap without password
closes-jira-bug: JCB-218958
Change-Id: Id95001cf5ab455650b5b900b9b5f7bb33ccef8e3<commit_after>
|
import string
template = string.Template("""
Defaults:contrail !requiretty
Cmnd_Alias CONFIGRESTART = /usr/sbin/service supervisor-config restart
Cmnd_Alias IFMAPRESTART = /usr/sbin/service ifmap restart
contrail ALL = (root) NOPASSWD:CONFIGRESTART,IFMAPRESTART
""")
|
import string
template = string.Template("""
Defaults:contrail !requiretty
Cmnd_Alias CONFIGRESTART = /usr/sbin/service supervisor-config restart
contrail ALL = (root) NOPASSWD:CONFIGRESTART
""")
Allow contrail user to restart ifmap without password
closes-jira-bug: JCB-218958
Change-Id: Id95001cf5ab455650b5b900b9b5f7bb33ccef8e3import string
template = string.Template("""
Defaults:contrail !requiretty
Cmnd_Alias CONFIGRESTART = /usr/sbin/service supervisor-config restart
Cmnd_Alias IFMAPRESTART = /usr/sbin/service ifmap restart
contrail ALL = (root) NOPASSWD:CONFIGRESTART,IFMAPRESTART
""")
|
<commit_before>import string
template = string.Template("""
Defaults:contrail !requiretty
Cmnd_Alias CONFIGRESTART = /usr/sbin/service supervisor-config restart
contrail ALL = (root) NOPASSWD:CONFIGRESTART
""")
<commit_msg>Allow contrail user to restart ifmap without password
closes-jira-bug: JCB-218958
Change-Id: Id95001cf5ab455650b5b900b9b5f7bb33ccef8e3<commit_after>import string
template = string.Template("""
Defaults:contrail !requiretty
Cmnd_Alias CONFIGRESTART = /usr/sbin/service supervisor-config restart
Cmnd_Alias IFMAPRESTART = /usr/sbin/service ifmap restart
contrail ALL = (root) NOPASSWD:CONFIGRESTART,IFMAPRESTART
""")
|
249c6bbd74174b3b053fed13a58b24c8d485163a
|
src/ggrc/models/custom_attribute_value.py
|
src/ggrc/models/custom_attribute_value.py
|
# Copyright (C) 2014 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: laran@reciprocitylabs.com
# Maintained By: laran@reciprocitylabs.com
from ggrc import db
from .mixins import (
deferred, Base
)
class CustomAttributeValue(Base, db.Model):
__tablename__ = 'custom_attribute_values'
custom_attribute_id = deferred(
db.Column(
db.Integer,
db.ForeignKey('custom_attribute_definitions.id')), 'CustomAttributeValue')
attributable_id = deferred(db.Column(db.Integer), 'CustomAttributeValue')
attributable_type = deferred(db.Column(db.String), 'CustomAttributeValue')
attribute_value = deferred(db.Column(db.String), 'CustomAttributeValue')
@property
def attributable_attr(self):
return '{0}_attributable'.format(self.attributable_type)
@property
def attributable(self):
return getattr(self, self.attributable_attr)
@attributable.setter
def attributable(self, value):
self.attributable_id = value.id if value is not None else None
self.attributable_type = value.__class__.__name__ if value is not None \
else None
return setattr(self, self.attributable_attr, value)
_publish_attrs = [
'custom_attribute_id',
'attributable_id',
'attributable_type',
'attribute_value'
]
|
# Copyright (C) 2014 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: laran@reciprocitylabs.com
# Maintained By: laran@reciprocitylabs.com
from ggrc import db
from ggrc.models.mixins import Base
from ggrc.models.mixins import deferred
class CustomAttributeValue(Base, db.Model):
__tablename__ = 'custom_attribute_values'
custom_attribute_id = deferred(
db.Column(db.Integer, db.ForeignKey('custom_attribute_definitions.id')),
'CustomAttributeValue')
attributable_id = deferred(db.Column(db.Integer), 'CustomAttributeValue')
attributable_type = deferred(db.Column(db.String), 'CustomAttributeValue')
attribute_value = deferred(db.Column(db.String), 'CustomAttributeValue')
@property
def attributable_attr(self):
return '{0}_attributable'.format(self.attributable_type)
@property
def attributable(self):
return getattr(self, self.attributable_attr)
@attributable.setter
def attributable(self, value):
self.attributable_id = value.id if value is not None else None
self.attributable_type = value.__class__.__name__ if value is not None \
else None
return setattr(self, self.attributable_attr, value)
_publish_attrs = [
'custom_attribute_id',
'attributable_id',
'attributable_type',
'attribute_value'
]
|
Fix code style for custom attribute value
|
Fix code style for custom attribute value
|
Python
|
apache-2.0
|
plamut/ggrc-core,selahssea/ggrc-core,hasanalom/ggrc-core,AleksNeStu/ggrc-core,jmakov/ggrc-core,andrei-karalionak/ggrc-core,AleksNeStu/ggrc-core,andrei-karalionak/ggrc-core,j0gurt/ggrc-core,jmakov/ggrc-core,NejcZupec/ggrc-core,selahssea/ggrc-core,plamut/ggrc-core,josthkko/ggrc-core,VinnieJohns/ggrc-core,plamut/ggrc-core,j0gurt/ggrc-core,prasannav7/ggrc-core,NejcZupec/ggrc-core,hyperNURb/ggrc-core,hasanalom/ggrc-core,edofic/ggrc-core,josthkko/ggrc-core,prasannav7/ggrc-core,j0gurt/ggrc-core,jmakov/ggrc-core,edofic/ggrc-core,hyperNURb/ggrc-core,josthkko/ggrc-core,AleksNeStu/ggrc-core,hasanalom/ggrc-core,prasannav7/ggrc-core,edofic/ggrc-core,selahssea/ggrc-core,kr41/ggrc-core,AleksNeStu/ggrc-core,j0gurt/ggrc-core,andrei-karalionak/ggrc-core,kr41/ggrc-core,josthkko/ggrc-core,VinnieJohns/ggrc-core,VinnieJohns/ggrc-core,NejcZupec/ggrc-core,plamut/ggrc-core,hyperNURb/ggrc-core,andrei-karalionak/ggrc-core,prasannav7/ggrc-core,hyperNURb/ggrc-core,hasanalom/ggrc-core,jmakov/ggrc-core,VinnieJohns/ggrc-core,kr41/ggrc-core,hyperNURb/ggrc-core,jmakov/ggrc-core,edofic/ggrc-core,kr41/ggrc-core,selahssea/ggrc-core,NejcZupec/ggrc-core,hasanalom/ggrc-core
|
# Copyright (C) 2014 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: laran@reciprocitylabs.com
# Maintained By: laran@reciprocitylabs.com
from ggrc import db
from .mixins import (
deferred, Base
)
class CustomAttributeValue(Base, db.Model):
__tablename__ = 'custom_attribute_values'
custom_attribute_id = deferred(
db.Column(
db.Integer,
db.ForeignKey('custom_attribute_definitions.id')), 'CustomAttributeValue')
attributable_id = deferred(db.Column(db.Integer), 'CustomAttributeValue')
attributable_type = deferred(db.Column(db.String), 'CustomAttributeValue')
attribute_value = deferred(db.Column(db.String), 'CustomAttributeValue')
@property
def attributable_attr(self):
return '{0}_attributable'.format(self.attributable_type)
@property
def attributable(self):
return getattr(self, self.attributable_attr)
@attributable.setter
def attributable(self, value):
self.attributable_id = value.id if value is not None else None
self.attributable_type = value.__class__.__name__ if value is not None \
else None
return setattr(self, self.attributable_attr, value)
_publish_attrs = [
'custom_attribute_id',
'attributable_id',
'attributable_type',
'attribute_value'
]
Fix code style for custom attribute value
|
# Copyright (C) 2014 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: laran@reciprocitylabs.com
# Maintained By: laran@reciprocitylabs.com
from ggrc import db
from ggrc.models.mixins import Base
from ggrc.models.mixins import deferred
class CustomAttributeValue(Base, db.Model):
__tablename__ = 'custom_attribute_values'
custom_attribute_id = deferred(
db.Column(db.Integer, db.ForeignKey('custom_attribute_definitions.id')),
'CustomAttributeValue')
attributable_id = deferred(db.Column(db.Integer), 'CustomAttributeValue')
attributable_type = deferred(db.Column(db.String), 'CustomAttributeValue')
attribute_value = deferred(db.Column(db.String), 'CustomAttributeValue')
@property
def attributable_attr(self):
return '{0}_attributable'.format(self.attributable_type)
@property
def attributable(self):
return getattr(self, self.attributable_attr)
@attributable.setter
def attributable(self, value):
self.attributable_id = value.id if value is not None else None
self.attributable_type = value.__class__.__name__ if value is not None \
else None
return setattr(self, self.attributable_attr, value)
_publish_attrs = [
'custom_attribute_id',
'attributable_id',
'attributable_type',
'attribute_value'
]
|
<commit_before># Copyright (C) 2014 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: laran@reciprocitylabs.com
# Maintained By: laran@reciprocitylabs.com
from ggrc import db
from .mixins import (
deferred, Base
)
class CustomAttributeValue(Base, db.Model):
__tablename__ = 'custom_attribute_values'
custom_attribute_id = deferred(
db.Column(
db.Integer,
db.ForeignKey('custom_attribute_definitions.id')), 'CustomAttributeValue')
attributable_id = deferred(db.Column(db.Integer), 'CustomAttributeValue')
attributable_type = deferred(db.Column(db.String), 'CustomAttributeValue')
attribute_value = deferred(db.Column(db.String), 'CustomAttributeValue')
@property
def attributable_attr(self):
return '{0}_attributable'.format(self.attributable_type)
@property
def attributable(self):
return getattr(self, self.attributable_attr)
@attributable.setter
def attributable(self, value):
self.attributable_id = value.id if value is not None else None
self.attributable_type = value.__class__.__name__ if value is not None \
else None
return setattr(self, self.attributable_attr, value)
_publish_attrs = [
'custom_attribute_id',
'attributable_id',
'attributable_type',
'attribute_value'
]
<commit_msg>Fix code style for custom attribute value<commit_after>
|
# Copyright (C) 2014 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: laran@reciprocitylabs.com
# Maintained By: laran@reciprocitylabs.com
from ggrc import db
from ggrc.models.mixins import Base
from ggrc.models.mixins import deferred
class CustomAttributeValue(Base, db.Model):
__tablename__ = 'custom_attribute_values'
custom_attribute_id = deferred(
db.Column(db.Integer, db.ForeignKey('custom_attribute_definitions.id')),
'CustomAttributeValue')
attributable_id = deferred(db.Column(db.Integer), 'CustomAttributeValue')
attributable_type = deferred(db.Column(db.String), 'CustomAttributeValue')
attribute_value = deferred(db.Column(db.String), 'CustomAttributeValue')
@property
def attributable_attr(self):
return '{0}_attributable'.format(self.attributable_type)
@property
def attributable(self):
return getattr(self, self.attributable_attr)
@attributable.setter
def attributable(self, value):
self.attributable_id = value.id if value is not None else None
self.attributable_type = value.__class__.__name__ if value is not None \
else None
return setattr(self, self.attributable_attr, value)
_publish_attrs = [
'custom_attribute_id',
'attributable_id',
'attributable_type',
'attribute_value'
]
|
# Copyright (C) 2014 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: laran@reciprocitylabs.com
# Maintained By: laran@reciprocitylabs.com
from ggrc import db
from .mixins import (
deferred, Base
)
class CustomAttributeValue(Base, db.Model):
__tablename__ = 'custom_attribute_values'
custom_attribute_id = deferred(
db.Column(
db.Integer,
db.ForeignKey('custom_attribute_definitions.id')), 'CustomAttributeValue')
attributable_id = deferred(db.Column(db.Integer), 'CustomAttributeValue')
attributable_type = deferred(db.Column(db.String), 'CustomAttributeValue')
attribute_value = deferred(db.Column(db.String), 'CustomAttributeValue')
@property
def attributable_attr(self):
return '{0}_attributable'.format(self.attributable_type)
@property
def attributable(self):
return getattr(self, self.attributable_attr)
@attributable.setter
def attributable(self, value):
self.attributable_id = value.id if value is not None else None
self.attributable_type = value.__class__.__name__ if value is not None \
else None
return setattr(self, self.attributable_attr, value)
_publish_attrs = [
'custom_attribute_id',
'attributable_id',
'attributable_type',
'attribute_value'
]
Fix code style for custom attribute value# Copyright (C) 2014 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: laran@reciprocitylabs.com
# Maintained By: laran@reciprocitylabs.com
from ggrc import db
from ggrc.models.mixins import Base
from ggrc.models.mixins import deferred
class CustomAttributeValue(Base, db.Model):
__tablename__ = 'custom_attribute_values'
custom_attribute_id = deferred(
db.Column(db.Integer, db.ForeignKey('custom_attribute_definitions.id')),
'CustomAttributeValue')
attributable_id = deferred(db.Column(db.Integer), 'CustomAttributeValue')
attributable_type = deferred(db.Column(db.String), 'CustomAttributeValue')
attribute_value = deferred(db.Column(db.String), 'CustomAttributeValue')
@property
def attributable_attr(self):
return '{0}_attributable'.format(self.attributable_type)
@property
def attributable(self):
return getattr(self, self.attributable_attr)
@attributable.setter
def attributable(self, value):
self.attributable_id = value.id if value is not None else None
self.attributable_type = value.__class__.__name__ if value is not None \
else None
return setattr(self, self.attributable_attr, value)
_publish_attrs = [
'custom_attribute_id',
'attributable_id',
'attributable_type',
'attribute_value'
]
|
<commit_before># Copyright (C) 2014 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: laran@reciprocitylabs.com
# Maintained By: laran@reciprocitylabs.com
from ggrc import db
from .mixins import (
deferred, Base
)
class CustomAttributeValue(Base, db.Model):
__tablename__ = 'custom_attribute_values'
custom_attribute_id = deferred(
db.Column(
db.Integer,
db.ForeignKey('custom_attribute_definitions.id')), 'CustomAttributeValue')
attributable_id = deferred(db.Column(db.Integer), 'CustomAttributeValue')
attributable_type = deferred(db.Column(db.String), 'CustomAttributeValue')
attribute_value = deferred(db.Column(db.String), 'CustomAttributeValue')
@property
def attributable_attr(self):
return '{0}_attributable'.format(self.attributable_type)
@property
def attributable(self):
return getattr(self, self.attributable_attr)
@attributable.setter
def attributable(self, value):
self.attributable_id = value.id if value is not None else None
self.attributable_type = value.__class__.__name__ if value is not None \
else None
return setattr(self, self.attributable_attr, value)
_publish_attrs = [
'custom_attribute_id',
'attributable_id',
'attributable_type',
'attribute_value'
]
<commit_msg>Fix code style for custom attribute value<commit_after># Copyright (C) 2014 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: laran@reciprocitylabs.com
# Maintained By: laran@reciprocitylabs.com
from ggrc import db
from ggrc.models.mixins import Base
from ggrc.models.mixins import deferred
class CustomAttributeValue(Base, db.Model):
__tablename__ = 'custom_attribute_values'
custom_attribute_id = deferred(
db.Column(db.Integer, db.ForeignKey('custom_attribute_definitions.id')),
'CustomAttributeValue')
attributable_id = deferred(db.Column(db.Integer), 'CustomAttributeValue')
attributable_type = deferred(db.Column(db.String), 'CustomAttributeValue')
attribute_value = deferred(db.Column(db.String), 'CustomAttributeValue')
@property
def attributable_attr(self):
return '{0}_attributable'.format(self.attributable_type)
@property
def attributable(self):
return getattr(self, self.attributable_attr)
@attributable.setter
def attributable(self, value):
self.attributable_id = value.id if value is not None else None
self.attributable_type = value.__class__.__name__ if value is not None \
else None
return setattr(self, self.attributable_attr, value)
_publish_attrs = [
'custom_attribute_id',
'attributable_id',
'attributable_type',
'attribute_value'
]
|
e2ce9ad697cd686e91b546f6f3aa7b24b5e9266f
|
masters/master.tryserver.chromium.angle/master_site_config.py
|
masters/master.tryserver.chromium.angle/master_site_config.py
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class TryServerANGLE(Master.Master4a):
project_name = 'ANGLE Try Server'
master_port = 21403
slave_port = 31403
master_port_alt = 41403
buildbot_url = 'http://build.chromium.org/p/tryserver.chromium.angle/'
gerrit_host = 'https://chromium-review.googlesource.com'
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class TryServerANGLE(Master.Master4a):
project_name = 'ANGLE Try Server'
master_port = 21403
slave_port = 31403
master_port_alt = 41403
buildbot_url = 'http://build.chromium.org/p/tryserver.chromium.angle/'
gerrit_host = 'https://chromium-review.googlesource.com'
service_account_file = 'service-account-chromium-tryserver.json'
buildbucket_bucket = 'master.tryserver.chromium.linux'
|
Add buildbucket service account to Angle master.
|
Add buildbucket service account to Angle master.
BUG=577560
TBR=nodir@chromium.org
Review URL: https://codereview.chromium.org/1624703003
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@298368 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
eunchong/build,eunchong/build,eunchong/build,eunchong/build
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class TryServerANGLE(Master.Master4a):
project_name = 'ANGLE Try Server'
master_port = 21403
slave_port = 31403
master_port_alt = 41403
buildbot_url = 'http://build.chromium.org/p/tryserver.chromium.angle/'
gerrit_host = 'https://chromium-review.googlesource.com'
Add buildbucket service account to Angle master.
BUG=577560
TBR=nodir@chromium.org
Review URL: https://codereview.chromium.org/1624703003
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@298368 0039d316-1c4b-4281-b951-d872f2087c98
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class TryServerANGLE(Master.Master4a):
project_name = 'ANGLE Try Server'
master_port = 21403
slave_port = 31403
master_port_alt = 41403
buildbot_url = 'http://build.chromium.org/p/tryserver.chromium.angle/'
gerrit_host = 'https://chromium-review.googlesource.com'
service_account_file = 'service-account-chromium-tryserver.json'
buildbucket_bucket = 'master.tryserver.chromium.linux'
|
<commit_before># Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class TryServerANGLE(Master.Master4a):
project_name = 'ANGLE Try Server'
master_port = 21403
slave_port = 31403
master_port_alt = 41403
buildbot_url = 'http://build.chromium.org/p/tryserver.chromium.angle/'
gerrit_host = 'https://chromium-review.googlesource.com'
<commit_msg>Add buildbucket service account to Angle master.
BUG=577560
TBR=nodir@chromium.org
Review URL: https://codereview.chromium.org/1624703003
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@298368 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class TryServerANGLE(Master.Master4a):
project_name = 'ANGLE Try Server'
master_port = 21403
slave_port = 31403
master_port_alt = 41403
buildbot_url = 'http://build.chromium.org/p/tryserver.chromium.angle/'
gerrit_host = 'https://chromium-review.googlesource.com'
service_account_file = 'service-account-chromium-tryserver.json'
buildbucket_bucket = 'master.tryserver.chromium.linux'
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class TryServerANGLE(Master.Master4a):
project_name = 'ANGLE Try Server'
master_port = 21403
slave_port = 31403
master_port_alt = 41403
buildbot_url = 'http://build.chromium.org/p/tryserver.chromium.angle/'
gerrit_host = 'https://chromium-review.googlesource.com'
Add buildbucket service account to Angle master.
BUG=577560
TBR=nodir@chromium.org
Review URL: https://codereview.chromium.org/1624703003
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@298368 0039d316-1c4b-4281-b951-d872f2087c98# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class TryServerANGLE(Master.Master4a):
project_name = 'ANGLE Try Server'
master_port = 21403
slave_port = 31403
master_port_alt = 41403
buildbot_url = 'http://build.chromium.org/p/tryserver.chromium.angle/'
gerrit_host = 'https://chromium-review.googlesource.com'
service_account_file = 'service-account-chromium-tryserver.json'
buildbucket_bucket = 'master.tryserver.chromium.linux'
|
<commit_before># Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class TryServerANGLE(Master.Master4a):
project_name = 'ANGLE Try Server'
master_port = 21403
slave_port = 31403
master_port_alt = 41403
buildbot_url = 'http://build.chromium.org/p/tryserver.chromium.angle/'
gerrit_host = 'https://chromium-review.googlesource.com'
<commit_msg>Add buildbucket service account to Angle master.
BUG=577560
TBR=nodir@chromium.org
Review URL: https://codereview.chromium.org/1624703003
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@298368 0039d316-1c4b-4281-b951-d872f2087c98<commit_after># Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class TryServerANGLE(Master.Master4a):
project_name = 'ANGLE Try Server'
master_port = 21403
slave_port = 31403
master_port_alt = 41403
buildbot_url = 'http://build.chromium.org/p/tryserver.chromium.angle/'
gerrit_host = 'https://chromium-review.googlesource.com'
service_account_file = 'service-account-chromium-tryserver.json'
buildbucket_bucket = 'master.tryserver.chromium.linux'
|
eab3d891d7b0460223990642251bec4bb377543d
|
website/addons/github/tests/factories.py
|
website/addons/github/tests/factories.py
|
# -*- coding: utf-8 -*-
from factory import Sequence, SubFactory
from tests.factories import ExternalAccountFactory, ModularOdmFactory, ProjectFactory, UserFactory
from website.addons.github.model import GitHubNodeSettings, GitHubUserSettings
class GitHubAccountFactory(ExternalAccountFactory):
provider = 'github'
provider_id = Sequence(lambda n: 'id-{0}'.format(n))
oauth_key = Sequence(lambda n: 'key-{0}'.format(n))
class GitHubUserSettingsFactory(ModularOdmFactory):
FACTORY_FOR = GitHubUserSettings
owner = SubFactory(UserFactory)
class GitHubNodeSettingsFactory(ModularOdmFactory):
FACTORY_FOR = GitHubNodeSettings
owner = SubFactory(ProjectFactory)
user_settings = SubFactory(GitHubUserSettingsFactory)
repo = 'mock'
user = 'abc'
|
# -*- coding: utf-8 -*-
from factory import Sequence, SubFactory
from tests.factories import ExternalAccountFactory, ModularOdmFactory, ProjectFactory, UserFactory
from website.addons.github.model import GitHubNodeSettings, GitHubUserSettings
class GitHubAccountFactory(ExternalAccountFactory):
provider = 'github'
provider_id = Sequence(lambda n: 'id-{0}'.format(n))
oauth_key = Sequence(lambda n: 'key-{0}'.format(n))
display_name = 'abc'
class GitHubUserSettingsFactory(ModularOdmFactory):
FACTORY_FOR = GitHubUserSettings
owner = SubFactory(UserFactory)
class GitHubNodeSettingsFactory(ModularOdmFactory):
FACTORY_FOR = GitHubNodeSettings
owner = SubFactory(ProjectFactory)
user_settings = SubFactory(GitHubUserSettingsFactory)
repo = 'mock'
user = 'abc'
|
Include display_name in factory for tests
|
Include display_name in factory for tests
|
Python
|
apache-2.0
|
leb2dg/osf.io,doublebits/osf.io,DanielSBrown/osf.io,kwierman/osf.io,abought/osf.io,mluo613/osf.io,jnayak1/osf.io,cslzchen/osf.io,aaxelb/osf.io,pattisdr/osf.io,mluke93/osf.io,laurenrevere/osf.io,alexschiller/osf.io,kwierman/osf.io,wearpants/osf.io,acshi/osf.io,Nesiehr/osf.io,zachjanicki/osf.io,monikagrabowska/osf.io,monikagrabowska/osf.io,chennan47/osf.io,kch8qx/osf.io,mattclark/osf.io,acshi/osf.io,erinspace/osf.io,binoculars/osf.io,sloria/osf.io,Johnetordoff/osf.io,CenterForOpenScience/osf.io,cwisecarver/osf.io,adlius/osf.io,mluo613/osf.io,cslzchen/osf.io,chennan47/osf.io,caseyrollins/osf.io,asanfilippo7/osf.io,hmoco/osf.io,HalcyonChimera/osf.io,doublebits/osf.io,emetsger/osf.io,TomHeatwole/osf.io,emetsger/osf.io,zamattiac/osf.io,brianjgeiger/osf.io,monikagrabowska/osf.io,mfraezz/osf.io,chrisseto/osf.io,kwierman/osf.io,crcresearch/osf.io,chrisseto/osf.io,kch8qx/osf.io,asanfilippo7/osf.io,CenterForOpenScience/osf.io,abought/osf.io,laurenrevere/osf.io,doublebits/osf.io,caseyrollins/osf.io,erinspace/osf.io,chrisseto/osf.io,baylee-d/osf.io,asanfilippo7/osf.io,mfraezz/osf.io,cslzchen/osf.io,alexschiller/osf.io,jnayak1/osf.io,emetsger/osf.io,RomanZWang/osf.io,zamattiac/osf.io,alexschiller/osf.io,doublebits/osf.io,mfraezz/osf.io,kwierman/osf.io,alexschiller/osf.io,saradbowman/osf.io,pattisdr/osf.io,Nesiehr/osf.io,mluke93/osf.io,RomanZWang/osf.io,chennan47/osf.io,zachjanicki/osf.io,aaxelb/osf.io,CenterForOpenScience/osf.io,kch8qx/osf.io,crcresearch/osf.io,pattisdr/osf.io,abought/osf.io,amyshi188/osf.io,samchrisinger/osf.io,zachjanicki/osf.io,cwisecarver/osf.io,chrisseto/osf.io,hmoco/osf.io,acshi/osf.io,samchrisinger/osf.io,Nesiehr/osf.io,adlius/osf.io,kch8qx/osf.io,mluo613/osf.io,brianjgeiger/osf.io,saradbowman/osf.io,icereval/osf.io,rdhyee/osf.io,leb2dg/osf.io,zamattiac/osf.io,SSJohns/osf.io,icereval/osf.io,amyshi188/osf.io,rdhyee/osf.io,acshi/osf.io,caneruguz/osf.io,amyshi188/osf.io,DanielSBrown/osf.io,brianjgeiger/osf.io,cwisecarver/osf.io,zachjanicki/osf.io,mattclark/osf.io,jnayak1/osf.io,TomBaxter/osf.io,binoculars/osf.io,hmoco/osf.io,HalcyonChimera/osf.io,alexschiller/osf.io,samchrisinger/osf.io,SSJohns/osf.io,aaxelb/osf.io,doublebits/osf.io,TomHeatwole/osf.io,TomHeatwole/osf.io,jnayak1/osf.io,DanielSBrown/osf.io,Johnetordoff/osf.io,leb2dg/osf.io,brianjgeiger/osf.io,SSJohns/osf.io,sloria/osf.io,DanielSBrown/osf.io,mluke93/osf.io,RomanZWang/osf.io,baylee-d/osf.io,acshi/osf.io,abought/osf.io,Nesiehr/osf.io,CenterForOpenScience/osf.io,RomanZWang/osf.io,TomBaxter/osf.io,mfraezz/osf.io,monikagrabowska/osf.io,adlius/osf.io,caneruguz/osf.io,RomanZWang/osf.io,hmoco/osf.io,caseyrollins/osf.io,laurenrevere/osf.io,wearpants/osf.io,aaxelb/osf.io,wearpants/osf.io,sloria/osf.io,felliott/osf.io,TomBaxter/osf.io,rdhyee/osf.io,adlius/osf.io,Johnetordoff/osf.io,wearpants/osf.io,felliott/osf.io,emetsger/osf.io,mluo613/osf.io,crcresearch/osf.io,monikagrabowska/osf.io,mluo613/osf.io,HalcyonChimera/osf.io,HalcyonChimera/osf.io,felliott/osf.io,cslzchen/osf.io,icereval/osf.io,SSJohns/osf.io,asanfilippo7/osf.io,binoculars/osf.io,kch8qx/osf.io,TomHeatwole/osf.io,erinspace/osf.io,zamattiac/osf.io,leb2dg/osf.io,caneruguz/osf.io,Johnetordoff/osf.io,rdhyee/osf.io,mattclark/osf.io,mluke93/osf.io,samchrisinger/osf.io,felliott/osf.io,caneruguz/osf.io,baylee-d/osf.io,cwisecarver/osf.io,amyshi188/osf.io
|
# -*- coding: utf-8 -*-
from factory import Sequence, SubFactory
from tests.factories import ExternalAccountFactory, ModularOdmFactory, ProjectFactory, UserFactory
from website.addons.github.model import GitHubNodeSettings, GitHubUserSettings
class GitHubAccountFactory(ExternalAccountFactory):
provider = 'github'
provider_id = Sequence(lambda n: 'id-{0}'.format(n))
oauth_key = Sequence(lambda n: 'key-{0}'.format(n))
class GitHubUserSettingsFactory(ModularOdmFactory):
FACTORY_FOR = GitHubUserSettings
owner = SubFactory(UserFactory)
class GitHubNodeSettingsFactory(ModularOdmFactory):
FACTORY_FOR = GitHubNodeSettings
owner = SubFactory(ProjectFactory)
user_settings = SubFactory(GitHubUserSettingsFactory)
repo = 'mock'
user = 'abc'
Include display_name in factory for tests
|
# -*- coding: utf-8 -*-
from factory import Sequence, SubFactory
from tests.factories import ExternalAccountFactory, ModularOdmFactory, ProjectFactory, UserFactory
from website.addons.github.model import GitHubNodeSettings, GitHubUserSettings
class GitHubAccountFactory(ExternalAccountFactory):
provider = 'github'
provider_id = Sequence(lambda n: 'id-{0}'.format(n))
oauth_key = Sequence(lambda n: 'key-{0}'.format(n))
display_name = 'abc'
class GitHubUserSettingsFactory(ModularOdmFactory):
FACTORY_FOR = GitHubUserSettings
owner = SubFactory(UserFactory)
class GitHubNodeSettingsFactory(ModularOdmFactory):
FACTORY_FOR = GitHubNodeSettings
owner = SubFactory(ProjectFactory)
user_settings = SubFactory(GitHubUserSettingsFactory)
repo = 'mock'
user = 'abc'
|
<commit_before># -*- coding: utf-8 -*-
from factory import Sequence, SubFactory
from tests.factories import ExternalAccountFactory, ModularOdmFactory, ProjectFactory, UserFactory
from website.addons.github.model import GitHubNodeSettings, GitHubUserSettings
class GitHubAccountFactory(ExternalAccountFactory):
provider = 'github'
provider_id = Sequence(lambda n: 'id-{0}'.format(n))
oauth_key = Sequence(lambda n: 'key-{0}'.format(n))
class GitHubUserSettingsFactory(ModularOdmFactory):
FACTORY_FOR = GitHubUserSettings
owner = SubFactory(UserFactory)
class GitHubNodeSettingsFactory(ModularOdmFactory):
FACTORY_FOR = GitHubNodeSettings
owner = SubFactory(ProjectFactory)
user_settings = SubFactory(GitHubUserSettingsFactory)
repo = 'mock'
user = 'abc'
<commit_msg>Include display_name in factory for tests<commit_after>
|
# -*- coding: utf-8 -*-
from factory import Sequence, SubFactory
from tests.factories import ExternalAccountFactory, ModularOdmFactory, ProjectFactory, UserFactory
from website.addons.github.model import GitHubNodeSettings, GitHubUserSettings
class GitHubAccountFactory(ExternalAccountFactory):
provider = 'github'
provider_id = Sequence(lambda n: 'id-{0}'.format(n))
oauth_key = Sequence(lambda n: 'key-{0}'.format(n))
display_name = 'abc'
class GitHubUserSettingsFactory(ModularOdmFactory):
FACTORY_FOR = GitHubUserSettings
owner = SubFactory(UserFactory)
class GitHubNodeSettingsFactory(ModularOdmFactory):
FACTORY_FOR = GitHubNodeSettings
owner = SubFactory(ProjectFactory)
user_settings = SubFactory(GitHubUserSettingsFactory)
repo = 'mock'
user = 'abc'
|
# -*- coding: utf-8 -*-
from factory import Sequence, SubFactory
from tests.factories import ExternalAccountFactory, ModularOdmFactory, ProjectFactory, UserFactory
from website.addons.github.model import GitHubNodeSettings, GitHubUserSettings
class GitHubAccountFactory(ExternalAccountFactory):
provider = 'github'
provider_id = Sequence(lambda n: 'id-{0}'.format(n))
oauth_key = Sequence(lambda n: 'key-{0}'.format(n))
class GitHubUserSettingsFactory(ModularOdmFactory):
FACTORY_FOR = GitHubUserSettings
owner = SubFactory(UserFactory)
class GitHubNodeSettingsFactory(ModularOdmFactory):
FACTORY_FOR = GitHubNodeSettings
owner = SubFactory(ProjectFactory)
user_settings = SubFactory(GitHubUserSettingsFactory)
repo = 'mock'
user = 'abc'
Include display_name in factory for tests# -*- coding: utf-8 -*-
from factory import Sequence, SubFactory
from tests.factories import ExternalAccountFactory, ModularOdmFactory, ProjectFactory, UserFactory
from website.addons.github.model import GitHubNodeSettings, GitHubUserSettings
class GitHubAccountFactory(ExternalAccountFactory):
provider = 'github'
provider_id = Sequence(lambda n: 'id-{0}'.format(n))
oauth_key = Sequence(lambda n: 'key-{0}'.format(n))
display_name = 'abc'
class GitHubUserSettingsFactory(ModularOdmFactory):
FACTORY_FOR = GitHubUserSettings
owner = SubFactory(UserFactory)
class GitHubNodeSettingsFactory(ModularOdmFactory):
FACTORY_FOR = GitHubNodeSettings
owner = SubFactory(ProjectFactory)
user_settings = SubFactory(GitHubUserSettingsFactory)
repo = 'mock'
user = 'abc'
|
<commit_before># -*- coding: utf-8 -*-
from factory import Sequence, SubFactory
from tests.factories import ExternalAccountFactory, ModularOdmFactory, ProjectFactory, UserFactory
from website.addons.github.model import GitHubNodeSettings, GitHubUserSettings
class GitHubAccountFactory(ExternalAccountFactory):
provider = 'github'
provider_id = Sequence(lambda n: 'id-{0}'.format(n))
oauth_key = Sequence(lambda n: 'key-{0}'.format(n))
class GitHubUserSettingsFactory(ModularOdmFactory):
FACTORY_FOR = GitHubUserSettings
owner = SubFactory(UserFactory)
class GitHubNodeSettingsFactory(ModularOdmFactory):
FACTORY_FOR = GitHubNodeSettings
owner = SubFactory(ProjectFactory)
user_settings = SubFactory(GitHubUserSettingsFactory)
repo = 'mock'
user = 'abc'
<commit_msg>Include display_name in factory for tests<commit_after># -*- coding: utf-8 -*-
from factory import Sequence, SubFactory
from tests.factories import ExternalAccountFactory, ModularOdmFactory, ProjectFactory, UserFactory
from website.addons.github.model import GitHubNodeSettings, GitHubUserSettings
class GitHubAccountFactory(ExternalAccountFactory):
provider = 'github'
provider_id = Sequence(lambda n: 'id-{0}'.format(n))
oauth_key = Sequence(lambda n: 'key-{0}'.format(n))
display_name = 'abc'
class GitHubUserSettingsFactory(ModularOdmFactory):
FACTORY_FOR = GitHubUserSettings
owner = SubFactory(UserFactory)
class GitHubNodeSettingsFactory(ModularOdmFactory):
FACTORY_FOR = GitHubNodeSettings
owner = SubFactory(ProjectFactory)
user_settings = SubFactory(GitHubUserSettingsFactory)
repo = 'mock'
user = 'abc'
|
9ff005d1c3ffc82e8469f1ecf7dda2d9ebf8bb46
|
Museau/urls.py
|
Museau/urls.py
|
from django.conf.urls.defaults import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Museau
url(r'^$', 'music.views.home', name='home'),
# Ajax requests
url(r'^ajax/(?P<action>.*)\.(?P<filetype>.*)$', 'music.ajax.do', name='ajax'),
# django-registration
url(r'^accounts/', include('registration.urls')),
url(r'^accounts/profile/$', 'music.views.back_to_home', name='redir'),
# Django Admin
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
)
|
from django.conf.urls.defaults import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Museau
url(r'^$', 'music.views.home', name='home'),
# django-registration
url(r'^accounts/', include('registration.urls')),
url(r'^accounts/profile/$', 'music.views.back_to_home', name='redir'),
# Django Admin
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
)
|
Remove ajax urlconf since we don't use it anymore
|
Remove ajax urlconf since we don't use it anymore
|
Python
|
mit
|
folz/Museau,folz/Museau
|
from django.conf.urls.defaults import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Museau
url(r'^$', 'music.views.home', name='home'),
# Ajax requests
url(r'^ajax/(?P<action>.*)\.(?P<filetype>.*)$', 'music.ajax.do', name='ajax'),
# django-registration
url(r'^accounts/', include('registration.urls')),
url(r'^accounts/profile/$', 'music.views.back_to_home', name='redir'),
# Django Admin
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
)
Remove ajax urlconf since we don't use it anymore
|
from django.conf.urls.defaults import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Museau
url(r'^$', 'music.views.home', name='home'),
# django-registration
url(r'^accounts/', include('registration.urls')),
url(r'^accounts/profile/$', 'music.views.back_to_home', name='redir'),
# Django Admin
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
)
|
<commit_before>from django.conf.urls.defaults import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Museau
url(r'^$', 'music.views.home', name='home'),
# Ajax requests
url(r'^ajax/(?P<action>.*)\.(?P<filetype>.*)$', 'music.ajax.do', name='ajax'),
# django-registration
url(r'^accounts/', include('registration.urls')),
url(r'^accounts/profile/$', 'music.views.back_to_home', name='redir'),
# Django Admin
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
)
<commit_msg>Remove ajax urlconf since we don't use it anymore<commit_after>
|
from django.conf.urls.defaults import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Museau
url(r'^$', 'music.views.home', name='home'),
# django-registration
url(r'^accounts/', include('registration.urls')),
url(r'^accounts/profile/$', 'music.views.back_to_home', name='redir'),
# Django Admin
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
)
|
from django.conf.urls.defaults import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Museau
url(r'^$', 'music.views.home', name='home'),
# Ajax requests
url(r'^ajax/(?P<action>.*)\.(?P<filetype>.*)$', 'music.ajax.do', name='ajax'),
# django-registration
url(r'^accounts/', include('registration.urls')),
url(r'^accounts/profile/$', 'music.views.back_to_home', name='redir'),
# Django Admin
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
)
Remove ajax urlconf since we don't use it anymorefrom django.conf.urls.defaults import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Museau
url(r'^$', 'music.views.home', name='home'),
# django-registration
url(r'^accounts/', include('registration.urls')),
url(r'^accounts/profile/$', 'music.views.back_to_home', name='redir'),
# Django Admin
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
)
|
<commit_before>from django.conf.urls.defaults import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Museau
url(r'^$', 'music.views.home', name='home'),
# Ajax requests
url(r'^ajax/(?P<action>.*)\.(?P<filetype>.*)$', 'music.ajax.do', name='ajax'),
# django-registration
url(r'^accounts/', include('registration.urls')),
url(r'^accounts/profile/$', 'music.views.back_to_home', name='redir'),
# Django Admin
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
)
<commit_msg>Remove ajax urlconf since we don't use it anymore<commit_after>from django.conf.urls.defaults import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Museau
url(r'^$', 'music.views.home', name='home'),
# django-registration
url(r'^accounts/', include('registration.urls')),
url(r'^accounts/profile/$', 'music.views.back_to_home', name='redir'),
# Django Admin
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
)
|
306336d4445149cd2f0d6fa3a58b7244eafe3cd0
|
conveyor/store.py
|
conveyor/store.py
|
class BaseStore(object):
def set(self, key, value):
raise NotImplementedError
def get(self, key):
raise NotImplementedError
class InMemoryStore(BaseStore):
def __init__(self, *args, **kwargs):
super(InMemoryStore, self).__init__(*args, **kwargs)
self._data = {}
def set(self, key, value):
self._data[key] = value
def get(self, key):
return self._data[key]
|
class BaseStore(object):
def set(self, key, value):
raise NotImplementedError
def get(self, key):
raise NotImplementedError
class InMemoryStore(BaseStore):
def __init__(self, *args, **kwargs):
super(InMemoryStore, self).__init__(*args, **kwargs)
self._data = {}
def set(self, key, value):
self._data[key] = value
def get(self, key):
return self._data[key]
class RedisStore(BaseStore):
def __init__(self, connection=None, prefix=None, *args, **kwargs):
super(RedisStore, self).__init__(*args, **kwargs)
import redis
self.redis = redis.StrictRedis(**connection)
self.prefix = prefix
def set(self, key, value):
if self.prefix is not None:
key = self.prefix + key
self.redis.set(key, value)
def get(self, key):
if self.prefix is not None:
key = self.prefix + key
return self.redis.get(key)
|
Add a RedisStore to conveyor
|
Add a RedisStore to conveyor
|
Python
|
bsd-2-clause
|
crateio/carrier
|
class BaseStore(object):
def set(self, key, value):
raise NotImplementedError
def get(self, key):
raise NotImplementedError
class InMemoryStore(BaseStore):
def __init__(self, *args, **kwargs):
super(InMemoryStore, self).__init__(*args, **kwargs)
self._data = {}
def set(self, key, value):
self._data[key] = value
def get(self, key):
return self._data[key]
Add a RedisStore to conveyor
|
class BaseStore(object):
def set(self, key, value):
raise NotImplementedError
def get(self, key):
raise NotImplementedError
class InMemoryStore(BaseStore):
def __init__(self, *args, **kwargs):
super(InMemoryStore, self).__init__(*args, **kwargs)
self._data = {}
def set(self, key, value):
self._data[key] = value
def get(self, key):
return self._data[key]
class RedisStore(BaseStore):
def __init__(self, connection=None, prefix=None, *args, **kwargs):
super(RedisStore, self).__init__(*args, **kwargs)
import redis
self.redis = redis.StrictRedis(**connection)
self.prefix = prefix
def set(self, key, value):
if self.prefix is not None:
key = self.prefix + key
self.redis.set(key, value)
def get(self, key):
if self.prefix is not None:
key = self.prefix + key
return self.redis.get(key)
|
<commit_before>class BaseStore(object):
def set(self, key, value):
raise NotImplementedError
def get(self, key):
raise NotImplementedError
class InMemoryStore(BaseStore):
def __init__(self, *args, **kwargs):
super(InMemoryStore, self).__init__(*args, **kwargs)
self._data = {}
def set(self, key, value):
self._data[key] = value
def get(self, key):
return self._data[key]
<commit_msg>Add a RedisStore to conveyor<commit_after>
|
class BaseStore(object):
def set(self, key, value):
raise NotImplementedError
def get(self, key):
raise NotImplementedError
class InMemoryStore(BaseStore):
def __init__(self, *args, **kwargs):
super(InMemoryStore, self).__init__(*args, **kwargs)
self._data = {}
def set(self, key, value):
self._data[key] = value
def get(self, key):
return self._data[key]
class RedisStore(BaseStore):
def __init__(self, connection=None, prefix=None, *args, **kwargs):
super(RedisStore, self).__init__(*args, **kwargs)
import redis
self.redis = redis.StrictRedis(**connection)
self.prefix = prefix
def set(self, key, value):
if self.prefix is not None:
key = self.prefix + key
self.redis.set(key, value)
def get(self, key):
if self.prefix is not None:
key = self.prefix + key
return self.redis.get(key)
|
class BaseStore(object):
def set(self, key, value):
raise NotImplementedError
def get(self, key):
raise NotImplementedError
class InMemoryStore(BaseStore):
def __init__(self, *args, **kwargs):
super(InMemoryStore, self).__init__(*args, **kwargs)
self._data = {}
def set(self, key, value):
self._data[key] = value
def get(self, key):
return self._data[key]
Add a RedisStore to conveyorclass BaseStore(object):
def set(self, key, value):
raise NotImplementedError
def get(self, key):
raise NotImplementedError
class InMemoryStore(BaseStore):
def __init__(self, *args, **kwargs):
super(InMemoryStore, self).__init__(*args, **kwargs)
self._data = {}
def set(self, key, value):
self._data[key] = value
def get(self, key):
return self._data[key]
class RedisStore(BaseStore):
def __init__(self, connection=None, prefix=None, *args, **kwargs):
super(RedisStore, self).__init__(*args, **kwargs)
import redis
self.redis = redis.StrictRedis(**connection)
self.prefix = prefix
def set(self, key, value):
if self.prefix is not None:
key = self.prefix + key
self.redis.set(key, value)
def get(self, key):
if self.prefix is not None:
key = self.prefix + key
return self.redis.get(key)
|
<commit_before>class BaseStore(object):
def set(self, key, value):
raise NotImplementedError
def get(self, key):
raise NotImplementedError
class InMemoryStore(BaseStore):
def __init__(self, *args, **kwargs):
super(InMemoryStore, self).__init__(*args, **kwargs)
self._data = {}
def set(self, key, value):
self._data[key] = value
def get(self, key):
return self._data[key]
<commit_msg>Add a RedisStore to conveyor<commit_after>class BaseStore(object):
def set(self, key, value):
raise NotImplementedError
def get(self, key):
raise NotImplementedError
class InMemoryStore(BaseStore):
def __init__(self, *args, **kwargs):
super(InMemoryStore, self).__init__(*args, **kwargs)
self._data = {}
def set(self, key, value):
self._data[key] = value
def get(self, key):
return self._data[key]
class RedisStore(BaseStore):
def __init__(self, connection=None, prefix=None, *args, **kwargs):
super(RedisStore, self).__init__(*args, **kwargs)
import redis
self.redis = redis.StrictRedis(**connection)
self.prefix = prefix
def set(self, key, value):
if self.prefix is not None:
key = self.prefix + key
self.redis.set(key, value)
def get(self, key):
if self.prefix is not None:
key = self.prefix + key
return self.redis.get(key)
|
2bdf58c9a707c0a08c7c48d46c5b9b13db14965f
|
github/data_types/repository.py
|
github/data_types/repository.py
|
from data_types.user import User
class Repository:
"""
GitHub Repository
https://developer.github.com/v3/repos/
Attributes:
id: GitHub internal id
name: Repository short name like "codex"
full_name: Repository short full_name like "codex-team/codex"
description: Optional description
owner: Repository owner User
private : true|false
html_url: Public URL on github.com
git_url: "git://github.com/baxterthehacker/public-repo.git"
clone_url: "https://github.com/baxterthehacker/public-repo.git"
"""
def __init__(self, data):
# Internal GitHub id
self.id = data.get('id', 0)
self.name = data.get('name', '')
self.full_name = data.get('full_name', '')
self.description = data.get('description', '')
# Public link
self.html_url = data.get('html_url', '')
# Owner represented as User
self.owner = User(data['owner'])
self.private = data.get('private', 'false')
self.git_url = data.get('git_url', '')
self.clone_url = data.get('clone_url', '')
|
from data_types.user import User
class Repository:
"""
GitHub Repository
https://developer.github.com/v3/repos/
Attributes:
id: GitHub internal id
name: Repository short name like "codex"
full_name: Repository short full_name like "codex-team/codex"
description: Optional description
owner: Repository owner User
private : true|false
html_url: Public URL on github.com
git_url: "git://github.com/baxterthehacker/public-repo.git"
clone_url: "https://github.com/baxterthehacker/public-repo.git"
"""
def __init__(self, data):
# Internal GitHub id
self.id = data.get('id', 0)
self.name = data.get('name', '')
self.full_name = data.get('full_name', '')
self.description = data.get('description', '')
# Public link
self.html_url = data.get('html_url', '')
# Owner represented as User
self.owner = User(data['owner'])
self.private = data.get('private', 'false')
self.git_url = data.get('git_url', '')
self.clone_url = data.get('clone_url', '')
self.stargazers_count = data.get('stargazers_count', 0)
|
Add a new field "stargazers_count"
|
Add a new field "stargazers_count"
|
Python
|
mit
|
codex-bot/github
|
from data_types.user import User
class Repository:
"""
GitHub Repository
https://developer.github.com/v3/repos/
Attributes:
id: GitHub internal id
name: Repository short name like "codex"
full_name: Repository short full_name like "codex-team/codex"
description: Optional description
owner: Repository owner User
private : true|false
html_url: Public URL on github.com
git_url: "git://github.com/baxterthehacker/public-repo.git"
clone_url: "https://github.com/baxterthehacker/public-repo.git"
"""
def __init__(self, data):
# Internal GitHub id
self.id = data.get('id', 0)
self.name = data.get('name', '')
self.full_name = data.get('full_name', '')
self.description = data.get('description', '')
# Public link
self.html_url = data.get('html_url', '')
# Owner represented as User
self.owner = User(data['owner'])
self.private = data.get('private', 'false')
self.git_url = data.get('git_url', '')
self.clone_url = data.get('clone_url', '')
Add a new field "stargazers_count"
|
from data_types.user import User
class Repository:
"""
GitHub Repository
https://developer.github.com/v3/repos/
Attributes:
id: GitHub internal id
name: Repository short name like "codex"
full_name: Repository short full_name like "codex-team/codex"
description: Optional description
owner: Repository owner User
private : true|false
html_url: Public URL on github.com
git_url: "git://github.com/baxterthehacker/public-repo.git"
clone_url: "https://github.com/baxterthehacker/public-repo.git"
"""
def __init__(self, data):
# Internal GitHub id
self.id = data.get('id', 0)
self.name = data.get('name', '')
self.full_name = data.get('full_name', '')
self.description = data.get('description', '')
# Public link
self.html_url = data.get('html_url', '')
# Owner represented as User
self.owner = User(data['owner'])
self.private = data.get('private', 'false')
self.git_url = data.get('git_url', '')
self.clone_url = data.get('clone_url', '')
self.stargazers_count = data.get('stargazers_count', 0)
|
<commit_before>from data_types.user import User
class Repository:
"""
GitHub Repository
https://developer.github.com/v3/repos/
Attributes:
id: GitHub internal id
name: Repository short name like "codex"
full_name: Repository short full_name like "codex-team/codex"
description: Optional description
owner: Repository owner User
private : true|false
html_url: Public URL on github.com
git_url: "git://github.com/baxterthehacker/public-repo.git"
clone_url: "https://github.com/baxterthehacker/public-repo.git"
"""
def __init__(self, data):
# Internal GitHub id
self.id = data.get('id', 0)
self.name = data.get('name', '')
self.full_name = data.get('full_name', '')
self.description = data.get('description', '')
# Public link
self.html_url = data.get('html_url', '')
# Owner represented as User
self.owner = User(data['owner'])
self.private = data.get('private', 'false')
self.git_url = data.get('git_url', '')
self.clone_url = data.get('clone_url', '')
<commit_msg>Add a new field "stargazers_count"<commit_after>
|
from data_types.user import User
class Repository:
"""
GitHub Repository
https://developer.github.com/v3/repos/
Attributes:
id: GitHub internal id
name: Repository short name like "codex"
full_name: Repository short full_name like "codex-team/codex"
description: Optional description
owner: Repository owner User
private : true|false
html_url: Public URL on github.com
git_url: "git://github.com/baxterthehacker/public-repo.git"
clone_url: "https://github.com/baxterthehacker/public-repo.git"
"""
def __init__(self, data):
# Internal GitHub id
self.id = data.get('id', 0)
self.name = data.get('name', '')
self.full_name = data.get('full_name', '')
self.description = data.get('description', '')
# Public link
self.html_url = data.get('html_url', '')
# Owner represented as User
self.owner = User(data['owner'])
self.private = data.get('private', 'false')
self.git_url = data.get('git_url', '')
self.clone_url = data.get('clone_url', '')
self.stargazers_count = data.get('stargazers_count', 0)
|
from data_types.user import User
class Repository:
"""
GitHub Repository
https://developer.github.com/v3/repos/
Attributes:
id: GitHub internal id
name: Repository short name like "codex"
full_name: Repository short full_name like "codex-team/codex"
description: Optional description
owner: Repository owner User
private : true|false
html_url: Public URL on github.com
git_url: "git://github.com/baxterthehacker/public-repo.git"
clone_url: "https://github.com/baxterthehacker/public-repo.git"
"""
def __init__(self, data):
# Internal GitHub id
self.id = data.get('id', 0)
self.name = data.get('name', '')
self.full_name = data.get('full_name', '')
self.description = data.get('description', '')
# Public link
self.html_url = data.get('html_url', '')
# Owner represented as User
self.owner = User(data['owner'])
self.private = data.get('private', 'false')
self.git_url = data.get('git_url', '')
self.clone_url = data.get('clone_url', '')
Add a new field "stargazers_count"from data_types.user import User
class Repository:
"""
GitHub Repository
https://developer.github.com/v3/repos/
Attributes:
id: GitHub internal id
name: Repository short name like "codex"
full_name: Repository short full_name like "codex-team/codex"
description: Optional description
owner: Repository owner User
private : true|false
html_url: Public URL on github.com
git_url: "git://github.com/baxterthehacker/public-repo.git"
clone_url: "https://github.com/baxterthehacker/public-repo.git"
"""
def __init__(self, data):
# Internal GitHub id
self.id = data.get('id', 0)
self.name = data.get('name', '')
self.full_name = data.get('full_name', '')
self.description = data.get('description', '')
# Public link
self.html_url = data.get('html_url', '')
# Owner represented as User
self.owner = User(data['owner'])
self.private = data.get('private', 'false')
self.git_url = data.get('git_url', '')
self.clone_url = data.get('clone_url', '')
self.stargazers_count = data.get('stargazers_count', 0)
|
<commit_before>from data_types.user import User
class Repository:
"""
GitHub Repository
https://developer.github.com/v3/repos/
Attributes:
id: GitHub internal id
name: Repository short name like "codex"
full_name: Repository short full_name like "codex-team/codex"
description: Optional description
owner: Repository owner User
private : true|false
html_url: Public URL on github.com
git_url: "git://github.com/baxterthehacker/public-repo.git"
clone_url: "https://github.com/baxterthehacker/public-repo.git"
"""
def __init__(self, data):
# Internal GitHub id
self.id = data.get('id', 0)
self.name = data.get('name', '')
self.full_name = data.get('full_name', '')
self.description = data.get('description', '')
# Public link
self.html_url = data.get('html_url', '')
# Owner represented as User
self.owner = User(data['owner'])
self.private = data.get('private', 'false')
self.git_url = data.get('git_url', '')
self.clone_url = data.get('clone_url', '')
<commit_msg>Add a new field "stargazers_count"<commit_after>from data_types.user import User
class Repository:
"""
GitHub Repository
https://developer.github.com/v3/repos/
Attributes:
id: GitHub internal id
name: Repository short name like "codex"
full_name: Repository short full_name like "codex-team/codex"
description: Optional description
owner: Repository owner User
private : true|false
html_url: Public URL on github.com
git_url: "git://github.com/baxterthehacker/public-repo.git"
clone_url: "https://github.com/baxterthehacker/public-repo.git"
"""
def __init__(self, data):
# Internal GitHub id
self.id = data.get('id', 0)
self.name = data.get('name', '')
self.full_name = data.get('full_name', '')
self.description = data.get('description', '')
# Public link
self.html_url = data.get('html_url', '')
# Owner represented as User
self.owner = User(data['owner'])
self.private = data.get('private', 'false')
self.git_url = data.get('git_url', '')
self.clone_url = data.get('clone_url', '')
self.stargazers_count = data.get('stargazers_count', 0)
|
19dd85a13ef0108bd2860a658881a255f6e31613
|
debsources/app/patches/views.py
|
debsources/app/patches/views.py
|
# Copyright (C) 2015 The Debsources developers <info@sources.debian.net>.
# See the AUTHORS file at the top-level directory of this distribution and at
# https://anonscm.debian.org/gitweb/?p=qa/debsources.git;a=blob;f=AUTHORS;hb=HEAD
#
# This file is part of Debsources. Debsources is free software: you can
# redistribute it and/or modify it under the terms of the GNU Affero General
# Public License as published by the Free Software Foundation, either version 3
# of the License, or (at your option) any later version. For more information
# see the COPYING file at the top-level directory of this distribution and at
# https://anonscm.debian.org/gitweb/?p=qa/debsources.git;a=blob;f=COPYING;hb=HEAD
from __future__ import absolute_import
from ..views import GeneralView
class SummaryView(GeneralView):
def get_objects(self, path_to):
path_dict = path_to.split('/')
package = path_dict[0]
version = path_dict[1]
return dict(package=package,
version=version)
|
# Copyright (C) 2015 The Debsources developers <info@sources.debian.net>.
# See the AUTHORS file at the top-level directory of this distribution and at
# https://anonscm.debian.org/gitweb/?p=qa/debsources.git;a=blob;f=AUTHORS;hb=HEAD
#
# This file is part of Debsources. Debsources is free software: you can
# redistribute it and/or modify it under the terms of the GNU Affero General
# Public License as published by the Free Software Foundation, either version 3
# of the License, or (at your option) any later version. For more information
# see the COPYING file at the top-level directory of this distribution and at
# https://anonscm.debian.org/gitweb/?p=qa/debsources.git;a=blob;f=COPYING;hb=HEAD
from __future__ import absolute_import
from flask import request
from ..views import GeneralView
class SummaryView(GeneralView):
def get_objects(self, path_to):
path_dict = path_to.split('/')
package = path_dict[0]
version = path_dict[1]
path = '/'.join(path_dict[2:])
if version == "latest": # we search the latest available version
return self._handle_latest_version(request.endpoint,
package, path)
versions = self.handle_versions(version, package, path)
if versions:
redirect_url_parts = [package, versions[-1]]
if path:
redirect_url_parts.append(path)
redirect_url = '/'.join(redirect_url_parts)
return self._redirect_to_url(request.endpoint,
redirect_url, redirect_code=302)
return dict(package=package,
version=version)
|
Add version handling in SummaryView for the patches BP
|
Add version handling in SummaryView for the patches BP
|
Python
|
agpl-3.0
|
devoxel/debsources,vivekanand1101/debsources,vivekanand1101/debsources,zacchiro/debsources,Debian/debsources,devoxel/debsources,zacchiro/debsources,matthieucan/debsources,Debian/debsources,oorestisime/debsources,vivekanand1101/debsources,devoxel/debsources,matthieucan/debsources,devoxel/debsources,matthieucan/debsources,oorestisime/debsources,zacchiro/debsources,matthieucan/debsources,devoxel/debsources,zacchiro/debsources,Debian/debsources,matthieucan/debsources,vivekanand1101/debsources,Debian/debsources,oorestisime/debsources,oorestisime/debsources,oorestisime/debsources,Debian/debsources,vivekanand1101/debsources,zacchiro/debsources
|
# Copyright (C) 2015 The Debsources developers <info@sources.debian.net>.
# See the AUTHORS file at the top-level directory of this distribution and at
# https://anonscm.debian.org/gitweb/?p=qa/debsources.git;a=blob;f=AUTHORS;hb=HEAD
#
# This file is part of Debsources. Debsources is free software: you can
# redistribute it and/or modify it under the terms of the GNU Affero General
# Public License as published by the Free Software Foundation, either version 3
# of the License, or (at your option) any later version. For more information
# see the COPYING file at the top-level directory of this distribution and at
# https://anonscm.debian.org/gitweb/?p=qa/debsources.git;a=blob;f=COPYING;hb=HEAD
from __future__ import absolute_import
from ..views import GeneralView
class SummaryView(GeneralView):
def get_objects(self, path_to):
path_dict = path_to.split('/')
package = path_dict[0]
version = path_dict[1]
return dict(package=package,
version=version)
Add version handling in SummaryView for the patches BP
|
# Copyright (C) 2015 The Debsources developers <info@sources.debian.net>.
# See the AUTHORS file at the top-level directory of this distribution and at
# https://anonscm.debian.org/gitweb/?p=qa/debsources.git;a=blob;f=AUTHORS;hb=HEAD
#
# This file is part of Debsources. Debsources is free software: you can
# redistribute it and/or modify it under the terms of the GNU Affero General
# Public License as published by the Free Software Foundation, either version 3
# of the License, or (at your option) any later version. For more information
# see the COPYING file at the top-level directory of this distribution and at
# https://anonscm.debian.org/gitweb/?p=qa/debsources.git;a=blob;f=COPYING;hb=HEAD
from __future__ import absolute_import
from flask import request
from ..views import GeneralView
class SummaryView(GeneralView):
def get_objects(self, path_to):
path_dict = path_to.split('/')
package = path_dict[0]
version = path_dict[1]
path = '/'.join(path_dict[2:])
if version == "latest": # we search the latest available version
return self._handle_latest_version(request.endpoint,
package, path)
versions = self.handle_versions(version, package, path)
if versions:
redirect_url_parts = [package, versions[-1]]
if path:
redirect_url_parts.append(path)
redirect_url = '/'.join(redirect_url_parts)
return self._redirect_to_url(request.endpoint,
redirect_url, redirect_code=302)
return dict(package=package,
version=version)
|
<commit_before># Copyright (C) 2015 The Debsources developers <info@sources.debian.net>.
# See the AUTHORS file at the top-level directory of this distribution and at
# https://anonscm.debian.org/gitweb/?p=qa/debsources.git;a=blob;f=AUTHORS;hb=HEAD
#
# This file is part of Debsources. Debsources is free software: you can
# redistribute it and/or modify it under the terms of the GNU Affero General
# Public License as published by the Free Software Foundation, either version 3
# of the License, or (at your option) any later version. For more information
# see the COPYING file at the top-level directory of this distribution and at
# https://anonscm.debian.org/gitweb/?p=qa/debsources.git;a=blob;f=COPYING;hb=HEAD
from __future__ import absolute_import
from ..views import GeneralView
class SummaryView(GeneralView):
def get_objects(self, path_to):
path_dict = path_to.split('/')
package = path_dict[0]
version = path_dict[1]
return dict(package=package,
version=version)
<commit_msg>Add version handling in SummaryView for the patches BP<commit_after>
|
# Copyright (C) 2015 The Debsources developers <info@sources.debian.net>.
# See the AUTHORS file at the top-level directory of this distribution and at
# https://anonscm.debian.org/gitweb/?p=qa/debsources.git;a=blob;f=AUTHORS;hb=HEAD
#
# This file is part of Debsources. Debsources is free software: you can
# redistribute it and/or modify it under the terms of the GNU Affero General
# Public License as published by the Free Software Foundation, either version 3
# of the License, or (at your option) any later version. For more information
# see the COPYING file at the top-level directory of this distribution and at
# https://anonscm.debian.org/gitweb/?p=qa/debsources.git;a=blob;f=COPYING;hb=HEAD
from __future__ import absolute_import
from flask import request
from ..views import GeneralView
class SummaryView(GeneralView):
def get_objects(self, path_to):
path_dict = path_to.split('/')
package = path_dict[0]
version = path_dict[1]
path = '/'.join(path_dict[2:])
if version == "latest": # we search the latest available version
return self._handle_latest_version(request.endpoint,
package, path)
versions = self.handle_versions(version, package, path)
if versions:
redirect_url_parts = [package, versions[-1]]
if path:
redirect_url_parts.append(path)
redirect_url = '/'.join(redirect_url_parts)
return self._redirect_to_url(request.endpoint,
redirect_url, redirect_code=302)
return dict(package=package,
version=version)
|
# Copyright (C) 2015 The Debsources developers <info@sources.debian.net>.
# See the AUTHORS file at the top-level directory of this distribution and at
# https://anonscm.debian.org/gitweb/?p=qa/debsources.git;a=blob;f=AUTHORS;hb=HEAD
#
# This file is part of Debsources. Debsources is free software: you can
# redistribute it and/or modify it under the terms of the GNU Affero General
# Public License as published by the Free Software Foundation, either version 3
# of the License, or (at your option) any later version. For more information
# see the COPYING file at the top-level directory of this distribution and at
# https://anonscm.debian.org/gitweb/?p=qa/debsources.git;a=blob;f=COPYING;hb=HEAD
from __future__ import absolute_import
from ..views import GeneralView
class SummaryView(GeneralView):
def get_objects(self, path_to):
path_dict = path_to.split('/')
package = path_dict[0]
version = path_dict[1]
return dict(package=package,
version=version)
Add version handling in SummaryView for the patches BP# Copyright (C) 2015 The Debsources developers <info@sources.debian.net>.
# See the AUTHORS file at the top-level directory of this distribution and at
# https://anonscm.debian.org/gitweb/?p=qa/debsources.git;a=blob;f=AUTHORS;hb=HEAD
#
# This file is part of Debsources. Debsources is free software: you can
# redistribute it and/or modify it under the terms of the GNU Affero General
# Public License as published by the Free Software Foundation, either version 3
# of the License, or (at your option) any later version. For more information
# see the COPYING file at the top-level directory of this distribution and at
# https://anonscm.debian.org/gitweb/?p=qa/debsources.git;a=blob;f=COPYING;hb=HEAD
from __future__ import absolute_import
from flask import request
from ..views import GeneralView
class SummaryView(GeneralView):
def get_objects(self, path_to):
path_dict = path_to.split('/')
package = path_dict[0]
version = path_dict[1]
path = '/'.join(path_dict[2:])
if version == "latest": # we search the latest available version
return self._handle_latest_version(request.endpoint,
package, path)
versions = self.handle_versions(version, package, path)
if versions:
redirect_url_parts = [package, versions[-1]]
if path:
redirect_url_parts.append(path)
redirect_url = '/'.join(redirect_url_parts)
return self._redirect_to_url(request.endpoint,
redirect_url, redirect_code=302)
return dict(package=package,
version=version)
|
<commit_before># Copyright (C) 2015 The Debsources developers <info@sources.debian.net>.
# See the AUTHORS file at the top-level directory of this distribution and at
# https://anonscm.debian.org/gitweb/?p=qa/debsources.git;a=blob;f=AUTHORS;hb=HEAD
#
# This file is part of Debsources. Debsources is free software: you can
# redistribute it and/or modify it under the terms of the GNU Affero General
# Public License as published by the Free Software Foundation, either version 3
# of the License, or (at your option) any later version. For more information
# see the COPYING file at the top-level directory of this distribution and at
# https://anonscm.debian.org/gitweb/?p=qa/debsources.git;a=blob;f=COPYING;hb=HEAD
from __future__ import absolute_import
from ..views import GeneralView
class SummaryView(GeneralView):
def get_objects(self, path_to):
path_dict = path_to.split('/')
package = path_dict[0]
version = path_dict[1]
return dict(package=package,
version=version)
<commit_msg>Add version handling in SummaryView for the patches BP<commit_after># Copyright (C) 2015 The Debsources developers <info@sources.debian.net>.
# See the AUTHORS file at the top-level directory of this distribution and at
# https://anonscm.debian.org/gitweb/?p=qa/debsources.git;a=blob;f=AUTHORS;hb=HEAD
#
# This file is part of Debsources. Debsources is free software: you can
# redistribute it and/or modify it under the terms of the GNU Affero General
# Public License as published by the Free Software Foundation, either version 3
# of the License, or (at your option) any later version. For more information
# see the COPYING file at the top-level directory of this distribution and at
# https://anonscm.debian.org/gitweb/?p=qa/debsources.git;a=blob;f=COPYING;hb=HEAD
from __future__ import absolute_import
from flask import request
from ..views import GeneralView
class SummaryView(GeneralView):
def get_objects(self, path_to):
path_dict = path_to.split('/')
package = path_dict[0]
version = path_dict[1]
path = '/'.join(path_dict[2:])
if version == "latest": # we search the latest available version
return self._handle_latest_version(request.endpoint,
package, path)
versions = self.handle_versions(version, package, path)
if versions:
redirect_url_parts = [package, versions[-1]]
if path:
redirect_url_parts.append(path)
redirect_url = '/'.join(redirect_url_parts)
return self._redirect_to_url(request.endpoint,
redirect_url, redirect_code=302)
return dict(package=package,
version=version)
|
84ff58c997c163f9e3566245beb9af308a77b880
|
spyder_terminal/server/rest/term_rest.py
|
spyder_terminal/server/rest/term_rest.py
|
# -*- coding: iso-8859-15 -*-
"""Main HTTP routes request handlers."""
import tornado.web
import tornado.escape
from os import getcwd
class MainHandler(tornado.web.RequestHandler):
"""Handles creation of new terminals."""
@tornado.gen.coroutine
def post(self):
"""POST verb: Create a new terminal."""
rows = int(self.get_argument('rows', None, 23))
cols = int(self.get_argument('cols', None, 73))
cwd = self.get_cookie('cwd', default=getcwd())
self.application.logger.info('CWD: {0}'.format(cwd))
pid = yield self.application.term_manager.create_term(rows, cols, cwd)
self.write(pid)
class ResizeHandler(tornado.web.RequestHandler):
"""Handles resizing of terminals."""
@tornado.gen.coroutine
def post(self, pid):
"""POST verb: Resize a terminal."""
rows = int(self.get_argument('rows', None, 23))
cols = int(self.get_argument('cols', None, 73))
self.application.term_manager.resize_term(pid, rows, cols)
|
# -*- coding: iso-8859-15 -*-
"""Main HTTP routes request handlers."""
import tornado.web
import tornado.escape
from os import getcwd
class MainHandler(tornado.web.RequestHandler):
"""Handles creation of new terminals."""
@tornado.gen.coroutine
def post(self):
"""POST verb: Create a new terminal."""
rows = int(self.get_argument('rows', default=23))
cols = int(self.get_argument('cols', default=73))
cwd = self.get_cookie('cwd', default=getcwd())
self.application.logger.info('CWD: {0}'.format(cwd))
self.application.logger.info('Size: ({0}, {1})'.format(cols, rows))
pid = yield self.application.term_manager.create_term(rows, cols, cwd)
self.write(pid)
class ResizeHandler(tornado.web.RequestHandler):
"""Handles resizing of terminals."""
@tornado.gen.coroutine
def post(self, pid):
"""POST verb: Resize a terminal."""
rows = int(self.get_argument('rows', None, 23))
cols = int(self.get_argument('cols', None, 73))
self.application.term_manager.resize_term(pid, rows, cols)
|
Change default terminal size arguments - Add server debug log message
|
Change default terminal size arguments - Add server debug log message
|
Python
|
mit
|
spyder-ide/spyder-terminal,spyder-ide/spyder-terminal,spyder-ide/spyder-terminal,spyder-ide/spyder-terminal,andfoy/spyder-terminal,andfoy/spyder-terminal,andfoy/spyder-terminal
|
# -*- coding: iso-8859-15 -*-
"""Main HTTP routes request handlers."""
import tornado.web
import tornado.escape
from os import getcwd
class MainHandler(tornado.web.RequestHandler):
"""Handles creation of new terminals."""
@tornado.gen.coroutine
def post(self):
"""POST verb: Create a new terminal."""
rows = int(self.get_argument('rows', None, 23))
cols = int(self.get_argument('cols', None, 73))
cwd = self.get_cookie('cwd', default=getcwd())
self.application.logger.info('CWD: {0}'.format(cwd))
pid = yield self.application.term_manager.create_term(rows, cols, cwd)
self.write(pid)
class ResizeHandler(tornado.web.RequestHandler):
"""Handles resizing of terminals."""
@tornado.gen.coroutine
def post(self, pid):
"""POST verb: Resize a terminal."""
rows = int(self.get_argument('rows', None, 23))
cols = int(self.get_argument('cols', None, 73))
self.application.term_manager.resize_term(pid, rows, cols)
Change default terminal size arguments - Add server debug log message
|
# -*- coding: iso-8859-15 -*-
"""Main HTTP routes request handlers."""
import tornado.web
import tornado.escape
from os import getcwd
class MainHandler(tornado.web.RequestHandler):
"""Handles creation of new terminals."""
@tornado.gen.coroutine
def post(self):
"""POST verb: Create a new terminal."""
rows = int(self.get_argument('rows', default=23))
cols = int(self.get_argument('cols', default=73))
cwd = self.get_cookie('cwd', default=getcwd())
self.application.logger.info('CWD: {0}'.format(cwd))
self.application.logger.info('Size: ({0}, {1})'.format(cols, rows))
pid = yield self.application.term_manager.create_term(rows, cols, cwd)
self.write(pid)
class ResizeHandler(tornado.web.RequestHandler):
"""Handles resizing of terminals."""
@tornado.gen.coroutine
def post(self, pid):
"""POST verb: Resize a terminal."""
rows = int(self.get_argument('rows', None, 23))
cols = int(self.get_argument('cols', None, 73))
self.application.term_manager.resize_term(pid, rows, cols)
|
<commit_before># -*- coding: iso-8859-15 -*-
"""Main HTTP routes request handlers."""
import tornado.web
import tornado.escape
from os import getcwd
class MainHandler(tornado.web.RequestHandler):
"""Handles creation of new terminals."""
@tornado.gen.coroutine
def post(self):
"""POST verb: Create a new terminal."""
rows = int(self.get_argument('rows', None, 23))
cols = int(self.get_argument('cols', None, 73))
cwd = self.get_cookie('cwd', default=getcwd())
self.application.logger.info('CWD: {0}'.format(cwd))
pid = yield self.application.term_manager.create_term(rows, cols, cwd)
self.write(pid)
class ResizeHandler(tornado.web.RequestHandler):
"""Handles resizing of terminals."""
@tornado.gen.coroutine
def post(self, pid):
"""POST verb: Resize a terminal."""
rows = int(self.get_argument('rows', None, 23))
cols = int(self.get_argument('cols', None, 73))
self.application.term_manager.resize_term(pid, rows, cols)
<commit_msg>Change default terminal size arguments - Add server debug log message<commit_after>
|
# -*- coding: iso-8859-15 -*-
"""Main HTTP routes request handlers."""
import tornado.web
import tornado.escape
from os import getcwd
class MainHandler(tornado.web.RequestHandler):
"""Handles creation of new terminals."""
@tornado.gen.coroutine
def post(self):
"""POST verb: Create a new terminal."""
rows = int(self.get_argument('rows', default=23))
cols = int(self.get_argument('cols', default=73))
cwd = self.get_cookie('cwd', default=getcwd())
self.application.logger.info('CWD: {0}'.format(cwd))
self.application.logger.info('Size: ({0}, {1})'.format(cols, rows))
pid = yield self.application.term_manager.create_term(rows, cols, cwd)
self.write(pid)
class ResizeHandler(tornado.web.RequestHandler):
"""Handles resizing of terminals."""
@tornado.gen.coroutine
def post(self, pid):
"""POST verb: Resize a terminal."""
rows = int(self.get_argument('rows', None, 23))
cols = int(self.get_argument('cols', None, 73))
self.application.term_manager.resize_term(pid, rows, cols)
|
# -*- coding: iso-8859-15 -*-
"""Main HTTP routes request handlers."""
import tornado.web
import tornado.escape
from os import getcwd
class MainHandler(tornado.web.RequestHandler):
"""Handles creation of new terminals."""
@tornado.gen.coroutine
def post(self):
"""POST verb: Create a new terminal."""
rows = int(self.get_argument('rows', None, 23))
cols = int(self.get_argument('cols', None, 73))
cwd = self.get_cookie('cwd', default=getcwd())
self.application.logger.info('CWD: {0}'.format(cwd))
pid = yield self.application.term_manager.create_term(rows, cols, cwd)
self.write(pid)
class ResizeHandler(tornado.web.RequestHandler):
"""Handles resizing of terminals."""
@tornado.gen.coroutine
def post(self, pid):
"""POST verb: Resize a terminal."""
rows = int(self.get_argument('rows', None, 23))
cols = int(self.get_argument('cols', None, 73))
self.application.term_manager.resize_term(pid, rows, cols)
Change default terminal size arguments - Add server debug log message# -*- coding: iso-8859-15 -*-
"""Main HTTP routes request handlers."""
import tornado.web
import tornado.escape
from os import getcwd
class MainHandler(tornado.web.RequestHandler):
"""Handles creation of new terminals."""
@tornado.gen.coroutine
def post(self):
"""POST verb: Create a new terminal."""
rows = int(self.get_argument('rows', default=23))
cols = int(self.get_argument('cols', default=73))
cwd = self.get_cookie('cwd', default=getcwd())
self.application.logger.info('CWD: {0}'.format(cwd))
self.application.logger.info('Size: ({0}, {1})'.format(cols, rows))
pid = yield self.application.term_manager.create_term(rows, cols, cwd)
self.write(pid)
class ResizeHandler(tornado.web.RequestHandler):
"""Handles resizing of terminals."""
@tornado.gen.coroutine
def post(self, pid):
"""POST verb: Resize a terminal."""
rows = int(self.get_argument('rows', None, 23))
cols = int(self.get_argument('cols', None, 73))
self.application.term_manager.resize_term(pid, rows, cols)
|
<commit_before># -*- coding: iso-8859-15 -*-
"""Main HTTP routes request handlers."""
import tornado.web
import tornado.escape
from os import getcwd
class MainHandler(tornado.web.RequestHandler):
"""Handles creation of new terminals."""
@tornado.gen.coroutine
def post(self):
"""POST verb: Create a new terminal."""
rows = int(self.get_argument('rows', None, 23))
cols = int(self.get_argument('cols', None, 73))
cwd = self.get_cookie('cwd', default=getcwd())
self.application.logger.info('CWD: {0}'.format(cwd))
pid = yield self.application.term_manager.create_term(rows, cols, cwd)
self.write(pid)
class ResizeHandler(tornado.web.RequestHandler):
"""Handles resizing of terminals."""
@tornado.gen.coroutine
def post(self, pid):
"""POST verb: Resize a terminal."""
rows = int(self.get_argument('rows', None, 23))
cols = int(self.get_argument('cols', None, 73))
self.application.term_manager.resize_term(pid, rows, cols)
<commit_msg>Change default terminal size arguments - Add server debug log message<commit_after># -*- coding: iso-8859-15 -*-
"""Main HTTP routes request handlers."""
import tornado.web
import tornado.escape
from os import getcwd
class MainHandler(tornado.web.RequestHandler):
"""Handles creation of new terminals."""
@tornado.gen.coroutine
def post(self):
"""POST verb: Create a new terminal."""
rows = int(self.get_argument('rows', default=23))
cols = int(self.get_argument('cols', default=73))
cwd = self.get_cookie('cwd', default=getcwd())
self.application.logger.info('CWD: {0}'.format(cwd))
self.application.logger.info('Size: ({0}, {1})'.format(cols, rows))
pid = yield self.application.term_manager.create_term(rows, cols, cwd)
self.write(pid)
class ResizeHandler(tornado.web.RequestHandler):
"""Handles resizing of terminals."""
@tornado.gen.coroutine
def post(self, pid):
"""POST verb: Resize a terminal."""
rows = int(self.get_argument('rows', None, 23))
cols = int(self.get_argument('cols', None, 73))
self.application.term_manager.resize_term(pid, rows, cols)
|
0e69efcd3a6992d0a34d7ecb80a76c3fbc52975c
|
pikka_bird_collector/sender.py
|
pikka_bird_collector/sender.py
|
import datetime
import logging
import json
import urllib.parse
import requests
class Sender():
SERVER_SERVICES = {
'collections': '/collections'}
REQUEST_HEADERS = {
'Content-Type': 'application/json'}
def __init__(self, server_uri, logger=None):
self.server_uri = server_uri
self.logger = logger or logging.getLogger()
def send(self, collection):
url = self.__service_url('collections')
data = json.dumps(collection)
t_0 = datetime.datetime.utcnow()
self.logger.info("SENDING %s (%d b)" % (url, (len(data.encode('utf-8')))))
try:
r = requests.post(url, data=data, headers=self.REQUEST_HEADERS)
r.raise_for_status()
logger = self.logger.info
except requests.exceptions.HTTPError:
logger = self.logger.error
t = datetime.datetime.utcnow()
logger("SENT %d %s (%s s)" % (r.status_code, r.text, (t - t_0).seconds))
def __service_url(self, service):
service_path = self.SERVER_SERVICES[service]
return urllib.parse.urljoin(self.server_uri, service_path)
|
import datetime
import logging
import json
import urllib.parse
import requests
class Sender():
SERVER_SERVICES = {
'collections': '/collections'}
REQUEST_HEADERS = {
'Content-Type': 'application/json'}
def __init__(self, server_uri, logger=None):
self.server_uri = server_uri
self.logger = logger or logging.getLogger()
def send(self, collection):
url = self.__service_url('collections')
data = json.dumps(collection)
t_0 = datetime.datetime.utcnow()
self.logger.info("SENDING %s (%d b)" % (url, (len(data.encode('utf-8')))))
try:
r = requests.post(url, data=data, headers=self.REQUEST_HEADERS)
r.raise_for_status()
status = True
except requests.exceptions.HTTPError:
status = False
except requests.exceptions.ConnectionError:
self.logger.error("CONNECTION FAILED")
return False
logger = self.logger.info if status else self.logger.error
try:
text = r.text
except ValueError:
text = None
t = datetime.datetime.utcnow()
logger("SENT %d %s (%s s)" % (r.status_code, text, (t - t_0).seconds))
return status
def __service_url(self, service):
service_path = self.SERVER_SERVICES[service]
return urllib.parse.urljoin(self.server_uri, service_path)
|
Extend Sender.send() to return boolean status.
|
Extend Sender.send() to return boolean status.
|
Python
|
mit
|
tiredpixel/pikka-bird-collector-py
|
import datetime
import logging
import json
import urllib.parse
import requests
class Sender():
SERVER_SERVICES = {
'collections': '/collections'}
REQUEST_HEADERS = {
'Content-Type': 'application/json'}
def __init__(self, server_uri, logger=None):
self.server_uri = server_uri
self.logger = logger or logging.getLogger()
def send(self, collection):
url = self.__service_url('collections')
data = json.dumps(collection)
t_0 = datetime.datetime.utcnow()
self.logger.info("SENDING %s (%d b)" % (url, (len(data.encode('utf-8')))))
try:
r = requests.post(url, data=data, headers=self.REQUEST_HEADERS)
r.raise_for_status()
logger = self.logger.info
except requests.exceptions.HTTPError:
logger = self.logger.error
t = datetime.datetime.utcnow()
logger("SENT %d %s (%s s)" % (r.status_code, r.text, (t - t_0).seconds))
def __service_url(self, service):
service_path = self.SERVER_SERVICES[service]
return urllib.parse.urljoin(self.server_uri, service_path)
Extend Sender.send() to return boolean status.
|
import datetime
import logging
import json
import urllib.parse
import requests
class Sender():
SERVER_SERVICES = {
'collections': '/collections'}
REQUEST_HEADERS = {
'Content-Type': 'application/json'}
def __init__(self, server_uri, logger=None):
self.server_uri = server_uri
self.logger = logger or logging.getLogger()
def send(self, collection):
url = self.__service_url('collections')
data = json.dumps(collection)
t_0 = datetime.datetime.utcnow()
self.logger.info("SENDING %s (%d b)" % (url, (len(data.encode('utf-8')))))
try:
r = requests.post(url, data=data, headers=self.REQUEST_HEADERS)
r.raise_for_status()
status = True
except requests.exceptions.HTTPError:
status = False
except requests.exceptions.ConnectionError:
self.logger.error("CONNECTION FAILED")
return False
logger = self.logger.info if status else self.logger.error
try:
text = r.text
except ValueError:
text = None
t = datetime.datetime.utcnow()
logger("SENT %d %s (%s s)" % (r.status_code, text, (t - t_0).seconds))
return status
def __service_url(self, service):
service_path = self.SERVER_SERVICES[service]
return urllib.parse.urljoin(self.server_uri, service_path)
|
<commit_before>import datetime
import logging
import json
import urllib.parse
import requests
class Sender():
SERVER_SERVICES = {
'collections': '/collections'}
REQUEST_HEADERS = {
'Content-Type': 'application/json'}
def __init__(self, server_uri, logger=None):
self.server_uri = server_uri
self.logger = logger or logging.getLogger()
def send(self, collection):
url = self.__service_url('collections')
data = json.dumps(collection)
t_0 = datetime.datetime.utcnow()
self.logger.info("SENDING %s (%d b)" % (url, (len(data.encode('utf-8')))))
try:
r = requests.post(url, data=data, headers=self.REQUEST_HEADERS)
r.raise_for_status()
logger = self.logger.info
except requests.exceptions.HTTPError:
logger = self.logger.error
t = datetime.datetime.utcnow()
logger("SENT %d %s (%s s)" % (r.status_code, r.text, (t - t_0).seconds))
def __service_url(self, service):
service_path = self.SERVER_SERVICES[service]
return urllib.parse.urljoin(self.server_uri, service_path)
<commit_msg>Extend Sender.send() to return boolean status.<commit_after>
|
import datetime
import logging
import json
import urllib.parse
import requests
class Sender():
SERVER_SERVICES = {
'collections': '/collections'}
REQUEST_HEADERS = {
'Content-Type': 'application/json'}
def __init__(self, server_uri, logger=None):
self.server_uri = server_uri
self.logger = logger or logging.getLogger()
def send(self, collection):
url = self.__service_url('collections')
data = json.dumps(collection)
t_0 = datetime.datetime.utcnow()
self.logger.info("SENDING %s (%d b)" % (url, (len(data.encode('utf-8')))))
try:
r = requests.post(url, data=data, headers=self.REQUEST_HEADERS)
r.raise_for_status()
status = True
except requests.exceptions.HTTPError:
status = False
except requests.exceptions.ConnectionError:
self.logger.error("CONNECTION FAILED")
return False
logger = self.logger.info if status else self.logger.error
try:
text = r.text
except ValueError:
text = None
t = datetime.datetime.utcnow()
logger("SENT %d %s (%s s)" % (r.status_code, text, (t - t_0).seconds))
return status
def __service_url(self, service):
service_path = self.SERVER_SERVICES[service]
return urllib.parse.urljoin(self.server_uri, service_path)
|
import datetime
import logging
import json
import urllib.parse
import requests
class Sender():
SERVER_SERVICES = {
'collections': '/collections'}
REQUEST_HEADERS = {
'Content-Type': 'application/json'}
def __init__(self, server_uri, logger=None):
self.server_uri = server_uri
self.logger = logger or logging.getLogger()
def send(self, collection):
url = self.__service_url('collections')
data = json.dumps(collection)
t_0 = datetime.datetime.utcnow()
self.logger.info("SENDING %s (%d b)" % (url, (len(data.encode('utf-8')))))
try:
r = requests.post(url, data=data, headers=self.REQUEST_HEADERS)
r.raise_for_status()
logger = self.logger.info
except requests.exceptions.HTTPError:
logger = self.logger.error
t = datetime.datetime.utcnow()
logger("SENT %d %s (%s s)" % (r.status_code, r.text, (t - t_0).seconds))
def __service_url(self, service):
service_path = self.SERVER_SERVICES[service]
return urllib.parse.urljoin(self.server_uri, service_path)
Extend Sender.send() to return boolean status.import datetime
import logging
import json
import urllib.parse
import requests
class Sender():
SERVER_SERVICES = {
'collections': '/collections'}
REQUEST_HEADERS = {
'Content-Type': 'application/json'}
def __init__(self, server_uri, logger=None):
self.server_uri = server_uri
self.logger = logger or logging.getLogger()
def send(self, collection):
url = self.__service_url('collections')
data = json.dumps(collection)
t_0 = datetime.datetime.utcnow()
self.logger.info("SENDING %s (%d b)" % (url, (len(data.encode('utf-8')))))
try:
r = requests.post(url, data=data, headers=self.REQUEST_HEADERS)
r.raise_for_status()
status = True
except requests.exceptions.HTTPError:
status = False
except requests.exceptions.ConnectionError:
self.logger.error("CONNECTION FAILED")
return False
logger = self.logger.info if status else self.logger.error
try:
text = r.text
except ValueError:
text = None
t = datetime.datetime.utcnow()
logger("SENT %d %s (%s s)" % (r.status_code, text, (t - t_0).seconds))
return status
def __service_url(self, service):
service_path = self.SERVER_SERVICES[service]
return urllib.parse.urljoin(self.server_uri, service_path)
|
<commit_before>import datetime
import logging
import json
import urllib.parse
import requests
class Sender():
SERVER_SERVICES = {
'collections': '/collections'}
REQUEST_HEADERS = {
'Content-Type': 'application/json'}
def __init__(self, server_uri, logger=None):
self.server_uri = server_uri
self.logger = logger or logging.getLogger()
def send(self, collection):
url = self.__service_url('collections')
data = json.dumps(collection)
t_0 = datetime.datetime.utcnow()
self.logger.info("SENDING %s (%d b)" % (url, (len(data.encode('utf-8')))))
try:
r = requests.post(url, data=data, headers=self.REQUEST_HEADERS)
r.raise_for_status()
logger = self.logger.info
except requests.exceptions.HTTPError:
logger = self.logger.error
t = datetime.datetime.utcnow()
logger("SENT %d %s (%s s)" % (r.status_code, r.text, (t - t_0).seconds))
def __service_url(self, service):
service_path = self.SERVER_SERVICES[service]
return urllib.parse.urljoin(self.server_uri, service_path)
<commit_msg>Extend Sender.send() to return boolean status.<commit_after>import datetime
import logging
import json
import urllib.parse
import requests
class Sender():
SERVER_SERVICES = {
'collections': '/collections'}
REQUEST_HEADERS = {
'Content-Type': 'application/json'}
def __init__(self, server_uri, logger=None):
self.server_uri = server_uri
self.logger = logger or logging.getLogger()
def send(self, collection):
url = self.__service_url('collections')
data = json.dumps(collection)
t_0 = datetime.datetime.utcnow()
self.logger.info("SENDING %s (%d b)" % (url, (len(data.encode('utf-8')))))
try:
r = requests.post(url, data=data, headers=self.REQUEST_HEADERS)
r.raise_for_status()
status = True
except requests.exceptions.HTTPError:
status = False
except requests.exceptions.ConnectionError:
self.logger.error("CONNECTION FAILED")
return False
logger = self.logger.info if status else self.logger.error
try:
text = r.text
except ValueError:
text = None
t = datetime.datetime.utcnow()
logger("SENT %d %s (%s s)" % (r.status_code, text, (t - t_0).seconds))
return status
def __service_url(self, service):
service_path = self.SERVER_SERVICES[service]
return urllib.parse.urljoin(self.server_uri, service_path)
|
89cb0d1558e02a72048047709d9960a1f7ee265e
|
src/waldur_mastermind/marketplace_checklist/import_export_resources.py
|
src/waldur_mastermind/marketplace_checklist/import_export_resources.py
|
import tablib
from import_export import fields, resources, widgets
from . import models
CategoryResource = resources.modelresource_factory(models.Category)
QuestionResource = resources.modelresource_factory(models.Question)
class ChecklistResource(resources.ModelResource):
questions = fields.Field(column_name='questions', widget=widgets.JSONWidget(),)
category_data = fields.Field(
column_name='category_data', widget=widgets.JSONWidget()
)
def before_import_row(self, row, **kwargs):
if row.get('category_data'):
dataset = tablib.Dataset().load(row.get('category_data'), 'json')
CategoryResource().import_data(dataset)
def dehydrate_category_data(self, checklist):
if checklist.category:
dataset = CategoryResource().export(
queryset=models.Category.objects.filter(pk=checklist.category.pk)
)
return dataset.json
def dehydrate_questions(self, checklist):
dataset = QuestionResource().export(queryset=checklist.questions.all())
return dataset.json
def save_m2m(self, instance, row, using_transactions, dry_run):
super().save_m2m(instance, row, using_transactions, dry_run)
if row.get('questions'):
dataset = tablib.Dataset().load(row.get('questions'), 'json')
result = QuestionResource().import_data(dataset)
result
class Meta:
exclude = ('created', 'modified', 'uuid', 'customers')
ChecklistResource = resources.modelresource_factory(models.Checklist, ChecklistResource)
|
import tablib
from import_export import fields, resources, widgets
from . import models
CategoryResource = resources.modelresource_factory(models.Category)
QuestionResource = resources.modelresource_factory(models.Question)
class ChecklistResource(resources.ModelResource):
questions = fields.Field(column_name='questions', widget=widgets.JSONWidget(),)
category_data = fields.Field(
column_name='category_data', widget=widgets.JSONWidget()
)
def before_import_row(self, row, **kwargs):
if row.get('category_data'):
dataset = tablib.Dataset().load(row.get('category_data'), 'json')
CategoryResource().import_data(dataset)
def dehydrate_category_data(self, checklist):
if checklist.category:
dataset = CategoryResource().export(
queryset=models.Category.objects.filter(pk=checklist.category.pk)
)
return dataset.json
def dehydrate_questions(self, checklist):
dataset = QuestionResource().export(queryset=checklist.questions.all())
return dataset.json
def save_m2m(self, instance, row, using_transactions, dry_run):
super().save_m2m(instance, row, using_transactions, dry_run)
if row.get('questions'):
dataset = tablib.Dataset().load(row.get('questions'), 'json')
QuestionResource().import_data(dataset)
class Meta:
exclude = ('created', 'modified', 'uuid', 'customers')
ChecklistResource = resources.modelresource_factory(models.Checklist, ChecklistResource)
|
Remove statement without side effect.
|
Remove statement without side effect.
|
Python
|
mit
|
opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind,opennode/waldur-mastermind
|
import tablib
from import_export import fields, resources, widgets
from . import models
CategoryResource = resources.modelresource_factory(models.Category)
QuestionResource = resources.modelresource_factory(models.Question)
class ChecklistResource(resources.ModelResource):
questions = fields.Field(column_name='questions', widget=widgets.JSONWidget(),)
category_data = fields.Field(
column_name='category_data', widget=widgets.JSONWidget()
)
def before_import_row(self, row, **kwargs):
if row.get('category_data'):
dataset = tablib.Dataset().load(row.get('category_data'), 'json')
CategoryResource().import_data(dataset)
def dehydrate_category_data(self, checklist):
if checklist.category:
dataset = CategoryResource().export(
queryset=models.Category.objects.filter(pk=checklist.category.pk)
)
return dataset.json
def dehydrate_questions(self, checklist):
dataset = QuestionResource().export(queryset=checklist.questions.all())
return dataset.json
def save_m2m(self, instance, row, using_transactions, dry_run):
super().save_m2m(instance, row, using_transactions, dry_run)
if row.get('questions'):
dataset = tablib.Dataset().load(row.get('questions'), 'json')
result = QuestionResource().import_data(dataset)
result
class Meta:
exclude = ('created', 'modified', 'uuid', 'customers')
ChecklistResource = resources.modelresource_factory(models.Checklist, ChecklistResource)
Remove statement without side effect.
|
import tablib
from import_export import fields, resources, widgets
from . import models
CategoryResource = resources.modelresource_factory(models.Category)
QuestionResource = resources.modelresource_factory(models.Question)
class ChecklistResource(resources.ModelResource):
questions = fields.Field(column_name='questions', widget=widgets.JSONWidget(),)
category_data = fields.Field(
column_name='category_data', widget=widgets.JSONWidget()
)
def before_import_row(self, row, **kwargs):
if row.get('category_data'):
dataset = tablib.Dataset().load(row.get('category_data'), 'json')
CategoryResource().import_data(dataset)
def dehydrate_category_data(self, checklist):
if checklist.category:
dataset = CategoryResource().export(
queryset=models.Category.objects.filter(pk=checklist.category.pk)
)
return dataset.json
def dehydrate_questions(self, checklist):
dataset = QuestionResource().export(queryset=checklist.questions.all())
return dataset.json
def save_m2m(self, instance, row, using_transactions, dry_run):
super().save_m2m(instance, row, using_transactions, dry_run)
if row.get('questions'):
dataset = tablib.Dataset().load(row.get('questions'), 'json')
QuestionResource().import_data(dataset)
class Meta:
exclude = ('created', 'modified', 'uuid', 'customers')
ChecklistResource = resources.modelresource_factory(models.Checklist, ChecklistResource)
|
<commit_before>import tablib
from import_export import fields, resources, widgets
from . import models
CategoryResource = resources.modelresource_factory(models.Category)
QuestionResource = resources.modelresource_factory(models.Question)
class ChecklistResource(resources.ModelResource):
questions = fields.Field(column_name='questions', widget=widgets.JSONWidget(),)
category_data = fields.Field(
column_name='category_data', widget=widgets.JSONWidget()
)
def before_import_row(self, row, **kwargs):
if row.get('category_data'):
dataset = tablib.Dataset().load(row.get('category_data'), 'json')
CategoryResource().import_data(dataset)
def dehydrate_category_data(self, checklist):
if checklist.category:
dataset = CategoryResource().export(
queryset=models.Category.objects.filter(pk=checklist.category.pk)
)
return dataset.json
def dehydrate_questions(self, checklist):
dataset = QuestionResource().export(queryset=checklist.questions.all())
return dataset.json
def save_m2m(self, instance, row, using_transactions, dry_run):
super().save_m2m(instance, row, using_transactions, dry_run)
if row.get('questions'):
dataset = tablib.Dataset().load(row.get('questions'), 'json')
result = QuestionResource().import_data(dataset)
result
class Meta:
exclude = ('created', 'modified', 'uuid', 'customers')
ChecklistResource = resources.modelresource_factory(models.Checklist, ChecklistResource)
<commit_msg>Remove statement without side effect.<commit_after>
|
import tablib
from import_export import fields, resources, widgets
from . import models
CategoryResource = resources.modelresource_factory(models.Category)
QuestionResource = resources.modelresource_factory(models.Question)
class ChecklistResource(resources.ModelResource):
questions = fields.Field(column_name='questions', widget=widgets.JSONWidget(),)
category_data = fields.Field(
column_name='category_data', widget=widgets.JSONWidget()
)
def before_import_row(self, row, **kwargs):
if row.get('category_data'):
dataset = tablib.Dataset().load(row.get('category_data'), 'json')
CategoryResource().import_data(dataset)
def dehydrate_category_data(self, checklist):
if checklist.category:
dataset = CategoryResource().export(
queryset=models.Category.objects.filter(pk=checklist.category.pk)
)
return dataset.json
def dehydrate_questions(self, checklist):
dataset = QuestionResource().export(queryset=checklist.questions.all())
return dataset.json
def save_m2m(self, instance, row, using_transactions, dry_run):
super().save_m2m(instance, row, using_transactions, dry_run)
if row.get('questions'):
dataset = tablib.Dataset().load(row.get('questions'), 'json')
QuestionResource().import_data(dataset)
class Meta:
exclude = ('created', 'modified', 'uuid', 'customers')
ChecklistResource = resources.modelresource_factory(models.Checklist, ChecklistResource)
|
import tablib
from import_export import fields, resources, widgets
from . import models
CategoryResource = resources.modelresource_factory(models.Category)
QuestionResource = resources.modelresource_factory(models.Question)
class ChecklistResource(resources.ModelResource):
questions = fields.Field(column_name='questions', widget=widgets.JSONWidget(),)
category_data = fields.Field(
column_name='category_data', widget=widgets.JSONWidget()
)
def before_import_row(self, row, **kwargs):
if row.get('category_data'):
dataset = tablib.Dataset().load(row.get('category_data'), 'json')
CategoryResource().import_data(dataset)
def dehydrate_category_data(self, checklist):
if checklist.category:
dataset = CategoryResource().export(
queryset=models.Category.objects.filter(pk=checklist.category.pk)
)
return dataset.json
def dehydrate_questions(self, checklist):
dataset = QuestionResource().export(queryset=checklist.questions.all())
return dataset.json
def save_m2m(self, instance, row, using_transactions, dry_run):
super().save_m2m(instance, row, using_transactions, dry_run)
if row.get('questions'):
dataset = tablib.Dataset().load(row.get('questions'), 'json')
result = QuestionResource().import_data(dataset)
result
class Meta:
exclude = ('created', 'modified', 'uuid', 'customers')
ChecklistResource = resources.modelresource_factory(models.Checklist, ChecklistResource)
Remove statement without side effect.import tablib
from import_export import fields, resources, widgets
from . import models
CategoryResource = resources.modelresource_factory(models.Category)
QuestionResource = resources.modelresource_factory(models.Question)
class ChecklistResource(resources.ModelResource):
questions = fields.Field(column_name='questions', widget=widgets.JSONWidget(),)
category_data = fields.Field(
column_name='category_data', widget=widgets.JSONWidget()
)
def before_import_row(self, row, **kwargs):
if row.get('category_data'):
dataset = tablib.Dataset().load(row.get('category_data'), 'json')
CategoryResource().import_data(dataset)
def dehydrate_category_data(self, checklist):
if checklist.category:
dataset = CategoryResource().export(
queryset=models.Category.objects.filter(pk=checklist.category.pk)
)
return dataset.json
def dehydrate_questions(self, checklist):
dataset = QuestionResource().export(queryset=checklist.questions.all())
return dataset.json
def save_m2m(self, instance, row, using_transactions, dry_run):
super().save_m2m(instance, row, using_transactions, dry_run)
if row.get('questions'):
dataset = tablib.Dataset().load(row.get('questions'), 'json')
QuestionResource().import_data(dataset)
class Meta:
exclude = ('created', 'modified', 'uuid', 'customers')
ChecklistResource = resources.modelresource_factory(models.Checklist, ChecklistResource)
|
<commit_before>import tablib
from import_export import fields, resources, widgets
from . import models
CategoryResource = resources.modelresource_factory(models.Category)
QuestionResource = resources.modelresource_factory(models.Question)
class ChecklistResource(resources.ModelResource):
questions = fields.Field(column_name='questions', widget=widgets.JSONWidget(),)
category_data = fields.Field(
column_name='category_data', widget=widgets.JSONWidget()
)
def before_import_row(self, row, **kwargs):
if row.get('category_data'):
dataset = tablib.Dataset().load(row.get('category_data'), 'json')
CategoryResource().import_data(dataset)
def dehydrate_category_data(self, checklist):
if checklist.category:
dataset = CategoryResource().export(
queryset=models.Category.objects.filter(pk=checklist.category.pk)
)
return dataset.json
def dehydrate_questions(self, checklist):
dataset = QuestionResource().export(queryset=checklist.questions.all())
return dataset.json
def save_m2m(self, instance, row, using_transactions, dry_run):
super().save_m2m(instance, row, using_transactions, dry_run)
if row.get('questions'):
dataset = tablib.Dataset().load(row.get('questions'), 'json')
result = QuestionResource().import_data(dataset)
result
class Meta:
exclude = ('created', 'modified', 'uuid', 'customers')
ChecklistResource = resources.modelresource_factory(models.Checklist, ChecklistResource)
<commit_msg>Remove statement without side effect.<commit_after>import tablib
from import_export import fields, resources, widgets
from . import models
CategoryResource = resources.modelresource_factory(models.Category)
QuestionResource = resources.modelresource_factory(models.Question)
class ChecklistResource(resources.ModelResource):
questions = fields.Field(column_name='questions', widget=widgets.JSONWidget(),)
category_data = fields.Field(
column_name='category_data', widget=widgets.JSONWidget()
)
def before_import_row(self, row, **kwargs):
if row.get('category_data'):
dataset = tablib.Dataset().load(row.get('category_data'), 'json')
CategoryResource().import_data(dataset)
def dehydrate_category_data(self, checklist):
if checklist.category:
dataset = CategoryResource().export(
queryset=models.Category.objects.filter(pk=checklist.category.pk)
)
return dataset.json
def dehydrate_questions(self, checklist):
dataset = QuestionResource().export(queryset=checklist.questions.all())
return dataset.json
def save_m2m(self, instance, row, using_transactions, dry_run):
super().save_m2m(instance, row, using_transactions, dry_run)
if row.get('questions'):
dataset = tablib.Dataset().load(row.get('questions'), 'json')
QuestionResource().import_data(dataset)
class Meta:
exclude = ('created', 'modified', 'uuid', 'customers')
ChecklistResource = resources.modelresource_factory(models.Checklist, ChecklistResource)
|
5d463f5823baad3ea485a54719a5799d14f10a27
|
lda/__init__.py
|
lda/__init__.py
|
from __future__ import absolute_import, unicode_literals # noqa
import logging
import pbr.version
from lda.lda import LDA # noqa
__version__ = pbr.version.VersionInfo('lda').version_string()
logging.getLogger('lda').addHandler(logging.NullHandler())
|
from __future__ import absolute_import, unicode_literals # noqa
import logging
import pbr.version
from lda.lda import LDA # noqa
import lda.datasets # noqa
__version__ = pbr.version.VersionInfo('lda').version_string()
logging.getLogger('lda').addHandler(logging.NullHandler())
|
Make lda.datasets available after import lda
|
Make lda.datasets available after import lda
|
Python
|
mpl-2.0
|
hothHowler/lda,ww880412/lda,ww880412/lda,ariddell/lda,tdhopper/lda-1,tdhopper/lda-1,ariddell/lda-debian,ww880412/lda,tdhopper/lda-1,ariddell/lda,hothHowler/lda,ariddell/lda-debian,ariddell/lda,hothHowler/lda,ariddell/lda-debian
|
from __future__ import absolute_import, unicode_literals # noqa
import logging
import pbr.version
from lda.lda import LDA # noqa
__version__ = pbr.version.VersionInfo('lda').version_string()
logging.getLogger('lda').addHandler(logging.NullHandler())
Make lda.datasets available after import lda
|
from __future__ import absolute_import, unicode_literals # noqa
import logging
import pbr.version
from lda.lda import LDA # noqa
import lda.datasets # noqa
__version__ = pbr.version.VersionInfo('lda').version_string()
logging.getLogger('lda').addHandler(logging.NullHandler())
|
<commit_before>from __future__ import absolute_import, unicode_literals # noqa
import logging
import pbr.version
from lda.lda import LDA # noqa
__version__ = pbr.version.VersionInfo('lda').version_string()
logging.getLogger('lda').addHandler(logging.NullHandler())
<commit_msg>Make lda.datasets available after import lda<commit_after>
|
from __future__ import absolute_import, unicode_literals # noqa
import logging
import pbr.version
from lda.lda import LDA # noqa
import lda.datasets # noqa
__version__ = pbr.version.VersionInfo('lda').version_string()
logging.getLogger('lda').addHandler(logging.NullHandler())
|
from __future__ import absolute_import, unicode_literals # noqa
import logging
import pbr.version
from lda.lda import LDA # noqa
__version__ = pbr.version.VersionInfo('lda').version_string()
logging.getLogger('lda').addHandler(logging.NullHandler())
Make lda.datasets available after import ldafrom __future__ import absolute_import, unicode_literals # noqa
import logging
import pbr.version
from lda.lda import LDA # noqa
import lda.datasets # noqa
__version__ = pbr.version.VersionInfo('lda').version_string()
logging.getLogger('lda').addHandler(logging.NullHandler())
|
<commit_before>from __future__ import absolute_import, unicode_literals # noqa
import logging
import pbr.version
from lda.lda import LDA # noqa
__version__ = pbr.version.VersionInfo('lda').version_string()
logging.getLogger('lda').addHandler(logging.NullHandler())
<commit_msg>Make lda.datasets available after import lda<commit_after>from __future__ import absolute_import, unicode_literals # noqa
import logging
import pbr.version
from lda.lda import LDA # noqa
import lda.datasets # noqa
__version__ = pbr.version.VersionInfo('lda').version_string()
logging.getLogger('lda').addHandler(logging.NullHandler())
|
fd1783df3648cdb80b32ae41ffd1d9e1ccb23196
|
tests/ex25_tests.py
|
tests/ex25_tests.py
|
from nose.tools import *
from exercises import ex25
def test_make_ing_form_ie():
'''
Test for ie match
'''
present_verb = ex25.make_ing_form('tie')
assert_equal(third_person_form, 'tying')
def test_make_ing_form_e():
'''
Test for e match
'''
present_verb = ex25.make_ing_form('grate')
assert_equal(third_person_form, 'grating')
def test_make_ing_form_s():
'''
Test for other matches
'''
present_verb = ex25.make_ing_form('grab')
assert_equal(third_person_form, 'grabs')
|
from nose.tools import *
from exercises import ex25
def test_make_ing_form_ie():
'''
Test for ie match
'''
present_verb = ex25.make_ing_form('tie')
assert_equal(present_verb, 'tying')
def test_make_ing_form_e():
'''
Test for e match
'''
present_verb = ex25.make_ing_form('grate')
assert_equal(present_verb, 'grating')
def test_make_ing_form_s():
'''
Test for other matches
'''
present_verb = ex25.make_ing_form('grab')
assert_equal(present_verb, 'grabs')
|
Fix a copy paste fail
|
Fix a copy paste fail
|
Python
|
mit
|
gravyboat/python-exercises
|
from nose.tools import *
from exercises import ex25
def test_make_ing_form_ie():
'''
Test for ie match
'''
present_verb = ex25.make_ing_form('tie')
assert_equal(third_person_form, 'tying')
def test_make_ing_form_e():
'''
Test for e match
'''
present_verb = ex25.make_ing_form('grate')
assert_equal(third_person_form, 'grating')
def test_make_ing_form_s():
'''
Test for other matches
'''
present_verb = ex25.make_ing_form('grab')
assert_equal(third_person_form, 'grabs')
Fix a copy paste fail
|
from nose.tools import *
from exercises import ex25
def test_make_ing_form_ie():
'''
Test for ie match
'''
present_verb = ex25.make_ing_form('tie')
assert_equal(present_verb, 'tying')
def test_make_ing_form_e():
'''
Test for e match
'''
present_verb = ex25.make_ing_form('grate')
assert_equal(present_verb, 'grating')
def test_make_ing_form_s():
'''
Test for other matches
'''
present_verb = ex25.make_ing_form('grab')
assert_equal(present_verb, 'grabs')
|
<commit_before>from nose.tools import *
from exercises import ex25
def test_make_ing_form_ie():
'''
Test for ie match
'''
present_verb = ex25.make_ing_form('tie')
assert_equal(third_person_form, 'tying')
def test_make_ing_form_e():
'''
Test for e match
'''
present_verb = ex25.make_ing_form('grate')
assert_equal(third_person_form, 'grating')
def test_make_ing_form_s():
'''
Test for other matches
'''
present_verb = ex25.make_ing_form('grab')
assert_equal(third_person_form, 'grabs')
<commit_msg>Fix a copy paste fail<commit_after>
|
from nose.tools import *
from exercises import ex25
def test_make_ing_form_ie():
'''
Test for ie match
'''
present_verb = ex25.make_ing_form('tie')
assert_equal(present_verb, 'tying')
def test_make_ing_form_e():
'''
Test for e match
'''
present_verb = ex25.make_ing_form('grate')
assert_equal(present_verb, 'grating')
def test_make_ing_form_s():
'''
Test for other matches
'''
present_verb = ex25.make_ing_form('grab')
assert_equal(present_verb, 'grabs')
|
from nose.tools import *
from exercises import ex25
def test_make_ing_form_ie():
'''
Test for ie match
'''
present_verb = ex25.make_ing_form('tie')
assert_equal(third_person_form, 'tying')
def test_make_ing_form_e():
'''
Test for e match
'''
present_verb = ex25.make_ing_form('grate')
assert_equal(third_person_form, 'grating')
def test_make_ing_form_s():
'''
Test for other matches
'''
present_verb = ex25.make_ing_form('grab')
assert_equal(third_person_form, 'grabs')
Fix a copy paste failfrom nose.tools import *
from exercises import ex25
def test_make_ing_form_ie():
'''
Test for ie match
'''
present_verb = ex25.make_ing_form('tie')
assert_equal(present_verb, 'tying')
def test_make_ing_form_e():
'''
Test for e match
'''
present_verb = ex25.make_ing_form('grate')
assert_equal(present_verb, 'grating')
def test_make_ing_form_s():
'''
Test for other matches
'''
present_verb = ex25.make_ing_form('grab')
assert_equal(present_verb, 'grabs')
|
<commit_before>from nose.tools import *
from exercises import ex25
def test_make_ing_form_ie():
'''
Test for ie match
'''
present_verb = ex25.make_ing_form('tie')
assert_equal(third_person_form, 'tying')
def test_make_ing_form_e():
'''
Test for e match
'''
present_verb = ex25.make_ing_form('grate')
assert_equal(third_person_form, 'grating')
def test_make_ing_form_s():
'''
Test for other matches
'''
present_verb = ex25.make_ing_form('grab')
assert_equal(third_person_form, 'grabs')
<commit_msg>Fix a copy paste fail<commit_after>from nose.tools import *
from exercises import ex25
def test_make_ing_form_ie():
'''
Test for ie match
'''
present_verb = ex25.make_ing_form('tie')
assert_equal(present_verb, 'tying')
def test_make_ing_form_e():
'''
Test for e match
'''
present_verb = ex25.make_ing_form('grate')
assert_equal(present_verb, 'grating')
def test_make_ing_form_s():
'''
Test for other matches
'''
present_verb = ex25.make_ing_form('grab')
assert_equal(present_verb, 'grabs')
|
3492cadef1b3cd204f23e47f17722abcabce435b
|
test/win/gyptest-link-pdb.py
|
test/win/gyptest-link-pdb.py
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that the 'Profile' attribute in VCLinker is extracted properly.
"""
import TestGyp
import os
import sys
if sys.platform == 'win32':
test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
CHDIR = 'linker-flags'
test.run_gyp('program-database.gyp', chdir=CHDIR)
test.build('program-database.gyp', test.ALL, chdir=CHDIR)
def FindFile(pdb):
full_path = test.built_file_path(pdb, chdir=CHDIR)
return os.path.isfile(full_path)
# Verify the specified PDB is created when ProgramDatabaseFile
# is provided.
if not FindFile('name_set.pdb'):
test.fail_test()
else:
test.pass_test()
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that the 'Profile' attribute in VCLinker is extracted properly.
"""
import TestGyp
import os
import sys
if sys.platform == 'win32':
test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
CHDIR = 'linker-flags'
test.run_gyp('program-database.gyp', chdir=CHDIR)
test.build('program-database.gyp', test.ALL, chdir=CHDIR)
def FindFile(pdb):
full_path = test.built_file_path(pdb, chdir=CHDIR)
return os.path.isfile(full_path)
# Verify the specified PDB is created when ProgramDatabaseFile
# is provided.
if not FindFile('name_set.pdb'):
test.fail_test()
else:
test.pass_test()
|
Insert empty line at to fix patch.
|
Insert empty line at to fix patch.
gyptest-link-pdb.py was checked in without a blank line. This appears
to cause a patch issue with the try bots. This CL is only a whitespace
change to attempt to fix that problem.
SEE:
patching file test/win/gyptest-link-pdb.py
Hunk #1 FAILED at 26.
1 out of 1 hunk FAILED -- saving rejects to file test/win/gyptest-link-pdb.py.rej
===================================================================
--- test/win/gyptest-link-pdb.py (revision 1530)
+++ test/win/gyptest-link-pdb.py (working copy)
@@ -26,7 +26,9 @@
# Verify the specified PDB is created when ProgramDatabaseFile
# is provided.
- if not FindFile('name_set.pdb'):
+ if not FindFile('name_outdir.pdb'):
test.fail_test()
- else:
- test.pass_test()
\ No newline at end of file
+ if not FindFile('name_proddir.pdb'):
+ test.fail_test()
+
+ test.pass_test()
Index: test/win/linker-flags/program-database.gyp
TBR=bradnelson@chromium.org
Review URL: https://codereview.chromium.org/11368061
|
Python
|
bsd-3-clause
|
old8xp/gyp_from_google,old8xp/gyp_from_google,old8xp/gyp_from_google,old8xp/gyp_from_google,old8xp/gyp_from_google
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that the 'Profile' attribute in VCLinker is extracted properly.
"""
import TestGyp
import os
import sys
if sys.platform == 'win32':
test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
CHDIR = 'linker-flags'
test.run_gyp('program-database.gyp', chdir=CHDIR)
test.build('program-database.gyp', test.ALL, chdir=CHDIR)
def FindFile(pdb):
full_path = test.built_file_path(pdb, chdir=CHDIR)
return os.path.isfile(full_path)
# Verify the specified PDB is created when ProgramDatabaseFile
# is provided.
if not FindFile('name_set.pdb'):
test.fail_test()
else:
test.pass_test()Insert empty line at to fix patch.
gyptest-link-pdb.py was checked in without a blank line. This appears
to cause a patch issue with the try bots. This CL is only a whitespace
change to attempt to fix that problem.
SEE:
patching file test/win/gyptest-link-pdb.py
Hunk #1 FAILED at 26.
1 out of 1 hunk FAILED -- saving rejects to file test/win/gyptest-link-pdb.py.rej
===================================================================
--- test/win/gyptest-link-pdb.py (revision 1530)
+++ test/win/gyptest-link-pdb.py (working copy)
@@ -26,7 +26,9 @@
# Verify the specified PDB is created when ProgramDatabaseFile
# is provided.
- if not FindFile('name_set.pdb'):
+ if not FindFile('name_outdir.pdb'):
test.fail_test()
- else:
- test.pass_test()
\ No newline at end of file
+ if not FindFile('name_proddir.pdb'):
+ test.fail_test()
+
+ test.pass_test()
Index: test/win/linker-flags/program-database.gyp
TBR=bradnelson@chromium.org
Review URL: https://codereview.chromium.org/11368061
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that the 'Profile' attribute in VCLinker is extracted properly.
"""
import TestGyp
import os
import sys
if sys.platform == 'win32':
test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
CHDIR = 'linker-flags'
test.run_gyp('program-database.gyp', chdir=CHDIR)
test.build('program-database.gyp', test.ALL, chdir=CHDIR)
def FindFile(pdb):
full_path = test.built_file_path(pdb, chdir=CHDIR)
return os.path.isfile(full_path)
# Verify the specified PDB is created when ProgramDatabaseFile
# is provided.
if not FindFile('name_set.pdb'):
test.fail_test()
else:
test.pass_test()
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that the 'Profile' attribute in VCLinker is extracted properly.
"""
import TestGyp
import os
import sys
if sys.platform == 'win32':
test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
CHDIR = 'linker-flags'
test.run_gyp('program-database.gyp', chdir=CHDIR)
test.build('program-database.gyp', test.ALL, chdir=CHDIR)
def FindFile(pdb):
full_path = test.built_file_path(pdb, chdir=CHDIR)
return os.path.isfile(full_path)
# Verify the specified PDB is created when ProgramDatabaseFile
# is provided.
if not FindFile('name_set.pdb'):
test.fail_test()
else:
test.pass_test()<commit_msg>Insert empty line at to fix patch.
gyptest-link-pdb.py was checked in without a blank line. This appears
to cause a patch issue with the try bots. This CL is only a whitespace
change to attempt to fix that problem.
SEE:
patching file test/win/gyptest-link-pdb.py
Hunk #1 FAILED at 26.
1 out of 1 hunk FAILED -- saving rejects to file test/win/gyptest-link-pdb.py.rej
===================================================================
--- test/win/gyptest-link-pdb.py (revision 1530)
+++ test/win/gyptest-link-pdb.py (working copy)
@@ -26,7 +26,9 @@
# Verify the specified PDB is created when ProgramDatabaseFile
# is provided.
- if not FindFile('name_set.pdb'):
+ if not FindFile('name_outdir.pdb'):
test.fail_test()
- else:
- test.pass_test()
\ No newline at end of file
+ if not FindFile('name_proddir.pdb'):
+ test.fail_test()
+
+ test.pass_test()
Index: test/win/linker-flags/program-database.gyp
TBR=bradnelson@chromium.org
Review URL: https://codereview.chromium.org/11368061<commit_after>
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that the 'Profile' attribute in VCLinker is extracted properly.
"""
import TestGyp
import os
import sys
if sys.platform == 'win32':
test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
CHDIR = 'linker-flags'
test.run_gyp('program-database.gyp', chdir=CHDIR)
test.build('program-database.gyp', test.ALL, chdir=CHDIR)
def FindFile(pdb):
full_path = test.built_file_path(pdb, chdir=CHDIR)
return os.path.isfile(full_path)
# Verify the specified PDB is created when ProgramDatabaseFile
# is provided.
if not FindFile('name_set.pdb'):
test.fail_test()
else:
test.pass_test()
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that the 'Profile' attribute in VCLinker is extracted properly.
"""
import TestGyp
import os
import sys
if sys.platform == 'win32':
test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
CHDIR = 'linker-flags'
test.run_gyp('program-database.gyp', chdir=CHDIR)
test.build('program-database.gyp', test.ALL, chdir=CHDIR)
def FindFile(pdb):
full_path = test.built_file_path(pdb, chdir=CHDIR)
return os.path.isfile(full_path)
# Verify the specified PDB is created when ProgramDatabaseFile
# is provided.
if not FindFile('name_set.pdb'):
test.fail_test()
else:
test.pass_test()Insert empty line at to fix patch.
gyptest-link-pdb.py was checked in without a blank line. This appears
to cause a patch issue with the try bots. This CL is only a whitespace
change to attempt to fix that problem.
SEE:
patching file test/win/gyptest-link-pdb.py
Hunk #1 FAILED at 26.
1 out of 1 hunk FAILED -- saving rejects to file test/win/gyptest-link-pdb.py.rej
===================================================================
--- test/win/gyptest-link-pdb.py (revision 1530)
+++ test/win/gyptest-link-pdb.py (working copy)
@@ -26,7 +26,9 @@
# Verify the specified PDB is created when ProgramDatabaseFile
# is provided.
- if not FindFile('name_set.pdb'):
+ if not FindFile('name_outdir.pdb'):
test.fail_test()
- else:
- test.pass_test()
\ No newline at end of file
+ if not FindFile('name_proddir.pdb'):
+ test.fail_test()
+
+ test.pass_test()
Index: test/win/linker-flags/program-database.gyp
TBR=bradnelson@chromium.org
Review URL: https://codereview.chromium.org/11368061#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that the 'Profile' attribute in VCLinker is extracted properly.
"""
import TestGyp
import os
import sys
if sys.platform == 'win32':
test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
CHDIR = 'linker-flags'
test.run_gyp('program-database.gyp', chdir=CHDIR)
test.build('program-database.gyp', test.ALL, chdir=CHDIR)
def FindFile(pdb):
full_path = test.built_file_path(pdb, chdir=CHDIR)
return os.path.isfile(full_path)
# Verify the specified PDB is created when ProgramDatabaseFile
# is provided.
if not FindFile('name_set.pdb'):
test.fail_test()
else:
test.pass_test()
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that the 'Profile' attribute in VCLinker is extracted properly.
"""
import TestGyp
import os
import sys
if sys.platform == 'win32':
test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
CHDIR = 'linker-flags'
test.run_gyp('program-database.gyp', chdir=CHDIR)
test.build('program-database.gyp', test.ALL, chdir=CHDIR)
def FindFile(pdb):
full_path = test.built_file_path(pdb, chdir=CHDIR)
return os.path.isfile(full_path)
# Verify the specified PDB is created when ProgramDatabaseFile
# is provided.
if not FindFile('name_set.pdb'):
test.fail_test()
else:
test.pass_test()<commit_msg>Insert empty line at to fix patch.
gyptest-link-pdb.py was checked in without a blank line. This appears
to cause a patch issue with the try bots. This CL is only a whitespace
change to attempt to fix that problem.
SEE:
patching file test/win/gyptest-link-pdb.py
Hunk #1 FAILED at 26.
1 out of 1 hunk FAILED -- saving rejects to file test/win/gyptest-link-pdb.py.rej
===================================================================
--- test/win/gyptest-link-pdb.py (revision 1530)
+++ test/win/gyptest-link-pdb.py (working copy)
@@ -26,7 +26,9 @@
# Verify the specified PDB is created when ProgramDatabaseFile
# is provided.
- if not FindFile('name_set.pdb'):
+ if not FindFile('name_outdir.pdb'):
test.fail_test()
- else:
- test.pass_test()
\ No newline at end of file
+ if not FindFile('name_proddir.pdb'):
+ test.fail_test()
+
+ test.pass_test()
Index: test/win/linker-flags/program-database.gyp
TBR=bradnelson@chromium.org
Review URL: https://codereview.chromium.org/11368061<commit_after>#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that the 'Profile' attribute in VCLinker is extracted properly.
"""
import TestGyp
import os
import sys
if sys.platform == 'win32':
test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
CHDIR = 'linker-flags'
test.run_gyp('program-database.gyp', chdir=CHDIR)
test.build('program-database.gyp', test.ALL, chdir=CHDIR)
def FindFile(pdb):
full_path = test.built_file_path(pdb, chdir=CHDIR)
return os.path.isfile(full_path)
# Verify the specified PDB is created when ProgramDatabaseFile
# is provided.
if not FindFile('name_set.pdb'):
test.fail_test()
else:
test.pass_test()
|
7c1cce47e2a3cd8743e5e7d7795e9f5014d5f6ec
|
tests/test_utils.py
|
tests/test_utils.py
|
from tinydb.utils import LRUCache
def test_lru_cache():
cache = LRUCache(capacity=3)
cache["a"] = 1
cache["b"] = 2
cache["c"] = 3
_ = cache["a"] # move to front in lru queue
cache["d"] = 4 # move oldest item out of lru queue
assert cache.lru == ["c", "a", "d"]
def test_lru_cache_set_multiple():
cache = LRUCache(capacity=3)
cache["a"] = 1
cache["a"] = 2
cache["a"] = 3
cache["a"] = 4
assert cache.lru == ["a"]
def test_lru_cache_delete():
cache = LRUCache(capacity=3)
cache["a"] = 1
cache["b"] = 2
del cache["a"]
assert cache.lru == ["b"]
def test_lru_cache_clear():
cache = LRUCache(capacity=3)
cache["a"] = 1
cache["b"] = 2
cache.clear()
assert cache.lru == []
|
from tinydb.utils import LRUCache
def test_lru_cache():
cache = LRUCache(capacity=3)
cache["a"] = 1
cache["b"] = 2
cache["c"] = 3
_ = cache["a"] # move to front in lru queue
cache["d"] = 4 # move oldest item out of lru queue
assert cache.lru == ["c", "a", "d"]
def test_lru_cache_set_multiple():
cache = LRUCache(capacity=3)
cache["a"] = 1
cache["a"] = 2
cache["a"] = 3
cache["a"] = 4
assert cache.lru == ["a"]
def test_lru_cache_get():
cache = LRUCache(capacity=3)
cache["a"] = 1
cache["b"] = 1
cache["c"] = 1
cache.get("a")
cache["d"] = 4
assert cache.lru == ["c", "a", "d"]
def test_lru_cache_delete():
cache = LRUCache(capacity=3)
cache["a"] = 1
cache["b"] = 2
del cache["a"]
assert cache.lru == ["b"]
def test_lru_cache_clear():
cache = LRUCache(capacity=3)
cache["a"] = 1
cache["b"] = 2
cache.clear()
assert cache.lru == []
def test_lru_cache_unlimited():
cache = LRUCache()
for i in xrange(100):
cache[i] = i
assert len(cache.lru) == 100
|
Improve test coverage once again...
|
Improve test coverage once again...
|
Python
|
mit
|
cagnosolutions/tinydb,raquel-ucl/tinydb,ivankravets/tinydb,Callwoola/tinydb,msiemens/tinydb
|
from tinydb.utils import LRUCache
def test_lru_cache():
cache = LRUCache(capacity=3)
cache["a"] = 1
cache["b"] = 2
cache["c"] = 3
_ = cache["a"] # move to front in lru queue
cache["d"] = 4 # move oldest item out of lru queue
assert cache.lru == ["c", "a", "d"]
def test_lru_cache_set_multiple():
cache = LRUCache(capacity=3)
cache["a"] = 1
cache["a"] = 2
cache["a"] = 3
cache["a"] = 4
assert cache.lru == ["a"]
def test_lru_cache_delete():
cache = LRUCache(capacity=3)
cache["a"] = 1
cache["b"] = 2
del cache["a"]
assert cache.lru == ["b"]
def test_lru_cache_clear():
cache = LRUCache(capacity=3)
cache["a"] = 1
cache["b"] = 2
cache.clear()
assert cache.lru == []
Improve test coverage once again...
|
from tinydb.utils import LRUCache
def test_lru_cache():
cache = LRUCache(capacity=3)
cache["a"] = 1
cache["b"] = 2
cache["c"] = 3
_ = cache["a"] # move to front in lru queue
cache["d"] = 4 # move oldest item out of lru queue
assert cache.lru == ["c", "a", "d"]
def test_lru_cache_set_multiple():
cache = LRUCache(capacity=3)
cache["a"] = 1
cache["a"] = 2
cache["a"] = 3
cache["a"] = 4
assert cache.lru == ["a"]
def test_lru_cache_get():
cache = LRUCache(capacity=3)
cache["a"] = 1
cache["b"] = 1
cache["c"] = 1
cache.get("a")
cache["d"] = 4
assert cache.lru == ["c", "a", "d"]
def test_lru_cache_delete():
cache = LRUCache(capacity=3)
cache["a"] = 1
cache["b"] = 2
del cache["a"]
assert cache.lru == ["b"]
def test_lru_cache_clear():
cache = LRUCache(capacity=3)
cache["a"] = 1
cache["b"] = 2
cache.clear()
assert cache.lru == []
def test_lru_cache_unlimited():
cache = LRUCache()
for i in xrange(100):
cache[i] = i
assert len(cache.lru) == 100
|
<commit_before>from tinydb.utils import LRUCache
def test_lru_cache():
cache = LRUCache(capacity=3)
cache["a"] = 1
cache["b"] = 2
cache["c"] = 3
_ = cache["a"] # move to front in lru queue
cache["d"] = 4 # move oldest item out of lru queue
assert cache.lru == ["c", "a", "d"]
def test_lru_cache_set_multiple():
cache = LRUCache(capacity=3)
cache["a"] = 1
cache["a"] = 2
cache["a"] = 3
cache["a"] = 4
assert cache.lru == ["a"]
def test_lru_cache_delete():
cache = LRUCache(capacity=3)
cache["a"] = 1
cache["b"] = 2
del cache["a"]
assert cache.lru == ["b"]
def test_lru_cache_clear():
cache = LRUCache(capacity=3)
cache["a"] = 1
cache["b"] = 2
cache.clear()
assert cache.lru == []
<commit_msg>Improve test coverage once again...<commit_after>
|
from tinydb.utils import LRUCache
def test_lru_cache():
cache = LRUCache(capacity=3)
cache["a"] = 1
cache["b"] = 2
cache["c"] = 3
_ = cache["a"] # move to front in lru queue
cache["d"] = 4 # move oldest item out of lru queue
assert cache.lru == ["c", "a", "d"]
def test_lru_cache_set_multiple():
cache = LRUCache(capacity=3)
cache["a"] = 1
cache["a"] = 2
cache["a"] = 3
cache["a"] = 4
assert cache.lru == ["a"]
def test_lru_cache_get():
cache = LRUCache(capacity=3)
cache["a"] = 1
cache["b"] = 1
cache["c"] = 1
cache.get("a")
cache["d"] = 4
assert cache.lru == ["c", "a", "d"]
def test_lru_cache_delete():
cache = LRUCache(capacity=3)
cache["a"] = 1
cache["b"] = 2
del cache["a"]
assert cache.lru == ["b"]
def test_lru_cache_clear():
cache = LRUCache(capacity=3)
cache["a"] = 1
cache["b"] = 2
cache.clear()
assert cache.lru == []
def test_lru_cache_unlimited():
cache = LRUCache()
for i in xrange(100):
cache[i] = i
assert len(cache.lru) == 100
|
from tinydb.utils import LRUCache
def test_lru_cache():
cache = LRUCache(capacity=3)
cache["a"] = 1
cache["b"] = 2
cache["c"] = 3
_ = cache["a"] # move to front in lru queue
cache["d"] = 4 # move oldest item out of lru queue
assert cache.lru == ["c", "a", "d"]
def test_lru_cache_set_multiple():
cache = LRUCache(capacity=3)
cache["a"] = 1
cache["a"] = 2
cache["a"] = 3
cache["a"] = 4
assert cache.lru == ["a"]
def test_lru_cache_delete():
cache = LRUCache(capacity=3)
cache["a"] = 1
cache["b"] = 2
del cache["a"]
assert cache.lru == ["b"]
def test_lru_cache_clear():
cache = LRUCache(capacity=3)
cache["a"] = 1
cache["b"] = 2
cache.clear()
assert cache.lru == []
Improve test coverage once again...from tinydb.utils import LRUCache
def test_lru_cache():
cache = LRUCache(capacity=3)
cache["a"] = 1
cache["b"] = 2
cache["c"] = 3
_ = cache["a"] # move to front in lru queue
cache["d"] = 4 # move oldest item out of lru queue
assert cache.lru == ["c", "a", "d"]
def test_lru_cache_set_multiple():
cache = LRUCache(capacity=3)
cache["a"] = 1
cache["a"] = 2
cache["a"] = 3
cache["a"] = 4
assert cache.lru == ["a"]
def test_lru_cache_get():
cache = LRUCache(capacity=3)
cache["a"] = 1
cache["b"] = 1
cache["c"] = 1
cache.get("a")
cache["d"] = 4
assert cache.lru == ["c", "a", "d"]
def test_lru_cache_delete():
cache = LRUCache(capacity=3)
cache["a"] = 1
cache["b"] = 2
del cache["a"]
assert cache.lru == ["b"]
def test_lru_cache_clear():
cache = LRUCache(capacity=3)
cache["a"] = 1
cache["b"] = 2
cache.clear()
assert cache.lru == []
def test_lru_cache_unlimited():
cache = LRUCache()
for i in xrange(100):
cache[i] = i
assert len(cache.lru) == 100
|
<commit_before>from tinydb.utils import LRUCache
def test_lru_cache():
cache = LRUCache(capacity=3)
cache["a"] = 1
cache["b"] = 2
cache["c"] = 3
_ = cache["a"] # move to front in lru queue
cache["d"] = 4 # move oldest item out of lru queue
assert cache.lru == ["c", "a", "d"]
def test_lru_cache_set_multiple():
cache = LRUCache(capacity=3)
cache["a"] = 1
cache["a"] = 2
cache["a"] = 3
cache["a"] = 4
assert cache.lru == ["a"]
def test_lru_cache_delete():
cache = LRUCache(capacity=3)
cache["a"] = 1
cache["b"] = 2
del cache["a"]
assert cache.lru == ["b"]
def test_lru_cache_clear():
cache = LRUCache(capacity=3)
cache["a"] = 1
cache["b"] = 2
cache.clear()
assert cache.lru == []
<commit_msg>Improve test coverage once again...<commit_after>from tinydb.utils import LRUCache
def test_lru_cache():
cache = LRUCache(capacity=3)
cache["a"] = 1
cache["b"] = 2
cache["c"] = 3
_ = cache["a"] # move to front in lru queue
cache["d"] = 4 # move oldest item out of lru queue
assert cache.lru == ["c", "a", "d"]
def test_lru_cache_set_multiple():
cache = LRUCache(capacity=3)
cache["a"] = 1
cache["a"] = 2
cache["a"] = 3
cache["a"] = 4
assert cache.lru == ["a"]
def test_lru_cache_get():
cache = LRUCache(capacity=3)
cache["a"] = 1
cache["b"] = 1
cache["c"] = 1
cache.get("a")
cache["d"] = 4
assert cache.lru == ["c", "a", "d"]
def test_lru_cache_delete():
cache = LRUCache(capacity=3)
cache["a"] = 1
cache["b"] = 2
del cache["a"]
assert cache.lru == ["b"]
def test_lru_cache_clear():
cache = LRUCache(capacity=3)
cache["a"] = 1
cache["b"] = 2
cache.clear()
assert cache.lru == []
def test_lru_cache_unlimited():
cache = LRUCache()
for i in xrange(100):
cache[i] = i
assert len(cache.lru) == 100
|
4e20731050c4b9f5a27693427e73ade62af0012e
|
web/impact/impact/v1/helpers/matching_criterion_helper.py
|
web/impact/impact/v1/helpers/matching_criterion_helper.py
|
# MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from django.db.models import Q
from impact.v1.helpers.criterion_helper import CriterionHelper
class MatchingCriterionHelper(CriterionHelper):
def __init__(self, subject):
super().__init__(subject)
self._app_ids_to_targets = {}
self._target_counts = {}
def app_count(self, apps, option_name):
return self.target_counts(apps).get(option_name, 0)
def refine_feedbacks(self, feedbacks, target, refinement):
if not target:
return None
query = Q(**{refinement: target})
return feedbacks.filter(query)
def find_app_ids(self, feedbacks, apps, target):
result = []
if feedbacks:
app_map = self.app_ids_to_targets(apps)
for app_id in feedbacks.values_list("application_id", flat=True):
if app_id in app_map and app_map[app_id] == target.id:
result.append(app_id)
return result
def app_ids_to_targets(self, apps):
if not self._app_ids_to_targets:
self.calc_app_ids_to_targets(apps)
return self._app_ids_to_targets
def target_counts(self, apps):
if not self._app_ids_to_targets:
self.calc_app_ids_to_targets(apps)
return self._target_counts
|
# MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from django.db.models import Q
from impact.v1.helpers.criterion_helper import CriterionHelper
class MatchingCriterionHelper(CriterionHelper):
def __init__(self, subject):
super().__init__(subject)
self._app_ids_to_targets = {}
self._target_counts = {}
def app_count(self, apps, option_name):
return self.target_counts(apps).get(option_name, 0)
def refine_feedbacks(self, feedbacks, target, refinement):
if not target:
return None
query = Q(**{refinement: target})
return feedbacks.filter(query)
def find_app_ids(self, feedbacks, apps, target):
if not feedbacks:
return []
result = []
app_map = self.app_ids_to_targets(apps)
return [app_id for app_id in
feedbacks.values_list("application_id", flat=True)
if app_id in app_map and app_map[app_id] == target.id]
def app_ids_to_targets(self, apps):
if not self._app_ids_to_targets:
self.calc_app_ids_to_targets(apps)
return self._app_ids_to_targets
def target_counts(self, apps):
if not self._app_ids_to_targets:
self.calc_app_ids_to_targets(apps)
return self._target_counts
|
Address CodeClimate cognitive complexity concern
|
[AC-5625] Address CodeClimate cognitive complexity concern
|
Python
|
mit
|
masschallenge/impact-api,masschallenge/impact-api,masschallenge/impact-api,masschallenge/impact-api
|
# MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from django.db.models import Q
from impact.v1.helpers.criterion_helper import CriterionHelper
class MatchingCriterionHelper(CriterionHelper):
def __init__(self, subject):
super().__init__(subject)
self._app_ids_to_targets = {}
self._target_counts = {}
def app_count(self, apps, option_name):
return self.target_counts(apps).get(option_name, 0)
def refine_feedbacks(self, feedbacks, target, refinement):
if not target:
return None
query = Q(**{refinement: target})
return feedbacks.filter(query)
def find_app_ids(self, feedbacks, apps, target):
result = []
if feedbacks:
app_map = self.app_ids_to_targets(apps)
for app_id in feedbacks.values_list("application_id", flat=True):
if app_id in app_map and app_map[app_id] == target.id:
result.append(app_id)
return result
def app_ids_to_targets(self, apps):
if not self._app_ids_to_targets:
self.calc_app_ids_to_targets(apps)
return self._app_ids_to_targets
def target_counts(self, apps):
if not self._app_ids_to_targets:
self.calc_app_ids_to_targets(apps)
return self._target_counts
[AC-5625] Address CodeClimate cognitive complexity concern
|
# MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from django.db.models import Q
from impact.v1.helpers.criterion_helper import CriterionHelper
class MatchingCriterionHelper(CriterionHelper):
def __init__(self, subject):
super().__init__(subject)
self._app_ids_to_targets = {}
self._target_counts = {}
def app_count(self, apps, option_name):
return self.target_counts(apps).get(option_name, 0)
def refine_feedbacks(self, feedbacks, target, refinement):
if not target:
return None
query = Q(**{refinement: target})
return feedbacks.filter(query)
def find_app_ids(self, feedbacks, apps, target):
if not feedbacks:
return []
result = []
app_map = self.app_ids_to_targets(apps)
return [app_id for app_id in
feedbacks.values_list("application_id", flat=True)
if app_id in app_map and app_map[app_id] == target.id]
def app_ids_to_targets(self, apps):
if not self._app_ids_to_targets:
self.calc_app_ids_to_targets(apps)
return self._app_ids_to_targets
def target_counts(self, apps):
if not self._app_ids_to_targets:
self.calc_app_ids_to_targets(apps)
return self._target_counts
|
<commit_before># MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from django.db.models import Q
from impact.v1.helpers.criterion_helper import CriterionHelper
class MatchingCriterionHelper(CriterionHelper):
def __init__(self, subject):
super().__init__(subject)
self._app_ids_to_targets = {}
self._target_counts = {}
def app_count(self, apps, option_name):
return self.target_counts(apps).get(option_name, 0)
def refine_feedbacks(self, feedbacks, target, refinement):
if not target:
return None
query = Q(**{refinement: target})
return feedbacks.filter(query)
def find_app_ids(self, feedbacks, apps, target):
result = []
if feedbacks:
app_map = self.app_ids_to_targets(apps)
for app_id in feedbacks.values_list("application_id", flat=True):
if app_id in app_map and app_map[app_id] == target.id:
result.append(app_id)
return result
def app_ids_to_targets(self, apps):
if not self._app_ids_to_targets:
self.calc_app_ids_to_targets(apps)
return self._app_ids_to_targets
def target_counts(self, apps):
if not self._app_ids_to_targets:
self.calc_app_ids_to_targets(apps)
return self._target_counts
<commit_msg>[AC-5625] Address CodeClimate cognitive complexity concern<commit_after>
|
# MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from django.db.models import Q
from impact.v1.helpers.criterion_helper import CriterionHelper
class MatchingCriterionHelper(CriterionHelper):
def __init__(self, subject):
super().__init__(subject)
self._app_ids_to_targets = {}
self._target_counts = {}
def app_count(self, apps, option_name):
return self.target_counts(apps).get(option_name, 0)
def refine_feedbacks(self, feedbacks, target, refinement):
if not target:
return None
query = Q(**{refinement: target})
return feedbacks.filter(query)
def find_app_ids(self, feedbacks, apps, target):
if not feedbacks:
return []
result = []
app_map = self.app_ids_to_targets(apps)
return [app_id for app_id in
feedbacks.values_list("application_id", flat=True)
if app_id in app_map and app_map[app_id] == target.id]
def app_ids_to_targets(self, apps):
if not self._app_ids_to_targets:
self.calc_app_ids_to_targets(apps)
return self._app_ids_to_targets
def target_counts(self, apps):
if not self._app_ids_to_targets:
self.calc_app_ids_to_targets(apps)
return self._target_counts
|
# MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from django.db.models import Q
from impact.v1.helpers.criterion_helper import CriterionHelper
class MatchingCriterionHelper(CriterionHelper):
def __init__(self, subject):
super().__init__(subject)
self._app_ids_to_targets = {}
self._target_counts = {}
def app_count(self, apps, option_name):
return self.target_counts(apps).get(option_name, 0)
def refine_feedbacks(self, feedbacks, target, refinement):
if not target:
return None
query = Q(**{refinement: target})
return feedbacks.filter(query)
def find_app_ids(self, feedbacks, apps, target):
result = []
if feedbacks:
app_map = self.app_ids_to_targets(apps)
for app_id in feedbacks.values_list("application_id", flat=True):
if app_id in app_map and app_map[app_id] == target.id:
result.append(app_id)
return result
def app_ids_to_targets(self, apps):
if not self._app_ids_to_targets:
self.calc_app_ids_to_targets(apps)
return self._app_ids_to_targets
def target_counts(self, apps):
if not self._app_ids_to_targets:
self.calc_app_ids_to_targets(apps)
return self._target_counts
[AC-5625] Address CodeClimate cognitive complexity concern# MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from django.db.models import Q
from impact.v1.helpers.criterion_helper import CriterionHelper
class MatchingCriterionHelper(CriterionHelper):
def __init__(self, subject):
super().__init__(subject)
self._app_ids_to_targets = {}
self._target_counts = {}
def app_count(self, apps, option_name):
return self.target_counts(apps).get(option_name, 0)
def refine_feedbacks(self, feedbacks, target, refinement):
if not target:
return None
query = Q(**{refinement: target})
return feedbacks.filter(query)
def find_app_ids(self, feedbacks, apps, target):
if not feedbacks:
return []
result = []
app_map = self.app_ids_to_targets(apps)
return [app_id for app_id in
feedbacks.values_list("application_id", flat=True)
if app_id in app_map and app_map[app_id] == target.id]
def app_ids_to_targets(self, apps):
if not self._app_ids_to_targets:
self.calc_app_ids_to_targets(apps)
return self._app_ids_to_targets
def target_counts(self, apps):
if not self._app_ids_to_targets:
self.calc_app_ids_to_targets(apps)
return self._target_counts
|
<commit_before># MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from django.db.models import Q
from impact.v1.helpers.criterion_helper import CriterionHelper
class MatchingCriterionHelper(CriterionHelper):
def __init__(self, subject):
super().__init__(subject)
self._app_ids_to_targets = {}
self._target_counts = {}
def app_count(self, apps, option_name):
return self.target_counts(apps).get(option_name, 0)
def refine_feedbacks(self, feedbacks, target, refinement):
if not target:
return None
query = Q(**{refinement: target})
return feedbacks.filter(query)
def find_app_ids(self, feedbacks, apps, target):
result = []
if feedbacks:
app_map = self.app_ids_to_targets(apps)
for app_id in feedbacks.values_list("application_id", flat=True):
if app_id in app_map and app_map[app_id] == target.id:
result.append(app_id)
return result
def app_ids_to_targets(self, apps):
if not self._app_ids_to_targets:
self.calc_app_ids_to_targets(apps)
return self._app_ids_to_targets
def target_counts(self, apps):
if not self._app_ids_to_targets:
self.calc_app_ids_to_targets(apps)
return self._target_counts
<commit_msg>[AC-5625] Address CodeClimate cognitive complexity concern<commit_after># MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from django.db.models import Q
from impact.v1.helpers.criterion_helper import CriterionHelper
class MatchingCriterionHelper(CriterionHelper):
def __init__(self, subject):
super().__init__(subject)
self._app_ids_to_targets = {}
self._target_counts = {}
def app_count(self, apps, option_name):
return self.target_counts(apps).get(option_name, 0)
def refine_feedbacks(self, feedbacks, target, refinement):
if not target:
return None
query = Q(**{refinement: target})
return feedbacks.filter(query)
def find_app_ids(self, feedbacks, apps, target):
if not feedbacks:
return []
result = []
app_map = self.app_ids_to_targets(apps)
return [app_id for app_id in
feedbacks.values_list("application_id", flat=True)
if app_id in app_map and app_map[app_id] == target.id]
def app_ids_to_targets(self, apps):
if not self._app_ids_to_targets:
self.calc_app_ids_to_targets(apps)
return self._app_ids_to_targets
def target_counts(self, apps):
if not self._app_ids_to_targets:
self.calc_app_ids_to_targets(apps)
return self._target_counts
|
378ef98e78894024aaed18d55543c755c7095df4
|
src/InventoryManagement/Item/models.py
|
src/InventoryManagement/Item/models.py
|
from django.db import models
# Create your models here.
|
from django.db import models
# Create your models here.
# Model of an Item
class Item(models.Model):
item_name = models.CharField(max_lenght=100)
objects = ItemManager()
class ItemManager(models.Manager):
def create_item(self, item_name):
item = self.create(item_name=item_name)
|
Add name field and object manager
|
Add name field and object manager
|
Python
|
apache-2.0
|
Hekaton/InventoryManagement
|
from django.db import models
# Create your models here.
Add name field and object manager
|
from django.db import models
# Create your models here.
# Model of an Item
class Item(models.Model):
item_name = models.CharField(max_lenght=100)
objects = ItemManager()
class ItemManager(models.Manager):
def create_item(self, item_name):
item = self.create(item_name=item_name)
|
<commit_before>from django.db import models
# Create your models here.
<commit_msg>Add name field and object manager<commit_after>
|
from django.db import models
# Create your models here.
# Model of an Item
class Item(models.Model):
item_name = models.CharField(max_lenght=100)
objects = ItemManager()
class ItemManager(models.Manager):
def create_item(self, item_name):
item = self.create(item_name=item_name)
|
from django.db import models
# Create your models here.
Add name field and object managerfrom django.db import models
# Create your models here.
# Model of an Item
class Item(models.Model):
item_name = models.CharField(max_lenght=100)
objects = ItemManager()
class ItemManager(models.Manager):
def create_item(self, item_name):
item = self.create(item_name=item_name)
|
<commit_before>from django.db import models
# Create your models here.
<commit_msg>Add name field and object manager<commit_after>from django.db import models
# Create your models here.
# Model of an Item
class Item(models.Model):
item_name = models.CharField(max_lenght=100)
objects = ItemManager()
class ItemManager(models.Manager):
def create_item(self, item_name):
item = self.create(item_name=item_name)
|
09ee458e4e1968dae21b0becca44e0d3356027d6
|
pyluos/tools/scratch/genext.py
|
pyluos/tools/scratch/genext.py
|
import os
import re
from jinja2 import Template
DEFAULT_TEMPLATE = os.path.join(os.path.dirname(__file__), 'extension.tpl.js')
supported_modules = [
'button',
'dynamixel',
'l0_dc_motor',
'l0_gpio',
'l0_servo',
'led',
'potard',
]
def find_modules(state, type):
return [
m['alias']
for m in state['modules']
if m['type'] == type
]
def find_xl320(state, dxl):
dxl = next(filter(lambda mod: mod['alias'] == dxl, state['modules']))
motors = [m for m in dxl if re.match(r'm[0-9]+', m)]
return motors
def generate_extension(name,
robot,
host,
port,
template=DEFAULT_TEMPLATE):
context = {}
context = {
'name': name,
'host': host,
'port': port,
}
context.update({
type: find_modules(robot._state, type)
for type in supported_modules
})
if context['dynamixel']:
# TODO: This should be done for every dxl controller!
context['xl_320'] = find_xl320(robot._state, context['dynamixel'][0])
with open(template) as f:
tpl = Template(f.read())
ext = tpl.render(**context)
return ext
|
import os
import re
from jinja2 import Template
DEFAULT_TEMPLATE = os.path.join(os.path.dirname(__file__), 'extension.tpl.js')
supported_modules = [
'button',
'dynamixel',
'l0_dc_motor',
'l0_gpio',
'l0_servo',
'led',
'potard',
]
def find_modules(state, type):
return [
str(m['alias'])
for m in state['modules']
if m['type'] == type
]
def find_xl320(state, dxl):
dxl = next(filter(lambda mod: mod['alias'] == dxl, state['modules']))
motors = [m for m in dxl if re.match(r'm[0-9]+', m)]
return motors
def generate_extension(name,
robot,
host,
port,
template=DEFAULT_TEMPLATE):
context = {}
context = {
'name': name,
'host': host,
'port': port,
}
context.update({
type: find_modules(robot._state, type)
for type in supported_modules
})
if context['dynamixel']:
# TODO: This should be done for every dxl controller!
context['xl_320'] = find_xl320(robot._state, context['dynamixel'][0])
with open(template) as f:
tpl = Template(f.read())
ext = tpl.render(**context)
return ext
|
Fix an issue in python 2 for the scratch extension.
|
Fix an issue in python 2 for the scratch extension.
|
Python
|
mit
|
pollen/pyrobus
|
import os
import re
from jinja2 import Template
DEFAULT_TEMPLATE = os.path.join(os.path.dirname(__file__), 'extension.tpl.js')
supported_modules = [
'button',
'dynamixel',
'l0_dc_motor',
'l0_gpio',
'l0_servo',
'led',
'potard',
]
def find_modules(state, type):
return [
m['alias']
for m in state['modules']
if m['type'] == type
]
def find_xl320(state, dxl):
dxl = next(filter(lambda mod: mod['alias'] == dxl, state['modules']))
motors = [m for m in dxl if re.match(r'm[0-9]+', m)]
return motors
def generate_extension(name,
robot,
host,
port,
template=DEFAULT_TEMPLATE):
context = {}
context = {
'name': name,
'host': host,
'port': port,
}
context.update({
type: find_modules(robot._state, type)
for type in supported_modules
})
if context['dynamixel']:
# TODO: This should be done for every dxl controller!
context['xl_320'] = find_xl320(robot._state, context['dynamixel'][0])
with open(template) as f:
tpl = Template(f.read())
ext = tpl.render(**context)
return ext
Fix an issue in python 2 for the scratch extension.
|
import os
import re
from jinja2 import Template
DEFAULT_TEMPLATE = os.path.join(os.path.dirname(__file__), 'extension.tpl.js')
supported_modules = [
'button',
'dynamixel',
'l0_dc_motor',
'l0_gpio',
'l0_servo',
'led',
'potard',
]
def find_modules(state, type):
return [
str(m['alias'])
for m in state['modules']
if m['type'] == type
]
def find_xl320(state, dxl):
dxl = next(filter(lambda mod: mod['alias'] == dxl, state['modules']))
motors = [m for m in dxl if re.match(r'm[0-9]+', m)]
return motors
def generate_extension(name,
robot,
host,
port,
template=DEFAULT_TEMPLATE):
context = {}
context = {
'name': name,
'host': host,
'port': port,
}
context.update({
type: find_modules(robot._state, type)
for type in supported_modules
})
if context['dynamixel']:
# TODO: This should be done for every dxl controller!
context['xl_320'] = find_xl320(robot._state, context['dynamixel'][0])
with open(template) as f:
tpl = Template(f.read())
ext = tpl.render(**context)
return ext
|
<commit_before>import os
import re
from jinja2 import Template
DEFAULT_TEMPLATE = os.path.join(os.path.dirname(__file__), 'extension.tpl.js')
supported_modules = [
'button',
'dynamixel',
'l0_dc_motor',
'l0_gpio',
'l0_servo',
'led',
'potard',
]
def find_modules(state, type):
return [
m['alias']
for m in state['modules']
if m['type'] == type
]
def find_xl320(state, dxl):
dxl = next(filter(lambda mod: mod['alias'] == dxl, state['modules']))
motors = [m for m in dxl if re.match(r'm[0-9]+', m)]
return motors
def generate_extension(name,
robot,
host,
port,
template=DEFAULT_TEMPLATE):
context = {}
context = {
'name': name,
'host': host,
'port': port,
}
context.update({
type: find_modules(robot._state, type)
for type in supported_modules
})
if context['dynamixel']:
# TODO: This should be done for every dxl controller!
context['xl_320'] = find_xl320(robot._state, context['dynamixel'][0])
with open(template) as f:
tpl = Template(f.read())
ext = tpl.render(**context)
return ext
<commit_msg>Fix an issue in python 2 for the scratch extension.<commit_after>
|
import os
import re
from jinja2 import Template
DEFAULT_TEMPLATE = os.path.join(os.path.dirname(__file__), 'extension.tpl.js')
supported_modules = [
'button',
'dynamixel',
'l0_dc_motor',
'l0_gpio',
'l0_servo',
'led',
'potard',
]
def find_modules(state, type):
return [
str(m['alias'])
for m in state['modules']
if m['type'] == type
]
def find_xl320(state, dxl):
dxl = next(filter(lambda mod: mod['alias'] == dxl, state['modules']))
motors = [m for m in dxl if re.match(r'm[0-9]+', m)]
return motors
def generate_extension(name,
robot,
host,
port,
template=DEFAULT_TEMPLATE):
context = {}
context = {
'name': name,
'host': host,
'port': port,
}
context.update({
type: find_modules(robot._state, type)
for type in supported_modules
})
if context['dynamixel']:
# TODO: This should be done for every dxl controller!
context['xl_320'] = find_xl320(robot._state, context['dynamixel'][0])
with open(template) as f:
tpl = Template(f.read())
ext = tpl.render(**context)
return ext
|
import os
import re
from jinja2 import Template
DEFAULT_TEMPLATE = os.path.join(os.path.dirname(__file__), 'extension.tpl.js')
supported_modules = [
'button',
'dynamixel',
'l0_dc_motor',
'l0_gpio',
'l0_servo',
'led',
'potard',
]
def find_modules(state, type):
return [
m['alias']
for m in state['modules']
if m['type'] == type
]
def find_xl320(state, dxl):
dxl = next(filter(lambda mod: mod['alias'] == dxl, state['modules']))
motors = [m for m in dxl if re.match(r'm[0-9]+', m)]
return motors
def generate_extension(name,
robot,
host,
port,
template=DEFAULT_TEMPLATE):
context = {}
context = {
'name': name,
'host': host,
'port': port,
}
context.update({
type: find_modules(robot._state, type)
for type in supported_modules
})
if context['dynamixel']:
# TODO: This should be done for every dxl controller!
context['xl_320'] = find_xl320(robot._state, context['dynamixel'][0])
with open(template) as f:
tpl = Template(f.read())
ext = tpl.render(**context)
return ext
Fix an issue in python 2 for the scratch extension.import os
import re
from jinja2 import Template
DEFAULT_TEMPLATE = os.path.join(os.path.dirname(__file__), 'extension.tpl.js')
supported_modules = [
'button',
'dynamixel',
'l0_dc_motor',
'l0_gpio',
'l0_servo',
'led',
'potard',
]
def find_modules(state, type):
return [
str(m['alias'])
for m in state['modules']
if m['type'] == type
]
def find_xl320(state, dxl):
dxl = next(filter(lambda mod: mod['alias'] == dxl, state['modules']))
motors = [m for m in dxl if re.match(r'm[0-9]+', m)]
return motors
def generate_extension(name,
robot,
host,
port,
template=DEFAULT_TEMPLATE):
context = {}
context = {
'name': name,
'host': host,
'port': port,
}
context.update({
type: find_modules(robot._state, type)
for type in supported_modules
})
if context['dynamixel']:
# TODO: This should be done for every dxl controller!
context['xl_320'] = find_xl320(robot._state, context['dynamixel'][0])
with open(template) as f:
tpl = Template(f.read())
ext = tpl.render(**context)
return ext
|
<commit_before>import os
import re
from jinja2 import Template
DEFAULT_TEMPLATE = os.path.join(os.path.dirname(__file__), 'extension.tpl.js')
supported_modules = [
'button',
'dynamixel',
'l0_dc_motor',
'l0_gpio',
'l0_servo',
'led',
'potard',
]
def find_modules(state, type):
return [
m['alias']
for m in state['modules']
if m['type'] == type
]
def find_xl320(state, dxl):
dxl = next(filter(lambda mod: mod['alias'] == dxl, state['modules']))
motors = [m for m in dxl if re.match(r'm[0-9]+', m)]
return motors
def generate_extension(name,
robot,
host,
port,
template=DEFAULT_TEMPLATE):
context = {}
context = {
'name': name,
'host': host,
'port': port,
}
context.update({
type: find_modules(robot._state, type)
for type in supported_modules
})
if context['dynamixel']:
# TODO: This should be done for every dxl controller!
context['xl_320'] = find_xl320(robot._state, context['dynamixel'][0])
with open(template) as f:
tpl = Template(f.read())
ext = tpl.render(**context)
return ext
<commit_msg>Fix an issue in python 2 for the scratch extension.<commit_after>import os
import re
from jinja2 import Template
DEFAULT_TEMPLATE = os.path.join(os.path.dirname(__file__), 'extension.tpl.js')
supported_modules = [
'button',
'dynamixel',
'l0_dc_motor',
'l0_gpio',
'l0_servo',
'led',
'potard',
]
def find_modules(state, type):
return [
str(m['alias'])
for m in state['modules']
if m['type'] == type
]
def find_xl320(state, dxl):
dxl = next(filter(lambda mod: mod['alias'] == dxl, state['modules']))
motors = [m for m in dxl if re.match(r'm[0-9]+', m)]
return motors
def generate_extension(name,
robot,
host,
port,
template=DEFAULT_TEMPLATE):
context = {}
context = {
'name': name,
'host': host,
'port': port,
}
context.update({
type: find_modules(robot._state, type)
for type in supported_modules
})
if context['dynamixel']:
# TODO: This should be done for every dxl controller!
context['xl_320'] = find_xl320(robot._state, context['dynamixel'][0])
with open(template) as f:
tpl = Template(f.read())
ext = tpl.render(**context)
return ext
|
22548b3d45b13361fe1df9af8897e38c61bad894
|
setup.py
|
setup.py
|
import os
import sys
import re
from setuptools import setup, find_packages
v = open(os.path.join(os.path.dirname(__file__), 'dogpile', 'cache', '__init__.py'))
VERSION = re.compile(r".*__version__ = '(.*?)'", re.S).match(v.read()).group(1)
v.close()
readme = os.path.join(os.path.dirname(__file__), 'README.rst')
setup(name='dogpile.cache',
version=VERSION,
description="A caching front-end based on the Dogpile lock.",
long_description=open(readme).read(),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
keywords='caching',
author='Mike Bayer',
author_email='mike_mp@zzzcomputing.com',
url='http://bitbucket.org/zzzeek/dogpile.cache',
license='BSD',
packages=find_packages('.', exclude=['ez_setup', 'tests*']),
namespace_packages=['dogpile'],
entry_points="""
[mako.cache]
dogpile = dogpile.cache.plugins.mako:MakoPlugin
""",
zip_safe=False,
install_requires=['dogpile.core>=0.4.1'],
test_suite='nose.collector',
tests_require=['nose', 'mock'],
)
|
import os
import sys
import re
from setuptools import setup, find_packages
v = open(os.path.join(os.path.dirname(__file__), 'dogpile', 'cache', '__init__.py'))
VERSION = re.compile(r".*__version__ = '(.*?)'", re.S).match(v.read()).group(1)
v.close()
readme = os.path.join(os.path.dirname(__file__), 'README.rst')
setup(name='dogpile.cache',
version=VERSION,
description="A caching front-end based on the Dogpile lock.",
long_description=open(readme).read(),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
keywords='caching',
author='Mike Bayer',
author_email='mike_mp@zzzcomputing.com',
url='http://bitbucket.org/zzzeek/dogpile.cache',
license='BSD',
packages=find_packages('.', exclude=['ez_setup', 'tests*']),
namespace_packages=['dogpile'],
entry_points="""
[mako.cache]
dogpile.cache = dogpile.cache.plugins.mako_cache:MakoPlugin
""",
zip_safe=False,
install_requires=['dogpile.core>=0.4.1'],
test_suite='nose.collector',
tests_require=['nose', 'mock'],
)
|
Fix entry point for Mako.
|
Fix entry point for Mako.
|
Python
|
bsd-3-clause
|
thruflo/dogpile.cache,thruflo/dogpile.cache
|
import os
import sys
import re
from setuptools import setup, find_packages
v = open(os.path.join(os.path.dirname(__file__), 'dogpile', 'cache', '__init__.py'))
VERSION = re.compile(r".*__version__ = '(.*?)'", re.S).match(v.read()).group(1)
v.close()
readme = os.path.join(os.path.dirname(__file__), 'README.rst')
setup(name='dogpile.cache',
version=VERSION,
description="A caching front-end based on the Dogpile lock.",
long_description=open(readme).read(),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
keywords='caching',
author='Mike Bayer',
author_email='mike_mp@zzzcomputing.com',
url='http://bitbucket.org/zzzeek/dogpile.cache',
license='BSD',
packages=find_packages('.', exclude=['ez_setup', 'tests*']),
namespace_packages=['dogpile'],
entry_points="""
[mako.cache]
dogpile = dogpile.cache.plugins.mako:MakoPlugin
""",
zip_safe=False,
install_requires=['dogpile.core>=0.4.1'],
test_suite='nose.collector',
tests_require=['nose', 'mock'],
)
Fix entry point for Mako.
|
import os
import sys
import re
from setuptools import setup, find_packages
v = open(os.path.join(os.path.dirname(__file__), 'dogpile', 'cache', '__init__.py'))
VERSION = re.compile(r".*__version__ = '(.*?)'", re.S).match(v.read()).group(1)
v.close()
readme = os.path.join(os.path.dirname(__file__), 'README.rst')
setup(name='dogpile.cache',
version=VERSION,
description="A caching front-end based on the Dogpile lock.",
long_description=open(readme).read(),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
keywords='caching',
author='Mike Bayer',
author_email='mike_mp@zzzcomputing.com',
url='http://bitbucket.org/zzzeek/dogpile.cache',
license='BSD',
packages=find_packages('.', exclude=['ez_setup', 'tests*']),
namespace_packages=['dogpile'],
entry_points="""
[mako.cache]
dogpile.cache = dogpile.cache.plugins.mako_cache:MakoPlugin
""",
zip_safe=False,
install_requires=['dogpile.core>=0.4.1'],
test_suite='nose.collector',
tests_require=['nose', 'mock'],
)
|
<commit_before>import os
import sys
import re
from setuptools import setup, find_packages
v = open(os.path.join(os.path.dirname(__file__), 'dogpile', 'cache', '__init__.py'))
VERSION = re.compile(r".*__version__ = '(.*?)'", re.S).match(v.read()).group(1)
v.close()
readme = os.path.join(os.path.dirname(__file__), 'README.rst')
setup(name='dogpile.cache',
version=VERSION,
description="A caching front-end based on the Dogpile lock.",
long_description=open(readme).read(),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
keywords='caching',
author='Mike Bayer',
author_email='mike_mp@zzzcomputing.com',
url='http://bitbucket.org/zzzeek/dogpile.cache',
license='BSD',
packages=find_packages('.', exclude=['ez_setup', 'tests*']),
namespace_packages=['dogpile'],
entry_points="""
[mako.cache]
dogpile = dogpile.cache.plugins.mako:MakoPlugin
""",
zip_safe=False,
install_requires=['dogpile.core>=0.4.1'],
test_suite='nose.collector',
tests_require=['nose', 'mock'],
)
<commit_msg>Fix entry point for Mako.<commit_after>
|
import os
import sys
import re
from setuptools import setup, find_packages
v = open(os.path.join(os.path.dirname(__file__), 'dogpile', 'cache', '__init__.py'))
VERSION = re.compile(r".*__version__ = '(.*?)'", re.S).match(v.read()).group(1)
v.close()
readme = os.path.join(os.path.dirname(__file__), 'README.rst')
setup(name='dogpile.cache',
version=VERSION,
description="A caching front-end based on the Dogpile lock.",
long_description=open(readme).read(),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
keywords='caching',
author='Mike Bayer',
author_email='mike_mp@zzzcomputing.com',
url='http://bitbucket.org/zzzeek/dogpile.cache',
license='BSD',
packages=find_packages('.', exclude=['ez_setup', 'tests*']),
namespace_packages=['dogpile'],
entry_points="""
[mako.cache]
dogpile.cache = dogpile.cache.plugins.mako_cache:MakoPlugin
""",
zip_safe=False,
install_requires=['dogpile.core>=0.4.1'],
test_suite='nose.collector',
tests_require=['nose', 'mock'],
)
|
import os
import sys
import re
from setuptools import setup, find_packages
v = open(os.path.join(os.path.dirname(__file__), 'dogpile', 'cache', '__init__.py'))
VERSION = re.compile(r".*__version__ = '(.*?)'", re.S).match(v.read()).group(1)
v.close()
readme = os.path.join(os.path.dirname(__file__), 'README.rst')
setup(name='dogpile.cache',
version=VERSION,
description="A caching front-end based on the Dogpile lock.",
long_description=open(readme).read(),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
keywords='caching',
author='Mike Bayer',
author_email='mike_mp@zzzcomputing.com',
url='http://bitbucket.org/zzzeek/dogpile.cache',
license='BSD',
packages=find_packages('.', exclude=['ez_setup', 'tests*']),
namespace_packages=['dogpile'],
entry_points="""
[mako.cache]
dogpile = dogpile.cache.plugins.mako:MakoPlugin
""",
zip_safe=False,
install_requires=['dogpile.core>=0.4.1'],
test_suite='nose.collector',
tests_require=['nose', 'mock'],
)
Fix entry point for Mako.import os
import sys
import re
from setuptools import setup, find_packages
v = open(os.path.join(os.path.dirname(__file__), 'dogpile', 'cache', '__init__.py'))
VERSION = re.compile(r".*__version__ = '(.*?)'", re.S).match(v.read()).group(1)
v.close()
readme = os.path.join(os.path.dirname(__file__), 'README.rst')
setup(name='dogpile.cache',
version=VERSION,
description="A caching front-end based on the Dogpile lock.",
long_description=open(readme).read(),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
keywords='caching',
author='Mike Bayer',
author_email='mike_mp@zzzcomputing.com',
url='http://bitbucket.org/zzzeek/dogpile.cache',
license='BSD',
packages=find_packages('.', exclude=['ez_setup', 'tests*']),
namespace_packages=['dogpile'],
entry_points="""
[mako.cache]
dogpile.cache = dogpile.cache.plugins.mako_cache:MakoPlugin
""",
zip_safe=False,
install_requires=['dogpile.core>=0.4.1'],
test_suite='nose.collector',
tests_require=['nose', 'mock'],
)
|
<commit_before>import os
import sys
import re
from setuptools import setup, find_packages
v = open(os.path.join(os.path.dirname(__file__), 'dogpile', 'cache', '__init__.py'))
VERSION = re.compile(r".*__version__ = '(.*?)'", re.S).match(v.read()).group(1)
v.close()
readme = os.path.join(os.path.dirname(__file__), 'README.rst')
setup(name='dogpile.cache',
version=VERSION,
description="A caching front-end based on the Dogpile lock.",
long_description=open(readme).read(),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
keywords='caching',
author='Mike Bayer',
author_email='mike_mp@zzzcomputing.com',
url='http://bitbucket.org/zzzeek/dogpile.cache',
license='BSD',
packages=find_packages('.', exclude=['ez_setup', 'tests*']),
namespace_packages=['dogpile'],
entry_points="""
[mako.cache]
dogpile = dogpile.cache.plugins.mako:MakoPlugin
""",
zip_safe=False,
install_requires=['dogpile.core>=0.4.1'],
test_suite='nose.collector',
tests_require=['nose', 'mock'],
)
<commit_msg>Fix entry point for Mako.<commit_after>import os
import sys
import re
from setuptools import setup, find_packages
v = open(os.path.join(os.path.dirname(__file__), 'dogpile', 'cache', '__init__.py'))
VERSION = re.compile(r".*__version__ = '(.*?)'", re.S).match(v.read()).group(1)
v.close()
readme = os.path.join(os.path.dirname(__file__), 'README.rst')
setup(name='dogpile.cache',
version=VERSION,
description="A caching front-end based on the Dogpile lock.",
long_description=open(readme).read(),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
keywords='caching',
author='Mike Bayer',
author_email='mike_mp@zzzcomputing.com',
url='http://bitbucket.org/zzzeek/dogpile.cache',
license='BSD',
packages=find_packages('.', exclude=['ez_setup', 'tests*']),
namespace_packages=['dogpile'],
entry_points="""
[mako.cache]
dogpile.cache = dogpile.cache.plugins.mako_cache:MakoPlugin
""",
zip_safe=False,
install_requires=['dogpile.core>=0.4.1'],
test_suite='nose.collector',
tests_require=['nose', 'mock'],
)
|
298c187737c1cb1207ce67fdfc47ca214b341006
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup
from PyFileMaker import __version__
setup(
name='PyFileMaker',
version=__version__,
description='Python Object Wrapper for FileMaker Server XML Interface',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Database :: Database Engines/Servers',
'Topic :: Software Development :: Libraries :: Python Modules',
],
keywords=['FileMaker'],
author='Klokan Petr Pridal, Pieter Claerhout, Marcin Kawa',
author_email='klokan@klokan.cz, pieter@yellowduck.be, kawa.macin@gmail.com',
url='https://github.com/aeguana/PyFileMaker',
download_url='https://github.com/aeguana/PyFileMaker/releases',
license='http://www.opensource.org/licenses/bsd-license.php',
platforms = ['any'],
packages=['PyFileMaker'],
install_requires=['requests'],
)
|
#!/usr/bin/env python
from setuptools import setup
setup(
name='PyFileMaker',
version="3.3",
description='Python Object Wrapper for FileMaker Server XML Interface',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Database :: Database Engines/Servers',
'Topic :: Software Development :: Libraries :: Python Modules',
],
keywords=['FileMaker'],
author='Klokan Petr Pridal, Pieter Claerhout, Marcin Kawa',
author_email='klokan@klokan.cz, pieter@yellowduck.be, kawa.macin@gmail.com',
url='https://github.com/aeguana/PyFileMaker',
download_url='https://github.com/aeguana/PyFileMaker/releases',
license='http://www.opensource.org/licenses/bsd-license.php',
platforms = ['any'],
packages=['PyFileMaker'],
install_requires=['requests'],
)
|
Remove package import to prevent missing dependencies error
|
[fix_installation] Remove package import to prevent missing dependencies error
|
Python
|
bsd-3-clause
|
aeguana/PyFileMaker
|
#!/usr/bin/env python
from setuptools import setup
from PyFileMaker import __version__
setup(
name='PyFileMaker',
version=__version__,
description='Python Object Wrapper for FileMaker Server XML Interface',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Database :: Database Engines/Servers',
'Topic :: Software Development :: Libraries :: Python Modules',
],
keywords=['FileMaker'],
author='Klokan Petr Pridal, Pieter Claerhout, Marcin Kawa',
author_email='klokan@klokan.cz, pieter@yellowduck.be, kawa.macin@gmail.com',
url='https://github.com/aeguana/PyFileMaker',
download_url='https://github.com/aeguana/PyFileMaker/releases',
license='http://www.opensource.org/licenses/bsd-license.php',
platforms = ['any'],
packages=['PyFileMaker'],
install_requires=['requests'],
)
[fix_installation] Remove package import to prevent missing dependencies error
|
#!/usr/bin/env python
from setuptools import setup
setup(
name='PyFileMaker',
version="3.3",
description='Python Object Wrapper for FileMaker Server XML Interface',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Database :: Database Engines/Servers',
'Topic :: Software Development :: Libraries :: Python Modules',
],
keywords=['FileMaker'],
author='Klokan Petr Pridal, Pieter Claerhout, Marcin Kawa',
author_email='klokan@klokan.cz, pieter@yellowduck.be, kawa.macin@gmail.com',
url='https://github.com/aeguana/PyFileMaker',
download_url='https://github.com/aeguana/PyFileMaker/releases',
license='http://www.opensource.org/licenses/bsd-license.php',
platforms = ['any'],
packages=['PyFileMaker'],
install_requires=['requests'],
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup
from PyFileMaker import __version__
setup(
name='PyFileMaker',
version=__version__,
description='Python Object Wrapper for FileMaker Server XML Interface',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Database :: Database Engines/Servers',
'Topic :: Software Development :: Libraries :: Python Modules',
],
keywords=['FileMaker'],
author='Klokan Petr Pridal, Pieter Claerhout, Marcin Kawa',
author_email='klokan@klokan.cz, pieter@yellowduck.be, kawa.macin@gmail.com',
url='https://github.com/aeguana/PyFileMaker',
download_url='https://github.com/aeguana/PyFileMaker/releases',
license='http://www.opensource.org/licenses/bsd-license.php',
platforms = ['any'],
packages=['PyFileMaker'],
install_requires=['requests'],
)
<commit_msg>[fix_installation] Remove package import to prevent missing dependencies error<commit_after>
|
#!/usr/bin/env python
from setuptools import setup
setup(
name='PyFileMaker',
version="3.3",
description='Python Object Wrapper for FileMaker Server XML Interface',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Database :: Database Engines/Servers',
'Topic :: Software Development :: Libraries :: Python Modules',
],
keywords=['FileMaker'],
author='Klokan Petr Pridal, Pieter Claerhout, Marcin Kawa',
author_email='klokan@klokan.cz, pieter@yellowduck.be, kawa.macin@gmail.com',
url='https://github.com/aeguana/PyFileMaker',
download_url='https://github.com/aeguana/PyFileMaker/releases',
license='http://www.opensource.org/licenses/bsd-license.php',
platforms = ['any'],
packages=['PyFileMaker'],
install_requires=['requests'],
)
|
#!/usr/bin/env python
from setuptools import setup
from PyFileMaker import __version__
setup(
name='PyFileMaker',
version=__version__,
description='Python Object Wrapper for FileMaker Server XML Interface',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Database :: Database Engines/Servers',
'Topic :: Software Development :: Libraries :: Python Modules',
],
keywords=['FileMaker'],
author='Klokan Petr Pridal, Pieter Claerhout, Marcin Kawa',
author_email='klokan@klokan.cz, pieter@yellowduck.be, kawa.macin@gmail.com',
url='https://github.com/aeguana/PyFileMaker',
download_url='https://github.com/aeguana/PyFileMaker/releases',
license='http://www.opensource.org/licenses/bsd-license.php',
platforms = ['any'],
packages=['PyFileMaker'],
install_requires=['requests'],
)
[fix_installation] Remove package import to prevent missing dependencies error#!/usr/bin/env python
from setuptools import setup
setup(
name='PyFileMaker',
version="3.3",
description='Python Object Wrapper for FileMaker Server XML Interface',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Database :: Database Engines/Servers',
'Topic :: Software Development :: Libraries :: Python Modules',
],
keywords=['FileMaker'],
author='Klokan Petr Pridal, Pieter Claerhout, Marcin Kawa',
author_email='klokan@klokan.cz, pieter@yellowduck.be, kawa.macin@gmail.com',
url='https://github.com/aeguana/PyFileMaker',
download_url='https://github.com/aeguana/PyFileMaker/releases',
license='http://www.opensource.org/licenses/bsd-license.php',
platforms = ['any'],
packages=['PyFileMaker'],
install_requires=['requests'],
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup
from PyFileMaker import __version__
setup(
name='PyFileMaker',
version=__version__,
description='Python Object Wrapper for FileMaker Server XML Interface',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Database :: Database Engines/Servers',
'Topic :: Software Development :: Libraries :: Python Modules',
],
keywords=['FileMaker'],
author='Klokan Petr Pridal, Pieter Claerhout, Marcin Kawa',
author_email='klokan@klokan.cz, pieter@yellowduck.be, kawa.macin@gmail.com',
url='https://github.com/aeguana/PyFileMaker',
download_url='https://github.com/aeguana/PyFileMaker/releases',
license='http://www.opensource.org/licenses/bsd-license.php',
platforms = ['any'],
packages=['PyFileMaker'],
install_requires=['requests'],
)
<commit_msg>[fix_installation] Remove package import to prevent missing dependencies error<commit_after>#!/usr/bin/env python
from setuptools import setup
setup(
name='PyFileMaker',
version="3.3",
description='Python Object Wrapper for FileMaker Server XML Interface',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Database :: Database Engines/Servers',
'Topic :: Software Development :: Libraries :: Python Modules',
],
keywords=['FileMaker'],
author='Klokan Petr Pridal, Pieter Claerhout, Marcin Kawa',
author_email='klokan@klokan.cz, pieter@yellowduck.be, kawa.macin@gmail.com',
url='https://github.com/aeguana/PyFileMaker',
download_url='https://github.com/aeguana/PyFileMaker/releases',
license='http://www.opensource.org/licenses/bsd-license.php',
platforms = ['any'],
packages=['PyFileMaker'],
install_requires=['requests'],
)
|
4e7399261abf6999ef248e7487c116830c1b0bf4
|
app/receipt.py
|
app/receipt.py
|
from app import settings
import logging
from structlog import wrap_logger
import base64
import os
from jinja2 import Environment, FileSystemLoader
env = Environment(loader=FileSystemLoader('%s/templates/' % os.path.dirname(__file__)))
logger = wrap_logger(logging.getLogger(__name__))
def get_receipt_endpoint(decrypted_json):
try:
statistical_unit_id = decrypted_json['metadata']['ru_ref']
exercise_sid = decrypted_json['collection']['exercise_sid']
except KeyError as e:
logger.error("Unable to get required data from json", exception=repr(e))
return None
host = settings.RECEIPT_HOST
path = settings.RECEIPT_PATH
logger.debug("RECEIPT|HOST/PATH: %s/%s" % (host, path))
uri = path + "/" + statistical_unit_id + "/collectionexercises/" + exercise_sid + "/receipts"
endpoint = host + "/" + uri
logger.debug("RECEIPT|ENDPOINT: %s" % endpoint)
return endpoint
def get_receipt_xml(decrypted_json):
try:
template = env.get_template('receipt.xml.tmpl')
output = template.render(survey=decrypted_json)
return output
except Exception as e:
logger.error("Unable to render xml receipt", exception=repr(e))
return None
def get_receipt_headers():
headers = {}
auth = settings.RECEIPT_USER + ":" + settings.RECEIPT_PASS
encoded = base64.b64encode(bytes(auth, 'utf-8'))
headers['Authorization'] = "Basic " + str(encoded)
headers['Content-Type'] = "application/vnd.collections+xml"
return headers
|
from app import settings
import logging
from structlog import wrap_logger
import base64
import os
from jinja2 import Environment, FileSystemLoader
env = Environment(loader=FileSystemLoader('%s/templates/' % os.path.dirname(__file__)))
logger = wrap_logger(logging.getLogger(__name__))
def get_receipt_endpoint(decrypted_json):
try:
statistical_unit_id = decrypted_json['metadata']['ru_ref']
exercise_sid = decrypted_json['collection']['exercise_sid']
except KeyError as e:
logger.error("Unable to get required data from json", exception=repr(e))
return None
host = settings.RECEIPT_HOST
path = settings.RECEIPT_PATH
logger.debug("RECEIPT|HOST/PATH: %s/%s" % (host, path))
uri = path + "/" + statistical_unit_id + "/collectionexercises/" + exercise_sid + "/receipts"
endpoint = host + "/" + uri
logger.debug("RECEIPT|ENDPOINT: %s" % endpoint)
return endpoint
def get_receipt_xml(decrypted_json):
try:
template = env.get_template('receipt.xml.tmpl')
output = template.render(survey=decrypted_json)
return output
except Exception as e:
logger.error("Unable to render xml receipt", exception=repr(e))
return None
def get_receipt_headers():
headers = {}
headers['Content-Type'] = "application/vnd.collections+xml"
return headers
|
Remove broken authorization header code
|
Remove broken authorization header code
The authorization header generation code in receipt.py was setting
the authorization header to a byte stream rather than a string
(b'...'). As requests provides a way to generate basic auth headers
using the auth param it makes more sense to use that.
|
Python
|
mit
|
ONSdigital/Perkin,ONSdigital/Perkin,ONSdigital/Perkin
|
from app import settings
import logging
from structlog import wrap_logger
import base64
import os
from jinja2 import Environment, FileSystemLoader
env = Environment(loader=FileSystemLoader('%s/templates/' % os.path.dirname(__file__)))
logger = wrap_logger(logging.getLogger(__name__))
def get_receipt_endpoint(decrypted_json):
try:
statistical_unit_id = decrypted_json['metadata']['ru_ref']
exercise_sid = decrypted_json['collection']['exercise_sid']
except KeyError as e:
logger.error("Unable to get required data from json", exception=repr(e))
return None
host = settings.RECEIPT_HOST
path = settings.RECEIPT_PATH
logger.debug("RECEIPT|HOST/PATH: %s/%s" % (host, path))
uri = path + "/" + statistical_unit_id + "/collectionexercises/" + exercise_sid + "/receipts"
endpoint = host + "/" + uri
logger.debug("RECEIPT|ENDPOINT: %s" % endpoint)
return endpoint
def get_receipt_xml(decrypted_json):
try:
template = env.get_template('receipt.xml.tmpl')
output = template.render(survey=decrypted_json)
return output
except Exception as e:
logger.error("Unable to render xml receipt", exception=repr(e))
return None
def get_receipt_headers():
headers = {}
auth = settings.RECEIPT_USER + ":" + settings.RECEIPT_PASS
encoded = base64.b64encode(bytes(auth, 'utf-8'))
headers['Authorization'] = "Basic " + str(encoded)
headers['Content-Type'] = "application/vnd.collections+xml"
return headers
Remove broken authorization header code
The authorization header generation code in receipt.py was setting
the authorization header to a byte stream rather than a string
(b'...'). As requests provides a way to generate basic auth headers
using the auth param it makes more sense to use that.
|
from app import settings
import logging
from structlog import wrap_logger
import base64
import os
from jinja2 import Environment, FileSystemLoader
env = Environment(loader=FileSystemLoader('%s/templates/' % os.path.dirname(__file__)))
logger = wrap_logger(logging.getLogger(__name__))
def get_receipt_endpoint(decrypted_json):
try:
statistical_unit_id = decrypted_json['metadata']['ru_ref']
exercise_sid = decrypted_json['collection']['exercise_sid']
except KeyError as e:
logger.error("Unable to get required data from json", exception=repr(e))
return None
host = settings.RECEIPT_HOST
path = settings.RECEIPT_PATH
logger.debug("RECEIPT|HOST/PATH: %s/%s" % (host, path))
uri = path + "/" + statistical_unit_id + "/collectionexercises/" + exercise_sid + "/receipts"
endpoint = host + "/" + uri
logger.debug("RECEIPT|ENDPOINT: %s" % endpoint)
return endpoint
def get_receipt_xml(decrypted_json):
try:
template = env.get_template('receipt.xml.tmpl')
output = template.render(survey=decrypted_json)
return output
except Exception as e:
logger.error("Unable to render xml receipt", exception=repr(e))
return None
def get_receipt_headers():
headers = {}
headers['Content-Type'] = "application/vnd.collections+xml"
return headers
|
<commit_before>from app import settings
import logging
from structlog import wrap_logger
import base64
import os
from jinja2 import Environment, FileSystemLoader
env = Environment(loader=FileSystemLoader('%s/templates/' % os.path.dirname(__file__)))
logger = wrap_logger(logging.getLogger(__name__))
def get_receipt_endpoint(decrypted_json):
try:
statistical_unit_id = decrypted_json['metadata']['ru_ref']
exercise_sid = decrypted_json['collection']['exercise_sid']
except KeyError as e:
logger.error("Unable to get required data from json", exception=repr(e))
return None
host = settings.RECEIPT_HOST
path = settings.RECEIPT_PATH
logger.debug("RECEIPT|HOST/PATH: %s/%s" % (host, path))
uri = path + "/" + statistical_unit_id + "/collectionexercises/" + exercise_sid + "/receipts"
endpoint = host + "/" + uri
logger.debug("RECEIPT|ENDPOINT: %s" % endpoint)
return endpoint
def get_receipt_xml(decrypted_json):
try:
template = env.get_template('receipt.xml.tmpl')
output = template.render(survey=decrypted_json)
return output
except Exception as e:
logger.error("Unable to render xml receipt", exception=repr(e))
return None
def get_receipt_headers():
headers = {}
auth = settings.RECEIPT_USER + ":" + settings.RECEIPT_PASS
encoded = base64.b64encode(bytes(auth, 'utf-8'))
headers['Authorization'] = "Basic " + str(encoded)
headers['Content-Type'] = "application/vnd.collections+xml"
return headers
<commit_msg>Remove broken authorization header code
The authorization header generation code in receipt.py was setting
the authorization header to a byte stream rather than a string
(b'...'). As requests provides a way to generate basic auth headers
using the auth param it makes more sense to use that.<commit_after>
|
from app import settings
import logging
from structlog import wrap_logger
import base64
import os
from jinja2 import Environment, FileSystemLoader
env = Environment(loader=FileSystemLoader('%s/templates/' % os.path.dirname(__file__)))
logger = wrap_logger(logging.getLogger(__name__))
def get_receipt_endpoint(decrypted_json):
try:
statistical_unit_id = decrypted_json['metadata']['ru_ref']
exercise_sid = decrypted_json['collection']['exercise_sid']
except KeyError as e:
logger.error("Unable to get required data from json", exception=repr(e))
return None
host = settings.RECEIPT_HOST
path = settings.RECEIPT_PATH
logger.debug("RECEIPT|HOST/PATH: %s/%s" % (host, path))
uri = path + "/" + statistical_unit_id + "/collectionexercises/" + exercise_sid + "/receipts"
endpoint = host + "/" + uri
logger.debug("RECEIPT|ENDPOINT: %s" % endpoint)
return endpoint
def get_receipt_xml(decrypted_json):
try:
template = env.get_template('receipt.xml.tmpl')
output = template.render(survey=decrypted_json)
return output
except Exception as e:
logger.error("Unable to render xml receipt", exception=repr(e))
return None
def get_receipt_headers():
headers = {}
headers['Content-Type'] = "application/vnd.collections+xml"
return headers
|
from app import settings
import logging
from structlog import wrap_logger
import base64
import os
from jinja2 import Environment, FileSystemLoader
env = Environment(loader=FileSystemLoader('%s/templates/' % os.path.dirname(__file__)))
logger = wrap_logger(logging.getLogger(__name__))
def get_receipt_endpoint(decrypted_json):
try:
statistical_unit_id = decrypted_json['metadata']['ru_ref']
exercise_sid = decrypted_json['collection']['exercise_sid']
except KeyError as e:
logger.error("Unable to get required data from json", exception=repr(e))
return None
host = settings.RECEIPT_HOST
path = settings.RECEIPT_PATH
logger.debug("RECEIPT|HOST/PATH: %s/%s" % (host, path))
uri = path + "/" + statistical_unit_id + "/collectionexercises/" + exercise_sid + "/receipts"
endpoint = host + "/" + uri
logger.debug("RECEIPT|ENDPOINT: %s" % endpoint)
return endpoint
def get_receipt_xml(decrypted_json):
try:
template = env.get_template('receipt.xml.tmpl')
output = template.render(survey=decrypted_json)
return output
except Exception as e:
logger.error("Unable to render xml receipt", exception=repr(e))
return None
def get_receipt_headers():
headers = {}
auth = settings.RECEIPT_USER + ":" + settings.RECEIPT_PASS
encoded = base64.b64encode(bytes(auth, 'utf-8'))
headers['Authorization'] = "Basic " + str(encoded)
headers['Content-Type'] = "application/vnd.collections+xml"
return headers
Remove broken authorization header code
The authorization header generation code in receipt.py was setting
the authorization header to a byte stream rather than a string
(b'...'). As requests provides a way to generate basic auth headers
using the auth param it makes more sense to use that.from app import settings
import logging
from structlog import wrap_logger
import base64
import os
from jinja2 import Environment, FileSystemLoader
env = Environment(loader=FileSystemLoader('%s/templates/' % os.path.dirname(__file__)))
logger = wrap_logger(logging.getLogger(__name__))
def get_receipt_endpoint(decrypted_json):
try:
statistical_unit_id = decrypted_json['metadata']['ru_ref']
exercise_sid = decrypted_json['collection']['exercise_sid']
except KeyError as e:
logger.error("Unable to get required data from json", exception=repr(e))
return None
host = settings.RECEIPT_HOST
path = settings.RECEIPT_PATH
logger.debug("RECEIPT|HOST/PATH: %s/%s" % (host, path))
uri = path + "/" + statistical_unit_id + "/collectionexercises/" + exercise_sid + "/receipts"
endpoint = host + "/" + uri
logger.debug("RECEIPT|ENDPOINT: %s" % endpoint)
return endpoint
def get_receipt_xml(decrypted_json):
try:
template = env.get_template('receipt.xml.tmpl')
output = template.render(survey=decrypted_json)
return output
except Exception as e:
logger.error("Unable to render xml receipt", exception=repr(e))
return None
def get_receipt_headers():
headers = {}
headers['Content-Type'] = "application/vnd.collections+xml"
return headers
|
<commit_before>from app import settings
import logging
from structlog import wrap_logger
import base64
import os
from jinja2 import Environment, FileSystemLoader
env = Environment(loader=FileSystemLoader('%s/templates/' % os.path.dirname(__file__)))
logger = wrap_logger(logging.getLogger(__name__))
def get_receipt_endpoint(decrypted_json):
try:
statistical_unit_id = decrypted_json['metadata']['ru_ref']
exercise_sid = decrypted_json['collection']['exercise_sid']
except KeyError as e:
logger.error("Unable to get required data from json", exception=repr(e))
return None
host = settings.RECEIPT_HOST
path = settings.RECEIPT_PATH
logger.debug("RECEIPT|HOST/PATH: %s/%s" % (host, path))
uri = path + "/" + statistical_unit_id + "/collectionexercises/" + exercise_sid + "/receipts"
endpoint = host + "/" + uri
logger.debug("RECEIPT|ENDPOINT: %s" % endpoint)
return endpoint
def get_receipt_xml(decrypted_json):
try:
template = env.get_template('receipt.xml.tmpl')
output = template.render(survey=decrypted_json)
return output
except Exception as e:
logger.error("Unable to render xml receipt", exception=repr(e))
return None
def get_receipt_headers():
headers = {}
auth = settings.RECEIPT_USER + ":" + settings.RECEIPT_PASS
encoded = base64.b64encode(bytes(auth, 'utf-8'))
headers['Authorization'] = "Basic " + str(encoded)
headers['Content-Type'] = "application/vnd.collections+xml"
return headers
<commit_msg>Remove broken authorization header code
The authorization header generation code in receipt.py was setting
the authorization header to a byte stream rather than a string
(b'...'). As requests provides a way to generate basic auth headers
using the auth param it makes more sense to use that.<commit_after>from app import settings
import logging
from structlog import wrap_logger
import base64
import os
from jinja2 import Environment, FileSystemLoader
env = Environment(loader=FileSystemLoader('%s/templates/' % os.path.dirname(__file__)))
logger = wrap_logger(logging.getLogger(__name__))
def get_receipt_endpoint(decrypted_json):
try:
statistical_unit_id = decrypted_json['metadata']['ru_ref']
exercise_sid = decrypted_json['collection']['exercise_sid']
except KeyError as e:
logger.error("Unable to get required data from json", exception=repr(e))
return None
host = settings.RECEIPT_HOST
path = settings.RECEIPT_PATH
logger.debug("RECEIPT|HOST/PATH: %s/%s" % (host, path))
uri = path + "/" + statistical_unit_id + "/collectionexercises/" + exercise_sid + "/receipts"
endpoint = host + "/" + uri
logger.debug("RECEIPT|ENDPOINT: %s" % endpoint)
return endpoint
def get_receipt_xml(decrypted_json):
try:
template = env.get_template('receipt.xml.tmpl')
output = template.render(survey=decrypted_json)
return output
except Exception as e:
logger.error("Unable to render xml receipt", exception=repr(e))
return None
def get_receipt_headers():
headers = {}
headers['Content-Type'] = "application/vnd.collections+xml"
return headers
|
fccf3df85eb79ea7f270e454f5bb9eda162985f9
|
test_api_project/test_api_project/autocomplete_light_registry.py
|
test_api_project/test_api_project/autocomplete_light_registry.py
|
import autocomplete_light
from cities_light.contrib.autocomplete_light_restframework import RemoteCountryChannel, RemoteCityChannel
from cities_light.models import City, Country
class RemoteCountryChannel(RemoteCountryChannel):
source_url = 'http://localhost:8000/cities_light/country/'
class RemoteCityChannel(RemoteCityChannel):
source_url = 'http://localhost:8000/cities_light/city/'
autocomplete_light.register(Country, RemoteCountryChannel)
autocomplete_light.register(City, RemoteCityChannel)
|
import autocomplete_light
from cities_light.contrib.autocomplete_light_restframework import RemoteCountryChannel, RemoteCityChannel
from cities_light.models import City, Country
autocomplete_light.register(Country, RemoteCountryChannel,
source_url = 'http://localhost:8000/cities_light/country/')
autocomplete_light.register(City, RemoteCityChannel,
source_url = 'http://localhost:8000/cities_light/city/')
|
Update example to match current register signature, avoids subclassing
|
Update example to match current register signature, avoids subclassing
|
Python
|
mit
|
jonashaag/django-autocomplete-light,Visgean/django-autocomplete-light,dsanders11/django-autocomplete-light,Eraldo/django-autocomplete-light,jonashaag/django-autocomplete-light,dsanders11/django-autocomplete-light,luzfcb/django-autocomplete-light,spookylukey/django-autocomplete-light,Eraldo/django-autocomplete-light,blueyed/django-autocomplete-light,Perkville/django-autocomplete-light,jonashaag/django-autocomplete-light,Visgean/django-autocomplete-light,Eraldo/django-autocomplete-light,blueyed/django-autocomplete-light,shubhamdipt/django-autocomplete-light,luzfcb/django-autocomplete-light,dsanders11/django-autocomplete-light,yourlabs/django-autocomplete-light,yourlabs/django-autocomplete-light,shubhamdipt/django-autocomplete-light,Perkville/django-autocomplete-light,luzfcb/django-autocomplete-light,dsanders11/django-autocomplete-light,Visgean/django-autocomplete-light,luzfcb/django-autocomplete-light,yourlabs/django-autocomplete-light,blueyed/django-autocomplete-light,Perkville/django-autocomplete-light,Perkville/django-autocomplete-light,shubhamdipt/django-autocomplete-light,Eraldo/django-autocomplete-light,spookylukey/django-autocomplete-light,Visgean/django-autocomplete-light,spookylukey/django-autocomplete-light,yourlabs/django-autocomplete-light,shubhamdipt/django-autocomplete-light
|
import autocomplete_light
from cities_light.contrib.autocomplete_light_restframework import RemoteCountryChannel, RemoteCityChannel
from cities_light.models import City, Country
class RemoteCountryChannel(RemoteCountryChannel):
source_url = 'http://localhost:8000/cities_light/country/'
class RemoteCityChannel(RemoteCityChannel):
source_url = 'http://localhost:8000/cities_light/city/'
autocomplete_light.register(Country, RemoteCountryChannel)
autocomplete_light.register(City, RemoteCityChannel)
Update example to match current register signature, avoids subclassing
|
import autocomplete_light
from cities_light.contrib.autocomplete_light_restframework import RemoteCountryChannel, RemoteCityChannel
from cities_light.models import City, Country
autocomplete_light.register(Country, RemoteCountryChannel,
source_url = 'http://localhost:8000/cities_light/country/')
autocomplete_light.register(City, RemoteCityChannel,
source_url = 'http://localhost:8000/cities_light/city/')
|
<commit_before>import autocomplete_light
from cities_light.contrib.autocomplete_light_restframework import RemoteCountryChannel, RemoteCityChannel
from cities_light.models import City, Country
class RemoteCountryChannel(RemoteCountryChannel):
source_url = 'http://localhost:8000/cities_light/country/'
class RemoteCityChannel(RemoteCityChannel):
source_url = 'http://localhost:8000/cities_light/city/'
autocomplete_light.register(Country, RemoteCountryChannel)
autocomplete_light.register(City, RemoteCityChannel)
<commit_msg>Update example to match current register signature, avoids subclassing<commit_after>
|
import autocomplete_light
from cities_light.contrib.autocomplete_light_restframework import RemoteCountryChannel, RemoteCityChannel
from cities_light.models import City, Country
autocomplete_light.register(Country, RemoteCountryChannel,
source_url = 'http://localhost:8000/cities_light/country/')
autocomplete_light.register(City, RemoteCityChannel,
source_url = 'http://localhost:8000/cities_light/city/')
|
import autocomplete_light
from cities_light.contrib.autocomplete_light_restframework import RemoteCountryChannel, RemoteCityChannel
from cities_light.models import City, Country
class RemoteCountryChannel(RemoteCountryChannel):
source_url = 'http://localhost:8000/cities_light/country/'
class RemoteCityChannel(RemoteCityChannel):
source_url = 'http://localhost:8000/cities_light/city/'
autocomplete_light.register(Country, RemoteCountryChannel)
autocomplete_light.register(City, RemoteCityChannel)
Update example to match current register signature, avoids subclassingimport autocomplete_light
from cities_light.contrib.autocomplete_light_restframework import RemoteCountryChannel, RemoteCityChannel
from cities_light.models import City, Country
autocomplete_light.register(Country, RemoteCountryChannel,
source_url = 'http://localhost:8000/cities_light/country/')
autocomplete_light.register(City, RemoteCityChannel,
source_url = 'http://localhost:8000/cities_light/city/')
|
<commit_before>import autocomplete_light
from cities_light.contrib.autocomplete_light_restframework import RemoteCountryChannel, RemoteCityChannel
from cities_light.models import City, Country
class RemoteCountryChannel(RemoteCountryChannel):
source_url = 'http://localhost:8000/cities_light/country/'
class RemoteCityChannel(RemoteCityChannel):
source_url = 'http://localhost:8000/cities_light/city/'
autocomplete_light.register(Country, RemoteCountryChannel)
autocomplete_light.register(City, RemoteCityChannel)
<commit_msg>Update example to match current register signature, avoids subclassing<commit_after>import autocomplete_light
from cities_light.contrib.autocomplete_light_restframework import RemoteCountryChannel, RemoteCityChannel
from cities_light.models import City, Country
autocomplete_light.register(Country, RemoteCountryChannel,
source_url = 'http://localhost:8000/cities_light/country/')
autocomplete_light.register(City, RemoteCityChannel,
source_url = 'http://localhost:8000/cities_light/city/')
|
6807bd35ee61eb7ff456ba03e432888ba9a241dc
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup
"""
If you only want to install some of the scripts, you can manually copy them
to the /bin/ or /usr/bin directory and make them executable (chmod +x).
You can also comment out lines in the 'scripts' list below:
"""
scripts = [
'bin/ansi-colormap',
'bin/icanhazip',
'bin/wifipassword',
'bin/mint-post-install',
'bin/whoamiv'
]
config = {
'name': 'Linux Addons',
'author': 'Curtis Mattoon',
'author_email': 'cmattoon@cmattoon.com',
'scripts': scripts,
'packages': [
'lxlib'
]
}
setup(**config)
|
#!/usr/bin/env python
from setuptools import setup
"""
If you only want to install some of the scripts, you can manually copy them
to the /bin/ or /usr/bin directory and make them executable (chmod +x).
You can also comment out lines in the 'scripts' list below:
"""
scripts = [
'bin/ansi-colormap',
'bin/icanhazip',
'bin/wifipassword',
'bin/mint-post-install',
'bin/whoamiv'
'bin/hostmgr'
]
config = {
'name': 'Linux Addons',
'author': 'Curtis Mattoon',
'author_email': 'cmattoon@cmattoon.com',
'scripts': scripts,
'packages': [
'lxlib'
]
}
setup(**config)
|
Add hostmgr. Currently sorts/cleans /etc/hosts
|
Add hostmgr. Currently sorts/cleans /etc/hosts
|
Python
|
mit
|
cmattoon/linux-addons,cmattoon/linux-addons
|
#!/usr/bin/env python
from setuptools import setup
"""
If you only want to install some of the scripts, you can manually copy them
to the /bin/ or /usr/bin directory and make them executable (chmod +x).
You can also comment out lines in the 'scripts' list below:
"""
scripts = [
'bin/ansi-colormap',
'bin/icanhazip',
'bin/wifipassword',
'bin/mint-post-install',
'bin/whoamiv'
]
config = {
'name': 'Linux Addons',
'author': 'Curtis Mattoon',
'author_email': 'cmattoon@cmattoon.com',
'scripts': scripts,
'packages': [
'lxlib'
]
}
setup(**config)
Add hostmgr. Currently sorts/cleans /etc/hosts
|
#!/usr/bin/env python
from setuptools import setup
"""
If you only want to install some of the scripts, you can manually copy them
to the /bin/ or /usr/bin directory and make them executable (chmod +x).
You can also comment out lines in the 'scripts' list below:
"""
scripts = [
'bin/ansi-colormap',
'bin/icanhazip',
'bin/wifipassword',
'bin/mint-post-install',
'bin/whoamiv'
'bin/hostmgr'
]
config = {
'name': 'Linux Addons',
'author': 'Curtis Mattoon',
'author_email': 'cmattoon@cmattoon.com',
'scripts': scripts,
'packages': [
'lxlib'
]
}
setup(**config)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup
"""
If you only want to install some of the scripts, you can manually copy them
to the /bin/ or /usr/bin directory and make them executable (chmod +x).
You can also comment out lines in the 'scripts' list below:
"""
scripts = [
'bin/ansi-colormap',
'bin/icanhazip',
'bin/wifipassword',
'bin/mint-post-install',
'bin/whoamiv'
]
config = {
'name': 'Linux Addons',
'author': 'Curtis Mattoon',
'author_email': 'cmattoon@cmattoon.com',
'scripts': scripts,
'packages': [
'lxlib'
]
}
setup(**config)
<commit_msg>Add hostmgr. Currently sorts/cleans /etc/hosts<commit_after>
|
#!/usr/bin/env python
from setuptools import setup
"""
If you only want to install some of the scripts, you can manually copy them
to the /bin/ or /usr/bin directory and make them executable (chmod +x).
You can also comment out lines in the 'scripts' list below:
"""
scripts = [
'bin/ansi-colormap',
'bin/icanhazip',
'bin/wifipassword',
'bin/mint-post-install',
'bin/whoamiv'
'bin/hostmgr'
]
config = {
'name': 'Linux Addons',
'author': 'Curtis Mattoon',
'author_email': 'cmattoon@cmattoon.com',
'scripts': scripts,
'packages': [
'lxlib'
]
}
setup(**config)
|
#!/usr/bin/env python
from setuptools import setup
"""
If you only want to install some of the scripts, you can manually copy them
to the /bin/ or /usr/bin directory and make them executable (chmod +x).
You can also comment out lines in the 'scripts' list below:
"""
scripts = [
'bin/ansi-colormap',
'bin/icanhazip',
'bin/wifipassword',
'bin/mint-post-install',
'bin/whoamiv'
]
config = {
'name': 'Linux Addons',
'author': 'Curtis Mattoon',
'author_email': 'cmattoon@cmattoon.com',
'scripts': scripts,
'packages': [
'lxlib'
]
}
setup(**config)
Add hostmgr. Currently sorts/cleans /etc/hosts#!/usr/bin/env python
from setuptools import setup
"""
If you only want to install some of the scripts, you can manually copy them
to the /bin/ or /usr/bin directory and make them executable (chmod +x).
You can also comment out lines in the 'scripts' list below:
"""
scripts = [
'bin/ansi-colormap',
'bin/icanhazip',
'bin/wifipassword',
'bin/mint-post-install',
'bin/whoamiv'
'bin/hostmgr'
]
config = {
'name': 'Linux Addons',
'author': 'Curtis Mattoon',
'author_email': 'cmattoon@cmattoon.com',
'scripts': scripts,
'packages': [
'lxlib'
]
}
setup(**config)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup
"""
If you only want to install some of the scripts, you can manually copy them
to the /bin/ or /usr/bin directory and make them executable (chmod +x).
You can also comment out lines in the 'scripts' list below:
"""
scripts = [
'bin/ansi-colormap',
'bin/icanhazip',
'bin/wifipassword',
'bin/mint-post-install',
'bin/whoamiv'
]
config = {
'name': 'Linux Addons',
'author': 'Curtis Mattoon',
'author_email': 'cmattoon@cmattoon.com',
'scripts': scripts,
'packages': [
'lxlib'
]
}
setup(**config)
<commit_msg>Add hostmgr. Currently sorts/cleans /etc/hosts<commit_after>#!/usr/bin/env python
from setuptools import setup
"""
If you only want to install some of the scripts, you can manually copy them
to the /bin/ or /usr/bin directory and make them executable (chmod +x).
You can also comment out lines in the 'scripts' list below:
"""
scripts = [
'bin/ansi-colormap',
'bin/icanhazip',
'bin/wifipassword',
'bin/mint-post-install',
'bin/whoamiv'
'bin/hostmgr'
]
config = {
'name': 'Linux Addons',
'author': 'Curtis Mattoon',
'author_email': 'cmattoon@cmattoon.com',
'scripts': scripts,
'packages': [
'lxlib'
]
}
setup(**config)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.