commit
stringlengths
40
40
old_file
stringlengths
4
150
new_file
stringlengths
4
150
old_contents
stringlengths
0
3.26k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
501
message
stringlengths
15
4.06k
lang
stringclasses
4 values
license
stringclasses
13 values
repos
stringlengths
5
91.5k
diff
stringlengths
0
4.35k
727f221767c662e95585f54e06c0c8b4e4a77d88
smartfile/exceptions.py
smartfile/exceptions.py
from requests.exceptions import ConnectionError class SmartFileException(Exception): pass class SmartFileConnException(SmartFileException): """ Exception for issues regarding a request. """ def __init__(self, exc, *args, **kwargs): self.exc = exc if isinstance(exc, ConnectionError): self.detail = exc.message.strerror else: self.detail = '{0}: {1}'.format(exc.__class__, exc) super(SmartFileConnException, self).__init__(*args, **kwargs) def __str__(self): return self.detail class SmartFileResponseException(SmartFileException): """ Exception for issues regarding a response. """ def __init__(self, response, *args, **kwargs): self.response = response self.status_code = response.status_code self.detail = response.json.get('detail', 'Check response for errors') super(SmartFileResponseException, self).__init__(*args, **kwargs) def __str__(self): return 'Response {0}: {1}'.format(self.status_code, self.detail)
from requests.exceptions import ConnectionError class SmartFileException(Exception): pass class SmartFileConnException(SmartFileException): """ Exception for issues regarding a request. """ def __init__(self, exc, *args, **kwargs): self.exc = exc if isinstance(exc, ConnectionError): self.detail = exc.message.strerror else: self.detail = u'{0}: {1}'.format(exc.__class__, exc) super(SmartFileConnException, self).__init__(*args, **kwargs) def __str__(self): return self.detail class SmartFileResponseException(SmartFileException): """ Exception for issues regarding a response. """ def __init__(self, response, *args, **kwargs): self.response = response self.status_code = response.status_code if not response.json or not 'detail' in response.json: self.detail = u'Check response for errors' else: self.detail = response.json['detail'] super(SmartFileResponseException, self).__init__(*args, **kwargs) def __str__(self): return 'Response {0}: {1}'.format(self.status_code, self.detail)
Handle responses without JSON or detail field
Handle responses without JSON or detail field Check the response for JSON and a detail field before trying to access them within SmartFileResponseException. This could occur if the server returns a 500.
Python
mit
smartfile/client-python
--- +++ @@ -12,7 +12,7 @@ if isinstance(exc, ConnectionError): self.detail = exc.message.strerror else: - self.detail = '{0}: {1}'.format(exc.__class__, exc) + self.detail = u'{0}: {1}'.format(exc.__class__, exc) super(SmartFileConnException, self).__init__(*args, **kwargs) def __str__(self): @@ -24,7 +24,10 @@ def __init__(self, response, *args, **kwargs): self.response = response self.status_code = response.status_code - self.detail = response.json.get('detail', 'Check response for errors') + if not response.json or not 'detail' in response.json: + self.detail = u'Check response for errors' + else: + self.detail = response.json['detail'] super(SmartFileResponseException, self).__init__(*args, **kwargs) def __str__(self):
1364470725a55232556d8856be7eb910f3376fb3
permuta/misc/union_find.py
permuta/misc/union_find.py
class UnionFind(object): """A collection of distjoint sets.""" def __init__(self, n = 0): """Creates a collection of n disjoint unit sets.""" self.p = [-1]*n self.leaders = set( i for i in range(n) ) def find(self, x): """Return the identifier of a representative element for the set containing the element with identifier x.""" if self.p[x] < 0: return x self.p[x] = self.find(self.p[x]) return self.p[x] def size(self, x): """Return the number of elements in the set containing the element with identifier x.""" return -self.p[self.find(x)] def unite(self, x, y): """Unite the two sets containing the elements with identifiers x and y, respectively.""" x = self.find(x) y = self.find(y) if x == y: return if self.size(x) > self.size(y): x,y = y,x self.p[y] += self.p[x] self.p[x] = y self.leaders.remove(x) def add(self): """Add a unit set containing a new element to the collection, and return the identifier of the new element.""" nid = len(self.p) self.p.append(nid) return nid
class UnionFind(object): """A collection of distjoint sets.""" def __init__(self, n = 0): """Creates a collection of n disjoint unit sets.""" self.p = [-1]*n self.leaders = set( i for i in range(n) ) def find(self, x): """Return the identifier of a representative element for the set containing the element with identifier x.""" if self.p[x] < 0: return x self.p[x] = self.find(self.p[x]) return self.p[x] def size(self, x): """Return the number of elements in the set containing the element with identifier x.""" return -self.p[self.find(x)] def unite(self, x, y): """Unite the two sets containing the elements with identifiers x and y, respectively.""" x = self.find(x) y = self.find(y) if x == y: return False if self.size(x) > self.size(y): x,y = y,x self.p[y] += self.p[x] self.p[x] = y self.leaders.remove(x) return True def add(self): """Add a unit set containing a new element to the collection, and return the identifier of the new element.""" nid = len(self.p) self.p.append(nid) return nid
Make UnionFind.unite return whether the operation was successful
Make UnionFind.unite return whether the operation was successful
Python
bsd-3-clause
PermutaTriangle/Permuta
--- +++ @@ -26,12 +26,13 @@ x = self.find(x) y = self.find(y) if x == y: - return + return False if self.size(x) > self.size(y): x,y = y,x self.p[y] += self.p[x] self.p[x] = y self.leaders.remove(x) + return True def add(self): """Add a unit set containing a new element to the collection, and
c90e9798881630c2f956e08901f5bd35d948df6d
salt_observer/management/commands/fetchpackages.py
salt_observer/management/commands/fetchpackages.py
from django.core.management.base import BaseCommand from django.utils import timezone from salt_observer.models import Minion from . import ApiCommand import json class Command(ApiCommand, BaseCommand): help = 'Fetch and save new data from all servers' def save_packages(self, api): print('Fetching packages ...') packages = api.get_server_module_data('pkg.list_pkgs') print('Fetching upgrades ...') upgrades = api.get_server_module_data('pkg.list_upgrades') for minion_fqdn, minion_packages in packages.items(): print('Handling {}'.format(minion_fqdn)) minion = Minion.objects.filter(fqdn=minion_fqdn).first() minion_data = json.loads(minion.data) minion_package_data = {} for minion_package_name, minion_package_version in minion_packages.items(): if type(upgrades.get(minion_fqdn, {})) != dict: del upgrades[minion_fqdn] minion_package_data.update({ minion_package_name: { 'version': minion_package_version, 'latest_version': upgrades.get(minion_fqdn, {}).get(minion_package_name, '') } }) minion_data['packages'] = minion_package_data minion.data = json.dumps(minion_data) minion.save() def handle(self, *args, **kwargs): api = super().handle(*args, **kwargs) self.save_packages(api) api.logout()
from django.core.management.base import BaseCommand from django.utils import timezone from salt_observer.models import Minion from . import ApiCommand import json class Command(ApiCommand, BaseCommand): help = 'Fetch and save new data from all servers' def save_packages(self, api): packages = api.get_server_module_data('pkg.list_pkgs') upgrades = api.get_server_module_data('pkg.list_upgrades') for minion_fqdn, minion_packages in packages.items(): minion = Minion.objects.filter(fqdn=minion_fqdn).first() minion_data = json.loads(minion.data) minion_package_data = {} for minion_package_name, minion_package_version in minion_packages.items(): if type(upgrades.get(minion_fqdn, {})) != dict: del upgrades[minion_fqdn] minion_package_data.update({ minion_package_name: { 'version': minion_package_version, 'latest_version': upgrades.get(minion_fqdn, {}).get(minion_package_name, '') } }) minion_data['packages'] = minion_package_data minion.data = json.dumps(minion_data) minion.save() def handle(self, *args, **kwargs): api = super().handle(*args, **kwargs) self.save_packages(api) api.logout()
Remove unwanted prints in management command
Remove unwanted prints in management command
Python
mit
hs-hannover/salt-observer,hs-hannover/salt-observer,hs-hannover/salt-observer
--- +++ @@ -11,14 +11,10 @@ help = 'Fetch and save new data from all servers' def save_packages(self, api): - print('Fetching packages ...') packages = api.get_server_module_data('pkg.list_pkgs') - print('Fetching upgrades ...') upgrades = api.get_server_module_data('pkg.list_upgrades') for minion_fqdn, minion_packages in packages.items(): - - print('Handling {}'.format(minion_fqdn)) minion = Minion.objects.filter(fqdn=minion_fqdn).first() minion_data = json.loads(minion.data)
fb488a27b16943709327c7a9f31c26aba418055f
lyricist/bsopener.py
lyricist/bsopener.py
from bs4 import BeautifulSoup from urllib.request import urlopen from urllib.request import Request from .const import constant class BSOpener(object): """ A wrapper arround urllib and BeautifulSoup used a helper for url requests """ # TODO: make this class a singleton class _Const(): """ Contains the constants used in BSOpener class """ @constant def HEADERS(): """ Headers to send with all url requests """ return {"Accept":"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8", "User-Agent":"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2526.111 Safari/537.36"} def __init__(self): self.CONST = self._Const() def bsopen(self, url, headers=None): if headers is None: headers=self.CONST.HEADERS req = Request(url=url, headers=headers) return BeautifulSoup(urlopen(req), "html.parser")
from bs4 import BeautifulSoup from urllib.request import urlopen from urllib.request import Request from .const import constant class BSOpener(object): """ A wrapper arround urllib and BeautifulSoup used a helper for url requests """ # TODO: make this class a singleton class _Const(): """ Contains the constants used in BSOpener class """ @constant def HEADERS(): """ Headers to send with all url requests """ return {"Accept":"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8", "User-Agent":"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2526.111 Safari/537.36"} def __init__(self): self.CONST = self._Const() def bsopen(self, url, headers=None): if headers is None: headers=self.CONST.HEADERS req = Request(url=url, headers=headers) try: html = urlopen(req) except urllib.error.HTTPError: print("WARNING: exception during opening url: " + url) return None return BeautifulSoup(html, "html.parser")
Add handler for case of HTTP errors case.
Add handler for case of HTTP errors case. Added a try/catch to url requests to handle errors.
Python
mit
iluxonchik/lyricist
--- +++ @@ -24,4 +24,10 @@ headers=self.CONST.HEADERS req = Request(url=url, headers=headers) - return BeautifulSoup(urlopen(req), "html.parser") + try: + html = urlopen(req) + except urllib.error.HTTPError: + print("WARNING: exception during opening url: " + url) + return None + + return BeautifulSoup(html, "html.parser")
a39a91b406be3965addc613021f0c94007c42cf6
matchzoo/__init__.py
matchzoo/__init__.py
from pathlib import Path USER_DIR = Path.expanduser(Path('~')).joinpath('.matchzoo') if not USER_DIR.exists(): USER_DIR.mkdir() USER_DATA_DIR = USER_DIR.joinpath('datasets') if not USER_DATA_DIR.exists(): USER_DATA_DIR.mkdir() from .logger import logger from .version import __version__ from .utils import * from . import processor_units from .processor_units import chain_transform, ProcessorUnit from .data_pack import DataPack from .data_pack import pack from .data_pack import load_data_pack from .data_pack import build_unit_from_data_pack from .data_pack import build_vocab_unit from .data_generator import DataGenerator from .data_generator import PairDataGenerator from .data_generator import DynamicDataGenerator from . import tasks from . import metrics from . import losses from . import engine from . import preprocessors from . import models from . import embedding from . import datasets from . import auto from .engine import load_model from .engine import load_preprocessor
from pathlib import Path USER_DIR = Path.expanduser(Path('~')).joinpath('.matchzoo') if not USER_DIR.exists(): USER_DIR.mkdir() USER_DATA_DIR = USER_DIR.joinpath('datasets') if not USER_DATA_DIR.exists(): USER_DATA_DIR.mkdir() from .logger import logger from .version import __version__ from .utils import * from . import processor_units from .processor_units import chain_transform, ProcessorUnit from .data_pack import DataPack from .data_pack import pack from .data_pack import load_data_pack from .data_pack import build_unit_from_data_pack from .data_pack import build_vocab_unit from .data_generator import DataGenerator from .data_generator import PairDataGenerator from .data_generator import DynamicDataGenerator from . import tasks from . import metrics from . import losses from . import engine from . import preprocessors from . import models from . import embedding from . import datasets from . import auto from .engine import load_model from .engine import load_preprocessor from .engine import callbacks
Add callbacks to matchzoo root scope.
Add callbacks to matchzoo root scope.
Python
apache-2.0
faneshion/MatchZoo,faneshion/MatchZoo
--- +++ @@ -36,3 +36,4 @@ from .engine import load_model from .engine import load_preprocessor +from .engine import callbacks
67c58e3941491d276318daf568354f1e17ef3892
omics/gsa/__init__.py
omics/gsa/__init__.py
"""Gene Set Analysis Module """ from GeneSetCollection import GeneSetCollection
"""Gene Set Analysis Module """ from GeneSetCollection import GeneSetCollect def enrichment(gene_list, gene_set, background, alternative="two-sided", verbose=True): """Gene set enrichment analysis by Fisher Exact Test. gene_list : query gene list gene_set : predefined gene set background : background gene set alternative: {'two-sided', 'less', 'greater'}, optional verbose : print results or not Return: odds ratio (prior), p-value. See http://docs.scipy.org/doc/scipy/reference/generated/scipy.stats.fisher_exact.html To-do: to support a whole genome for default background? """ from scipy.stats import fisher_exact from math import log10 L = set(gene_list) & set(background) S = set(gene_set) & set(background) a = len(L & S) b = len(L) - a c = len(S) - a d = len(background) - (a + b + c) oddsratio, p_value = fisher_exact([[a, b], [c, d]], alternative) if verbose: print "2x2 contingency table:" print "\t%d\t%d" % (a, b) print "\t%d\t%d" % (c, d) print "odds ratio:\t%f" % oddsratio print "%s P-val:\t%g" % (alternative, p_value) print "-log(P-val):\t%f" % -log10(p_value) return oddsratio, p_value
Add enrichment function in gsa module
Add enrichment function in gsa module
Python
mit
choyichen/omics
--- +++ @@ -1,3 +1,35 @@ """Gene Set Analysis Module """ -from GeneSetCollection import GeneSetCollection +from GeneSetCollection import GeneSetCollect + +def enrichment(gene_list, gene_set, background, alternative="two-sided", verbose=True): + """Gene set enrichment analysis by Fisher Exact Test. + + gene_list : query gene list + gene_set : predefined gene set + background : background gene set + alternative: {'two-sided', 'less', 'greater'}, optional + verbose : print results or not + + Return: odds ratio (prior), p-value. + + See http://docs.scipy.org/doc/scipy/reference/generated/scipy.stats.fisher_exact.html + To-do: to support a whole genome for default background? + """ + from scipy.stats import fisher_exact + from math import log10 + L = set(gene_list) & set(background) + S = set(gene_set) & set(background) + a = len(L & S) + b = len(L) - a + c = len(S) - a + d = len(background) - (a + b + c) + oddsratio, p_value = fisher_exact([[a, b], [c, d]], alternative) + if verbose: + print "2x2 contingency table:" + print "\t%d\t%d" % (a, b) + print "\t%d\t%d" % (c, d) + print "odds ratio:\t%f" % oddsratio + print "%s P-val:\t%g" % (alternative, p_value) + print "-log(P-val):\t%f" % -log10(p_value) + return oddsratio, p_value
7818f9aa2d66ab0f4a99f731ecfb03e711e9ad6c
utils/send_messages.py
utils/send_messages.py
from django.conf import settings import requests def send_message_android(destination, title, message): headers = { 'Authorization': 'key=' + settings.FIREBASE_SERVER_KEY, 'Content - Type': 'application/json' } payload = { "to": destination, "notification": {"title": title, "text": message} } request = requests.post( settings.FIREBASE_API_URL, json=payload, headers=headers ) print request.text
"""Push notification service send_message_android and send_message_ios are the same, but this is intentional, in order to support any future different conditions for both platforms, different keys or addtional parameters shit happens sometimes ROFL! """ from django.conf import settings from constance import config import requests def send_message_android(destination, message, title=config.TITLE_PUSH_NOTIFICATION): headers = { 'Authorization': 'key=' + settings.FIREBASE_SERVER_KEY, 'Content - Type': 'application/json' } payload = { "to": destination, "notification": {"title": title, "text": message} } request = requests.post( settings.FIREBASE_API_URL, json=payload, headers=headers ) print request.text def send_message_ios(destination, message, title=config.TITLE_PUSH_NOTIFICATION): headers = { 'Authorization': 'key=' + settings.FIREBASE_SERVER_KEY, 'Content - Type': 'application/json' } payload = { "to": destination, "notification": {"title": title, "text": message} } request = requests.post( settings.FIREBASE_API_URL, json=payload, headers=headers ) print request.text def send_push_notification(user, message): try: devices = user.employeedevice_set.all() if devices[0].android_device: send_message_android(devices[0].android_device, message) if devices[0].ios_device: send_message_ios(devices[0].ios_device, message) return True except: return False
Add send_push_notification function and separate android and ios functions
Add send_push_notification function and separate android and ios functions
Python
apache-2.0
belatrix/BackendAllStars
--- +++ @@ -1,8 +1,17 @@ +"""Push notification service + +send_message_android and send_message_ios are the same, but this is intentional, in order +to support any future different conditions for both platforms, different keys or addtional parameters +shit happens sometimes ROFL! + +""" + from django.conf import settings +from constance import config import requests -def send_message_android(destination, title, message): +def send_message_android(destination, message, title=config.TITLE_PUSH_NOTIFICATION): headers = { 'Authorization': 'key=' + settings.FIREBASE_SERVER_KEY, 'Content - Type': 'application/json' @@ -17,3 +26,32 @@ headers=headers ) print request.text + + +def send_message_ios(destination, message, title=config.TITLE_PUSH_NOTIFICATION): + headers = { + 'Authorization': 'key=' + settings.FIREBASE_SERVER_KEY, + 'Content - Type': 'application/json' + } + payload = { + "to": destination, + "notification": {"title": title, "text": message} + } + request = requests.post( + settings.FIREBASE_API_URL, + json=payload, + headers=headers + ) + print request.text + + +def send_push_notification(user, message): + try: + devices = user.employeedevice_set.all() + if devices[0].android_device: + send_message_android(devices[0].android_device, message) + if devices[0].ios_device: + send_message_ios(devices[0].ios_device, message) + return True + except: + return False
fc6b3df720ac05b715ae6478367f79e834c47c26
pi_broadcast_service/rabbit.py
pi_broadcast_service/rabbit.py
import json import pika class Publisher(object): def __init__(self, rabbit_url, exchange): self._rabbit_url = rabbit_url self._exchange = exchange self._connection = pika.BlockingConnection(pika.URLParameters(self._rabbit_url)) self._channel = self._connection.channel() def send(self, routing_key, message): self._channel.basic_publish( exchange=self._exchange, routing_key=routing_key, body=json.dumps(message)) def stop(self): self._connection.close()
import json import pika class Publisher(object): def __init__(self, rabbit_url, exchange): self._rabbit_url = rabbit_url self._exchange = exchange self._connection = pika.BlockingConnection(pika.URLParameters(self._rabbit_url)) self._channel = self._connection.channel() self._channel.exchange_declare(exchange=self._exchange, type='direct') def send(self, routing_key, message): self._channel.basic_publish( exchange=self._exchange, routing_key=routing_key, body=json.dumps(message)) def stop(self): self._connection.close()
Make sure the exchange is there first
Make sure the exchange is there first
Python
mit
projectweekend/Pi-Broadcast-Service
--- +++ @@ -9,6 +9,7 @@ self._exchange = exchange self._connection = pika.BlockingConnection(pika.URLParameters(self._rabbit_url)) self._channel = self._connection.channel() + self._channel.exchange_declare(exchange=self._exchange, type='direct') def send(self, routing_key, message): self._channel.basic_publish(
02c74c5235b8ad821786213a3bcf5f824162454d
flax/linen/combinators.py
flax/linen/combinators.py
"""Combinators of modules, such as a Sequential.""" from typing import Callable, Sequence from flax.linen.module import Module class Sequential(Module): """Applies a linear chain of Modules. Meant to be used only for the simple case of fusing together callables where the input of a particular module/op is the output of the previous one. Modules will be applied in the order that they are passed in the constructor. The apply() method of Sequential accepts any input and forwards it to the first module it contains. It chains the output sequentially to the input of the next module and returns the output of the final module. Example usage:: class Foo(nn.Module): feature_sizes: Sequence[int] @nn.compact def __call__(self, x): return nn.Sequential([nn.Dense(layer_size, name=f'layers_{idx}') for idx, layer_size in enumerate(self.feature_sizes)])(x) """ layers: Sequence[Callable] def __call__(self, *args, **kwargs): if not self.layers: raise ValueError(f'Empty Sequential module {self.name}.') outputs = self.layers[0](*args, **kwargs) for layer in self.layers[1:]: outputs = layer(outputs) return outputs
"""Combinators of modules, such as a Sequential.""" from typing import Callable, Sequence from flax.linen.module import Module class Sequential(Module): """Applies a linear chain of Modules. Meant to be used only for the simple case of fusing together callables where the input of a particular module/op is the output of the previous one. Modules will be applied in the order that they are passed in the constructor. The apply() method of Sequential accepts any input and forwards it to the first module it contains. It chains the output sequentially to the input of the next module and returns the output of the final module. Example usage:: class Foo(nn.Module): feature_sizes: Sequence[int] @nn.compact def __call__(self, x): return nn.Sequential([nn.Dense(4), nn.relu, nn.Dense(2), nn.log_softmax])(x) """ layers: Sequence[Callable] def __call__(self, *args, **kwargs): if not self.layers: raise ValueError(f'Empty Sequential module {self.name}.') outputs = self.layers[0](*args, **kwargs) for layer in self.layers[1:]: outputs = layer(outputs) return outputs
Include activations in Sequential example.
Include activations in Sequential example.
Python
apache-2.0
google/flax,google/flax
--- +++ @@ -19,13 +19,14 @@ Example usage:: class Foo(nn.Module): - feature_sizes: Sequence[int] + feature_sizes: Sequence[int] - @nn.compact - def __call__(self, x): - return nn.Sequential([nn.Dense(layer_size, name=f'layers_{idx}') - for idx, layer_size - in enumerate(self.feature_sizes)])(x) + @nn.compact + def __call__(self, x): + return nn.Sequential([nn.Dense(4), + nn.relu, + nn.Dense(2), + nn.log_softmax])(x) """ layers: Sequence[Callable]
0e6de6bc5890d6028a7115a62289bf26b0dd043b
hydra_agent/actions/server_conf.py
hydra_agent/actions/server_conf.py
# ============================== # Copyright 2011 Whamcloud, Inc. # ============================== from hydra_agent.plugins import AgentPlugin def set_server_conf(args = None): import simplejson as json data = json.loads(args.args) from hydra_agent.store import AgentStore AgentStore.set_server_conf(data) def remove_server_conf(args = None): from hydra_agent.store import AgentStore AgentStore.remove_server_conf() class ServerConfPlugin(AgentPlugin): def register_commands(self, parser): p = parser.add_parser("set-server-conf", help="set server config params") p.add_argument("--args", required=True, help="config params to be set") p.set_defaults(func=set_server_conf) p = parser.add_parser("remove-server-conf", help="unset server config params") p.set_defaults(func=remove_server_conf)
# ============================== # Copyright 2011 Whamcloud, Inc. # ============================== from hydra_agent.plugins import AgentPlugin def _validate_conf(server_conf): from hydra_agent.main_loop import MainLoop result = MainLoop()._send_update(server_conf['url'], server_conf['token'], None, {}) if result == None: from socket import getfqdn raise RuntimeError("Cannot contact server URL %s from %s" % (server_conf['url'], getfqdn())) def set_server_conf(args = None): import simplejson as json server_conf = json.loads(args.args) _validate_conf(server_conf) from hydra_agent.store import AgentStore AgentStore.set_server_conf(server_conf) def remove_server_conf(args = None): from hydra_agent.store import AgentStore AgentStore.remove_server_conf() class ServerConfPlugin(AgentPlugin): def register_commands(self, parser): p = parser.add_parser("set-server-conf", help="set server config params") p.add_argument("--args", required=True, help="config params to be set") p.set_defaults(func=set_server_conf) p = parser.add_parser("remove-server-conf", help="unset server config params") p.set_defaults(func=remove_server_conf)
Test HTTP access to server before saving server config
Test HTTP access to server before saving server config
Python
mit
intel-hpdd/intel-manager-for-lustre,intel-hpdd/intel-manager-for-lustre,intel-hpdd/intel-manager-for-lustre
--- +++ @@ -4,12 +4,21 @@ from hydra_agent.plugins import AgentPlugin +def _validate_conf(server_conf): + from hydra_agent.main_loop import MainLoop + result = MainLoop()._send_update(server_conf['url'], server_conf['token'], None, {}) + if result == None: + from socket import getfqdn + raise RuntimeError("Cannot contact server URL %s from %s" % (server_conf['url'], getfqdn())) def set_server_conf(args = None): import simplejson as json - data = json.loads(args.args) + server_conf = json.loads(args.args) + + _validate_conf(server_conf) + from hydra_agent.store import AgentStore - AgentStore.set_server_conf(data) + AgentStore.set_server_conf(server_conf) def remove_server_conf(args = None):
60edf2f1534e02a6da9aa715662a0e4ea8922191
mk/get_config_dir.py
mk/get_config_dir.py
#!/usr/bin/env python import os component = os.getenv("COMPONENT") if component == "ocaml": print "/repos/xen-dist-ocaml.hg" if component == "api-libs": print "/repos/xen-api-libs-rpm-buildroot"
#!/usr/bin/env python import os component = os.getenv("COMPONENT") if component == "ocaml": print "/repos/xen-dist-ocaml.hg" if component == "api-libs": print "/repos/xen-api-libs-specs"
Change name of config repo for api-libs component
Change name of config repo for api-libs component Signed-off-by: Jon Ludlam <e7e3380887a8f95cc9dc4f0d51dedc7e849a287a@eu.citrix.com>
Python
lgpl-2.1
simonjbeaumont/planex,jonludlam/planex,euanh/planex-cleanhistory,jonludlam/planex,djs55/planex,djs55/planex,djs55/planex,simonjbeaumont/planex,jonludlam/planex,euanh/planex-cleanhistory,euanh/planex-cleanhistory,simonjbeaumont/planex
--- +++ @@ -7,5 +7,5 @@ if component == "ocaml": print "/repos/xen-dist-ocaml.hg" if component == "api-libs": - print "/repos/xen-api-libs-rpm-buildroot" + print "/repos/xen-api-libs-specs"
0043fe9c8de4d8341afbcea388f472a50017de2c
jiradoc/__main__.py
jiradoc/__main__.py
# ------------------------------------------------------------ # __main__.py # # The main program which expects a jiradoc formatted file to # be passed in as a cmdline option. It reads the file and # parses its content to Story objects. # ------------------------------------------------------------ import argparse import pkg_resources from jiradoc.parser.parser import parser def main(args=None): argparser = argparse.ArgumentParser(description='The JIRAdoc parser') test_file = pkg_resources.resource_filename(__name__, 'data/test.jiradoc') argparser.add_argument('-f', dest='file', default=test_file, help='The jiradoc formatted file') args = argparser.parse_args() with open(args.file) as f: content = f.read() stories = parser.parse(content) for story in stories: print story if __name__ == "__main__": main()
# ------------------------------------------------------------ # __main__.py # # The main program # ------------------------------------------------------------ import argparse import os import pkg_resources import sys from jiradoc.parser.parser import parser as jiradoc_parser def main(args=None): parser = argparse.ArgumentParser(description='A tool that parses a JIRAdoc formatted file and returns a list of ' 'story objects') test_file = pkg_resources.resource_filename(__name__, 'data/test.jiradoc') parser.add_argument('-f', dest='file', default=test_file, help='A .jiradoc file containing sub-tasks to JIRA stories') args = parser.parse_args() filename, ext = os.path.splitext(args.file) if ext != '.jiradoc': print 'Invalid file extension: ' + ext print 'The only valid extension is .jiradoc' sys.exit(1) with open(args.file) as f: content = f.read() stories = jiradoc_parser.parse(content) for story in stories: print story if __name__ == "__main__": main()
Validate that the input file ends with .jiradoc
Validate that the input file ends with .jiradoc
Python
mit
lucianovdveekens/jiradoc
--- +++ @@ -1,27 +1,35 @@ # ------------------------------------------------------------ # __main__.py # -# The main program which expects a jiradoc formatted file to -# be passed in as a cmdline option. It reads the file and -# parses its content to Story objects. +# The main program # ------------------------------------------------------------ import argparse +import os import pkg_resources +import sys -from jiradoc.parser.parser import parser +from jiradoc.parser.parser import parser as jiradoc_parser def main(args=None): - argparser = argparse.ArgumentParser(description='The JIRAdoc parser') + parser = argparse.ArgumentParser(description='A tool that parses a JIRAdoc formatted file and returns a list of ' + 'story objects') test_file = pkg_resources.resource_filename(__name__, 'data/test.jiradoc') - argparser.add_argument('-f', dest='file', default=test_file, help='The jiradoc formatted file') - args = argparser.parse_args() + parser.add_argument('-f', dest='file', default=test_file, + help='A .jiradoc file containing sub-tasks to JIRA stories') + args = parser.parse_args() + + filename, ext = os.path.splitext(args.file) + if ext != '.jiradoc': + print 'Invalid file extension: ' + ext + print 'The only valid extension is .jiradoc' + sys.exit(1) with open(args.file) as f: content = f.read() - stories = parser.parse(content) + stories = jiradoc_parser.parse(content) for story in stories: print story
af2c9647e64ad0e0575b191e35e38f8bf23ed6f8
config.py
config.py
### # Copyright (c) 2012-2013, spline # All rights reserved. # # ### import supybot.conf as conf import supybot.registry as registry from supybot.i18n import PluginInternationalization, internationalizeDocstring _ = PluginInternationalization('NFL') def configure(advanced): # This will be called by supybot to configure this module. advanced is # a bool that specifies whether the user identified himself as an advanced # user or not. You should effect your configuration by manipulating the # registry as appropriate. from supybot.questions import expect, anything, something, yn conf.registerPlugin('NFL', True) NFL = conf.registerPlugin('NFL') conf.registerGlobalValue(NFL, 'logURLs', registry.Boolean(True, """Should we log all URL calls?""")) # vim:set shiftwidth=4 tabstop=4 expandtab textwidth=250:
### # Copyright (c) 2012-2013, spline # All rights reserved. # # ### import supybot.conf as conf import supybot.registry as registry def configure(advanced): # This will be called by supybot to configure this module. advanced is # a bool that specifies whether the user identified himself as an advanced # user or not. You should effect your configuration by manipulating the # registry as appropriate. from supybot.questions import expect, anything, something, yn conf.registerPlugin('NFL', True) NFL = conf.registerPlugin('NFL') conf.registerGlobalValue(NFL, 'logURLs', registry.Boolean(True, """Should we log all URL calls?""")) # vim:set shiftwidth=4 tabstop=4 expandtab textwidth=250:
Remove internationalization because its not needed.
Remove internationalization because its not needed.
Python
mit
cottongin/NFL,fasteddie2/NFL,reticulatingspline/NFL
--- +++ @@ -7,9 +7,6 @@ import supybot.conf as conf import supybot.registry as registry -from supybot.i18n import PluginInternationalization, internationalizeDocstring - -_ = PluginInternationalization('NFL') def configure(advanced): # This will be called by supybot to configure this module. advanced is
91c35078c7a8aad153d9aabe0b02fc3c48cfc76a
hesiod.py
hesiod.py
#!/usr/bin/env python from _hesiod import bind, resolve
#!/usr/bin/env python """ Present both functional and object-oriented interfaces for executing lookups in Hesiod, Project Athena's service name resolution protocol. """ from _hesiod import bind, resolve from pwd import struct_passwd class HesiodParseError(Exception): pass class Lookup(object): """ A Generic Hesiod lookup """ def __init__(self, hes_name, hes_type): self.results = resolve(hes_name, hes_type) self.parseRecords() def parseRecords(self): pass class FilsysLookup(Lookup): def __init__(self, name): Lookup.__init__(self, name, 'filsys') def parseRecords(self): Lookup.parseRecords(self) self.filsys = [] self.multiRecords = (len(self.results) > 1) for result in self.results: priority = 0 if self.multiRecords: result, priority = result.rsplit(" ", 1) priority = int(priority) parts = result.split(" ") type = parts[0] if type == 'AFS': self.filsys.append(dict(type=type, location=parts[1], mode=parts[2], mountpoint=parts[3], priority=priority)) elif type == 'NFS': self.filsys.append(dict(type=type, remote_location=parts[1], server=parts[2], mode=parts[3], mountpoint=parts[4], priority=priority)) elif type == 'ERR': self.filsys.append(dict(type=type, message=parts[1], priority=priority)) elif type == 'UFS': self.filsys.append(dict(type=type, device=parts[1], mode=parts[2], mountpoint=parts[3], priority=priority)) elif type == 'LOC': self.filsys.append(dict(type=type, location=parts[1], mode=parts[2], mountpoint=parts[3], priority=priority)) else: raise HesiodParseError('Unknown filsys type: %s' % type) class PasswdLookup(Lookup): def __init__(self, name): Lookup.__init__(self, name, 'passwd') def parseRecords(self): self.passwd = struct_passwd(self.results[0].split(':')) class UidLookup(PasswdLookup): def __init__(self, uid): Lookup.__init__(self, uid, 'uid') __all__ = ['bind', 'resolve', 'Lookup', 'FilsysLookup', 'PasswdLookup', 'UidLookup', 'HesiodParseError']
Add object-oriented-style lookups for filsys, passwd, and uid lookups
Add object-oriented-style lookups for filsys, passwd, and uid lookups The filsys entry parsing code is taken from pyHesiodFS and was explicitly relicensed under the MIT license by Quentin Smith <quentin@mit.edu>
Python
mit
ebroder/python-hesiod
--- +++ @@ -1,3 +1,89 @@ #!/usr/bin/env python +""" +Present both functional and object-oriented interfaces for executing +lookups in Hesiod, Project Athena's service name resolution protocol. +""" + from _hesiod import bind, resolve + +from pwd import struct_passwd + +class HesiodParseError(Exception): + pass + +class Lookup(object): + """ + A Generic Hesiod lookup + """ + def __init__(self, hes_name, hes_type): + self.results = resolve(hes_name, hes_type) + self.parseRecords() + + def parseRecords(self): + pass + +class FilsysLookup(Lookup): + def __init__(self, name): + Lookup.__init__(self, name, 'filsys') + + def parseRecords(self): + Lookup.parseRecords(self) + + self.filsys = [] + self.multiRecords = (len(self.results) > 1) + + for result in self.results: + priority = 0 + if self.multiRecords: + result, priority = result.rsplit(" ", 1) + priority = int(priority) + + parts = result.split(" ") + type = parts[0] + if type == 'AFS': + self.filsys.append(dict(type=type, + location=parts[1], + mode=parts[2], + mountpoint=parts[3], + priority=priority)) + elif type == 'NFS': + self.filsys.append(dict(type=type, + remote_location=parts[1], + server=parts[2], + mode=parts[3], + mountpoint=parts[4], + priority=priority)) + elif type == 'ERR': + self.filsys.append(dict(type=type, + message=parts[1], + priority=priority)) + elif type == 'UFS': + self.filsys.append(dict(type=type, + device=parts[1], + mode=parts[2], + mountpoint=parts[3], + priority=priority)) + elif type == 'LOC': + self.filsys.append(dict(type=type, + location=parts[1], + mode=parts[2], + mountpoint=parts[3], + priority=priority)) + else: + raise HesiodParseError('Unknown filsys type: %s' % type) + +class PasswdLookup(Lookup): + def __init__(self, name): + Lookup.__init__(self, name, 'passwd') + + def parseRecords(self): + self.passwd = struct_passwd(self.results[0].split(':')) + +class UidLookup(PasswdLookup): + def __init__(self, uid): + Lookup.__init__(self, uid, 'uid') + +__all__ = ['bind', 'resolve', + 'Lookup', 'FilsysLookup', 'PasswdLookup', 'UidLookup', + 'HesiodParseError']
637c19a54c6bee656ba3effd9316ad3d5587a963
src/dynmen/__init__.py
src/dynmen/__init__.py
# -*- coding: utf-8 -*- """ dynmen - A simple python interface to dynamic menus like dmenu or rofi import dynmen menu = dynmen.Menu(['dmenu', '-fn', 'Sans-30']) output = menu({'a': 1, 'b': 2, 'c': 3}) You can make the menu non-blocking by setting: menu.process_mode = 'futures' Please see the repository for more examples: https://github.com/frostidaho/dynmen """ from .menu import Menu, MenuError, MenuResult del menu def new_dmenu(**kwargs): from .dmenu import DMenu return DMenu(**kwargs) def new_rofi(**kwargs): from .rofi import Rofi return Rofi(**kwargs)
# -*- coding: utf-8 -*- """ dynmen - A simple python interface to dynamic menus like dmenu or rofi import dynmen menu = dynmen.Menu(['dmenu', '-fn', 'Sans-30']) output = menu({'a': 1, 'b': 2, 'c': 3}) You can make the menu non-blocking by setting: menu.process_mode = 'futures' Please see the repository for more examples: https://github.com/frostidaho/dynmen """ from .menu import Menu, MenuError, MenuResult del menu def new_dmenu(**kwargs): """Create an instance of dynmen.dmenu.DMenu(**kwargs) The keyword arguments set the corresponding attribute on the DMenu instance. """ from .dmenu import DMenu return DMenu(**kwargs) def new_rofi(**kwargs): """Create an instance of dynmen.rofi.Rofi(**kwargs) The keyword arguments set the corresponding attribute on the Rofi instance. """ from .rofi import Rofi return Rofi(**kwargs)
Add docstrings to dynmen.new_dmenu and dynmen.new_rofi
Add docstrings to dynmen.new_dmenu and dynmen.new_rofi
Python
mit
frostidaho/dynmen
--- +++ @@ -15,12 +15,20 @@ from .menu import Menu, MenuError, MenuResult del menu +def new_dmenu(**kwargs): + """Create an instance of dynmen.dmenu.DMenu(**kwargs) -def new_dmenu(**kwargs): + The keyword arguments set the corresponding attribute + on the DMenu instance. + """ from .dmenu import DMenu return DMenu(**kwargs) +def new_rofi(**kwargs): + """Create an instance of dynmen.rofi.Rofi(**kwargs) -def new_rofi(**kwargs): + The keyword arguments set the corresponding attribute + on the Rofi instance. + """ from .rofi import Rofi return Rofi(**kwargs)
968ef4bfb57743328587f9f693a7c531e20cbce0
go_cli/tests/test_main.py
go_cli/tests/test_main.py
""" Tests for go_cli.main. """ from unittest import TestCase from click.testing import CliRunner from go_cli.main import cli class TestCli(TestCase): def test_help(self): runner = CliRunner() result = runner.invoke(cli, ['--help']) self.assertEqual(result.exit_code, 0) self.assertTrue("Vumi Go command line utility." in result.output) self.assertTrue("send Send messages via an HTTP API (nostream)..." in result.output) def test_version(self): runner = CliRunner() result = runner.invoke(cli, ['--version']) self.assertEqual(result.exit_code, 0) self.assertTrue("go-cli, version " in result.output)
""" Tests for go_cli.main. """ from unittest import TestCase from click.testing import CliRunner from go_cli.main import cli class TestCli(TestCase): def test_help(self): runner = CliRunner() result = runner.invoke(cli, ['--help']) self.assertEqual(result.exit_code, 0) self.assertTrue("Vumi Go command line utility." in result.output) self.assertTrue( "export-contacts Export contacts from the contacts API." in result.output) self.assertTrue( "send Send messages via an HTTP API (nostream)..." in result.output) def test_version(self): runner = CliRunner() result = runner.invoke(cli, ['--version']) self.assertEqual(result.exit_code, 0) self.assertTrue("go-cli, version " in result.output)
Check that export-contacts is included in the top-level command.
Check that export-contacts is included in the top-level command.
Python
bsd-3-clause
praekelt/go-cli,praekelt/go-cli
--- +++ @@ -13,8 +13,12 @@ result = runner.invoke(cli, ['--help']) self.assertEqual(result.exit_code, 0) self.assertTrue("Vumi Go command line utility." in result.output) - self.assertTrue("send Send messages via an HTTP API (nostream)..." - in result.output) + self.assertTrue( + "export-contacts Export contacts from the contacts API." + in result.output) + self.assertTrue( + "send Send messages via an HTTP API (nostream)..." + in result.output) def test_version(self): runner = CliRunner()
c3b743b6dc757db4f2e063af8fa9ad6b228b3dcb
savate/buffer_event.py
savate/buffer_event.py
# -*- coding: utf-8 -*- import errno import collections from savate import writev # FIXME: should this be a method of BufferEvent below ? # FIXME: handle Python2.x/Python3k compat here def buffer_slice(buff, offset, size): return buffer(buff, offset, size) class BufferOutputHandler(object): def __init__(self, sock, initial_buffer_queue = ()): self.sock = sock self.ready = True self.buffer_queue = collections.deque(initial_buffer_queue) def add_buffer(self, buff): self.buffer_queue.append(buff) def empty(self): return len(self.buffer_queue) == 0 def flush(self): self.ready = True total_sent_bytes = 0 try: while self.buffer_queue: sent_bytes = writev.writev(self.sock.fileno(), self.buffer_queue) total_sent_bytes += sent_bytes while (self.buffer_queue and sent_bytes and len(self.buffer_queue[0]) <= sent_bytes): sent_bytes -= len(self.buffer_queue.popleft()) if sent_bytes: # One of the buffers was partially sent self.buffer_queue[0] = self.buffer_queue[0][sent_bytes:] except IOError, exc: if exc.errno == errno.EAGAIN: self.ready = False else: raise return total_sent_bytes
# -*- coding: utf-8 -*- import errno import collections from savate import writev # FIXME: should this be a method of BufferEvent below ? # FIXME: handle Python2.x/Python3k compat here def buffer_slice(buff, offset, size): return buffer(buff, offset, size) class BufferOutputHandler(object): def __init__(self, sock, initial_buffer_queue = ()): self.sock = sock self.ready = True self.buffer_queue = collections.deque(initial_buffer_queue) def add_buffer(self, buff): self.buffer_queue.append(buff) def empty(self): return len(self.buffer_queue) == 0 def flush(self): self.ready = True total_sent_bytes = 0 try: while self.buffer_queue: sent_bytes = self.sock.send(self.buffer_queue[0]) total_sent_bytes += sent_bytes if sent_bytes < len(self.buffer_queue[0]): # One of the buffers was partially sent self.buffer_queue[0] = self.buffer_queue[0][sent_bytes:] else: self.buffer_queue.popleft() except IOError, exc: if exc.errno == errno.EAGAIN: self.ready = False else: raise return total_sent_bytes
Use regular send() instead of our writev()
Use regular send() instead of our writev() Buffering is apparently more rewarding performance-wise than using zero-copy vectorised I/O ala writev(), hence there's no point to keep using our homemade write() extension.
Python
agpl-3.0
noirbee/savate,noirbee/savate
--- +++ @@ -27,15 +27,13 @@ total_sent_bytes = 0 try: while self.buffer_queue: - sent_bytes = writev.writev(self.sock.fileno(), - self.buffer_queue) + sent_bytes = self.sock.send(self.buffer_queue[0]) total_sent_bytes += sent_bytes - while (self.buffer_queue and sent_bytes and - len(self.buffer_queue[0]) <= sent_bytes): - sent_bytes -= len(self.buffer_queue.popleft()) - if sent_bytes: + if sent_bytes < len(self.buffer_queue[0]): # One of the buffers was partially sent self.buffer_queue[0] = self.buffer_queue[0][sent_bytes:] + else: + self.buffer_queue.popleft() except IOError, exc: if exc.errno == errno.EAGAIN: self.ready = False
ea9273ba54dc502327bcca8b233e8d338aaa0d43
pombola/south_africa/management/commands/south_africa_create_new_parties_for_election_2019.py
pombola/south_africa/management/commands/south_africa_create_new_parties_for_election_2019.py
import unicodecsv from django.core.management.base import BaseCommand, CommandError from pombola.core.models import Organisation, OrganisationKind parties_csv = "pombola/south_africa/data/elections/2019/parties.csv" class Command(BaseCommand): help = "Creates new parties for the 2019 elections" def handle(self, *args, **options): with open(parties_csv, "rb") as csvfile: csv = unicodecsv.DictReader(csvfile) for row in csv: party_slug = row["slug"] party_name = row["name"] party_kind = OrganisationKind.objects.get(slug="party") party, created = Organisation.objects.get_or_create( slug=party_slug, name=party_name, kind=party_kind ) if created: print("Created new party: {}".format(party)) else: print("Party already exists: {}".format(party))
import unicodecsv from django.core.management.base import BaseCommand, CommandError from pombola.core.models import Organisation, OrganisationKind parties_csv = "pombola/south_africa/data/elections/2019/parties.csv" class Command(BaseCommand): help = "Creates new parties for the 2019 elections" def handle(self, *args, **options): with open(parties_csv, "rb") as csvfile: csv = unicodecsv.DictReader(csvfile) for row in csv: party_slug = row["slug"] party_name = row["name"] party_kind = OrganisationKind.objects.get(slug="party") party, created = Organisation.objects.get_or_create( slug=party_slug, kind=party_kind, defaults={"name": party_name} ) if created: print("Created new party: {}".format(party)) else: print("Party already exists: {}".format(party))
Move party name to defaults argument
[ZA] Move party name to defaults argument Some party names have changed, but the slug remains the same, so to avoid errors move the name to the defaults sections so it's not checked for existing parties.
Python
agpl-3.0
mysociety/pombola,mysociety/pombola,mysociety/pombola,mysociety/pombola,mysociety/pombola,mysociety/pombola
--- +++ @@ -19,7 +19,7 @@ party_name = row["name"] party_kind = OrganisationKind.objects.get(slug="party") party, created = Organisation.objects.get_or_create( - slug=party_slug, name=party_name, kind=party_kind + slug=party_slug, kind=party_kind, defaults={"name": party_name} ) if created: print("Created new party: {}".format(party))
d51fcb604f9e4a0f9b7d4178d4c85209594afbde
dataset/types.py
dataset/types.py
from datetime import datetime, date from sqlalchemy import Integer, UnicodeText, Float, BigInteger from sqlalchemy import Boolean, Date, DateTime, Unicode from sqlalchemy.types import TypeEngine class Types(object): """A holder class for easy access to SQLAlchemy type names.""" integer = Integer string = Unicode text = UnicodeText float = Float bigint = BigInteger boolean = Boolean date = Date datetime = DateTime def guess(cls, sample): """Given a single sample, guess the column type for the field. If the sample is an instance of an SQLAlchemy type, the type will be used instead. """ if isinstance(sample, TypeEngine): return sample if isinstance(sample, bool): return cls.boolean elif isinstance(sample, int): return cls.bigint elif isinstance(sample, float): return cls.float elif isinstance(sample, datetime): return cls.datetime elif isinstance(sample, date): return cls.date return cls.text
from datetime import datetime, date from sqlalchemy import Integer, UnicodeText, Float, BigInteger from sqlalchemy import Boolean, Date, DateTime, Unicode from sqlalchemy.types import TypeEngine class Types(object): """A holder class for easy access to SQLAlchemy type names.""" integer = Integer string = Unicode text = UnicodeText float = Float bigint = BigInteger boolean = Boolean date = Date datetime = DateTime def guess(self, sample): """Given a single sample, guess the column type for the field. If the sample is an instance of an SQLAlchemy type, the type will be used instead. """ if isinstance(sample, TypeEngine): return sample if isinstance(sample, bool): return self.boolean elif isinstance(sample, int): return self.bigint elif isinstance(sample, float): return self.float elif isinstance(sample, datetime): return self.datetime elif isinstance(sample, date): return self.date return self.text
Replace `cls` argument with `self` Not sure if this was originally intended to be a `@classmethod` but it's now written and called as a method bound to an instance of the class.
Replace `cls` argument with `self` Not sure if this was originally intended to be a `@classmethod` but it's now written and called as a method bound to an instance of the class.
Python
mit
pudo/dataset
--- +++ @@ -16,7 +16,7 @@ date = Date datetime = DateTime - def guess(cls, sample): + def guess(self, sample): """Given a single sample, guess the column type for the field. If the sample is an instance of an SQLAlchemy type, the type will be @@ -25,13 +25,13 @@ if isinstance(sample, TypeEngine): return sample if isinstance(sample, bool): - return cls.boolean + return self.boolean elif isinstance(sample, int): - return cls.bigint + return self.bigint elif isinstance(sample, float): - return cls.float + return self.float elif isinstance(sample, datetime): - return cls.datetime + return self.datetime elif isinstance(sample, date): - return cls.date - return cls.text + return self.date + return self.text
4e63db0d699eeb7a313708f82c129637222e1014
src/penn_chime/utils.py
src/penn_chime/utils.py
"""Utils.""" from base64 import b64encode import pandas as pd def dataframe_to_base64(df: pd.DataFrame) -> str: """Converts a dataframe to a base64-encoded CSV representation of that data. This is useful for building datauris for use to download the data in the browser. Arguments: df: The dataframe to convert """ csv = df.to_csv(index=False) b64 = b64encode(csv.encode()).decode() return b64 def excel_to_base64(str_excel_filename) -> str: data = open(str_excel_filename, 'rb').read() b64 = b64encode(data).decode() return b64
"""Utils.""" from base64 import b64encode import pandas as pd def dataframe_to_base64(df: pd.DataFrame) -> str: """Converts a dataframe into csv base64-encoded data. This is useful for building datauris for use to download the data in the browser. Arguments: df: The dataframe to convert """ csv = df.to_csv(index=False) b64 = b64encode(csv.encode()).decode() return b64 def excel_to_base64(filename: str) -> str: """Converts an excel document into base64-encoded data.""" with open(filename, 'rb') as fin: return b64encode(fin.read()).decode()
Update excel_to_base64 to always close file handles
Update excel_to_base64 to always close file handles
Python
mit
CodeForPhilly/chime,CodeForPhilly/chime,CodeForPhilly/chime
--- +++ @@ -6,7 +6,7 @@ def dataframe_to_base64(df: pd.DataFrame) -> str: - """Converts a dataframe to a base64-encoded CSV representation of that data. + """Converts a dataframe into csv base64-encoded data. This is useful for building datauris for use to download the data in the browser. @@ -18,7 +18,7 @@ return b64 -def excel_to_base64(str_excel_filename) -> str: - data = open(str_excel_filename, 'rb').read() - b64 = b64encode(data).decode() - return b64 +def excel_to_base64(filename: str) -> str: + """Converts an excel document into base64-encoded data.""" + with open(filename, 'rb') as fin: + return b64encode(fin.read()).decode()
e14ca7e0a71e558ae9d8327248012d8109f9e0c5
needlestack/base.py
needlestack/base.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals, absolute_import class SearchBackend(object): pass class Field(object): """ Base class for any field. """ name = None def __init__(self, **kwargs) self.options = kwargs def set_name(self, name): self.name = name
# -*- coding: utf-8 -*- from __future__ import unicode_literals, absolute_import class SearchBackend(object): pass class Field(object): """ Base class for any field. """ name = None def __init__(self, **kwargs) for name, value in kwargs.items(): setattr(self, name, value) def set_name(self, name): self.name = name @classmethod def from_python(cls, value): """ Method for adapt document value from python to backend specific format. """ return value @classmethod def to_python(cls, value): """ Method for adapt backend specific format to native python format. """ return value
Add type addaptation methods for generic field class.
Add type addaptation methods for generic field class.
Python
bsd-3-clause
niwinz/needlestack
--- +++ @@ -15,7 +15,24 @@ name = None def __init__(self, **kwargs) - self.options = kwargs + for name, value in kwargs.items(): + setattr(self, name, value) def set_name(self, name): self.name = name + + @classmethod + def from_python(cls, value): + """ + Method for adapt document value from python + to backend specific format. + """ + return value + + @classmethod + def to_python(cls, value): + """ + Method for adapt backend specific format to + native python format. + """ + return value
b0961ac53a75ff3745d2297de80748f90c823827
pajbot/migration/db.py
pajbot/migration/db.py
from contextlib import contextmanager class DatabaseMigratable: def __init__(self, conn): self.conn = conn @contextmanager def create_resource(self): with self.conn.cursor() as cursor: # begins a transaction automatically cursor.execute("CREATE TABLE IF NOT EXISTS schema_version(revision_id INTEGER)") yield cursor def get_current_revision(self, cursor): cursor.execute("SELECT revision_id FROM schema_version") row = cursor.fetchone() if row is not None: return row[0] else: return None def set_revision(self, cursor, id): cursor.execute("DELETE FROM schema_version") cursor.execute("INSERT INTO schema_version(revision_id) VALUES (%s)", (id,))
from contextlib import contextmanager class DatabaseMigratable: def __init__(self, conn): self.conn = conn @contextmanager def create_resource(self): with self.conn.cursor() as cursor: # begins a transaction automatically cursor.execute("CREATE TABLE IF NOT EXISTS schema_version(revision_id INT NOT NULL)") yield cursor def get_current_revision(self, cursor): cursor.execute("SELECT revision_id FROM schema_version") row = cursor.fetchone() if row is not None: return row[0] else: return None def set_revision(self, cursor, id): cursor.execute("DELETE FROM schema_version") cursor.execute("INSERT INTO schema_version(revision_id) VALUES (%s)", (id,))
Change definition of schema_version table
Change definition of schema_version table
Python
mit
pajlada/tyggbot,pajlada/tyggbot,pajlada/tyggbot,pajlada/tyggbot,pajlada/pajbot,pajlada/pajbot,pajlada/pajbot,pajlada/pajbot
--- +++ @@ -10,7 +10,7 @@ with self.conn.cursor() as cursor: # begins a transaction automatically - cursor.execute("CREATE TABLE IF NOT EXISTS schema_version(revision_id INTEGER)") + cursor.execute("CREATE TABLE IF NOT EXISTS schema_version(revision_id INT NOT NULL)") yield cursor
6f6d1a7146020121a98ba6bb6cfa718686fc2a8d
ffflash/lib/api.py
ffflash/lib/api.py
from datetime import datetime from pprint import pformat from re import search as re_search from re import sub as re_sub class FFApi: def __init__(self, content): self.c = content def pull(self, *fields): c = self.c for f in fields: if isinstance(c, dict) and f in c.keys(): if f == fields[-1]: return c[f] c = c[f] def push(self, value, *fields): c = self.c for f in fields: if isinstance(c, dict) and f in c.keys(): if f == fields[-1]: c[f] = value c = c[f] def timestamp(self): if self.pull('state', 'lastchange') is not None: self.push(api_timestamp(), 'state', 'lastchange') def show(self): return pformat(self.c) def api_timestamp(dt=None): if not dt: dt = datetime.now() return dt.isoformat('T') def api_descr(rx, replace, text): match = ( False if not (rx and text) else re_search(rx, text) ) if match and replace: text = re_sub(rx, replace, text) return text
from datetime import datetime from pprint import pformat from re import search as re_search from re import sub as re_sub class FFApi: def __init__(self, content): self.c = content def pull(self, *fields): c = self.c for f in fields: if isinstance(c, dict) and f in c.keys(): if f == fields[-1]: return c[f] c = c[f] def push(self, value, *fields): c = self.c for f in fields: if isinstance(c, dict) and f in c.keys(): if f == fields[-1]: c[f] = value c = c[f] def timestamp(self): if self.pull('state', 'lastchange') is not None: self.push(api_timestamp(), 'state', 'lastchange') def show(self): return pformat(self.c) def api_timestamp(dt=None): if not dt: dt = datetime.now() return dt.isoformat('T') def api_descr(rx, replace, text): match = ( False if not (rx and text) else re_search(rx, text) ) if match and replace: text = re_sub(rx, replace, text) return text
Fix double indentation error in FFApi class
Fix double indentation error in FFApi class
Python
bsd-3-clause
spookey/ffflash,spookey/ffflash
--- +++ @@ -5,31 +5,31 @@ class FFApi: - def __init__(self, content): - self.c = content + def __init__(self, content): + self.c = content - def pull(self, *fields): - c = self.c - for f in fields: - if isinstance(c, dict) and f in c.keys(): - if f == fields[-1]: - return c[f] - c = c[f] + def pull(self, *fields): + c = self.c + for f in fields: + if isinstance(c, dict) and f in c.keys(): + if f == fields[-1]: + return c[f] + c = c[f] - def push(self, value, *fields): - c = self.c - for f in fields: - if isinstance(c, dict) and f in c.keys(): - if f == fields[-1]: - c[f] = value - c = c[f] + def push(self, value, *fields): + c = self.c + for f in fields: + if isinstance(c, dict) and f in c.keys(): + if f == fields[-1]: + c[f] = value + c = c[f] - def timestamp(self): - if self.pull('state', 'lastchange') is not None: - self.push(api_timestamp(), 'state', 'lastchange') + def timestamp(self): + if self.pull('state', 'lastchange') is not None: + self.push(api_timestamp(), 'state', 'lastchange') - def show(self): - return pformat(self.c) + def show(self): + return pformat(self.c) def api_timestamp(dt=None):
786d1ee8f58c95ab3d27d06bda2ec593cadef48e
apps/videos/tests/__init__.py
apps/videos/tests/__init__.py
from apps.videos.tests.celery_tasks import * from apps.videos.tests.downloads import * from apps.videos.tests.feeds import * from apps.videos.tests.following import * from apps.videos.tests.forms import * from apps.videos.tests.metadata import * from apps.videos.tests.models import * from apps.videos.tests.rpc import * from apps.videos.tests.syncing import * from apps.videos.tests.template_tags import * from apps.videos.tests.uploads import * from apps.videos.tests.video_types import * from apps.videos.tests.video_urls import * from apps.videos.tests.views import *
from apps.videos.tests.celery_tasks import * from apps.videos.tests.downloads import * from apps.videos.tests.feeds import * from apps.videos.tests.following import * from apps.videos.tests.forms import * from apps.videos.tests.metadata import * from apps.videos.tests.models import * from apps.videos.tests.rpc import * from apps.videos.tests.template_tags import * from apps.videos.tests.uploads import * from apps.videos.tests.video_types import * from apps.videos.tests.video_urls import * from apps.videos.tests.views import *
Remove the syncing tests from the list.
Remove the syncing tests from the list.
Python
agpl-3.0
ReachingOut/unisubs,ofer43211/unisubs,ofer43211/unisubs,ReachingOut/unisubs,ujdhesa/unisubs,norayr/unisubs,wevoice/wesub,norayr/unisubs,eloquence/unisubs,wevoice/wesub,pculture/unisubs,ofer43211/unisubs,eloquence/unisubs,wevoice/wesub,norayr/unisubs,ujdhesa/unisubs,pculture/unisubs,eloquence/unisubs,ujdhesa/unisubs,wevoice/wesub,ReachingOut/unisubs,pculture/unisubs,ofer43211/unisubs,pculture/unisubs,ReachingOut/unisubs,ujdhesa/unisubs,eloquence/unisubs,norayr/unisubs
--- +++ @@ -6,7 +6,6 @@ from apps.videos.tests.metadata import * from apps.videos.tests.models import * from apps.videos.tests.rpc import * -from apps.videos.tests.syncing import * from apps.videos.tests.template_tags import * from apps.videos.tests.uploads import * from apps.videos.tests.video_types import *
980aaa340a03353742d03be2844d4ceb829715c0
numpy/core/__init__.py
numpy/core/__init__.py
from info import __doc__ from numpy.version import version as __version__ import multiarray import umath import numerictypes as nt multiarray.set_typeDict(nt.sctypeDict) import _sort from numeric import * from fromnumeric import * from defmatrix import * import ma import defchararray as char import records as rec from records import * from memmap import * from defchararray import * import scalarmath del nt from fromnumeric import amax as max, amin as min, \ round_ as round from numeric import absolute as abs __all__ = ['char','rec','memmap','ma'] __all__ += numeric.__all__ __all__ += fromnumeric.__all__ __all__ += defmatrix.__all__ __all__ += rec.__all__ __all__ += char.__all__ def test(level=1, verbosity=1): from numpy.testing import NumpyTest return NumpyTest().test(level, verbosity)
from info import __doc__ from numpy.version import version as __version__ import multiarray import umath import _internal # for freeze programs import numerictypes as nt multiarray.set_typeDict(nt.sctypeDict) import _sort from numeric import * from fromnumeric import * from defmatrix import * import ma import defchararray as char import records as rec from records import * from memmap import * from defchararray import * import scalarmath del nt from fromnumeric import amax as max, amin as min, \ round_ as round from numeric import absolute as abs __all__ = ['char','rec','memmap','ma'] __all__ += numeric.__all__ __all__ += fromnumeric.__all__ __all__ += defmatrix.__all__ __all__ += rec.__all__ __all__ += char.__all__ def test(level=1, verbosity=1): from numpy.testing import NumpyTest return NumpyTest().test(level, verbosity)
Add an dummy import statement so that freeze programs pick up _internal.p
Add an dummy import statement so that freeze programs pick up _internal.p git-svn-id: 77a43f9646713b91fea7788fad5dfbf67e151ece@3807 94b884b6-d6fd-0310-90d3-974f1d3f35e1
Python
bsd-3-clause
teoliphant/numpy-refactor,chadnetzer/numpy-gaurdro,illume/numpy3k,teoliphant/numpy-refactor,illume/numpy3k,efiring/numpy-work,teoliphant/numpy-refactor,jasonmccampbell/numpy-refactor-sprint,Ademan/NumPy-GSoC,chadnetzer/numpy-gaurdro,efiring/numpy-work,teoliphant/numpy-refactor,Ademan/NumPy-GSoC,Ademan/NumPy-GSoC,jasonmccampbell/numpy-refactor-sprint,illume/numpy3k,illume/numpy3k,jasonmccampbell/numpy-refactor-sprint,chadnetzer/numpy-gaurdro,efiring/numpy-work,jasonmccampbell/numpy-refactor-sprint,teoliphant/numpy-refactor,chadnetzer/numpy-gaurdro,Ademan/NumPy-GSoC,efiring/numpy-work
--- +++ @@ -4,6 +4,7 @@ import multiarray import umath +import _internal # for freeze programs import numerictypes as nt multiarray.set_typeDict(nt.sctypeDict) import _sort
4aa979273bfe698459d9a8066ee41741c3a68e3d
megamix/__init__.py
megamix/__init__.py
# Module of Gaussian Mixture models for python __all__ = ['batch','online']
# Module of Gaussian Mixture models for python import megamix.batch import megamix.online __all__ = ['batch','online']
Solve minor import problems with submodules
Solve minor import problems with submodules
Python
apache-2.0
14thibea/megamix
--- +++ @@ -1,3 +1,5 @@ # Module of Gaussian Mixture models for python +import megamix.batch +import megamix.online __all__ = ['batch','online']
525441540306cb2ac385b0f633681e24587117dc
gdx2py/__init__.py
gdx2py/__init__.py
# -*- coding: utf-8 -*- """ Created on Wed Feb 24 11:01:10 2016 @author: ererkka """ __version__ = '1.2.2'
# -*- coding: utf-8 -*- """ Created on Wed Feb 24 11:01:10 2016 @author: ererkka """ __version__ = '1.2.2' from .gdxfile import GdxFile
Revert "Fixed bug with imports"
Revert "Fixed bug with imports" This reverts commit 7b63b268a002a0ceb0b7bb2c6c57237bf6d5f25b.
Python
mit
ererkka/GDX2py
--- +++ @@ -6,3 +6,5 @@ """ __version__ = '1.2.2' + +from .gdxfile import GdxFile
b63ee83574605e79075ee834d2418cd58b722bc6
docs/tests.py
docs/tests.py
from django.test import Client, TestCase from django.core.urlresolvers import reverse import views class DocsTestCase(TestCase): def setUp(self): self.client = Client() def test_index(self): response = self.client.get(reverse(views.index)) self.assertEqual(response.status_code, 200) def test_common_errors(self): response = self.client.get(reverse(views.common_errors)) self.assertEqual(response.status_code, 200) def test_one_liners(self): response = self.client.get(reverse(views.one_liners)) self.assertEqual(response.status_code, 200) def test_technical(self): response = self.client.get(reverse(views.technical)) self.assertEqual(response.status_code, 200)
from django.test import Client, TestCase from django.core.urlresolvers import reverse import views class DocsTestCase(TestCase): def setUp(self): self.client = Client() def test_index(self): response = self.client.get(reverse(views.index)) self.assertEqual(response.status_code, 200) def test_common_errors(self): response = self.client.get(reverse(views.common_errors)) self.assertEqual(response.status_code, 200) def test_one_liners(self): response = self.client.get(reverse(views.one_liners)) self.assertEqual(response.status_code, 200) def test_technical(self): response = self.client.get(reverse(views.technical)) self.assertEqual(response.status_code, 200) def test_resources(self): response = self.client.get(reverse(views.resources)) self.assertEqual(response.status_code, 200)
Add test case for resources docs page
Add test case for resources docs page
Python
mit
crcollins/chemtools-webapp,crcollins/chemtools-webapp,crcollins/chemtools-webapp,crcollins/chemtools-webapp,crcollins/chemtools-webapp
--- +++ @@ -23,3 +23,7 @@ def test_technical(self): response = self.client.get(reverse(views.technical)) self.assertEqual(response.status_code, 200) + + def test_resources(self): + response = self.client.get(reverse(views.resources)) + self.assertEqual(response.status_code, 200)
56f7646413de01cb45825b30586231cf3ecbd43c
tests/test_flask_get.py
tests/test_flask_get.py
import unittest from flask import Flask from flask.ext.autodoc import Autodoc class TestAutodocWithFlask(unittest.TestCase): def setUp(self): self.app = Flask(__name__) self.autodoc = Autodoc(self.app) @self.app.route('/') @self.autodoc.doc() def index(): """Returns a hello world message""" return 'Hello World!' self.client = self.app.test_client() def test_html(self): @self.app.route('/docs') def html_docs(): return self.autodoc.html() response = self.client.get('/docs') self.assertEqual(response.status_code, 200) def test_json(self): @self.app.route('/docs') def json_docs(): return self.autodoc.json() response = self.client.get('/docs') self.assertEqual(response.status_code, 200)
import json import unittest from flask import Flask from flask.ext.autodoc import Autodoc class TestAutodocWithFlask(unittest.TestCase): def setUp(self): self.app = Flask(__name__) self.autodoc = Autodoc(self.app) @self.app.route('/') @self.autodoc.doc() def index(): """Returns a hello world message""" return 'Hello World!' self.client = self.app.test_client() def test_html(self): @self.app.route('/docs') def html_docs(): return self.autodoc.html() response = self.client.get('/docs') self.assertEqual(response.status_code, 200) def test_json(self): @self.app.route('/docs') def json_docs(): return self.autodoc.json() response = self.client.get('/docs') self.assertEqual(response.status_code, 200) data = json.loads(response.data) self.assertIn('endpoints', data)
Make this test a bit harder.
Make this test a bit harder.
Python
mit
jwg4/flask-autodoc,jwg4/flask-autodoc
--- +++ @@ -1,3 +1,4 @@ +import json import unittest from flask import Flask @@ -32,3 +33,6 @@ response = self.client.get('/docs') self.assertEqual(response.status_code, 200) + + data = json.loads(response.data) + self.assertIn('endpoints', data)
1ac385de638854cad1095a3f81a63ecf45fa60ae
organizer/models.py
organizer/models.py
from django.db import models # Model Field Reference # https://docs.djangoproject.com/en/1.8/ref/models/fields/ class Tag(models.Model): name = models.CharField( max_length=31, unique=True) slug = models.SlugField( max_length=31, unique=True, help_text='A label for URL config.') class Meta: ordering = ['name'] def __str__(self): return self.name class Startup(models.Model): name = models.CharField( max_length=31, db_index=True) slug = models.SlugField( max_length=31, unique=True, help_text='A label for URL config.') description = models.TextField() founded_date = models.DateField( 'date founded') contact = models.EmailField() website = models.URLField(max_length=255) tags = models.ManyToManyField(Tag) class Meta: ordering = ['name'] get_latest_by = 'founded_date' def __str__(self): return self.name class NewsLink(models.Model): title = models.CharField(max_length=63) pub_date = models.DateField('date published') link = models.URLField(max_length=255) startup = models.ForeignKey(Startup) def __str__(self): return "{}: {}".format( self.startup, self.title)
from django.db import models # Model Field Reference # https://docs.djangoproject.com/en/1.8/ref/models/fields/ class Tag(models.Model): name = models.CharField( max_length=31, unique=True) slug = models.SlugField( max_length=31, unique=True, help_text='A label for URL config.') class Meta: ordering = ['name'] def __str__(self): return self.name class Startup(models.Model): name = models.CharField( max_length=31, db_index=True) slug = models.SlugField( max_length=31, unique=True, help_text='A label for URL config.') description = models.TextField() founded_date = models.DateField( 'date founded') contact = models.EmailField() website = models.URLField(max_length=255) tags = models.ManyToManyField(Tag) class Meta: ordering = ['name'] get_latest_by = 'founded_date' def __str__(self): return self.name class NewsLink(models.Model): title = models.CharField(max_length=63) pub_date = models.DateField('date published') link = models.URLField(max_length=255) startup = models.ForeignKey(Startup) class Meta: verbose_name = 'news article' ordering = ['-pub_date'] get_latest_by = 'pub_date' def __str__(self): return "{}: {}".format( self.startup, self.title)
Declare Meta class in NewsLink model.
Ch03: Declare Meta class in NewsLink model. [skip ci]
Python
bsd-2-clause
jambonrose/DjangoUnleashed-1.8,jambonrose/DjangoUnleashed-1.8
--- +++ @@ -48,6 +48,11 @@ link = models.URLField(max_length=255) startup = models.ForeignKey(Startup) + class Meta: + verbose_name = 'news article' + ordering = ['-pub_date'] + get_latest_by = 'pub_date' + def __str__(self): return "{}: {}".format( self.startup, self.title)
bf09097e66c02764d81516a4161ca607ccbd0bb5
lfs/other_translations.py
lfs/other_translations.py
def _(): return 1 _(u'Price excludes tax') _(u'Price includes tax') _(u'Street') _(u'Left') _(u'Right') _(u'Average Rating') _(u'Zip Code') _(u'Last Order Number') _(u'Format') _(u"Cart Price") _(u"Combined Length and Girth")
def _(): return 1 _(u'Price excludes tax') _(u'Price includes tax') _(u'Street / No') _(u'Left') _(u'Right') _(u'Average Rating') _(u'Zip Code') _(u'Last Order Number') _(u'Format') _(u"Cart Price") _(u"Combined Length and Girth")
Add house number to street label within address form
Add house number to street label within address form
Python
bsd-3-clause
diefenbach/django-lfs,diefenbach/django-lfs,diefenbach/django-lfs
--- +++ @@ -4,7 +4,7 @@ _(u'Price excludes tax') _(u'Price includes tax') -_(u'Street') +_(u'Street / No') _(u'Left') _(u'Right') _(u'Average Rating')
f317f946f9f17cbaf162b12c8770908abba5bbeb
linter.py
linter.py
# # linter.py # Linter for SublimeLinter3, a code checking framework for Sublime Text 3 # # Written by NotSqrt # Copyright (c) 2013 NotSqrt # # License: MIT # """This module exports the Cppcheck plugin class.""" from SublimeLinter.lint import Linter, util class Cppcheck(Linter): """Provides an interface to cppcheck.""" syntax = 'c++' cmd = ('cppcheck', '--template=gcc', '--inline-suppr', '--quiet', '*', '@') regex = r'^.+:(?P<line>\d+):\s+(?P<message>.+)' error_stream = util.STREAM_BOTH # linting errors are on stderr, exceptions like "file not found" on stdout tempfile_suffix = 'cpp' defaults = { '--std=,+': [], # example ['c99', 'c89'] '--enable=,': 'style', } inline_settings = 'std' inline_overrides = 'enable' comment_re = r'\s*/[/*]'
# # linter.py # Linter for SublimeLinter3, a code checking framework for Sublime Text 3 # # Written by NotSqrt # Copyright (c) 2013 NotSqrt # # License: MIT # """This module exports the Cppcheck plugin class.""" from SublimeLinter.lint import Linter, util class Cppcheck(Linter): """Provides an interface to cppcheck.""" syntax = 'c++' cmd = ('cppcheck', '--template=gcc', '--inline-suppr', '--quiet', '*', '@') regex = r'^.+:(?P<line>\d+):\s+((?P<error>error)|(?P<warning>warning|style|performance|portability|information)):\s+(?P<message>.+)' error_stream = util.STREAM_BOTH # linting errors are on stderr, exceptions like "file not found" on stdout tempfile_suffix = 'cpp' defaults = { '--std=,+': [], # example ['c99', 'c89'] '--enable=,': 'style', } inline_settings = 'std' inline_overrides = 'enable' comment_re = r'\s*/[/*]'
Change on the regex variable such that it distinguish warnings from errors.
Change on the regex variable such that it distinguish warnings from errors.
Python
mit
SublimeLinter/SublimeLinter-cppcheck,ftoulemon/SublimeLinter-cppcheck
--- +++ @@ -19,7 +19,7 @@ syntax = 'c++' cmd = ('cppcheck', '--template=gcc', '--inline-suppr', '--quiet', '*', '@') - regex = r'^.+:(?P<line>\d+):\s+(?P<message>.+)' + regex = r'^.+:(?P<line>\d+):\s+((?P<error>error)|(?P<warning>warning|style|performance|portability|information)):\s+(?P<message>.+)' error_stream = util.STREAM_BOTH # linting errors are on stderr, exceptions like "file not found" on stdout tempfile_suffix = 'cpp' defaults = {
f8c440d849a794298f119735ebe10290fc66f2ec
mopidy/__main__.py
mopidy/__main__.py
import asyncore import logging import multiprocessing import os import sys sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../'))) from mopidy import settings, SettingsError from mopidy.process import CoreProcess from mopidy.utils import get_class logger = logging.getLogger('mopidy.main') def main(): _setup_logging(2) core_queue = multiprocessing.Queue() core = CoreProcess(core_queue) core.start() get_class(settings.SERVER)(core_queue) asyncore.loop() def _setup_logging(verbosity_level): if verbosity_level == 0: level = logging.WARNING elif verbosity_level == 2: level = logging.DEBUG else: level = logging.INFO logging.basicConfig( format=settings.CONSOLE_LOG_FORMAT, level=level, ) if __name__ == '__main__': try: main() except KeyboardInterrupt: sys.exit('\nInterrupted by user') except SettingsError, e: sys.exit('%s' % e)
import asyncore import logging import multiprocessing import optparse import os import sys sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../'))) from mopidy import settings, SettingsError from mopidy.process import CoreProcess from mopidy.utils import get_class logger = logging.getLogger('mopidy.main') def main(): options, args = _parse_options() _setup_logging(options.verbosity_level) core_queue = multiprocessing.Queue() core = CoreProcess(core_queue) core.start() get_class(settings.SERVER)(core_queue) asyncore.loop() def _parse_options(): parser = optparse.OptionParser() parser.add_option('-q', '--quiet', action='store_const', const=0, dest='verbosity_level', help='less output (warning level)') parser.add_option('-v', '--verbose', action='store_const', const=2, dest='verbosity_level', help='more output (debug level)') return parser.parse_args() def _setup_logging(verbosity_level): if verbosity_level == 0: level = logging.WARNING elif verbosity_level == 2: level = logging.DEBUG else: level = logging.INFO logging.basicConfig( format=settings.CONSOLE_LOG_FORMAT, level=level, ) if __name__ == '__main__': try: main() except KeyboardInterrupt: sys.exit('\nInterrupted by user') except SettingsError, e: sys.exit('%s' % e)
Add -v and -q options for more or less output
Add -v and -q options for more or less output
Python
apache-2.0
jodal/mopidy,tkem/mopidy,jodal/mopidy,swak/mopidy,pacificIT/mopidy,tkem/mopidy,adamcik/mopidy,jcass77/mopidy,jmarsik/mopidy,adamcik/mopidy,ali/mopidy,jmarsik/mopidy,pacificIT/mopidy,swak/mopidy,jmarsik/mopidy,hkariti/mopidy,quartz55/mopidy,rawdlite/mopidy,mokieyue/mopidy,ZenithDK/mopidy,bencevans/mopidy,SuperStarPL/mopidy,quartz55/mopidy,ali/mopidy,pacificIT/mopidy,bacontext/mopidy,mopidy/mopidy,swak/mopidy,priestd09/mopidy,bencevans/mopidy,mokieyue/mopidy,SuperStarPL/mopidy,ali/mopidy,pacificIT/mopidy,jcass77/mopidy,swak/mopidy,hkariti/mopidy,diandiankan/mopidy,mokieyue/mopidy,hkariti/mopidy,ali/mopidy,abarisain/mopidy,quartz55/mopidy,kingosticks/mopidy,mopidy/mopidy,glogiotatidis/mopidy,jodal/mopidy,mopidy/mopidy,ZenithDK/mopidy,adamcik/mopidy,ZenithDK/mopidy,SuperStarPL/mopidy,vrs01/mopidy,priestd09/mopidy,woutervanwijk/mopidy,vrs01/mopidy,jcass77/mopidy,dbrgn/mopidy,glogiotatidis/mopidy,tkem/mopidy,bencevans/mopidy,bencevans/mopidy,ZenithDK/mopidy,SuperStarPL/mopidy,vrs01/mopidy,rawdlite/mopidy,abarisain/mopidy,glogiotatidis/mopidy,vrs01/mopidy,rawdlite/mopidy,glogiotatidis/mopidy,bacontext/mopidy,dbrgn/mopidy,bacontext/mopidy,diandiankan/mopidy,kingosticks/mopidy,dbrgn/mopidy,woutervanwijk/mopidy,bacontext/mopidy,priestd09/mopidy,kingosticks/mopidy,diandiankan/mopidy,liamw9534/mopidy,liamw9534/mopidy,jmarsik/mopidy,mokieyue/mopidy,tkem/mopidy,dbrgn/mopidy,diandiankan/mopidy,rawdlite/mopidy,hkariti/mopidy,quartz55/mopidy
--- +++ @@ -1,6 +1,7 @@ import asyncore import logging import multiprocessing +import optparse import os import sys @@ -14,12 +15,23 @@ logger = logging.getLogger('mopidy.main') def main(): - _setup_logging(2) + options, args = _parse_options() + _setup_logging(options.verbosity_level) core_queue = multiprocessing.Queue() core = CoreProcess(core_queue) core.start() get_class(settings.SERVER)(core_queue) asyncore.loop() + +def _parse_options(): + parser = optparse.OptionParser() + parser.add_option('-q', '--quiet', + action='store_const', const=0, dest='verbosity_level', + help='less output (warning level)') + parser.add_option('-v', '--verbose', + action='store_const', const=2, dest='verbosity_level', + help='more output (debug level)') + return parser.parse_args() def _setup_logging(verbosity_level): if verbosity_level == 0:
a521c4a4a55437452a4a7d006ec8faea0521ea05
capstone/rl/learners/sarsa.py
capstone/rl/learners/sarsa.py
from ..learner import Learner from ..policies import RandomPolicy from ..value_functions import TabularQ from ...utils import check_random_state class Sarsa(Learner): def __init__(self, env, policy=None, learning_rate=0.1, discount_factor=0.99, n_episodes=1000, verbose=True, random_state=None): super(Sarsa, self).__init__(env, n_episodes=n_episodes, verbose=verbose) self.policy = policy self.learning_rate = learning_rate self.discount_factor = discount_factor self.random_state = check_random_state(random_state) self.policy = policy or RandomPolicy(env.actions, self.random_state) self.qf = TabularQ(self.random_state) ########### # Learner # ########### def episode(self): state = self.env.cur_state() action = self.policy.action(state) while not self.env.is_terminal(): reward, next_state = self.env.do_action(action) next_action = self.policy.action(next_state) target = reward + (self.discount_factor * self.qf[next_state, next_action]) td_error = target - self.qf[state, action] self.qf[state, action] += self.learning_rate * td_error state, action = next_state, next_action
from ..learner import Learner from ..policies import RandomPolicy from ..value_functions import TabularQ from ...utils import check_random_state class Sarsa(Learner): def __init__(self, env, policy=None, learning_rate=0.1, discount_factor=0.99, n_episodes=1000, verbose=True, random_state=None): super(Sarsa, self).__init__(env, n_episodes=n_episodes, verbose=verbose) self.policy = policy self.learning_rate = learning_rate self.discount_factor = discount_factor self.random_state = check_random_state(random_state) self.policy = policy or RandomPolicy(env.actions, random_state=self.random_state) self.qf = TabularQ(random_state=self.random_state) ########### # Learner # ########### def episode(self): state = self.env.cur_state() action = self.policy.action(state) while not self.env.is_terminal(): reward, next_state = self.env.do_action(action) next_action = self.policy.action(next_state) target = reward + (self.discount_factor * self.qf[next_state, next_action]) td_error = target - self.qf[state, action] self.qf[state, action] += self.learning_rate * td_error state, action = next_state, next_action
Create tabular q-function with kwarg random_state
Create tabular q-function with kwarg random_state
Python
mit
davidrobles/mlnd-capstone-code
--- +++ @@ -13,8 +13,8 @@ self.learning_rate = learning_rate self.discount_factor = discount_factor self.random_state = check_random_state(random_state) - self.policy = policy or RandomPolicy(env.actions, self.random_state) - self.qf = TabularQ(self.random_state) + self.policy = policy or RandomPolicy(env.actions, random_state=self.random_state) + self.qf = TabularQ(random_state=self.random_state) ########### # Learner #
b388722fcc70d2787b91b5a4492cb9659cea7a42
parsl/providers/torque/template.py
parsl/providers/torque/template.py
template_string = '''#!/bin/bash #PBS -S /bin/bash #PBS -N ${jobname} #PBS -m n #PBS -k eo #PBS -l walltime=$walltime #PBS -l nodes=${nodes_per_block}:ppn=${tasks_per_node} #PBS -o ${submit_script_dir}/${jobname}.submit.stdout #PBS -e ${submit_script_dir}/${jobname}.submit.stderr ${scheduler_options} ${worker_init} export JOBNAME="${jobname}" ${user_script} '''
template_string = '''#!/bin/bash #PBS -S /bin/bash #PBS -N ${jobname} #PBS -m n #PBS -l walltime=$walltime #PBS -l nodes=${nodes_per_block}:ppn=${tasks_per_node} #PBS -o ${submit_script_dir}/${jobname}.submit.stdout #PBS -e ${submit_script_dir}/${jobname}.submit.stderr ${scheduler_options} ${worker_init} export JOBNAME="${jobname}" ${user_script} '''
Remove line which was preventing stdout redirect
Remove line which was preventing stdout redirect I think this line tries to send stdout and stderr to the same file. I'm not really sure why removing it causes the later specification of the stdout and stderr lines to be respected, but we don't want them in the same file in any case. Fixes #647.
Python
apache-2.0
Parsl/parsl,Parsl/parsl,Parsl/parsl,swift-lang/swift-e-lab,swift-lang/swift-e-lab,Parsl/parsl
--- +++ @@ -3,7 +3,6 @@ #PBS -S /bin/bash #PBS -N ${jobname} #PBS -m n -#PBS -k eo #PBS -l walltime=$walltime #PBS -l nodes=${nodes_per_block}:ppn=${tasks_per_node} #PBS -o ${submit_script_dir}/${jobname}.submit.stdout
3b83e983f1e7cd3a0866109c66dd91903db87fa1
goodtablesio/integrations/github/tasks/repos.py
goodtablesio/integrations/github/tasks/repos.py
import datetime from goodtablesio.models.user import User from goodtablesio.models.source import Source from goodtablesio.services import database from goodtablesio.celery_app import celery_app from goodtablesio.integrations.github.utils.repos import iter_repos_by_token @celery_app.task(name='goodtablesio.github.sync_user_repos') def sync_user_repos(user_id, token): """Sync user repositories. """ user = database['session'].query(User).get(user_id) for repo_data in iter_repos_by_token(token): repo = database['session'].query(Source).filter( Source.conf['github_id'].astext == repo_data['conf']['github_id'] ).one_or_none() if repo is None: repo = Source(**repo_data) database['session'].add(repo) repo.active = repo_data['active'] repo.updated = datetime.datetime.utcnow(), repo.users.append(user) database['session'].commit()
import datetime from goodtablesio.models.user import User from goodtablesio.services import database from goodtablesio.celery_app import celery_app from goodtablesio.integrations.github.models.repo import GithubRepo from goodtablesio.integrations.github.utils.repos import iter_repos_by_token @celery_app.task(name='goodtablesio.github.sync_user_repos') def sync_user_repos(user_id, token): """Sync user repositories. """ user = database['session'].query(User).get(user_id) for repo_data in iter_repos_by_token(token): repo = database['session'].query(GithubRepo).filter( GithubRepo.conf['github_id'].astext == repo_data['conf']['github_id'] ).one_or_none() if repo is None: repo = GithubRepo(**repo_data) database['session'].add(repo) repo.active = repo_data['active'] repo.updated = datetime.datetime.utcnow(), repo.users.append(user) database['session'].commit()
Use own model class on github task
Use own model class on github task
Python
agpl-3.0
frictionlessdata/goodtables.io,frictionlessdata/goodtables.io,frictionlessdata/goodtables.io,frictionlessdata/goodtables.io
--- +++ @@ -1,8 +1,8 @@ import datetime from goodtablesio.models.user import User -from goodtablesio.models.source import Source from goodtablesio.services import database from goodtablesio.celery_app import celery_app +from goodtablesio.integrations.github.models.repo import GithubRepo from goodtablesio.integrations.github.utils.repos import iter_repos_by_token @@ -13,11 +13,11 @@ user = database['session'].query(User).get(user_id) for repo_data in iter_repos_by_token(token): - repo = database['session'].query(Source).filter( - Source.conf['github_id'].astext == repo_data['conf']['github_id'] + repo = database['session'].query(GithubRepo).filter( + GithubRepo.conf['github_id'].astext == repo_data['conf']['github_id'] ).one_or_none() if repo is None: - repo = Source(**repo_data) + repo = GithubRepo(**repo_data) database['session'].add(repo) repo.active = repo_data['active'] repo.updated = datetime.datetime.utcnow(),
274a1b43a57f838f078d11ef71803d46f8fd34bf
perfect-numbers/perfect_numbers.py
perfect-numbers/perfect_numbers.py
def divisor_generator(n): big_factors = {1} for i in range(2, n): if i in big_factors: break if n % i == 0: big_factors.add(n // i) yield i yield from big_factors def is_perfect(n): return n == sum(divisor_generator(n))
from math import sqrt, ceil def divisor_generator(n): yield 1 for i in range(2, ceil(sqrt(n))): if n % i == 0: yield i yield n // i def is_perfect(n): return n == sum(divisor_generator(n))
Refactor to give faster solution
Refactor to give faster solution
Python
agpl-3.0
CubicComet/exercism-python-solutions
--- +++ @@ -1,12 +1,12 @@ +from math import sqrt, ceil + + def divisor_generator(n): - big_factors = {1} - for i in range(2, n): - if i in big_factors: - break + yield 1 + for i in range(2, ceil(sqrt(n))): if n % i == 0: - big_factors.add(n // i) yield i - yield from big_factors + yield n // i def is_perfect(n):
9fe93f19c38ea1e41ce7bfe5aca8ec9327b195b5
test/test_reqmodules.py
test/test_reqmodules.py
import tserver, sys, md5, math from disco import Disco, result_iterator def data_gen(path): return path[1:] + "\n" def fun_map(e, params): k = str(int(math.ceil(float(e))) ** 2) return [(md5.new(k).hexdigest(), "")] tserver.run_server(data_gen) disco = Disco(sys.argv[1]) inputs = [1, 485, 3245] job = disco.new_job(name = "test_reqmodules", nr_reduces = 1, input = tserver.makeurl(inputs), map = fun_map, required_modules = ["math", "md5"], sort = False) res = list(result_iterator(job.wait())) if len(res) != len(inputs): raise Exception("Too few results: Got: %d Should be %d" % (len(res), len(inputs))) cor = map(lambda x: md5.new(str(int(math.ceil(x)) ** 2)).hexdigest(), inputs) for k, v in res: if k not in cor: raise Exception("Invalid answer: %s" % k) cor.remove(k) job.purge() print "ok"
import tserver, sys, base64, math from disco import Disco, result_iterator def data_gen(path): return path[1:] + "\n" def fun_map(e, params): k = str(int(math.ceil(float(e))) ** 2) return [(base64.encodestring(k), "")] tserver.run_server(data_gen) disco = Disco(sys.argv[1]) inputs = [1, 485, 3245] job = disco.new_job(name = "test_reqmodules", nr_reduces = 1, input = tserver.makeurl(inputs), map = fun_map, sort = False) res = list(result_iterator(job.wait())) if len(res) != len(inputs): raise Exception("Too few results: Got: %d Should be %d" % (len(res), len(inputs))) cor = map(lambda x: base64.encodestring(str(int(math.ceil(x)) ** 2)), inputs) for k, v in res: if k not in cor: raise Exception("Invalid answer: %s" % k) cor.remove(k) job.purge() print "ok"
Use the base64 module for testing rather than md5 which is deprecated in python2.6
Use the base64 module for testing rather than md5 which is deprecated in python2.6
Python
bsd-3-clause
ErikDubbelboer/disco,beni55/disco,ErikDubbelboer/disco,beni55/disco,pombredanne/disco,scrapinghub/disco,scrapinghub/disco,pombredanne/disco,ErikDubbelboer/disco,seabirdzh/disco,pooya/disco,ErikDubbelboer/disco,discoproject/disco,pombredanne/disco,mwilliams3/disco,simudream/disco,discoproject/disco,mwilliams3/disco,ktkt2009/disco,beni55/disco,seabirdzh/disco,mozilla/disco,pombredanne/disco,ktkt2009/disco,pooya/disco,ktkt2009/disco,seabirdzh/disco,pavlobaron/disco_playground,beni55/disco,mozilla/disco,oldmantaiter/disco,simudream/disco,beni55/disco,mwilliams3/disco,oldmantaiter/disco,ErikDubbelboer/disco,pavlobaron/disco_playground,scrapinghub/disco,discoproject/disco,ktkt2009/disco,mwilliams3/disco,seabirdzh/disco,mozilla/disco,mwilliams3/disco,pooya/disco,oldmantaiter/disco,seabirdzh/disco,simudream/disco,oldmantaiter/disco,simudream/disco,discoproject/disco,ktkt2009/disco,oldmantaiter/disco,pombredanne/disco,pavlobaron/disco_playground,scrapinghub/disco,simudream/disco,mozilla/disco,discoproject/disco,pavlobaron/disco_playground,pooya/disco
--- +++ @@ -1,5 +1,5 @@ -import tserver, sys, md5, math +import tserver, sys, base64, math from disco import Disco, result_iterator def data_gen(path): @@ -7,7 +7,7 @@ def fun_map(e, params): k = str(int(math.ceil(float(e))) ** 2) - return [(md5.new(k).hexdigest(), "")] + return [(base64.encodestring(k), "")] tserver.run_server(data_gen) disco = Disco(sys.argv[1]) @@ -17,7 +17,6 @@ nr_reduces = 1, input = tserver.makeurl(inputs), map = fun_map, - required_modules = ["math", "md5"], sort = False) res = list(result_iterator(job.wait())) @@ -25,7 +24,7 @@ raise Exception("Too few results: Got: %d Should be %d" % (len(res), len(inputs))) -cor = map(lambda x: md5.new(str(int(math.ceil(x)) ** 2)).hexdigest(), inputs) +cor = map(lambda x: base64.encodestring(str(int(math.ceil(x)) ** 2)), inputs) for k, v in res: if k not in cor:
49a2502043e1d7ad5f3907779be7815a39ad85c7
awx/main/models/activity_stream.py
awx/main/models/activity_stream.py
# Copyright (c) 2013 AnsibleWorks, Inc. # All Rights Reserved. from django.db import models class ActivityStream(models.Model): ''' Model used to describe activity stream (audit) events ''' OPERATION_CHOICES = [ ('create', _('Entity Created')), ('update', _("Entity Updated")), ('delete', _("Entity Deleted")), ('associate', _("Entity Associated with another Entity")), ('disaassociate', _("Entity was Disassociated with another Entity")) ] user = models.ForeignKey('auth.User', null=True, on_delete=models.SET_NULL) operation = models.CharField(max_length=9, choices=OPERATION_CHOICES) timestamp = models.DateTimeField(auto_now_add=True) changes = models.TextField(blank=True) object1_id = models.PositiveIntegerField(db_index=True) object1_type = models.TextField() object2_id = models.PositiveIntegerField(db_index=True) object2_type = models.TextField() object_relationship_type = models.TextField()
# Copyright (c) 2013 AnsibleWorks, Inc. # All Rights Reserved. from django.db import models from django.utils.translation import ugettext_lazy as _ class ActivityStream(models.Model): ''' Model used to describe activity stream (audit) events ''' class Meta: app_label = 'main' OPERATION_CHOICES = [ ('create', _('Entity Created')), ('update', _("Entity Updated")), ('delete', _("Entity Deleted")), ('associate', _("Entity Associated with another Entity")), ('disaassociate', _("Entity was Disassociated with another Entity")) ] user = models.ForeignKey('auth.User', null=True, on_delete=models.SET_NULL, related_name='activity_stream') operation = models.CharField(max_length=9, choices=OPERATION_CHOICES) timestamp = models.DateTimeField(auto_now_add=True) changes = models.TextField(blank=True) object1_id = models.PositiveIntegerField(db_index=True) object1_type = models.TextField() object2_id = models.PositiveIntegerField(db_index=True, null=True) object2_type = models.TextField(null=True, blank=True) object_relationship_type = models.TextField(blank=True)
Fix up some issues with supporting schema migration
Fix up some issues with supporting schema migration
Python
apache-2.0
wwitzel3/awx,wwitzel3/awx,snahelou/awx,snahelou/awx,snahelou/awx,wwitzel3/awx,wwitzel3/awx,snahelou/awx
--- +++ @@ -3,11 +3,16 @@ from django.db import models +from django.utils.translation import ugettext_lazy as _ class ActivityStream(models.Model): ''' Model used to describe activity stream (audit) events ''' + + class Meta: + app_label = 'main' + OPERATION_CHOICES = [ ('create', _('Entity Created')), ('update', _("Entity Updated")), @@ -16,7 +21,7 @@ ('disaassociate', _("Entity was Disassociated with another Entity")) ] - user = models.ForeignKey('auth.User', null=True, on_delete=models.SET_NULL) + user = models.ForeignKey('auth.User', null=True, on_delete=models.SET_NULL, related_name='activity_stream') operation = models.CharField(max_length=9, choices=OPERATION_CHOICES) timestamp = models.DateTimeField(auto_now_add=True) changes = models.TextField(blank=True) @@ -24,7 +29,7 @@ object1_id = models.PositiveIntegerField(db_index=True) object1_type = models.TextField() - object2_id = models.PositiveIntegerField(db_index=True) - object2_type = models.TextField() + object2_id = models.PositiveIntegerField(db_index=True, null=True) + object2_type = models.TextField(null=True, blank=True) - object_relationship_type = models.TextField() + object_relationship_type = models.TextField(blank=True)
5fe73f508d87018db1d6f574cd22d76b46cbe862
events/api.py
events/api.py
''' Api for the events app ''' import datetime from models import Event from tastypie.utils import trailing_slash from apis.resources.base import BaseResource from django.conf.urls.defaults import url class EventResource(BaseResource): class Meta: queryset = Event.objects.all() allowed_methods = ['get'] def override_urls(self): return [ url(r'^(?P<resource_name>%s)/?' % self._meta.resource_name, self.wrap_view('get_future_events'), name = 'api-get-future-events') ] def get_future_events(self, request, **kwargs): events = Event.objects.filter(when__gte = datetime.datetime.now()) bundles = [self.build_bundle(obj = event, request = request) for event in events] return self.create_response(request, map(self.full_dehydrate, bundles))
''' Api for the events app ''' import datetime from models import Event from tastypie.utils import trailing_slash from apis.resources.base import BaseResource from django.conf.urls.defaults import url class EventResource(BaseResource): class Meta: queryset = Event.objects.all() allowed_methods = ['get'] def override_urls(self): return [ url(r'^(?P<resource_name>%s)/?$' % self._meta.resource_name, self.wrap_view('get_future_events'), name = 'api-get-future-events'), url(r'^(?P<resource_name>%s)/(?P<event_id>\d+)/?$' % self._meta.resource_name, self.wrap_view('get_specific_event'), name = 'api-get-specific-event') ] def get_future_events(self, request, **kwargs): events = Event.objects.filter(when__gte = datetime.datetime.now()) bundles = [self.build_bundle(obj = event, request = request) for event in events] return self.create_response(request, map(self.full_dehydrate, bundles)) def get_specific_event(self, request, **kwargs): events = Event.objects.filter(id = kwargs['event_id']) bundles = [self.build_bundle(obj = event, request = request) for event in events] return self.create_response(request, self.full_dehydrate(bundles[0]))
Fix the event APIv2 to account for retrieval of a single event.
Fix the event APIv2 to account for retrieval of a single event.
Python
bsd-3-clause
alonisser/Open-Knesset,OriHoch/Open-Knesset,habeanf/Open-Knesset,daonb/Open-Knesset,otadmor/Open-Knesset,navotsil/Open-Knesset,MeirKriheli/Open-Knesset,DanaOshri/Open-Knesset,Shrulik/Open-Knesset,daonb/Open-Knesset,otadmor/Open-Knesset,habeanf/Open-Knesset,DanaOshri/Open-Knesset,Shrulik/Open-Knesset,daonb/Open-Knesset,alonisser/Open-Knesset,navotsil/Open-Knesset,daonb/Open-Knesset,OriHoch/Open-Knesset,DanaOshri/Open-Knesset,jspan/Open-Knesset,DanaOshri/Open-Knesset,jspan/Open-Knesset,otadmor/Open-Knesset,MeirKriheli/Open-Knesset,habeanf/Open-Knesset,ofri/Open-Knesset,MeirKriheli/Open-Knesset,habeanf/Open-Knesset,noamelf/Open-Knesset,otadmor/Open-Knesset,ofri/Open-Knesset,alonisser/Open-Knesset,OriHoch/Open-Knesset,jspan/Open-Knesset,Shrulik/Open-Knesset,jspan/Open-Knesset,ofri/Open-Knesset,navotsil/Open-Knesset,ofri/Open-Knesset,MeirKriheli/Open-Knesset,noamelf/Open-Knesset,noamelf/Open-Knesset,Shrulik/Open-Knesset,navotsil/Open-Knesset,OriHoch/Open-Knesset,noamelf/Open-Knesset,alonisser/Open-Knesset
--- +++ @@ -14,12 +14,20 @@ def override_urls(self): return [ - url(r'^(?P<resource_name>%s)/?' % self._meta.resource_name, + url(r'^(?P<resource_name>%s)/?$' % self._meta.resource_name, self.wrap_view('get_future_events'), - name = 'api-get-future-events') + name = 'api-get-future-events'), + url(r'^(?P<resource_name>%s)/(?P<event_id>\d+)/?$' % self._meta.resource_name, + self.wrap_view('get_specific_event'), + name = 'api-get-specific-event') ] def get_future_events(self, request, **kwargs): events = Event.objects.filter(when__gte = datetime.datetime.now()) bundles = [self.build_bundle(obj = event, request = request) for event in events] return self.create_response(request, map(self.full_dehydrate, bundles)) + + def get_specific_event(self, request, **kwargs): + events = Event.objects.filter(id = kwargs['event_id']) + bundles = [self.build_bundle(obj = event, request = request) for event in events] + return self.create_response(request, self.full_dehydrate(bundles[0]))
2d7c7872bb030a280de6047a609d14560ff7e29f
fabfile/eg.py
fabfile/eg.py
from fabric.api import task, local, run, lcd, cd, env from os.path import exists as file_exists from fabtools.python import virtualenv from os import path PWD = path.join(path.dirname(__file__), '..') VENV_DIR = path.join(PWD, '.env') @task def mnist(): with virtualenv(VENV_DIR): with lcd(PWD): local('pip install -e .') local('pip install keras') print("Using Keras to get MNIST data") local('python examples/mnist.py')
from fabric.api import task, local, run, lcd, cd, env from os.path import exists as file_exists from fabtools.python import virtualenv from os import path PWD = path.join(path.dirname(__file__), '..') VENV_DIR = path.join(PWD, '.env') @task def mnist(): with virtualenv(VENV_DIR): with lcd(PWD): local('pip install -e .') local('pip install keras') print("Using Keras to get MNIST data") local('KERAS_BACKEND="theano" python examples/mnist.py') @task def basic_tagger(): with virtualenv(VENV_DIR): with lcd(PWD): local('pip install -e .') local('mkdir data') install_ancora() local('python examples/basic_tagger.py')
Patch mnist example to specify theano for keras.
Patch mnist example to specify theano for keras.
Python
mit
spacy-io/thinc,spacy-io/thinc,explosion/thinc,explosion/thinc,explosion/thinc,explosion/thinc,spacy-io/thinc
--- +++ @@ -15,4 +15,14 @@ local('pip install -e .') local('pip install keras') print("Using Keras to get MNIST data") - local('python examples/mnist.py') + local('KERAS_BACKEND="theano" python examples/mnist.py') + + +@task +def basic_tagger(): + with virtualenv(VENV_DIR): + with lcd(PWD): + local('pip install -e .') + local('mkdir data') + install_ancora() + local('python examples/basic_tagger.py')
81cd919f4f85f2d26d89006234d608aebe8a8047
plugins/clue/clue.py
plugins/clue/clue.py
from __future__ import unicode_literals # don't convert to ascii in py2.7 when creating string to return crontable = [] outputs = [] state = {} def process_message(data): channel = data['channel'] if channel not in state.keys(): state[channel] = {'count': 0, 'clue': ''} st = state[channel] # Count the number of messages we have seen in this channel since # stony last repeated a clue. st['count'] = st['count'] + 1 if data['text'].startswith('&gt;'): st['clue'] = data['text'] st['count'] = 1 else: if st['count'] % 10 == 0: outputs.append([channel, st['clue']])
from __future__ import unicode_literals # don't convert to ascii in py2.7 when creating string to return crontable = [] outputs = [] state = {} class ClueState: def __init__(self): self.count = 0 self.clue = '' def process_message(data): channel = data['channel'] if channel not in state.keys(): state[channel] = ClueState() st = state[channel] # Count the number of messages we have seen in this channel since # stony last repeated a clue. st.count = st.count + 1 if data['text'].startswith('&gt;'): st.clue = data['text'] st.count = 1 else: if st.count % 10 == 0: outputs.append([channel, st.clue])
Switch to a ClueState class instead of just a dictionary
Switch to a ClueState class instead of just a dictionary This makes the syntax a little easier to read, (such as "st.count" instead of "st['count']").
Python
mit
cworth-gh/stony
--- +++ @@ -7,21 +7,27 @@ state = {} +class ClueState: + + def __init__(self): + self.count = 0 + self.clue = '' + def process_message(data): channel = data['channel'] if channel not in state.keys(): - state[channel] = {'count': 0, 'clue': ''} + state[channel] = ClueState() st = state[channel] # Count the number of messages we have seen in this channel since # stony last repeated a clue. - st['count'] = st['count'] + 1 + st.count = st.count + 1 if data['text'].startswith('&gt;'): - st['clue'] = data['text'] - st['count'] = 1 + st.clue = data['text'] + st.count = 1 else: - if st['count'] % 10 == 0: - outputs.append([channel, st['clue']]) + if st.count % 10 == 0: + outputs.append([channel, st.clue])
1f3e42047a67001719220a5a42fe0c5f51d2b740
Demo/sockets/echosvr.py
Demo/sockets/echosvr.py
#! /usr/local/bin/python # Python implementation of an 'echo' tcp server: echo all data it receives. # # This is the simplest possible server, sevicing a single request only. import sys from socket import * # The standard echo port isn't very useful, it requires root permissions! # ECHO_PORT = 7 ECHO_PORT = 50000 + 7 BUFSIZE = 1024 def main(): if len(sys.argv) > 1: port = int(eval(sys.argv[1])) else: port = ECHO_PORT s = socket(AF_INET, SOCK_STREAM) s.bind('', port) s.listen(0) conn, (remotehost, remoteport) = s.accept() print 'connected by', remotehost, remoteport while 1: data = conn.recv(BUFSIZE) if not data: break conn.send(data) main()
#! /usr/local/bin/python # Python implementation of an 'echo' tcp server: echo all data it receives. # # This is the simplest possible server, sevicing a single request only. import sys from socket import * # The standard echo port isn't very useful, it requires root permissions! # ECHO_PORT = 7 ECHO_PORT = 50000 + 7 BUFSIZE = 1024 def main(): if len(sys.argv) > 1: port = int(eval(sys.argv[1])) else: port = ECHO_PORT s = socket(AF_INET, SOCK_STREAM) s.bind('', port) s.listen(1) conn, (remotehost, remoteport) = s.accept() print 'connected by', remotehost, remoteport while 1: data = conn.recv(BUFSIZE) if not data: break conn.send(data) main()
Change listen(0) to listen(1) for Solaris 2 sockets
Change listen(0) to listen(1) for Solaris 2 sockets
Python
mit
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
--- +++ @@ -19,7 +19,7 @@ port = ECHO_PORT s = socket(AF_INET, SOCK_STREAM) s.bind('', port) - s.listen(0) + s.listen(1) conn, (remotehost, remoteport) = s.accept() print 'connected by', remotehost, remoteport while 1:
2f968509be6ae8e3bcd15d0f46ebc3ba290ca086
astroplan/exceptions.py
astroplan/exceptions.py
# Licensed under a 3-clause BSD style license - see LICENSE.rst from __future__ import (absolute_import, division, print_function, unicode_literals) from astropy.utils.exceptions import AstropyWarning __all__ = ["TargetAlwaysUpWarning", "TargetNeverUpWarning", "OldEarthOrientationDataWarning", "PlotWarning", "PlotBelowHorizonWarning"] class TargetAlwaysUpWarning(AstropyWarning): """Target is circumpolar""" pass class TargetNeverUpWarning(AstropyWarning): """Target never rises above horizon""" pass class OldEarthOrientationDataWarning(AstropyWarning): """Using old Earth rotation data from IERS""" pass class PlotWarning(AstropyWarning): """Warnings dealing with the plotting aspects of astroplan""" pass class PlotBelowHorizonWarning(PlotWarning): """Warning for when something is hidden on a plot because it's below the horizon""" pass
# Licensed under a 3-clause BSD style license - see LICENSE.rst from __future__ import (absolute_import, division, print_function, unicode_literals) from astropy.utils.exceptions import AstropyWarning __all__ = ["TargetAlwaysUpWarning", "TargetNeverUpWarning", "OldEarthOrientationDataWarning", "PlotWarning", "PlotBelowHorizonWarning"] class AstroplanWarning(AstropyWarning): """Superclass for warnings used by astroplan""" class TargetAlwaysUpWarning(AstroplanWarning): """Target is circumpolar""" pass class TargetNeverUpWarning(AstroplanWarning): """Target never rises above horizon""" pass class OldEarthOrientationDataWarning(AstroplanWarning): """Using old Earth rotation data from IERS""" pass class PlotWarning(AstroplanWarning): """Warnings dealing with the plotting aspects of astroplan""" pass class PlotBelowHorizonWarning(PlotWarning): """Warning for when something is hidden on a plot because it's below the horizon""" pass
Make all astroplan warnings decend from an AstroplanWarning
Make all astroplan warnings decend from an AstroplanWarning
Python
bsd-3-clause
StuartLittlefair/astroplan
--- +++ @@ -8,19 +8,22 @@ "OldEarthOrientationDataWarning", "PlotWarning", "PlotBelowHorizonWarning"] -class TargetAlwaysUpWarning(AstropyWarning): +class AstroplanWarning(AstropyWarning): + """Superclass for warnings used by astroplan""" + +class TargetAlwaysUpWarning(AstroplanWarning): """Target is circumpolar""" pass -class TargetNeverUpWarning(AstropyWarning): +class TargetNeverUpWarning(AstroplanWarning): """Target never rises above horizon""" pass -class OldEarthOrientationDataWarning(AstropyWarning): +class OldEarthOrientationDataWarning(AstroplanWarning): """Using old Earth rotation data from IERS""" pass -class PlotWarning(AstropyWarning): +class PlotWarning(AstroplanWarning): """Warnings dealing with the plotting aspects of astroplan""" pass
5b00b6cd392a1f5f317cc18893198bb5d62c565d
pystache/parsed.py
pystache/parsed.py
# coding: utf-8 """ Exposes a class that represents a parsed (or compiled) template. """ class ParsedTemplate(object): """ Represents a parsed or compiled template. An instance wraps a list of unicode strings and node objects. A node object must have a `render(engine, stack)` method that accepts a RenderEngine instance and a ContextStack instance and returns a unicode string. """ def __init__(self): self._parse_tree = [] def __repr__(self): return repr(self._parse_tree) def add(self, node): """ Arguments: node: a unicode string or node object instance. See the class docstring for information. """ self._parse_tree.append(node) def render(self, engine, context): """ Returns: a string of type unicode. """ # We avoid use of the ternary operator for Python 2.4 support. def get_unicode(val): if type(val) is unicode: return val return val.render(engine, context) parts = map(get_unicode, self._parse_tree) s = ''.join(parts) return unicode(s)
# coding: utf-8 """ Exposes a class that represents a parsed (or compiled) template. """ class ParsedTemplate(object): """ Represents a parsed or compiled template. An instance wraps a list of unicode strings and node objects. A node object must have a `render(engine, stack)` method that accepts a RenderEngine instance and a ContextStack instance and returns a unicode string. """ def __init__(self): self._parse_tree = [] def __repr__(self): return repr(self._parse_tree) def add(self, node): """ Arguments: node: a unicode string or node object instance. See the class docstring for information. """ self._parse_tree.append(node) def render(self, engine, context): """ Returns: a string of type unicode. """ # We avoid use of the ternary operator for Python 2.4 support. def get_unicode(node): if type(node) is unicode: return node return node.render(engine, context) parts = map(get_unicode, self._parse_tree) s = ''.join(parts) return unicode(s)
Rename variable name from "val" to "node".
Rename variable name from "val" to "node".
Python
mit
charbeljc/pystache,nitish116/pystache,defunkt/pystache,harsh00008/pystache,rismalrv/pystache,nitish116/pystache,arlenesr28/pystache,nitish116/pystache,rismalrv/pystache,charbeljc/pystache,arlenesr28/pystache,harsh00008/pystache,beni55/pystache,harsh00008/pystache,jrnold/pystache,rismalrv/pystache,jrnold/pystache,arlenesr28/pystache,beni55/pystache
--- +++ @@ -40,10 +40,10 @@ """ # We avoid use of the ternary operator for Python 2.4 support. - def get_unicode(val): - if type(val) is unicode: - return val - return val.render(engine, context) + def get_unicode(node): + if type(node) is unicode: + return node + return node.render(engine, context) parts = map(get_unicode, self._parse_tree) s = ''.join(parts)
336edeebe5ac04df719569efa40e2fef01a2dccc
testproject/__init__.py
testproject/__init__.py
#!/usr/bin/env python import os import sys os.environ.setdefault("DJANGO_SETTINGS_MODULE", "testproject.testproject.settings") from django.test.utils import get_runner from django.conf import settings def runtests(): # Stolen from django/core/management/commands/test.py TestRunner = get_runner(settings) test_runner = TestRunner(verbosity=1, interactive=True) failures = test_runner.run_tests(['argonauts']) sys.exit(bool(failures))
#!/usr/bin/env python import os import sys os.environ.setdefault("DJANGO_SETTINGS_MODULE", "testproject.testproject.settings") import django from django.test.utils import get_runner from django.conf import settings def runtests(): if hasattr(django, 'setup'): # django >= 1.7 django.setup() # Stolen from django/core/management/commands/test.py TestRunner = get_runner(settings) test_runner = TestRunner(verbosity=1, interactive=True) failures = test_runner.run_tests(['argonauts']) sys.exit(bool(failures))
Set up the app registry when running tests on django 1.7
Set up the app registry when running tests on django 1.7 (cherry picked from commit 7b3c994758254f16e8f47bf0f13b363d98082f20) Signed-off-by: Antoine Catton <4589f2e060cef08c47d868a06754605a848a1006@fusionbox.com>
Python
bsd-2-clause
fusionbox/django-argonauts
--- +++ @@ -3,10 +3,14 @@ import sys os.environ.setdefault("DJANGO_SETTINGS_MODULE", "testproject.testproject.settings") +import django from django.test.utils import get_runner from django.conf import settings def runtests(): + if hasattr(django, 'setup'): + # django >= 1.7 + django.setup() # Stolen from django/core/management/commands/test.py TestRunner = get_runner(settings) test_runner = TestRunner(verbosity=1, interactive=True)
80958236edd4390b3f2f9fe4848de68fdda60513
hurdles/tools.py
hurdles/tools.py
from functools import wraps def extra_setup(setup_code): """Allows to setup some extra context to it's decorated function. As a convention, the bench decorated function should always handle *args and **kwargs. Kwargs will be updated with the extra context set by the decorator. Example: @extra_setup("l = [x for x in xrange(100)]") def bench_len(self, *args, **kwargs): print len(kwargs['l']) """ def decorator(func): # Exec extra setup code and put it in a local # context passed to function through kwargs. context = {} exec setup_code in {}, context @wraps(func) def decorated_function(*args, **kwargs): kwargs.update(context) return func(*args, **kwargs) return decorated_function return decorator
from functools import wraps def extra_setup(setup_code): """Allows to setup some extra context to it's decorated function. As a convention, the bench decorated function should always handle *args and **kwargs. Kwargs will be updated with the extra context set by the decorator. Example: @extra_setup("l = [x for x in xrange(100)]") def bench_len(self, *args, **kwargs): print len(kwargs['l']) """ def decorator(func): # Exec extra setup code and put it in a local # context passed to function through kwargs. context = {} compiled_code = compile(setup_code, '<string>', 'exec') exec compiled_code in context @wraps(func) def decorated_function(*args, **kwargs): kwargs.update(context) return func(*args, **kwargs) return decorated_function return decorator
Update : extra_setup compiles code, and runs it into custom global context
Update : extra_setup compiles code, and runs it into custom global context
Python
mit
oleiade/Hurdles
--- +++ @@ -17,7 +17,8 @@ # Exec extra setup code and put it in a local # context passed to function through kwargs. context = {} - exec setup_code in {}, context + compiled_code = compile(setup_code, '<string>', 'exec') + exec compiled_code in context @wraps(func) def decorated_function(*args, **kwargs):
918c30a22bf6dc6e0c69037ba21759710c5e7602
glance_registry_local_check.py
glance_registry_local_check.py
#!/usr/bin/env python from maas_common import (status_ok, status_err, metric, metric_bool, get_auth_ref) from requests import Session from requests import exceptions as exc def check(auth_ref): registry_endpoint = 'http://127.0.0.1:9191' api_status = 1 milliseconds = 0 s = Session() s.headers.update( {'Content-type': 'application/json', 'x-auth-token': auth_ref['token']['id']}) try: # /images returns a list of public, non-deleted images r = s.get('%s/images' % registry_endpoint, verify=False, timeout=10) except (exc.ConnectionError, exc.HTTPError, exc.Timeout): api_status = 0 milliseconds = -1 except Exception as e: status_err(str(e)) else: milliseconds = r.elapsed.total_seconds() * 1000 if not r.ok: api_status = 0 status_ok() metric_bool('glance_registry_local_status', api_status) metric('glance_registry_local_response_time', 'int32', milliseconds) def main(): auth_ref = get_auth_ref() check(auth_ref) if __name__ == "__main__": main()
#!/usr/bin/env python from maas_common import (status_ok, status_err, metric, get_keystone_client, get_auth_ref) from requests import Session from requests import exceptions as exc def check(auth_ref): keystone = get_keystone_client(auth_ref) tenant_id = keystone.tenant_id auth_token = keystone.auth_token registry_endpoint = 'http://127.0.0.1:9191' api_status = 1 milliseconds = 0 s = Session() s.headers.update( {'Content-type': 'application/json', 'x-auth-token': auth_token}) try: # /images returns a list of public, non-deleted images r = s.get('%s/images' % registry_endpoint, verify=False, timeout=10) except (exc.ConnectionError, exc.HTTPError, exc.Timeout): api_status = 0 milliseconds = -1 except Exception as e: status_err(str(e)) else: milliseconds = r.elapsed.total_seconds() * 1000 if not r.ok: api_status = 0 status_ok() metric('glance_registry_local_status', 'uint32', api_status) metric('glance_registry_local_response_time', 'int32', milliseconds) def main(): auth_ref = get_auth_ref() check(auth_ref) if __name__ == "__main__": main()
Revert "Use new metric_bool function"
Revert "Use new metric_bool function" This reverts commit e186b59c83f187900ce4bfd8b02b735d068434bb.
Python
apache-2.0
sigmavirus24/rpc-openstack,stevelle/rpc-openstack,cfarquhar/rpc-maas,jacobwagner/rpc-openstack,andymcc/rpc-openstack,cfarquhar/rpc-maas,briancurtin/rpc-maas,darrenchan/rpc-openstack,cfarquhar/rpc-openstack,stevelle/rpc-openstack,claco/rpc-openstack,major/rpc-openstack,sigmavirus24/rpc-openstack,miguelgrinberg/rpc-openstack,npawelek/rpc-maas,prometheanfire/rpc-openstack,andymcc/rpc-openstack,briancurtin/rpc-maas,git-harry/rpc-openstack,miguelgrinberg/rpc-openstack,shannonmitchell/rpc-openstack,prometheanfire/rpc-openstack,hughsaunders/rpc-openstack,busterswt/rpc-openstack,briancurtin/rpc-maas,mattt416/rpc-openstack,mattt416/rpc-openstack,byronmccollum/rpc-openstack,npawelek/rpc-maas,miguelgrinberg/rpc-openstack,cfarquhar/rpc-openstack,darrenchan/rpc-openstack,busterswt/rpc-openstack,busterswt/rpc-openstack,rcbops/rpc-openstack,mancdaz/rpc-openstack,robb-romans/rpc-openstack,darrenchan/rpc-openstack,npawelek/rpc-maas,shannonmitchell/rpc-openstack,xeregin/rpc-openstack,sigmavirus24/rpc-openstack,jpmontez/rpc-openstack,xeregin/rpc-openstack,cloudnull/rpc-maas,xeregin/rpc-openstack,hughsaunders/rpc-openstack,mattt416/rpc-openstack,galstrom21/rpc-openstack,byronmccollum/rpc-openstack,rcbops/rpc-openstack,BjoernT/rpc-openstack,nrb/rpc-openstack,stevelle/rpc-openstack,jpmontez/rpc-openstack,jacobwagner/rpc-openstack,cloudnull/rpc-openstack,claco/rpc-openstack,darrenchan/rpc-openstack,mancdaz/rpc-openstack,cfarquhar/rpc-maas,nrb/rpc-openstack,major/rpc-openstack,xeregin/rpc-openstack,BjoernT/rpc-openstack,cloudnull/rpc-openstack,nrb/rpc-openstack,cloudnull/rpc-maas,robb-romans/rpc-openstack,sigmavirus24/rpc-openstack,byronmccollum/rpc-openstack,claco/rpc-openstack,git-harry/rpc-openstack,jpmontez/rpc-openstack,galstrom21/rpc-openstack,andymcc/rpc-openstack,cloudnull/rpc-maas
--- +++ @@ -1,20 +1,25 @@ #!/usr/bin/env python -from maas_common import (status_ok, status_err, metric, metric_bool, +from maas_common import (status_ok, status_err, metric, get_keystone_client, get_auth_ref) from requests import Session from requests import exceptions as exc def check(auth_ref): + keystone = get_keystone_client(auth_ref) + tenant_id = keystone.tenant_id + auth_token = keystone.auth_token registry_endpoint = 'http://127.0.0.1:9191' + api_status = 1 milliseconds = 0 + s = Session() s.headers.update( {'Content-type': 'application/json', - 'x-auth-token': auth_ref['token']['id']}) + 'x-auth-token': auth_token}) try: # /images returns a list of public, non-deleted images @@ -31,7 +36,7 @@ api_status = 0 status_ok() - metric_bool('glance_registry_local_status', api_status) + metric('glance_registry_local_status', 'uint32', api_status) metric('glance_registry_local_response_time', 'int32', milliseconds)
3ec825c5ad8f1b4bacbbe9921d7caa46ad5ccd55
comrade/core/context_processors.py
comrade/core/context_processors.py
from django.conf import settings from django.contrib.sites.models import Site from settings import DeploymentType def default(request): context = {} context['DEPLOYMENT'] = settings.DEPLOYMENT context['site'] = Site.objects.get_current() if settings.DEPLOYMENT != DeploymentType.PRODUCTION: context['GIT_COMMIT'] = settings.GIT_COMMIT return context def ssl_media(request): if request.is_secure(): ssl_media_url = settings.MEDIA_URL.replace('http://','https://') else: ssl_media_url = settings.MEDIA_URL return {'MEDIA_URL': ssl_media_url}
from django.conf import settings from django.contrib.sites.models import Site from settings import DeploymentType def default(request): context = {} context['DEPLOYMENT'] = settings.DEPLOYMENT context['site'] = Site.objects.get_current() if settings.DEPLOYMENT != DeploymentType.PRODUCTION: context['GIT_COMMIT'] = settings.GIT_COMMIT context['site_email'] = settings.CONTACT_EMAIL return context def ssl_media(request): if request.is_secure(): ssl_media_url = settings.MEDIA_URL.replace('http://','https://') else: ssl_media_url = settings.MEDIA_URL return {'MEDIA_URL': ssl_media_url}
Add contact email to context.
Add contact email to context.
Python
mit
bueda/django-comrade
--- +++ @@ -8,6 +8,7 @@ context['site'] = Site.objects.get_current() if settings.DEPLOYMENT != DeploymentType.PRODUCTION: context['GIT_COMMIT'] = settings.GIT_COMMIT + context['site_email'] = settings.CONTACT_EMAIL return context def ssl_media(request):
cfbe762a0e752dcb58edbbd0835371ecc300d3f4
nosewatch/plugin.py
nosewatch/plugin.py
import sys from nose.plugins import Plugin from subprocess import Popen class WatchPlugin(Plugin): """ Plugin that use watchdog for continuous tests run. """ name = 'watch' is_watching = False sys = sys def call(self, args): Popen(args).wait() def finalize(self, result): argv = list(self.sys.argv) argv.remove('--with-watch') watchcmd = 'clear && ' + ' '.join(argv) call_args = ['watchmedo', 'shell-command', '-c', watchcmd, '-R', '-p', '*.py', '.'] try: self.call(call_args) except KeyboardInterrupt: self.sys.stdout.write('\nStopped\n')
import sys from nose.plugins import Plugin from subprocess import Popen class WatchPlugin(Plugin): """ Plugin that use watchdog for continuous tests run. """ name = 'watch' is_watching = False sys = sys def call(self, args): Popen(args).wait() def finalize(self, result): argv = list(self.sys.argv) try: argv.remove('--with-watch') except ValueError: pass watchcmd = 'clear && ' + ' '.join(argv) call_args = ['watchmedo', 'shell-command', '-c', watchcmd, '-R', '-p', '*.py', '.'] try: self.call(call_args) except KeyboardInterrupt: self.sys.stdout.write('\nStopped\n')
Add try block to argv remove statement
Add try block to argv remove statement When running nose-watch withing an environment like django-nose argv doesn't contain any arguments to remove.
Python
bsd-2-clause
lukaszb/nose-watch,lukaszb/nose-watch
--- +++ @@ -16,7 +16,10 @@ def finalize(self, result): argv = list(self.sys.argv) - argv.remove('--with-watch') + try: + argv.remove('--with-watch') + except ValueError: + pass watchcmd = 'clear && ' + ' '.join(argv) call_args = ['watchmedo', 'shell-command', '-c', watchcmd, '-R', '-p', '*.py', '.'] @@ -24,4 +27,3 @@ self.call(call_args) except KeyboardInterrupt: self.sys.stdout.write('\nStopped\n') -
cf99ee9a106b862ee3401399224e4730d758eacc
rated/middleware.py
rated/middleware.py
from . import settings from django.http import HttpResponse import time import redis # Connection pool POOL = redis.ConnectionPool(**settings.REDIS) class RatedMiddleware(object): def process_view(self, request, view_func, view_args, view_kwargs): # Try to determine the realm for this view try: realm = view_func._rated_realm except AttributeError: try: realm = settings.REALM_MAP[request.resolver_match.url_name] except KeyError: return None # should we also try the view name? source = request.META['REMOTE_ADDR'] conf = settings.REALMS.get(realm, {}) # Check against Realm whitelist if source in conf.get('whitelist', settings.DEFAULT_WHITELIST): return None key = 'rated:%s:%s' % (realm, source,) now = time.time() client = redis.Redis(connection_pool=POOL) # Do commands at once for speed pipe = client.pipeline() # Add our timestamp to the range pipe.zadd(key, now, now) # Update to not expire for another hour pipe.expireat(key, int(now + conf.get('timeout', settings.DEFAULT_TIMEOUT))) # Remove old values pipe.zremrangebyscore(key, '-inf', now - settings.DEFAULT_TIMEOUT) # Test count pipe.zcard(key) size = pipe.execute()[-1] if size > conf.get('limit', settings.DEFAULT_LIMIT): return HttpResponse(status=501) return None
from . import settings from django.http import HttpResponse import time import redis # Connection pool POOL = redis.ConnectionPool(**settings.REDIS) class RatedMiddleware(object): def process_view(self, request, view_func, view_args, view_kwargs): # Try to determine the realm for this view try: realm = view_func._rated_realm except AttributeError: try: realm = settings.REALM_MAP[request.resolver_match.url_name] except KeyError: return None # should we also try the view name? source = request.META['REMOTE_ADDR'] conf = settings.REALMS.get(realm, {}) # Check against Realm whitelist if source in conf.get('whitelist', settings.DEFAULT_WHITELIST): return None key = 'rated:%s:%s' % (realm, source,) now = time.time() client = redis.Redis(connection_pool=POOL) # Do commands at once for speed pipe = client.pipeline() # Add our timestamp to the range pipe.zadd(key, now, now) # Update to not expire for another hour pipe.expireat(key, int(now + conf.get('timeout', settings.DEFAULT_TIMEOUT))) # Remove old values pipe.zremrangebyscore(key, '-inf', now - settings.DEFAULT_TIMEOUT) # Test count pipe.zcard(key) size = pipe.execute()[-1] if size > conf.get('limit', settings.DEFAULT_LIMIT): return HttpResponse(status=429) return None
Use more appropriate status code
Use more appropriate status code
Python
bsd-3-clause
funkybob/django-rated
--- +++ @@ -46,5 +46,5 @@ pipe.zcard(key) size = pipe.execute()[-1] if size > conf.get('limit', settings.DEFAULT_LIMIT): - return HttpResponse(status=501) + return HttpResponse(status=429) return None
c3762443859ada75687e5a62d576fe8140a42a7c
tests/test_csv2iati.py
tests/test_csv2iati.py
import pytest from web_test_base import * class TestCSV2IATI(WebTestBase): requests_to_load = { 'CSV2IATI Homepage': { 'url': 'http://csv2iati.iatistandard.org/' } } def test_contains_links(self, loaded_request): """ Test that each page contains links to the defined URLs. """ result = utility.get_links_from_page(loaded_request) assert "http://iatistandard.org" in result @pytest.mark.parametrize("target_request", ["CSV2IATI Homepage"]) def test_login_form_presence(self, target_request): """ Test that there is a valid login form on the CSV2IATI Homepage. """ req = self.loaded_request_from_test_name(target_request) form_xpath = '//*[@id="login_register_forms_container"]/fieldset[1]/form' form_action_xpath = '//*[@id="login_register_forms_container"]/fieldset[1]/form/@action' input_xpath = '//*[@id="login_register_forms_container"]/fieldset[1]/form/dl/dd/input' forms = utility.locate_xpath_result(req, form_xpath) form_action = utility.locate_xpath_result(req, form_action_xpath) form_inputs = utility.locate_xpath_result(req, input_xpath) assert len(forms) == 1 assert form_action == ['/login'] assert len(form_inputs) == 3
import pytest from web_test_base import * class TestCSV2IATI(WebTestBase): requests_to_load = { 'CSV2IATI Homepage': { 'url': 'http://csv2iati.iatistandard.org/' } }
Remove redundant csv2iati test now site has been decommissioned
Remove redundant csv2iati test now site has been decommissioned
Python
mit
IATI/IATI-Website-Tests
--- +++ @@ -7,29 +7,3 @@ 'url': 'http://csv2iati.iatistandard.org/' } } - - def test_contains_links(self, loaded_request): - """ - Test that each page contains links to the defined URLs. - """ - result = utility.get_links_from_page(loaded_request) - - assert "http://iatistandard.org" in result - - @pytest.mark.parametrize("target_request", ["CSV2IATI Homepage"]) - def test_login_form_presence(self, target_request): - """ - Test that there is a valid login form on the CSV2IATI Homepage. - """ - req = self.loaded_request_from_test_name(target_request) - form_xpath = '//*[@id="login_register_forms_container"]/fieldset[1]/form' - form_action_xpath = '//*[@id="login_register_forms_container"]/fieldset[1]/form/@action' - input_xpath = '//*[@id="login_register_forms_container"]/fieldset[1]/form/dl/dd/input' - - forms = utility.locate_xpath_result(req, form_xpath) - form_action = utility.locate_xpath_result(req, form_action_xpath) - form_inputs = utility.locate_xpath_result(req, input_xpath) - - assert len(forms) == 1 - assert form_action == ['/login'] - assert len(form_inputs) == 3
6c3b5a314c9ba25fba2b0a605215d2fdad97e8dc
tests/test_evaluate.py
tests/test_evaluate.py
import numpy as np from numpy.testing import assert_equal from gala import evaluate as ev def test_contingency_table(): seg = np.array([0, 1, 1, 1, 2, 2, 2, 3]) gt = np.array([1, 1, 1, 2, 2, 2, 2, 0]) ct = ev.contingency_table(seg, gt, ignore_seg=[], ignore_gt=[]) ct0 = ev.contingency_table(seg, gt, ignore_seg=[0], ignore_gt=[0]) ctd = ct.todense() assert_equal(ctd, np.array([[0. , 0.125, 0. ], [0. , 0.25 , 0.125], [0. , 0. , 0.375], [0.125, 0. , 0. ]])) assert ct.shape == ct0.shape def test_vi(): seg = np.array([1, 2, 3, 4]) gt = np.array([1, 1, 8, 8]) assert_equal(ev.vi(seg, gt), 1)
import numpy as np from numpy.testing import assert_equal from gala import evaluate as ev def test_contingency_table(): seg = np.array([0, 1, 1, 1, 2, 2, 2, 3]) gt = np.array([1, 1, 1, 2, 2, 2, 2, 0]) ct = ev.contingency_table(seg, gt, ignore_seg=[], ignore_gt=[]) ct0 = ev.contingency_table(seg, gt, ignore_seg=[0], ignore_gt=[0]) ctd = ct.todense() assert_equal(ctd, np.array([[0. , 0.125, 0. ], [0. , 0.25 , 0.125], [0. , 0. , 0.375], [0.125, 0. , 0. ]])) assert ct.shape == ct0.shape def test_vi(): seg = np.array([1, 2, 3, 4]) gt = np.array([1, 1, 8, 8]) assert_equal(ev.vi(seg, gt), 1) def test_are(): seg = np.eye(3) gt = np.eye(3) seg[1][1] = 0 assert seg.shape == gt.shape
Add basic test to assert same shape of seg and gt
Add basic test to assert same shape of seg and gt
Python
bsd-3-clause
jni/gala,janelia-flyem/gala
--- +++ @@ -19,3 +19,9 @@ seg = np.array([1, 2, 3, 4]) gt = np.array([1, 1, 8, 8]) assert_equal(ev.vi(seg, gt), 1) + +def test_are(): + seg = np.eye(3) + gt = np.eye(3) + seg[1][1] = 0 + assert seg.shape == gt.shape
1ac69ebb2b1ea3218f5c01338af3df86f3828090
jarvis-system/orchestrator/src/asyncworker.py
jarvis-system/orchestrator/src/asyncworker.py
# Gets a list of services from the service resolver for each query. The module then # requests each service with the query in async fashion and returns a json response import requests import string import responsefilter def do(service_dict): url = _get_service_url(service_dict) return responsefilter.do(service_dict['service_name'], requests.get(url).json()) def _get_service_url(service_dict): # TODO: Error handling url_builder = ['http://'] url_builder.append(service_dict['service_id']) url_builder.append('.') url_builder.append(service_dict['namespace']) url_builder.append('.svc.cluster.local:') url_builder.append(service_dict['service_port']) url_builder.append(service_dict['endpoint']) url_builder.append('?') url_builder.append(service_dict['query']) return string.join(url_builder, '')
# Gets a list of services from the service resolver for each query. The module then # requests each service with the query in async fashion and returns a json response import requests import string import responsefilter def do(service_dict): url = _get_service_url(service_dict) return responsefilter.do(service_dict['service_name'], requests.get(url).json()) def _get_service_url(service_dict): # TODO: Error handling url_builder = ['http://'] url_builder.append(service_dict['service_id']) url_builder.append('.') url_builder.append(service_dict['namespace']) url_builder.append('.svc.cluster.local:') url_builder.append(service_dict['service_port']) url_builder.append(service_dict['endpoint']) url_builder.append('?query=') url_builder.append(service_dict['query']) return string.join(url_builder, '')
Fix bug in url creation.
Fix bug in url creation.
Python
mit
lahsivjar/jarvis-kube
--- +++ @@ -17,7 +17,7 @@ url_builder.append('.svc.cluster.local:') url_builder.append(service_dict['service_port']) url_builder.append(service_dict['endpoint']) - url_builder.append('?') + url_builder.append('?query=') url_builder.append(service_dict['query']) return string.join(url_builder, '')
96755c5e3ccf0573e7190da2a4a9264fdf409710
linter.py
linter.py
# # linter.py # Markdown Linter for SublimeLinter, a code checking framework # for Sublime Text 3 # # Written by Jon LaBelle # Copyright (c) 2018 Jon LaBelle # # License: MIT # """This module exports the Markdownlint plugin class.""" from SublimeLinter.lint import NodeLinter, util class MarkdownLint(NodeLinter): """Provides an interface to markdownlint.""" syntax = ('markdown', 'markdown gfm', 'multimarkdown', 'markdown extended') cmd = ('markdownlint', '${args}', '${file}') npm_name = 'markdownlint' config_file = ('--config', '.markdownlintrc') regex = r'.+?[:]\s(?P<line>\d+)[:]\s(?P<error>MD\d+)?[/]?(?P<message>.+)' multiline = False line_col_base = (1, 1) tempfile_suffix = '-' error_stream = util.STREAM_STDERR word_re = None comment_re = r'\s*/[/*]'
# # linter.py # Markdown Linter for SublimeLinter, a code checking framework # for Sublime Text 3 # # Written by Jon LaBelle # Copyright (c) 2018 Jon LaBelle # # License: MIT # """This module exports the Markdownlint plugin class.""" from SublimeLinter.lint import NodeLinter, util class MarkdownLint(NodeLinter): """Provides an interface to markdownlint.""" defaults = { 'selector': 'text.html.markdown,' 'text.html.markdown.multimarkdown,' 'text.html.markdown.extended,' 'text.html.markdown.gfm' } cmd = ('markdownlint', '${args}', '${file}') npm_name = 'markdownlint' config_file = ('--config', '.markdownlintrc') regex = r'.+?[:]\s(?P<line>\d+)[:]\s(?P<error>MD\d+)?[/]?(?P<message>.+)' multiline = False line_col_base = (1, 1) tempfile_suffix = '-' error_stream = util.STREAM_STDERR word_re = None comment_re = r'\s*/[/*]'
Remove deprecated SL 'syntax' property override
Remove deprecated SL 'syntax' property override Replaced by 'defaults/selector': http://www.sublimelinter.com/en/stable/linter_settings.html#selector
Python
mit
jonlabelle/SublimeLinter-contrib-markdownlint,jonlabelle/SublimeLinter-contrib-markdownlint
--- +++ @@ -16,8 +16,12 @@ class MarkdownLint(NodeLinter): """Provides an interface to markdownlint.""" - - syntax = ('markdown', 'markdown gfm', 'multimarkdown', 'markdown extended') + defaults = { + 'selector': 'text.html.markdown,' + 'text.html.markdown.multimarkdown,' + 'text.html.markdown.extended,' + 'text.html.markdown.gfm' + } cmd = ('markdownlint', '${args}', '${file}') npm_name = 'markdownlint' config_file = ('--config', '.markdownlintrc')
9b2ee2e4a956f8409047b289ee5c35ad4fcb4310
insanity/layers.py
insanity/layers.py
import numpy as np import theano import theano.tensor as T from theano.tensor.nnet import conv from theano.tensor.nnet import softmax from theano.tensor import shared_randomstreams from theano.tensor.signal import downsample class NeuralNetworkLayer(object): def __init__(self, numInputs, input, inputDropout, numNeurons, activation, miniBatchSize, dropoutAmount=0.0): self.numInputs = numInputs self.input = input self.inputDropout = inputDropout self.numNeurons = numNeurons self.activation = activation self.miniBatchSize = miniBatchSize self.dropoutAmount = dropoutAmount #Initialize weights. self.weights = theano.shared( np.asarray( np.random.normal( loc=0.0, scale=np.sqrt(1.0/n_out), size=(self.numInputs, self.numNeurons)), dtype=theano.config.floatX), name='weights', borrow=True) #Initialize biases. self.biases = theano.shared( np.asarray( np.random.normal( loc=0.0, scale=1.0, size=(self.numNeurons,)), dtype=theano.config.floatX), name='biases', borrow=True) #Store parameters to be learned in an attribute so that they can be externally accessed. self.learningParams = [self.weights, self.biases] #Define layer outputs. self.output, self.outputDropout = self.configureProcessing( self.input, self.inputDropout, self.weights, self.biases, self.miniBatchSize, self.dropoutAmount) class FullyConnectedLayer(NeuralNetworkLayer): def configureProcessing(input, inputDropout, weights, biases, miniBatchSize, dropoutAmount): # TODO things go here return output, outputDropout
import numpy as np import theano import theano.tensor as T from theano.tensor.nnet import conv from theano.tensor.nnet import softmax from theano.tensor import shared_randomstreams from theano.tensor.signal import downsample class Layer(object): def __init__(self, numInputs, numNeurons, activation): self.numInputs = numInputs self.numNeurons = numNeurons self.activation = activation #Initialize weights. self.weights = theano.shared( np.asarray( np.random.normal( loc=0.0, scale=np.sqrt(1.0/self.numNeurons), size=(self.numInputs, self.numNeurons)), dtype=theano.config.floatX), name='weights', borrow=True) #Initialize biases. self.biases = theano.shared( np.asarray( np.random.normal( loc=0.0, scale=1.0, size=(self.numNeurons,)), dtype=theano.config.floatX), name='biases', borrow=True) @property def input(value): #Configure the layer output. self.output = something class FullyConnectedLayer(Layer):
Add @property for network layer inputs
Add @property for network layer inputs
Python
cc0-1.0
cn04/insanity
--- +++ @@ -8,22 +8,18 @@ -class NeuralNetworkLayer(object): +class Layer(object): - def __init__(self, numInputs, input, inputDropout, numNeurons, activation, miniBatchSize, dropoutAmount=0.0): + def __init__(self, numInputs, numNeurons, activation): self.numInputs = numInputs - self.input = input - self.inputDropout = inputDropout self.numNeurons = numNeurons self.activation = activation - self.miniBatchSize = miniBatchSize - self.dropoutAmount = dropoutAmount #Initialize weights. self.weights = theano.shared( np.asarray( np.random.normal( - loc=0.0, scale=np.sqrt(1.0/n_out), size=(self.numInputs, self.numNeurons)), + loc=0.0, scale=np.sqrt(1.0/self.numNeurons), size=(self.numInputs, self.numNeurons)), dtype=theano.config.floatX), name='weights', borrow=True) @@ -32,20 +28,17 @@ np.asarray( np.random.normal( loc=0.0, scale=1.0, size=(self.numNeurons,)), - dtype=theano.config.floatX), + dtype=theano.config.floatX), name='biases', borrow=True) - - #Store parameters to be learned in an attribute so that they can be externally accessed. - self.learningParams = [self.weights, self.biases] - #Define layer outputs. - self.output, self.outputDropout = self.configureProcessing( - self.input, self.inputDropout, self.weights, self.biases, self.miniBatchSize, self.dropoutAmount) + @property + def input(value): + #Configure the layer output. + self.output = something -class FullyConnectedLayer(NeuralNetworkLayer): +class FullyConnectedLayer(Layer): - def configureProcessing(input, inputDropout, weights, biases, miniBatchSize, dropoutAmount): - # TODO things go here - return output, outputDropout + +
f707f0daec148ae16604a8fb1326fe10155d4453
tunobase/corporate/company_info/contact/tasks.py
tunobase/corporate/company_info/contact/tasks.py
""" CONTACT APP This module provides a variety of tasks to be queued by Celery. Classes: n/a Functions: email_contact_message Created on 21 Oct 2013 @author: michael """ from celery.decorators import task from django.conf import settings from tunobase.mailer import utils as mailer_utils @task(default_retry_delay=10 * 60) def email_contact_message(contact_message_id): """ Sends a Contact Message email to the Site's owners/support team """ try: from tunobase.corporate.company_info.contact import models contact_message = models.ContactMessage.objects.get( pk=contact_message_id ) user = contact_message.user ctx_dict = { 'contact_message' : contact_message, } mailer_utils.send_mail( subject='email/subjects/contact_message_subject.txt', html_content='email/html/contact_message.html', text_content='email/txt/contact_message.txt', context=ctx_dict, to_addresses=[settings.CONTACT_MESSAGE_TO_EMAIL,], user=user ) except Exception, exc: raise email_contact_message.retry(exc=exc)
""" CONTACT APP This module provides a variety of tasks to be queued by Celery. Classes: n/a Functions: email_contact_message Created on 21 Oct 2013 @author: michael """ from celery.decorators import task from django.conf import settings from tunobase.mailer import utils as mailer_utils @task(default_retry_delay=10 * 60) def email_contact_message(contact_message_id): """ Sends a Contact Message email to the Site's owners/support team """ try: from tunobase.corporate.company_info.contact import models contact_message = models.ContactMessage.objects.get( pk=contact_message_id ) user = contact_message.user ctx_dict = { 'contact_message' : contact_message, } mailer_utils.send_mail( subject='email/subjects/contact_message_subject.txt', html_content='email/html/contact_message.html', text_content='email/txt/contact_message.txt', context=ctx_dict, from_address=contact_message.email or settings.CONTACT_MESSAGE_TO_EMAIL, to_addresses=[settings.CONTACT_MESSAGE_TO_EMAIL,], user=user ) except Exception, exc: raise email_contact_message.retry(exc=exc)
Send contact us message from the email the user inputted on the site or use the default setting
Send contact us message from the email the user inputted on the site or use the default setting
Python
bsd-3-clause
unomena/tunobase,unomena/tunobase
--- +++ @@ -43,6 +43,7 @@ html_content='email/html/contact_message.html', text_content='email/txt/contact_message.txt', context=ctx_dict, + from_address=contact_message.email or settings.CONTACT_MESSAGE_TO_EMAIL, to_addresses=[settings.CONTACT_MESSAGE_TO_EMAIL,], user=user )
d410c30ba779ac0b32fdac7bf15733b1edacafe7
src/weitersager/util.py
src/weitersager/util.py
""" weitersager.util ~~~~~~~~~~~~~~~~ Utilities :Copyright: 2007-2021 Jochen Kupperschmidt :License: MIT, see LICENSE for details. """ import logging from logging import Formatter, StreamHandler from threading import Thread from typing import Callable def configure_logging(level: str) -> None: """Configure application-specific loggers. Setting the log level does not affect dependencies' loggers. """ # Get the parent logger of all application-specific # loggers defined in the package's modules. pkg_logger = logging.getLogger(__package__) # Configure handler that writes to STDERR. handler = StreamHandler() handler.setFormatter(Formatter('%(asctime)s %(levelname)-8s %(message)s')) pkg_logger.addHandler(handler) pkg_logger.setLevel(level) def start_thread(target: Callable, name: str) -> None: """Create, configure, and start a new thread.""" t = Thread(target=target, name=name, daemon=True) t.start()
""" weitersager.util ~~~~~~~~~~~~~~~~ Utilities :Copyright: 2007-2021 Jochen Kupperschmidt :License: MIT, see LICENSE for details. """ import logging from logging import Formatter, StreamHandler from threading import Thread from typing import Callable, Optional def configure_logging(level: str) -> None: """Configure application-specific loggers. Setting the log level does not affect dependencies' loggers. """ # Get the parent logger of all application-specific # loggers defined in the package's modules. pkg_logger = logging.getLogger(__package__) # Configure handler that writes to STDERR. handler = StreamHandler() handler.setFormatter(Formatter('%(asctime)s %(levelname)-8s %(message)s')) pkg_logger.addHandler(handler) pkg_logger.setLevel(level) def start_thread(target: Callable, name: Optional[str] = None) -> None: """Create, configure, and start a new thread.""" t = Thread(target=target, name=name, daemon=True) t.start()
Make thread name argument optional
Make thread name argument optional
Python
mit
homeworkprod/weitersager
--- +++ @@ -11,7 +11,7 @@ import logging from logging import Formatter, StreamHandler from threading import Thread -from typing import Callable +from typing import Callable, Optional def configure_logging(level: str) -> None: @@ -31,7 +31,7 @@ pkg_logger.setLevel(level) -def start_thread(target: Callable, name: str) -> None: +def start_thread(target: Callable, name: Optional[str] = None) -> None: """Create, configure, and start a new thread.""" t = Thread(target=target, name=name, daemon=True) t.start()
4ac3c1d5feaadd46a13bc78f9834c29a789e0d9a
manage.py
manage.py
#!/usr/bin/env python import os import unittest from flask.ext.script import Manager, Server from app import app manager = Manager(app) server = Server(host='0.0.0.0', use_debugger=True) manager.add_command("runserver", server) @manager.command def test(): """Runs all the tests""" parent_dir = os.path.dirname(os.path.abspath(__file__)) test_dir = os.path.join(parent_dir, 'tests') tests = unittest.TestLoader().discover(test_dir) results = unittest.TextTestRunner(verbosity=2).run(tests) if __name__ == '__main__': manager.run()
#!/usr/bin/env python import os import sys import unittest from flask.ext.script import Manager, Server from app import app manager = Manager(app) server = Server(host='0.0.0.0', use_debugger=True) manager.add_command("runserver", server) @manager.command def test(): """Runs all the tests""" parent_dir = os.path.dirname(os.path.abspath(__file__)) test_dir = os.path.join(parent_dir, 'tests') tests = unittest.TestLoader().discover(test_dir) results = unittest.TextTestRunner(verbosity=2).run(tests) ret = not results.wasSuccessful() sys.exit(ret) if __name__ == '__main__': manager.run()
Set the exit code when running tests correctly
Set the exit code when running tests correctly
Python
mit
andytom/snippets,andytom/snippets
--- +++ @@ -1,5 +1,6 @@ #!/usr/bin/env python import os +import sys import unittest from flask.ext.script import Manager, Server from app import app @@ -19,6 +20,8 @@ test_dir = os.path.join(parent_dir, 'tests') tests = unittest.TestLoader().discover(test_dir) results = unittest.TextTestRunner(verbosity=2).run(tests) + ret = not results.wasSuccessful() + sys.exit(ret) if __name__ == '__main__':
530549f7fe2c6bbf45996e17b2b125150ad031ae
manage.py
manage.py
#!/usr/bin/env python """ Run the Varda REST server. To reset the database: from varda import db db.drop_all() db.create_all() """ from flaskext.script import Manager from flaskext.celery import install_commands as install_celery_commands from varda import app, db manager = Manager(app) install_celery_commands(manager) @manager.command def createdb(): """ Create the SQLAlchemy database. """ db.drop_all() db.create_all() if __name__ == '__main__': manager.run()
#!/usr/bin/env python """ Run the Varda REST server. To setup the database: create database varda; create database vardacelery; create database vardaresults; grant all privileges on varda.* to varda@localhost identified by 'varda'; grant all privileges on vardacelery.* to varda@localhost identified by 'varda'; grant all privileges on vardaresults.* to varda@localhost identified by 'varda'; To reset the database: from varda import db db.drop_all() db.create_all() """ from flaskext.script import Manager from flaskext.celery import install_commands as install_celery_commands from varda import app, db manager = Manager(app) install_celery_commands(manager) @manager.command def createdb(): """ Create the SQLAlchemy database. """ db.drop_all() db.create_all() if __name__ == '__main__': manager.run()
Add some notes on database setup
Add some notes on database setup
Python
mit
varda/varda,sndrtj/varda
--- +++ @@ -1,6 +1,15 @@ #!/usr/bin/env python """ Run the Varda REST server. + +To setup the database: + + create database varda; + create database vardacelery; + create database vardaresults; + grant all privileges on varda.* to varda@localhost identified by 'varda'; + grant all privileges on vardacelery.* to varda@localhost identified by 'varda'; + grant all privileges on vardaresults.* to varda@localhost identified by 'varda'; To reset the database:
830e1a23559b82af37f52657484edd20641318c5
teamsupport/__init__.py
teamsupport/__init__.py
# -*- coding: utf-8 -*- from __future__ import absolute_import from teamsupport.services import TeamSupportService __author__ = 'Yola Engineers' __email__ = 'engineers@yola.com' __version__ = '0.1.0' __all__ = (TeamSupportService,)
# -*- coding: utf-8 -*- from __future__ import absolute_import from teamsupport.models import Action, Ticket from teamsupport.services import TeamSupportService __author__ = 'Yola Engineers' __email__ = 'engineers@yola.com' __version__ = '0.1.0' __all__ = (Action, TeamSupportService, Ticket,)
Add convenience imports for models
Add convenience imports for models
Python
mit
zoidbergwill/teamsupport-python,yola/teamsupport-python
--- +++ @@ -1,10 +1,11 @@ # -*- coding: utf-8 -*- from __future__ import absolute_import +from teamsupport.models import Action, Ticket from teamsupport.services import TeamSupportService __author__ = 'Yola Engineers' __email__ = 'engineers@yola.com' __version__ = '0.1.0' -__all__ = (TeamSupportService,) +__all__ = (Action, TeamSupportService, Ticket,)
0b0a11d01378368fb795c117ffd931811eaf9b76
pyhunter/exceptions.py
pyhunter/exceptions.py
class PyhunterError(Exception): """ Generic exception class for the library """ pass class MissingCompanyError(PyhunterError): pass class MissingNameError(PyhunterError): pass class HunterApiError(PyhunterError): """ Represents something went wrong in the call to the Hunter API """ pass
class PyhunterError(Exception): """ Generic exception class for the library """ pass class MissingCompanyError(PyhunterError): pass class MissingNameError(PyhunterError): pass class HunterApiError(PyhunterError): """ Represents something went wrong in the call to the Hunter API """ pass
Add new line at end of file
Add new line at end of file
Python
mit
VonStruddle/PyHunter
--- +++ @@ -1,4 +1,3 @@ - class PyhunterError(Exception): """ Generic exception class for the library
404d499877c050f706ca0f713d1c8ef0e5a88889
ooni/tests/bases.py
ooni/tests/bases.py
from twisted.trial import unittest from ooni.settings import config class ConfigTestCase(unittest.TestCase): def setUp(self): config.initialize_ooni_home("ooni_home") def tearDown(self): config.read_config_file()
import os from twisted.trial import unittest from ooni.settings import config class ConfigTestCase(unittest.TestCase): def setUp(self): config.global_options['datadir'] = os.path.join(__file__, '..', '..', '..', 'data') config.global_options['datadir'] = os.path.abspath(config.global_options['datadir']) config.initialize_ooni_home("ooni_home") def tearDown(self): config.read_config_file()
Set the datadirectory to be that of the repo.
Set the datadirectory to be that of the repo.
Python
bsd-2-clause
kdmurray91/ooni-probe,lordappsec/ooni-probe,Karthikeyan-kkk/ooni-probe,lordappsec/ooni-probe,Karthikeyan-kkk/ooni-probe,juga0/ooni-probe,juga0/ooni-probe,kdmurray91/ooni-probe,Karthikeyan-kkk/ooni-probe,lordappsec/ooni-probe,0xPoly/ooni-probe,0xPoly/ooni-probe,juga0/ooni-probe,Karthikeyan-kkk/ooni-probe,0xPoly/ooni-probe,juga0/ooni-probe,lordappsec/ooni-probe,kdmurray91/ooni-probe,kdmurray91/ooni-probe,0xPoly/ooni-probe
--- +++ @@ -1,3 +1,5 @@ +import os + from twisted.trial import unittest from ooni.settings import config @@ -5,6 +7,8 @@ class ConfigTestCase(unittest.TestCase): def setUp(self): + config.global_options['datadir'] = os.path.join(__file__, '..', '..', '..', 'data') + config.global_options['datadir'] = os.path.abspath(config.global_options['datadir']) config.initialize_ooni_home("ooni_home") def tearDown(self):
eacc1f88f7e34e26c3a4d29ec009b4984c10a345
SimPEG/Mesh/__init__.py
SimPEG/Mesh/__init__.py
from TensorMesh import TensorMesh from CylMesh import CylMesh from Cyl1DMesh import Cyl1DMesh from LogicallyRectMesh import LogicallyRectMesh from TreeMesh import TreeMesh from BaseMesh import BaseMesh
from TensorMesh import TensorMesh from CylMesh import CylMesh from LogicallyRectMesh import LogicallyRectMesh from TreeMesh import TreeMesh from BaseMesh import BaseMesh
Remove Cyl1DMesh from init file...
Remove Cyl1DMesh from init file...
Python
mit
simpeg/discretize,simpeg/simpeg,simpeg/discretize,simpeg/discretize
--- +++ @@ -1,6 +1,5 @@ from TensorMesh import TensorMesh from CylMesh import CylMesh -from Cyl1DMesh import Cyl1DMesh from LogicallyRectMesh import LogicallyRectMesh from TreeMesh import TreeMesh from BaseMesh import BaseMesh
26330025ccdeab7febd69c7f9053a99ac46d421b
cactus/static/external/manager.py
cactus/static/external/manager.py
class ExternalManager(object): """ Manager the active externals """ def __init__(self, processors=None, optimizers=None): self.processors = processors if processors is not None else [] self.optimizers = optimizers if optimizers is not None else [] def _register(self, external, externals): externals.insert(0, external) def clear(self): """ Clear this manager """ self.processors = [] self.optimizers = [] def register_processor(self, processor): """ Add a new processor to the list of processors This processor will be added with maximum priority """ self._register(processor, self.processors) def register_optimizer(self, optimizer): """ Add a new optimizer to the list of optimizer This optimizer will be added with maximum priority """ self._register(optimizer, self.optimizers)
class ExternalManager(object): """ Manager the active externals """ def __init__(self, processors=None, optimizers=None): self.processors = processors if processors is not None else [] self.optimizers = optimizers if optimizers is not None else [] def _register(self, external, externals): externals.insert(0, external) def _deregister(self, external, externals): externals.remove(external) def clear(self): """ Clear this manager """ self.processors = [] self.optimizers = [] def register_processor(self, processor): """ Add a new processor to the list of processors This processor will be added with maximum priority """ self._register(processor, self.processors) def deregister_processor(self, processor): """ Remove an existing processor from the list Will raise a ValueError if the processor is not present """ self._deregister(processor, self.processors) def register_optimizer(self, optimizer): """ Add a new optimizer to the list of optimizer This optimizer will be added with maximum priority """ self._register(optimizer, self.optimizers) def deregister_optimizer(self, processor): """ Remove an existing optimizer from the list Will raise a ValueError if the optimizer is not present """ self._deregister(processor, self.optimizers)
Add option to unregister externals
Add option to unregister externals
Python
bsd-3-clause
page-io/Cactus,juvham/Cactus,ibarria0/Cactus,gone/Cactus,Knownly/Cactus,ibarria0/Cactus,dreadatour/Cactus,danielmorosan/Cactus,PegasusWang/Cactus,dreadatour/Cactus,andyzsf/Cactus-,Bluetide/Cactus,koenbok/Cactus,Knownly/Cactus,PegasusWang/Cactus,koobs/Cactus,ibarria0/Cactus,eudicots/Cactus,page-io/Cactus,chaudum/Cactus,koobs/Cactus,Bluetide/Cactus,dreadatour/Cactus,juvham/Cactus,juvham/Cactus,koenbok/Cactus,andyzsf/Cactus-,page-io/Cactus,koobs/Cactus,gone/Cactus,koenbok/Cactus,eudicots/Cactus,fjxhkj/Cactus,Bluetide/Cactus,chaudum/Cactus,fjxhkj/Cactus,chaudum/Cactus,Knownly/Cactus,gone/Cactus,PegasusWang/Cactus,danielmorosan/Cactus,fjxhkj/Cactus,danielmorosan/Cactus,eudicots/Cactus,andyzsf/Cactus-
--- +++ @@ -8,6 +8,9 @@ def _register(self, external, externals): externals.insert(0, external) + + def _deregister(self, external, externals): + externals.remove(external) def clear(self): """ @@ -23,9 +26,23 @@ """ self._register(processor, self.processors) + def deregister_processor(self, processor): + """ + Remove an existing processor from the list + Will raise a ValueError if the processor is not present + """ + self._deregister(processor, self.processors) + def register_optimizer(self, optimizer): """ Add a new optimizer to the list of optimizer This optimizer will be added with maximum priority """ self._register(optimizer, self.optimizers) + + def deregister_optimizer(self, processor): + """ + Remove an existing optimizer from the list + Will raise a ValueError if the optimizer is not present + """ + self._deregister(processor, self.optimizers)
d48e1388eb2a21f83094e561405e292f6ecc3fb9
labkey/__init__.py
labkey/__init__.py
# # Copyright (c) 2011-2014 LabKey Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from labkey import query, experiment # wiki, messageboard from pkg_resources import get_distribution __title__ = get_distribution('labkey').project_name __version__ = get_distribution('labkey').version __author__ = 'LabKey Software' __license__ = 'Apache License 2.0'
# # Copyright (c) 2011-2014 LabKey Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from labkey import query, experiment, utils # wiki, messageboard from pkg_resources import get_distribution __title__ = get_distribution('labkey').project_name __version__ = get_distribution('labkey').version __author__ = 'LabKey Software' __license__ = 'Apache License 2.0'
Include utils in base import
Include utils in base import
Python
apache-2.0
LabKey/labkey-api-python
--- +++ @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from labkey import query, experiment # wiki, messageboard +from labkey import query, experiment, utils # wiki, messageboard from pkg_resources import get_distribution __title__ = get_distribution('labkey').project_name
962ae8a53c95e8ade1e0fd5804062b807837781e
pycalc.py
pycalc.py
# vim: set fileencoding=utf-8 import sys try: import readline # No idea if this is a license violation. Hope it isn't. except ImportError: print("Could not find readline, you will likely get no line editing functionality") if sys.version_info.major < 3: print("This program is for python version 3 only.") sys.exit(3) import lexer import execute while True: instr = input("» ") toks = lexer.to_toks(instr) rpn = lexer.to_rpn(toks) result = execute.eval_rpn(rpn) if result is not None: print(result) if len(sys.argv) >= 2: break
# vim: set fileencoding=utf-8 import sys try: import readline # noqa: this is used simply by being imported. # No idea if this is a license violation. Hope it isn't. except ImportError: print("Could not find readline, you will likely get no line editing functionality") if sys.version_info.major < 3: print("This program is for python version 3 only.") sys.exit(3) import lexer # noqa: These have to go here, as they use Unicode, which py2 can't handle. import execute # noqa while True: instr = input("» ") toks = lexer.to_toks(instr) rpn = lexer.to_rpn(toks) result = execute.eval_rpn(rpn) if result is not None: print(result) if len(sys.argv) >= 2: break
Add lines to shut flake8 up.
Add lines to shut flake8 up.
Python
mit
5225225/pycalc,5225225/pycalc
--- +++ @@ -3,7 +3,7 @@ import sys try: - import readline + import readline # noqa: this is used simply by being imported. # No idea if this is a license violation. Hope it isn't. except ImportError: print("Could not find readline, you will likely get no line editing functionality") @@ -12,8 +12,8 @@ print("This program is for python version 3 only.") sys.exit(3) -import lexer -import execute +import lexer # noqa: These have to go here, as they use Unicode, which py2 can't handle. +import execute # noqa while True: instr = input("» ")
2f03aa69f55d4d899af968e57a59a58d27ef82c8
url_shortener/forms.py
url_shortener/forms.py
# -*- coding: utf-8 -*- from flask_wtf import Form from flask_wtf.recaptcha import RecaptchaField, Recaptcha from wtforms import StringField, validators from .validation import not_blacklisted_nor_spam class ShortenedURLForm(Form): url = StringField( validators=[ validators.DataRequired(), validators.URL(message='A valid URL is required'), not_blacklisted_nor_spam ] ) recaptcha = RecaptchaField( validators=[ Recaptcha( 'Please click on the reCAPTCHA field to prove you are a human' ) ] )
# -*- coding: utf-8 -*- from flask_wtf import Form from flask_wtf.recaptcha import RecaptchaField, Recaptcha from wtforms import StringField, validators from .validation import not_blacklisted_nor_spam class ShortenedURLForm(Form): url = StringField( validators=[ validators.DataRequired(), validators.URL(message='A valid URL is required'), not_blacklisted_nor_spam ], render_kw={'placeholder': 'Original URL'} ) recaptcha = RecaptchaField( validators=[ Recaptcha( 'Please click on the reCAPTCHA field to prove you are a human' ) ] )
Add placeholder text to URL form field
Add placeholder text to URL form field
Python
mit
piotr-rusin/url-shortener,piotr-rusin/url-shortener
--- +++ @@ -12,7 +12,8 @@ validators.DataRequired(), validators.URL(message='A valid URL is required'), not_blacklisted_nor_spam - ] + ], + render_kw={'placeholder': 'Original URL'} ) recaptcha = RecaptchaField( validators=[
43e4d71a193f78e83f26e9d1c5fe69cee1b289b5
raffle.py
raffle.py
# -*- coding: utf-8 -*- import random import webbrowser from pythonkc_meetups import PythonKCMeetups from optparse import OptionParser def raffle_time(api_key=None, event_id=None): client = PythonKCMeetups(api_key=api_key) attendees = client.get_event_attendees(event_id) random.shuffle(attendees) winner = random.choice(attendees) if winner.photo: webbrowser.open_new(winner.photo.url) print "\n\nAnd the winner is, %s \n\n" % winner.name if __name__ == '__main__': parser = OptionParser() parser.add_option("-k", "--key", help="api key for meetup.com", dest="api_key", type="string") parser.add_option("-e", "--event_id", help="event id from meetup.com", dest="event_id", type="int") options, args = parser.parse_args() raffle_time(api_key=options.api_key, event_id=options.event_id)
# -*- coding: utf-8 -*- import random import webbrowser from pythonkc_meetups import PythonKCMeetups from optparse import OptionParser def raffle_time(api_key=None, event_id=None): client = PythonKCMeetups(api_key=api_key) attendees = client.get_event_attendees(event_id) random.seed() random.shuffle(attendees) winner = random.choice(attendees) if winner.photo: webbrowser.open_new(winner.photo.url) print("\n\nAnd the winner is, %s \n\n" % winner.name) if __name__ == '__main__': parser = OptionParser() parser.add_option("-k", "--key", help="api key for meetup.com", dest="api_key", type="string") parser.add_option("-e", "--event_id", help="event id from meetup.com", dest="event_id", type="int") options, args = parser.parse_args() raffle_time(api_key=options.api_key, event_id=options.event_id)
Add call to random.seed() and change print statement to print function.
Add call to random.seed() and change print statement to print function.
Python
bsd-3-clause
pythonkc/pythonkc-raffler
--- +++ @@ -9,11 +9,12 @@ def raffle_time(api_key=None, event_id=None): client = PythonKCMeetups(api_key=api_key) attendees = client.get_event_attendees(event_id) + random.seed() random.shuffle(attendees) winner = random.choice(attendees) if winner.photo: webbrowser.open_new(winner.photo.url) - print "\n\nAnd the winner is, %s \n\n" % winner.name + print("\n\nAnd the winner is, %s \n\n" % winner.name) if __name__ == '__main__':
c7b40736b6e59a654652afaab7fb643e00c1f186
clients/cron/call_uw_course_alerter.py
clients/cron/call_uw_course_alerter.py
import subprocess import urllib def main(): api_url = 'https://uw-alert.herokuapp.com/check_availability' courses = [{'level': 'under', 'session': 1151, 'subject': 'CS', 'number': 341, 'email': 'youremail@example.com'}] for course in courses: encoded_query = urllib.urlencode(course) subprocess.call(['curl', '-X', 'POST', '-H', 'Cache-Control: no-cache', '-H', 'Content-Type: application/x-www-form-urlencoded', '-d', encoded_query, api_url]) if __name__ == '__main__': main()
#!/usr/bin/env python import subprocess import urllib def main(): api_url = 'https://uw-alert.herokuapp.com/check_availability' courses = [{'level': 'under', 'session': 1151, 'subject': 'CS', 'number': 341, 'email': 'youremail@example.com'}] for course in courses: encoded_query = urllib.urlencode(course) subprocess.call(['curl', '-X', 'POST', '-H', 'Cache-Control: no-cache', '-H', 'Content-Type: application/x-www-form-urlencoded', '-d', encoded_query, api_url]) if __name__ == '__main__': main()
Add shebang for Python to sample script
Add shebang for Python to sample script
Python
mit
tuzhucheng/uw-course-alerter,tuzhucheng/uw-course-alerter,tuzhucheng/uw-course-alerter,tuzhucheng/uw-course-alerter
--- +++ @@ -1,3 +1,5 @@ +#!/usr/bin/env python + import subprocess import urllib
9b524fa97d1d1c8c132d1a880103a5ab4d0dabfc
models.py
models.py
from google.appengine.ext import ndb class Photo(ndb.Model): name = ndb.StringProperty() blob_info_key = ndb.BlobKeyProperty() date_created = ndb.DateTimeProperty(auto_now_add=True) class Album(ndb.Model): name = ndb.StringProperty() description = ndb.StringProperty() date_created = ndb.DateTimeProperty(auto_now_add=True)
from google.appengine.ext import ndb class Photo(ndb.Model): name = ndb.StringProperty() blob_info_key = ndb.BlobKeyProperty() date_created = ndb.DateTimeProperty(auto_now_add=True) class Album(ndb.Model): name = ndb.StringProperty() description = ndb.StringProperty() date_created = ndb.DateTimeProperty(auto_now_add=True) class Comment(ndb.Model): author = ndb.StringProperty() text = ndb.StringProperty() date_created = ndb.DateTimeProperty(auto_now_add=True)
Add Comment Model for Photo and Album
Add Comment Model for Photo and Album
Python
mit
MichaelAquilina/Photo-Nebula,MichaelAquilina/Photo-Nebula
--- +++ @@ -11,3 +11,9 @@ name = ndb.StringProperty() description = ndb.StringProperty() date_created = ndb.DateTimeProperty(auto_now_add=True) + + +class Comment(ndb.Model): + author = ndb.StringProperty() + text = ndb.StringProperty() + date_created = ndb.DateTimeProperty(auto_now_add=True)
3c04509ced5c51c55294e88ded2614b9f289deea
launch_instance.py
launch_instance.py
import boto3 import logging logger = logging.getLogger() logger.setLevel(logging.INFO) def lambda_handler(event, context): ec2 = boto3.resource('ec2') with open('env.sh') as e, open('cloud-init.sh') as f: script = e.read() + f.read() instance = ec2.create_instances( ImageId='ami-060cde69', # Ubuntu 16.04 MinCount=1, MaxCount=1, KeyName='ictrp', UserData=script, SecurityGroups=['launch-wizard-1'], InstanceType='t2.micro', InstanceInitiatedShutdownBehavior='terminate') logging.info(instance[0]) return instance[0].id
import boto3 import logging logger = logging.getLogger() logger.setLevel(logging.INFO) def lambda_handler(event, context): ec2 = boto3.resource('ec2') with open('env.sh') as e, open('cloud-init.sh') as f: script = e.read() + f.read() instance = ec2.create_instances( ImageId='ami-060cde69', # Ubuntu 16.04 MinCount=1, MaxCount=1, KeyName='ictrp', UserData=script, SecurityGroups=['launch-wizard-1'], InstanceType='t3.small', InstanceInitiatedShutdownBehavior='terminate') logging.info(instance[0]) return instance[0].id
Upgrade instance type due to RAM issues
Upgrade instance type due to RAM issues
Python
mit
gertvv/ictrp-retrieval,gertvv/ictrp-retrieval
--- +++ @@ -17,7 +17,7 @@ KeyName='ictrp', UserData=script, SecurityGroups=['launch-wizard-1'], - InstanceType='t2.micro', + InstanceType='t3.small', InstanceInitiatedShutdownBehavior='terminate') logging.info(instance[0])
d5e5b9ca4c9243d950e17b141008ffb168df0b64
kbkdna/dna.py
kbkdna/dna.py
#!/usr/bin/env python2 from __future__ import division def reverse(seq): """Return the reverse of the given sequence (i.e. 3' to 5').""" return seq[::-1] def complement(seq): """Return the complement of the given sequence (i.e. G=>C, A=>T, etc.)""" from string import maketrans complements = maketrans('ACTGactg', 'TGACtgac') return seq.translate(complements) def reverse_complement(seq): """Return the reverse complement of the given sequence (e.g. the opposite strand).""" return reverse(complement(seq)) # This function contains a bug. Do you see it? def gc_content(seq): """Return the GC content of the given sequence (e.g. the fraction of nucleotides that are either G or C).""" return sum(x in 'GC' for x in seq) / len(seq)
#!/usr/bin/env python2 def reverse(seq): """Return the reverse of the given sequence (i.e. 3' to 5').""" return seq[::-1] def complement(seq): """Return the complement of the given sequence (i.e. G=>C, A=>T, etc.)""" from string import maketrans complements = maketrans('ACTGactg', 'TGACtgac') return seq.translate(complements) def reverse_complement(seq): """Return the reverse complement of the given sequence (e.g. the opposite strand).""" return reverse(complement(seq)) # This function contains a bug. Do you see it? def gc_content(seq): """Return the GC content of the given sequence (e.g. the fraction of nucleotides that are either G or C).""" return sum(x in 'GC' for x in seq) / len(seq)
Put the gc_content bug back.
Put the gc_content bug back.
Python
mit
kalekundert/kbkdna
--- +++ @@ -1,6 +1,4 @@ #!/usr/bin/env python2 - -from __future__ import division def reverse(seq): """Return the reverse of the given sequence (i.e. 3' to
34274289f0cbfafbb1d762cad38a7225873d6850
matches/admin.py
matches/admin.py
from django.contrib import admin from .models import Match from .models import Tip def delete_tips(modeladmin, request, queryset): for match in queryset: tips = Tip.object.filter(match = match) for tip in tips: tip.score = 0 tip.scoring_field = "" tip.is_score_calculated = False delete_tips.delete_tips = "Delete calculated scores for tips for these matches" class MatchAdmin(admin.ModelAdmin): actions = [make_published] admin.site.register(Match, MatchAdmin) admin.site.register(Tip)
from django.contrib import admin from .models import Match from .models import Tip def delete_tips(modeladmin, request, queryset): for match in queryset: tips = Tip.object.filter(match = match) for tip in tips: tip.score = 0 tip.scoring_field = "" tip.is_score_calculated = False delete_tips.delete_tips = "Delete calculated scores for tips for these matches" class MatchAdmin(admin.ModelAdmin): actions = [delete_tips] admin.site.register(Match, MatchAdmin) admin.site.register(Tip)
Add action to zero out tips for given match
Add action to zero out tips for given match
Python
mit
leventebakos/football-ech,leventebakos/football-ech
--- +++ @@ -12,7 +12,7 @@ delete_tips.delete_tips = "Delete calculated scores for tips for these matches" class MatchAdmin(admin.ModelAdmin): - actions = [make_published] + actions = [delete_tips] admin.site.register(Match, MatchAdmin) admin.site.register(Tip)
88921deb630bef702a08f5b3a07279ccd223902b
test/lifecycle_test.py
test/lifecycle_test.py
from threading import Thread from Queue import Queue from doubles import lifecycle class TestLifecycle(object): def test_stores_a_space_per_thread(self): queue = Queue() queue.put(lifecycle.current_space()) def push_thread_space_to_queue(queue): queue.put(lifecycle.current_space()) thread = Thread(target=push_thread_space_to_queue, args=(queue,)) thread.start() thread.join() main_space = queue.get() thread_space = queue.get() assert main_space is not thread_space
from threading import Thread from Queue import Queue from doubles import lifecycle class TestLifecycle(object): def test_stores_a_space_per_thread(self): queue = Queue() def push_thread_space_to_queue(queue): queue.put(lifecycle.current_space()) push_thread_space_to_queue(queue) thread = Thread(target=push_thread_space_to_queue, args=(queue,)) thread.start() thread.join() main_space = queue.get() thread_space = queue.get() assert main_space is not thread_space
Reduce minor duplication in lifecycle test.
Reduce minor duplication in lifecycle test.
Python
mit
uber/doubles
--- +++ @@ -8,10 +8,10 @@ def test_stores_a_space_per_thread(self): queue = Queue() - queue.put(lifecycle.current_space()) - def push_thread_space_to_queue(queue): queue.put(lifecycle.current_space()) + + push_thread_space_to_queue(queue) thread = Thread(target=push_thread_space_to_queue, args=(queue,)) thread.start()
0335f32fa7035d394a6434b924a8ebb17332fd45
mist/__init__.py
mist/__init__.py
import requests class Bit: def __init__(self, auth_token, device_id): self.token = auth_token self.id = device_id self.version = '0.1.0' self.headers = {"Authorization": "Bearer " + self.token, "Accept": "application/vnd.littlebits.v2+json"} def output(self, pct, dur): r = requests.post("https://api-http.littlebitscloud.cc/devices/" + self.id + "/output", data={"percent": pct, "duration_ms": dur}, headers=self.headers) return r.text
import requests print """ mist - a python wrapper for the cloudBit API. If you use this, please let me know on github! http://github.com/technoboy10/mist """ class Bit: def __init__(self, auth_token, device_id): self.token = auth_token self.id = device_id self.version = '0.1.0' self.headers = {"Authorization": "Bearer " + self.token, "Accept": "application/vnd.littlebits.v2+json"} def output(self, pct, dur): r = requests.post("https://api-http.littlebitscloud.cc/devices/" + self.id + "/output", data={"percent": pct, "duration_ms": dur}, headers=self.headers) return r.text
Print stuff when module is imported.
Print stuff when module is imported.
Python
mit
technoboy10/partly-cloudy
--- +++ @@ -1,4 +1,10 @@ import requests +print """ +mist - a python wrapper for the cloudBit API. + +If you use this, please let me know on github! +http://github.com/technoboy10/mist +""" class Bit: def __init__(self, auth_token, device_id): self.token = auth_token
1843e34bba0343cd3600f3c8934ae29b4b365554
chstrings/chstrings_test.py
chstrings/chstrings_test.py
import chstrings import config import unittest class CHStringsTest(unittest.TestCase): @classmethod def add_smoke_test(cls, cfg): def test(self): # We just want to see if this will blow up chstrings.get_localized_strings(cfg, cfg.lang_code) name = 'test_' + cfg.lang_code + '_smoke_test' setattr(cls, name, test) if __name__ == '__main__': for lc in config.LANG_CODES_TO_LANG_NAMES: cfg = config.get_localized_config(lc) CHStringsTest.add_smoke_test(cfg) unittest.main()
import chstrings import config import unittest class CHStringsTest(unittest.TestCase): @classmethod def add_smoke_test(cls, cfg): def test(self): # We just want to see if this will blow up. Use the fallback # lang_tag across all tests. lang_tag = cfg.lang_code if cfg.accept_language: lang_tag = cfg.accept_language[-1] self.assertNotEqual({}, chstrings.get_localized_strings(cfg, lang_tag)) name = 'test_' + cfg.lang_code + '_smoke_test' setattr(cls, name, test) if __name__ == '__main__': for lc in config.LANG_CODES_TO_LANG_NAMES: cfg = config.get_localized_config(lc) CHStringsTest.add_smoke_test(cfg) unittest.main()
Extend chstrings smoke test a little more.
Extend chstrings smoke test a little more.
Python
mit
eggpi/citationhunt,guilherme-pg/citationhunt,eggpi/citationhunt,eggpi/citationhunt,guilherme-pg/citationhunt,guilherme-pg/citationhunt,eggpi/citationhunt,guilherme-pg/citationhunt
--- +++ @@ -7,8 +7,13 @@ @classmethod def add_smoke_test(cls, cfg): def test(self): - # We just want to see if this will blow up - chstrings.get_localized_strings(cfg, cfg.lang_code) + # We just want to see if this will blow up. Use the fallback + # lang_tag across all tests. + lang_tag = cfg.lang_code + if cfg.accept_language: + lang_tag = cfg.accept_language[-1] + self.assertNotEqual({}, + chstrings.get_localized_strings(cfg, lang_tag)) name = 'test_' + cfg.lang_code + '_smoke_test' setattr(cls, name, test)
a47e300d9e25dd3f631c9538f33b1d65cca847af
profile_collection/startup/80-areadetector.py
profile_collection/startup/80-areadetector.py
from ophyd.controls.area_detector import (AreaDetectorFileStoreHDF5, AreaDetectorFileStoreTIFF, AreaDetectorFileStoreTIFFSquashing) # from shutter import sh1 shctl1 = EpicsSignal('XF:28IDC-ES:1{Det:PE1}cam1:ShutterMode', name='shctl1') pe1 = AreaDetectorFileStoreTIFFSquashing( 'XF:28IDC-ES:1{Det:PE1}', name='pe1', stats=[], ioc_file_path = 'G:/pe1_data', file_path = '/home/xf28id1/pe1_data', shutter=shctl1, shutter_val=(1, 0) ) # Dan and Sanjit commented this out in June. #shctl2 = EpicsSignal('XF:28IDC-ES:1{Det:PE2}cam1:ShutterMode', name='shctl2') #pe2 = AreaDetectorFileStoreTIFFSquashing( # 'XF:28IDC-ES:1{Det:PE2}', # name='pe2', # stats=[], # ioc_file_path = 'G:/pe2_data', # file_path = '/home/xf28id1/pe2_data', # shutter=shctl2, # shutter_val=(1,0))
from ophyd.controls.area_detector import (AreaDetectorFileStoreHDF5, AreaDetectorFileStoreTIFF, AreaDetectorFileStoreTIFFSquashing) # from shutter import sh1 shctl1 = EpicsSignal('XF:28IDC-ES:1{Det:PE1}cam1:ShutterMode', name='shctl1') pe1 = AreaDetectorFileStoreTIFFSquashing( 'XF:28IDC-ES:1{Det:PE1}', name='pe1', stats=[], ioc_file_path = 'H:/pe1_data', file_path = '/home/xf28id1/pe1_data', shutter=shctl1, shutter_val=(1, 0) ) # Dan and Sanjit commented this out in June. #shctl2 = EpicsSignal('XF:28IDC-ES:1{Det:PE2}cam1:ShutterMode', name='shctl2') #pe2 = AreaDetectorFileStoreTIFFSquashing( # 'XF:28IDC-ES:1{Det:PE2}', # name='pe2', # stats=[], # ioc_file_path = 'G:/pe2_data', # file_path = '/home/xf28id1/pe2_data', # shutter=shctl2, # shutter_val=(1,0))
Update ioc storage path for the raw tiffs.
Update ioc storage path for the raw tiffs.
Python
bsd-2-clause
pavoljuhas/ipython_ophyd,pavoljuhas/ipython_ophyd,NSLS-II-XPD/ipython_ophyd,NSLS-II-XPD/ipython_ophyd
--- +++ @@ -10,7 +10,7 @@ 'XF:28IDC-ES:1{Det:PE1}', name='pe1', stats=[], - ioc_file_path = 'G:/pe1_data', + ioc_file_path = 'H:/pe1_data', file_path = '/home/xf28id1/pe1_data', shutter=shctl1, shutter_val=(1, 0)
46592a761df3716e5c3e218e4aa714bc7fc1b0ce
twilio.py
twilio.py
from twilio.rest import TwilioRestClient import config client = TwilioRestClient(account_sid, auth_token) message = client.messages.create(to="+12316851234", from_="+15555555555", body="Hello there!")
from twilio.rest import TwilioRestClient import config client = TwilioRestClient(account_sid, auth_token) message = client.messages.create(to="+12316851234", from_="+15555555555", body="Hello there!")
Add newline at end of file
Add newline at end of file
Python
mit
nhshd-slot/SLOT,bsharif/SLOT,bsharif/SLOT,nhshd-slot/SLOT,bsharif/SLOT,nhshd-slot/SLOT
8bd562b3ae6a2e5ae4fd1ad88da5d46408415365
cliche/services/__init__.py
cliche/services/__init__.py
""":mod:`cliche.services` --- Interfacing external services ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ How to add a new external service --------------------------------- In order to add a new service to cliche, you must create a subpackage under :data:`cliche.services` and expose some methods referring to interfaces, using :file:`__init__.py`. Interfaces needed to be exposed ------------------------------- - :func:`sync()`: Method to delay a main crawling task to the queue. It should be decorated with :code:`@app.task` to be defined as a celery app worker task. Example :file:`__init__.py` --------------------------- .. sourcecode:: python from .crawler import crawl as sync # noqa __all__ = 'sync', Note that you will need the import lines annotated with :code:`# noqa` because otherwise :program:`flake8` will consider it as unused import. """
""":mod:`cliche.services` --- Interfacing external services ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ How to add a new external service --------------------------------- In order to add a new service to cliche, you must create a subpackage under :data:`cliche.services` and expose some methods referring to interfaces, using :file:`__init__.py`. Interfaces needed to be exposed ------------------------------- - :func:`sync()`: Method to delay a main crawling task to the queue. It should be decorated with :code:`@app.task` to be defined as a celery app worker task. It should have no arguments and no return. Every output should be made as a log to celery logger. Example :file:`__init__.py` --------------------------- .. sourcecode:: python from .crawler import crawl as sync # noqa __all__ = 'sync', Note that you will need the import lines annotated with :code:`# noqa` because otherwise :program:`flake8` will consider it as unused import. """
Add some details to docs
Add some details to docs
Python
mit
clicheio/cliche,item4/cliche,clicheio/cliche,item4/cliche,clicheio/cliche
--- +++ @@ -13,7 +13,8 @@ - :func:`sync()`: Method to delay a main crawling task to the queue. It should be decorated with :code:`@app.task` to be defined as a celery app worker - task. + task. It should have no arguments and no return. Every output should be made + as a log to celery logger. Example :file:`__init__.py` ---------------------------
4add4eb1895498d547c8cf86ea8017c273932139
petition/forms.py
petition/forms.py
from django.forms import ModelForm from django import forms from django.forms.widgets import TextInput from django.core.urlresolvers import reverse from django.utils.translation import ugettext as _ from braces.forms import UserKwargModelFormMixin from crispy_forms.helper import FormHelper from crispy_forms.layout import Submit from constance import config from .models import Signature class TelephoneInput(TextInput): input_type = 'tel' class SignatureForm(UserKwargModelFormMixin, ModelForm): giodo = forms.BooleanField(widget=forms.CheckboxInput(), required=True) def __init__(self, *args, **kwargs): super(SignatureForm, self).__init__(*args, **kwargs) self.helper = FormHelper() self.helper.form_action = reverse('petition:create') self.helper.form_method = 'post' self.helper.add_input(Submit('submit', _('Sign'), css_class="btn-lg btn-block")) self.fields['telephone'].widget = TelephoneInput() self.fields['newsletter'].label = config.NEWSLETTER_TEXT self.fields['giodo'].label = config.AGGREMENT_TEXT class Meta: model = Signature fields = ['first_name', 'second_name', 'email', 'city', 'telephone', 'giodo', 'newsletter']
from django.forms import ModelForm from django import forms from django.forms.widgets import TextInput from django.core.urlresolvers import reverse from django.utils.translation import ugettext as _ from braces.forms import UserKwargModelFormMixin from crispy_forms.helper import FormHelper from crispy_forms.layout import Submit from constance import config from .models import Signature class TelephoneInput(TextInput): input_type = 'tel' class SignatureForm(UserKwargModelFormMixin, ModelForm): giodo = forms.BooleanField(widget=forms.CheckboxInput(), required=True) def __init__(self, *args, **kwargs): super(SignatureForm, self).__init__(*args, **kwargs) self.helper = FormHelper() self.helper.form_action = reverse('petition:create') self.helper.form_method = 'post' self.helper.add_input(Submit('submit', _('Sign'), css_class="btn-sign btn-lg btn-block")) self.fields['telephone'].widget = TelephoneInput() self.fields['newsletter'].label = config.NEWSLETTER_TEXT self.fields['giodo'].label = config.AGGREMENT_TEXT class Meta: model = Signature fields = ['first_name', 'second_name', 'email', 'city', 'telephone', 'giodo', 'newsletter']
Add a class for sign button
Add a class for sign button What will make it easier to manipulate the theme
Python
mit
watchdogpolska/django-one-petition,watchdogpolska/django-one-petition,watchdogpolska/django-one-petition
--- +++ @@ -22,7 +22,7 @@ self.helper = FormHelper() self.helper.form_action = reverse('petition:create') self.helper.form_method = 'post' - self.helper.add_input(Submit('submit', _('Sign'), css_class="btn-lg btn-block")) + self.helper.add_input(Submit('submit', _('Sign'), css_class="btn-sign btn-lg btn-block")) self.fields['telephone'].widget = TelephoneInput() self.fields['newsletter'].label = config.NEWSLETTER_TEXT self.fields['giodo'].label = config.AGGREMENT_TEXT
bcd9799b790aef9571491c0537e5aeb12bc9a564
make_a_plea/management/commands/check_urns_in_db.py
make_a_plea/management/commands/check_urns_in_db.py
import csv from django.core.management.base import BaseCommand from apps.plea.models import DataValidation, Case from apps.plea.standardisers import standardise_urn, format_for_region class Command(BaseCommand): help = "Build weekly aggregate stats" def add_arguments(self, parser): parser.add_argument('csv_file', nargs='+') def handle(self, *args, **options): with open(options['csv_file'][0]) as csvfile: total_matched, total_missed, matched, missed = 0, 0, 0, 0 for row in csvfile.readlines(): if not row.strip(): continue elif row.startswith("#"): if matched > 0 or missed > 0: print "----------------\nMatched {}\nMissed {}\n\n".format(matched, missed) total_matched += matched total_missed += missed matched = 0 missed = 0 print row else: urn = standardise_urn(row) if Case.objects.filter(urn__iexact=urn).exists(): matched += 1 else: missed += 1 print "{} - failed".format(urn) print "----------------\nTotal:\nMatched {}\nMissed {}".format(total_matched, total_missed)
from django.core.management.base import BaseCommand from apps.plea.models import Case from apps.plea.standardisers import standardise_urn class Command(BaseCommand): help = "Build weekly aggregate stats" def add_arguments(self, parser): parser.add_argument('csv_file', nargs='+') def handle(self, *args, **options): total_matched, total_missed, matched, missed = 0, 0, 0, 0 with open(options['csv_file'][0]) as csvfile: for row in csvfile.readlines(): if not row.strip(): print "----------------\nMatched {}\nMissed {}\n\n".format(matched, missed) total_matched += matched total_missed += missed elif row.startswith("#"): if matched > 0 or missed > 0: matched = 0 missed = 0 print row else: urn = standardise_urn(row) if Case.objects.filter(urn__iexact=urn).exists(): matched += 1 else: missed += 1 print "{} - failed".format(row.strip()) print "----------------\nTotal:\nMatched {}\nMissed {}".format(total_matched, total_missed)
Update management command to check if a list of URNs is present in the database
Update management command to check if a list of URNs is present in the database
Python
mit
ministryofjustice/manchester_traffic_offences_pleas,ministryofjustice/manchester_traffic_offences_pleas,ministryofjustice/manchester_traffic_offences_pleas,ministryofjustice/manchester_traffic_offences_pleas
--- +++ @@ -1,9 +1,7 @@ -import csv from django.core.management.base import BaseCommand - -from apps.plea.models import DataValidation, Case -from apps.plea.standardisers import standardise_urn, format_for_region +from apps.plea.models import Case +from apps.plea.standardisers import standardise_urn class Command(BaseCommand): @@ -13,17 +11,17 @@ parser.add_argument('csv_file', nargs='+') def handle(self, *args, **options): + total_matched, total_missed, matched, missed = 0, 0, 0, 0 + with open(options['csv_file'][0]) as csvfile: - total_matched, total_missed, matched, missed = 0, 0, 0, 0 - for row in csvfile.readlines(): if not row.strip(): - continue + print "----------------\nMatched {}\nMissed {}\n\n".format(matched, missed) + total_matched += matched + total_missed += missed + elif row.startswith("#"): if matched > 0 or missed > 0: - print "----------------\nMatched {}\nMissed {}\n\n".format(matched, missed) - total_matched += matched - total_missed += missed matched = 0 missed = 0 print row @@ -33,6 +31,6 @@ matched += 1 else: missed += 1 - print "{} - failed".format(urn) + print "{} - failed".format(row.strip()) print "----------------\nTotal:\nMatched {}\nMissed {}".format(total_matched, total_missed)
9eea896352e62850494dbb3f894eef0b82afab89
needy/projects/boostbuild.py
needy/projects/boostbuild.py
import os import subprocess from .. import project class BoostBuildProject(project.Project): @staticmethod def identifier(): return 'boostbuild' @staticmethod def is_valid_project(definition, needy): if not definition.target.platform.is_host(): return False if not os.path.isfile('Jamroot'): return False if os.path.isfile('b2'): return True try: needy.command_output(['b2', '-v']) return True except subprocess.CalledProcessError: return False except OSError: return False @staticmethod def configuration_keys(): return ['b2-args'] def get_build_concurrency_args(self): concurrency = self.build_concurrency() if concurrency > 1: return ['-j', str(concurrency)] elif concurrency == 0: return ['-j'] return [] def build(self, output_directory): b2 = './b2' if os.path.isfile('b2') else 'b2' b2_args = self.evaluate(self.configuration('b2-args')) b2_args += self.get_build_concurrency_args() self.command([b2] + b2_args) self.command([b2, 'install', '--prefix=%s' % output_directory] + b2_args)
import os import subprocess from .. import project class BoostBuildProject(project.Project): @staticmethod def identifier(): return 'boostbuild' @staticmethod def is_valid_project(definition, needy): if not definition.target.platform.is_host(): return False if not os.path.isfile('Jamroot'): return False if os.path.isfile('b2'): return True try: needy.command_output(['b2', '-v']) return True except subprocess.CalledProcessError: return False except OSError: return False @staticmethod def configuration_keys(): return ['b2-args'] def get_build_concurrency_args(self): concurrency = self.build_concurrency() if concurrency > 1: return ['-j', str(concurrency)] elif concurrency == 0: return ['-j'] return [] def build(self, output_directory): b2 = './b2' if os.path.isfile('b2') else 'b2' b2_args = self.evaluate(self.configuration('b2-args')) b2_args += self.get_build_concurrency_args() if self.configuration('linkage') in ['static']: b2_args += ['link=static'] elif self.configuration('linkage') in ['dynamic', 'shared']: b2_args += ['link=shared'] self.command([b2] + b2_args) self.command([b2, 'install', '--prefix=%s' % output_directory] + b2_args)
Add support for linkage in b2 projects
Add support for linkage in b2 projects
Python
mit
vmrob/needy,ccbrown/needy,vmrob/needy,bittorrent/needy,bittorrent/needy,ccbrown/needy
--- +++ @@ -46,5 +46,9 @@ b2 = './b2' if os.path.isfile('b2') else 'b2' b2_args = self.evaluate(self.configuration('b2-args')) b2_args += self.get_build_concurrency_args() + if self.configuration('linkage') in ['static']: + b2_args += ['link=static'] + elif self.configuration('linkage') in ['dynamic', 'shared']: + b2_args += ['link=shared'] self.command([b2] + b2_args) self.command([b2, 'install', '--prefix=%s' % output_directory] + b2_args)
cd88b3a695e87078aa0b50f9b3ea3af9ae3eb0a5
databroker/tests/test_core.py
databroker/tests/test_core.py
from __future__ import absolute_import, division, print_function from databroker.core import Header def test_header_dict_conformance(db): # TODO update this if / when we add conformance testing to # validate attrs in Header target = {'start': {'uid': 'start'}, 'stop': {'uid': 'stop', 'start_uid': 'start'}} h = Header(db, **target) # hack the descriptor lookup/cache mechanism target['descriptors'] = [{'uid': 'desc', 'start_uid': 'start'}] h._cache['desc'] = [{'uid': 'desc', 'start_uid': 'start'}] assert len(h) == len(target) assert set(h) == set(target) assert set(h.keys()) == set(target.keys()) for k, v in h.items(): assert v == target[k] assert v == h[k] # this is a dumb test assert len(list(h.values())) == len(h) n, d = h.to_name_dict_pair() assert n == 'header' assert d == target
from __future__ import absolute_import, division, print_function import copy from databroker.core import Header def test_header_dict_conformance(db): db.prepare_hook = lambda name, doc: copy.deepcopy(doc) # TODO update this if / when we add conformance testing to # validate attrs in Header target = {'start': {'uid': 'start'}, 'stop': {'uid': 'stop', 'start_uid': 'start'}} h = Header(db, **target) # hack the descriptor lookup/cache mechanism target['descriptors'] = [{'uid': 'desc', 'start_uid': 'start'}] h._cache['desc'] = [{'uid': 'desc', 'start_uid': 'start'}] assert len(h) == len(target) assert set(h) == set(target) assert set(h.keys()) == set(target.keys()) for k, v in h.items(): assert v == target[k] assert v == h[k] # this is a dumb test assert len(list(h.values())) == len(h) n, d = h.to_name_dict_pair() assert n == 'header' assert d == target
Remove doct from the equation in attrs tests.
TST: Remove doct from the equation in attrs tests.
Python
bsd-3-clause
ericdill/databroker,ericdill/databroker
--- +++ @@ -1,9 +1,12 @@ from __future__ import absolute_import, division, print_function +import copy from databroker.core import Header def test_header_dict_conformance(db): + db.prepare_hook = lambda name, doc: copy.deepcopy(doc) + # TODO update this if / when we add conformance testing to # validate attrs in Header target = {'start': {'uid': 'start'},
4fdf3af6414ae6fdc20882309bcaf36ffbe5c7a7
scripts/prereg/approve_draft_registrations.py
scripts/prereg/approve_draft_registrations.py
""" A script for testing DraftRegistrationApprovals. Automatically approves all pending DraftRegistrationApprovals. """ import sys import logging from website.app import init_app from website.models import DraftRegistration, Sanction, User logger = logging.getLogger(__name__) logging.basicConfig(level=logging.WARN) logging.disable(level=logging.INFO) def main(dry_run=True): if dry_run: logger.warn('DRY RUN mode') pending_approval_drafts = DraftRegistration.find() need_approval_drafts = [draft for draft in pending_approval_drafts if draft.requires_approval and draft.approval and draft.approval.state == Sanction.UNAPPROVED] for draft in need_approval_drafts: sanction = draft.approval try: if not dry_run: sanction.state = Sanction.APPROVED sanction._on_complete(None) sanction.save() logger.warn('Approved {0}'.format(draft._id)) except Exception as e: logger.error(e) if __name__ == '__main__': dry_run = 'dry' in sys.argv init_app(routes=False) main(dry_run=dry_run)
""" A script for testing DraftRegistrationApprovals. Automatically approves all pending DraftRegistrationApprovals. """ import sys import logging from framework.tasks.handlers import celery_teardown_request from website.app import init_app from website.project.model import DraftRegistration, Sanction logger = logging.getLogger(__name__) logging.basicConfig(level=logging.WARN) logging.disable(level=logging.INFO) def main(dry_run=True): if dry_run: logger.warn('DRY RUN mode') pending_approval_drafts = DraftRegistration.find() need_approval_drafts = [draft for draft in pending_approval_drafts if draft.approval and draft.requires_approval and draft.approval.state == Sanction.UNAPPROVED] for draft in need_approval_drafts: sanction = draft.approval try: if not dry_run: sanction.state = Sanction.APPROVED sanction._on_complete(None) sanction.save() logger.warn('Approved {0}'.format(draft._id)) except Exception as e: logger.error(e) if __name__ == '__main__': dry_run = 'dry' in sys.argv app = init_app(routes=False) main(dry_run=dry_run) celery_teardown_request()
Make sure celery_teardown_request gets called in DraftReg auto-approve script
Make sure celery_teardown_request gets called in DraftReg auto-approve script [skip ci]
Python
apache-2.0
crcresearch/osf.io,Johnetordoff/osf.io,TomHeatwole/osf.io,Nesiehr/osf.io,Nesiehr/osf.io,mattclark/osf.io,caneruguz/osf.io,zachjanicki/osf.io,caneruguz/osf.io,SSJohns/osf.io,adlius/osf.io,monikagrabowska/osf.io,abought/osf.io,crcresearch/osf.io,HalcyonChimera/osf.io,KAsante95/osf.io,aaxelb/osf.io,acshi/osf.io,KAsante95/osf.io,TomBaxter/osf.io,amyshi188/osf.io,mluo613/osf.io,crcresearch/osf.io,sloria/osf.io,amyshi188/osf.io,wearpants/osf.io,CenterForOpenScience/osf.io,KAsante95/osf.io,caneruguz/osf.io,aaxelb/osf.io,icereval/osf.io,abought/osf.io,billyhunt/osf.io,binoculars/osf.io,samchrisinger/osf.io,cslzchen/osf.io,caseyrollins/osf.io,asanfilippo7/osf.io,chennan47/osf.io,samchrisinger/osf.io,emetsger/osf.io,mluo613/osf.io,binoculars/osf.io,alexschiller/osf.io,Nesiehr/osf.io,laurenrevere/osf.io,mluo613/osf.io,alexschiller/osf.io,kwierman/osf.io,alexschiller/osf.io,DanielSBrown/osf.io,SSJohns/osf.io,cwisecarver/osf.io,baylee-d/osf.io,brandonPurvis/osf.io,felliott/osf.io,SSJohns/osf.io,pattisdr/osf.io,icereval/osf.io,acshi/osf.io,pattisdr/osf.io,Nesiehr/osf.io,adlius/osf.io,caneruguz/osf.io,RomanZWang/osf.io,mattclark/osf.io,cslzchen/osf.io,TomHeatwole/osf.io,felliott/osf.io,brianjgeiger/osf.io,leb2dg/osf.io,felliott/osf.io,doublebits/osf.io,chennan47/osf.io,GageGaskins/osf.io,monikagrabowska/osf.io,acshi/osf.io,brianjgeiger/osf.io,erinspace/osf.io,aaxelb/osf.io,caseyrollins/osf.io,KAsante95/osf.io,billyhunt/osf.io,adlius/osf.io,zachjanicki/osf.io,baylee-d/osf.io,kch8qx/osf.io,monikagrabowska/osf.io,brandonPurvis/osf.io,zachjanicki/osf.io,mluke93/osf.io,kch8qx/osf.io,caseyrollins/osf.io,Ghalko/osf.io,rdhyee/osf.io,kch8qx/osf.io,Ghalko/osf.io,brandonPurvis/osf.io,samchrisinger/osf.io,aaxelb/osf.io,felliott/osf.io,mfraezz/osf.io,DanielSBrown/osf.io,jnayak1/osf.io,Ghalko/osf.io,mfraezz/osf.io,SSJohns/osf.io,cwisecarver/osf.io,mluo613/osf.io,billyhunt/osf.io,leb2dg/osf.io,CenterForOpenScience/osf.io,jnayak1/osf.io,cslzchen/osf.io,baylee-d/osf.io,GageGaskins/osf.io,adlius/osf.io,mfraezz/osf.io,wearpants/osf.io,doublebits/osf.io,zamattiac/osf.io,GageGaskins/osf.io,TomBaxter/osf.io,kch8qx/osf.io,zamattiac/osf.io,doublebits/osf.io,chrisseto/osf.io,zachjanicki/osf.io,KAsante95/osf.io,Johnetordoff/osf.io,GageGaskins/osf.io,chrisseto/osf.io,brandonPurvis/osf.io,CenterForOpenScience/osf.io,amyshi188/osf.io,RomanZWang/osf.io,alexschiller/osf.io,brandonPurvis/osf.io,wearpants/osf.io,billyhunt/osf.io,kwierman/osf.io,jnayak1/osf.io,sloria/osf.io,rdhyee/osf.io,emetsger/osf.io,GageGaskins/osf.io,abought/osf.io,laurenrevere/osf.io,HalcyonChimera/osf.io,doublebits/osf.io,zamattiac/osf.io,emetsger/osf.io,amyshi188/osf.io,chrisseto/osf.io,emetsger/osf.io,asanfilippo7/osf.io,samchrisinger/osf.io,cslzchen/osf.io,binoculars/osf.io,rdhyee/osf.io,icereval/osf.io,Johnetordoff/osf.io,kch8qx/osf.io,asanfilippo7/osf.io,kwierman/osf.io,mluke93/osf.io,laurenrevere/osf.io,saradbowman/osf.io,RomanZWang/osf.io,hmoco/osf.io,TomBaxter/osf.io,cwisecarver/osf.io,mluke93/osf.io,CenterForOpenScience/osf.io,kwierman/osf.io,Ghalko/osf.io,pattisdr/osf.io,Johnetordoff/osf.io,chrisseto/osf.io,monikagrabowska/osf.io,hmoco/osf.io,erinspace/osf.io,HalcyonChimera/osf.io,wearpants/osf.io,acshi/osf.io,TomHeatwole/osf.io,mattclark/osf.io,brianjgeiger/osf.io,acshi/osf.io,chennan47/osf.io,DanielSBrown/osf.io,brianjgeiger/osf.io,mluo613/osf.io,abought/osf.io,erinspace/osf.io,cwisecarver/osf.io,saradbowman/osf.io,hmoco/osf.io,mluke93/osf.io,RomanZWang/osf.io,jnayak1/osf.io,RomanZWang/osf.io,TomHeatwole/osf.io,asanfilippo7/osf.io,mfraezz/osf.io,doublebits/osf.io,leb2dg/osf.io,leb2dg/osf.io,zamattiac/osf.io,monikagrabowska/osf.io,billyhunt/osf.io,hmoco/osf.io,rdhyee/osf.io,alexschiller/osf.io,HalcyonChimera/osf.io,sloria/osf.io,DanielSBrown/osf.io
--- +++ @@ -4,8 +4,10 @@ import sys import logging +from framework.tasks.handlers import celery_teardown_request + from website.app import init_app -from website.models import DraftRegistration, Sanction, User +from website.project.model import DraftRegistration, Sanction logger = logging.getLogger(__name__) logging.basicConfig(level=logging.WARN) @@ -17,7 +19,7 @@ logger.warn('DRY RUN mode') pending_approval_drafts = DraftRegistration.find() need_approval_drafts = [draft for draft in pending_approval_drafts - if draft.requires_approval and draft.approval and draft.approval.state == Sanction.UNAPPROVED] + if draft.approval and draft.requires_approval and draft.approval.state == Sanction.UNAPPROVED] for draft in need_approval_drafts: sanction = draft.approval @@ -32,5 +34,6 @@ if __name__ == '__main__': dry_run = 'dry' in sys.argv - init_app(routes=False) + app = init_app(routes=False) main(dry_run=dry_run) + celery_teardown_request()
579fbe76f8ab86d5f599f9888149fcaed297842c
src/sentry/celery.py
src/sentry/celery.py
from __future__ import absolute_import import celery import os import os.path import sys # Add the project to the python path sys.path.insert(0, os.path.join(os.path.dirname(__file__), os.pardir)) # Configure the application only if it seemingly isnt already configured from django.conf import settings if not settings.configured: from sentry.runner import configure configure() from sentry.utils import metrics class Celery(celery.Celery): def on_configure(self): from raven.contrib.django.models import client from raven.contrib.celery import register_signal, register_logger_signal # register a custom filter to filter out duplicate logs register_logger_signal(client) # hook into the Celery error handler register_signal(client) app = Celery('sentry') OriginalTask = app.Task class SentryTask(OriginalTask): def apply_async(self, args, kwargs): key = 'jobs.delay' instance = self.name with metrics.timer(key, instance=instance): return OriginalTask.apply_async(self, args, kwargs) app.Task = SentryTask # Using a string here means the worker will not have to # pickle the object when using Windows. app.config_from_object(settings) app.autodiscover_tasks(lambda: settings.INSTALLED_APPS) if __name__ == '__main__': app.start()
from __future__ import absolute_import import celery import os import os.path import sys # Add the project to the python path sys.path.insert(0, os.path.join(os.path.dirname(__file__), os.pardir)) # Configure the application only if it seemingly isnt already configured from django.conf import settings if not settings.configured: from sentry.runner import configure configure() from sentry.utils import metrics class Celery(celery.Celery): def on_configure(self): from raven.contrib.django.models import client from raven.contrib.celery import register_signal, register_logger_signal # register a custom filter to filter out duplicate logs register_logger_signal(client) # hook into the Celery error handler register_signal(client) app = Celery('sentry') OriginalTask = app.Task class SentryTask(OriginalTask): def apply_async(self, *args, **kwargs): key = 'jobs.delay' instance = self.name with metrics.timer(key, instance=instance): return OriginalTask.apply_async(self, *args, **kwargs) app.Task = SentryTask # Using a string here means the worker will not have to # pickle the object when using Windows. app.config_from_object(settings) app.autodiscover_tasks(lambda: settings.INSTALLED_APPS) if __name__ == '__main__': app.start()
Use star args to invoke apply_async
Use star args to invoke apply_async
Python
bsd-3-clause
zenefits/sentry,nicholasserra/sentry,fotinakis/sentry,JamesMura/sentry,alexm92/sentry,nicholasserra/sentry,BuildingLink/sentry,gencer/sentry,daevaorn/sentry,beeftornado/sentry,JamesMura/sentry,mvaled/sentry,jean/sentry,JamesMura/sentry,fotinakis/sentry,mvaled/sentry,JamesMura/sentry,beeftornado/sentry,looker/sentry,fotinakis/sentry,jean/sentry,ifduyue/sentry,zenefits/sentry,ifduyue/sentry,daevaorn/sentry,JackDanger/sentry,zenefits/sentry,jean/sentry,gencer/sentry,jean/sentry,BuildingLink/sentry,BuildingLink/sentry,mvaled/sentry,JackDanger/sentry,looker/sentry,daevaorn/sentry,JamesMura/sentry,mitsuhiko/sentry,fotinakis/sentry,daevaorn/sentry,gencer/sentry,mvaled/sentry,mvaled/sentry,gencer/sentry,ifduyue/sentry,nicholasserra/sentry,JackDanger/sentry,BuildingLink/sentry,mitsuhiko/sentry,alexm92/sentry,jean/sentry,BuildingLink/sentry,looker/sentry,mvaled/sentry,beeftornado/sentry,looker/sentry,zenefits/sentry,looker/sentry,alexm92/sentry,ifduyue/sentry,gencer/sentry,ifduyue/sentry,zenefits/sentry
--- +++ @@ -37,11 +37,11 @@ class SentryTask(OriginalTask): - def apply_async(self, args, kwargs): + def apply_async(self, *args, **kwargs): key = 'jobs.delay' instance = self.name with metrics.timer(key, instance=instance): - return OriginalTask.apply_async(self, args, kwargs) + return OriginalTask.apply_async(self, *args, **kwargs) app.Task = SentryTask
e9031ab6091e3b9d7866c300c8e21b9e81e7e935
api/collections/urls.py
api/collections/urls.py
from django.conf.urls import url from api.collections import views from website import settings urlpatterns = [] # Routes only active in local/staging environments if settings.DEV_MODE: urlpatterns.extend([ url(r'^$', views.CollectionList.as_view(), name='collection-list'), url(r'^(?P<collection_id>\w+)/$', views.CollectionDetail.as_view(), name='collection-detail'), url(r'^(?P<collection_id>\w+)/linked_nodes/$', views.LinkedNodesList.as_view(), name='linked-nodes'), url(r'^(?P<collection_id>\w+)/node_links/$', views.NodeLinksList.as_view(), name='node-pointers'), url(r'^(?P<collection_id>\w+)/node_links/(?P<node_link_id>\w+)/', views.NodeLinksDetail.as_view(), name='node-pointer-detail'), ])
from django.conf.urls import url from api.collections import views urlpatterns = [ url(r'^$', views.CollectionList.as_view(), name='collection-list'), url(r'^(?P<collection_id>\w+)/$', views.CollectionDetail.as_view(), name='collection-detail'), url(r'^(?P<collection_id>\w+)/linked_nodes/$', views.LinkedNodesList.as_view(), name='linked-nodes'), url(r'^(?P<collection_id>\w+)/node_links/$', views.NodeLinksList.as_view(), name='node-pointers'), url(r'^(?P<collection_id>\w+)/node_links/(?P<node_link_id>\w+)/', views.NodeLinksDetail.as_view(), name='node-pointer-detail'), ]
Remove DEV ONLY on the sub view since the super already has it
Remove DEV ONLY on the sub view since the super already has it
Python
apache-2.0
cslzchen/osf.io,leb2dg/osf.io,kch8qx/osf.io,TomBaxter/osf.io,wearpants/osf.io,laurenrevere/osf.io,chrisseto/osf.io,cslzchen/osf.io,mluo613/osf.io,rdhyee/osf.io,mluke93/osf.io,adlius/osf.io,CenterForOpenScience/osf.io,pattisdr/osf.io,billyhunt/osf.io,mluo613/osf.io,caseyrollins/osf.io,zachjanicki/osf.io,jnayak1/osf.io,amyshi188/osf.io,DanielSBrown/osf.io,ticklemepierce/osf.io,alexschiller/osf.io,aaxelb/osf.io,acshi/osf.io,kwierman/osf.io,chennan47/osf.io,ZobairAlijan/osf.io,mluke93/osf.io,HalcyonChimera/osf.io,billyhunt/osf.io,cslzchen/osf.io,jnayak1/osf.io,alexschiller/osf.io,GageGaskins/osf.io,hmoco/osf.io,HalcyonChimera/osf.io,samanehsan/osf.io,amyshi188/osf.io,DanielSBrown/osf.io,GageGaskins/osf.io,kwierman/osf.io,CenterForOpenScience/osf.io,danielneis/osf.io,brianjgeiger/osf.io,monikagrabowska/osf.io,crcresearch/osf.io,asanfilippo7/osf.io,TomBaxter/osf.io,caneruguz/osf.io,samanehsan/osf.io,mattclark/osf.io,binoculars/osf.io,alexschiller/osf.io,danielneis/osf.io,abought/osf.io,cwisecarver/osf.io,adlius/osf.io,pattisdr/osf.io,mfraezz/osf.io,saradbowman/osf.io,TomHeatwole/osf.io,kch8qx/osf.io,erinspace/osf.io,emetsger/osf.io,chrisseto/osf.io,acshi/osf.io,kwierman/osf.io,cwisecarver/osf.io,doublebits/osf.io,samchrisinger/osf.io,samchrisinger/osf.io,caseyrygt/osf.io,erinspace/osf.io,brianjgeiger/osf.io,emetsger/osf.io,abought/osf.io,monikagrabowska/osf.io,Johnetordoff/osf.io,felliott/osf.io,KAsante95/osf.io,mluke93/osf.io,crcresearch/osf.io,KAsante95/osf.io,GageGaskins/osf.io,RomanZWang/osf.io,Ghalko/osf.io,SSJohns/osf.io,asanfilippo7/osf.io,wearpants/osf.io,billyhunt/osf.io,Nesiehr/osf.io,aaxelb/osf.io,doublebits/osf.io,GageGaskins/osf.io,billyhunt/osf.io,caneruguz/osf.io,caseyrollins/osf.io,felliott/osf.io,ZobairAlijan/osf.io,acshi/osf.io,leb2dg/osf.io,ticklemepierce/osf.io,ticklemepierce/osf.io,TomHeatwole/osf.io,brianjgeiger/osf.io,zachjanicki/osf.io,samchrisinger/osf.io,doublebits/osf.io,abought/osf.io,Nesiehr/osf.io,mluo613/osf.io,RomanZWang/osf.io,mluo613/osf.io,brandonPurvis/osf.io,wearpants/osf.io,leb2dg/osf.io,chennan47/osf.io,sloria/osf.io,mfraezz/osf.io,sloria/osf.io,doublebits/osf.io,caneruguz/osf.io,kwierman/osf.io,samanehsan/osf.io,chrisseto/osf.io,zamattiac/osf.io,ticklemepierce/osf.io,Nesiehr/osf.io,binoculars/osf.io,TomHeatwole/osf.io,mluo613/osf.io,Ghalko/osf.io,laurenrevere/osf.io,baylee-d/osf.io,mluke93/osf.io,hmoco/osf.io,KAsante95/osf.io,mfraezz/osf.io,monikagrabowska/osf.io,felliott/osf.io,amyshi188/osf.io,zamattiac/osf.io,brandonPurvis/osf.io,samchrisinger/osf.io,zamattiac/osf.io,caseyrygt/osf.io,TomHeatwole/osf.io,icereval/osf.io,baylee-d/osf.io,erinspace/osf.io,ZobairAlijan/osf.io,acshi/osf.io,SSJohns/osf.io,HalcyonChimera/osf.io,chennan47/osf.io,Johnetordoff/osf.io,Nesiehr/osf.io,doublebits/osf.io,SSJohns/osf.io,cwisecarver/osf.io,caneruguz/osf.io,DanielSBrown/osf.io,ZobairAlijan/osf.io,samanehsan/osf.io,aaxelb/osf.io,rdhyee/osf.io,amyshi188/osf.io,chrisseto/osf.io,asanfilippo7/osf.io,caseyrollins/osf.io,kch8qx/osf.io,caseyrygt/osf.io,HalcyonChimera/osf.io,CenterForOpenScience/osf.io,zamattiac/osf.io,rdhyee/osf.io,icereval/osf.io,leb2dg/osf.io,CenterForOpenScience/osf.io,adlius/osf.io,saradbowman/osf.io,cslzchen/osf.io,RomanZWang/osf.io,GageGaskins/osf.io,emetsger/osf.io,KAsante95/osf.io,mfraezz/osf.io,Ghalko/osf.io,brandonPurvis/osf.io,sloria/osf.io,billyhunt/osf.io,laurenrevere/osf.io,pattisdr/osf.io,emetsger/osf.io,alexschiller/osf.io,abought/osf.io,zachjanicki/osf.io,icereval/osf.io,jnayak1/osf.io,danielneis/osf.io,RomanZWang/osf.io,mattclark/osf.io,danielneis/osf.io,asanfilippo7/osf.io,monikagrabowska/osf.io,adlius/osf.io,mattclark/osf.io,rdhyee/osf.io,hmoco/osf.io,brandonPurvis/osf.io,brianjgeiger/osf.io,Johnetordoff/osf.io,brandonPurvis/osf.io,RomanZWang/osf.io,kch8qx/osf.io,baylee-d/osf.io,caseyrygt/osf.io,acshi/osf.io,cwisecarver/osf.io,Ghalko/osf.io,aaxelb/osf.io,KAsante95/osf.io,kch8qx/osf.io,alexschiller/osf.io,Johnetordoff/osf.io,zachjanicki/osf.io,wearpants/osf.io,jnayak1/osf.io,binoculars/osf.io,monikagrabowska/osf.io,felliott/osf.io,hmoco/osf.io,SSJohns/osf.io,TomBaxter/osf.io,DanielSBrown/osf.io,crcresearch/osf.io
--- +++ @@ -1,16 +1,11 @@ from django.conf.urls import url from api.collections import views -from website import settings -urlpatterns = [] - -# Routes only active in local/staging environments -if settings.DEV_MODE: - urlpatterns.extend([ - url(r'^$', views.CollectionList.as_view(), name='collection-list'), - url(r'^(?P<collection_id>\w+)/$', views.CollectionDetail.as_view(), name='collection-detail'), - url(r'^(?P<collection_id>\w+)/linked_nodes/$', views.LinkedNodesList.as_view(), name='linked-nodes'), - url(r'^(?P<collection_id>\w+)/node_links/$', views.NodeLinksList.as_view(), name='node-pointers'), - url(r'^(?P<collection_id>\w+)/node_links/(?P<node_link_id>\w+)/', views.NodeLinksDetail.as_view(), name='node-pointer-detail'), - ]) +urlpatterns = [ + url(r'^$', views.CollectionList.as_view(), name='collection-list'), + url(r'^(?P<collection_id>\w+)/$', views.CollectionDetail.as_view(), name='collection-detail'), + url(r'^(?P<collection_id>\w+)/linked_nodes/$', views.LinkedNodesList.as_view(), name='linked-nodes'), + url(r'^(?P<collection_id>\w+)/node_links/$', views.NodeLinksList.as_view(), name='node-pointers'), + url(r'^(?P<collection_id>\w+)/node_links/(?P<node_link_id>\w+)/', views.NodeLinksDetail.as_view(), name='node-pointer-detail'), +]
af9b7dd5e4923c15a8adbab41ee9933245714ec8
python/example.py
python/example.py
#! /usr/bin/env python from lsapi import lsapi l = lsapi('my-access-id', 'my-secret-key') # As you may have noticed, there are lots of columns available # I did what I could to make them easily-accessible, but there # are a lot, and the names are long. So, the API calls have # defaults # Let's get some URL metrics. Results are now an array of dictionaries # the i'th dictionary is the results for the i'th URL metrics = l.urlMetrics(['www.moz.com', 'www.moz.com/blog']) # Now let's say we only want specific columns in the results authorities = l.urlMetrics(['www.moz.com'], lsapi.UMCols.domainAuthority | lsapi.UMCols.pageAuthority) # Or if you just need results for one URL mozMetrics = l.urlMetrics('www.moz.com') # Now for some anchor text results anchorResults = l.anchorText('www.moz.com/blog') # Or for just specific columns anchorTermResults = l.anchorText('www.moz.com/blog', cols=lsapi.ATCols.term) # Now for some links results links = l.links('www.moz.com') # The links API has more columns to specify, as well as sort, scope, etc. links = l.links('www.moz.com', scope='domain_to_domain', sort='domain_authority', filters=['external', 'nofollow'], targetCols = lsapi.UMCols.url)
#! /usr/bin/env python from lsapi import lsapi l = lsapi('my-access-id', 'my-secret-key') # As you may have noticed, there are lots of columns available # I did what I could to make them easily-accessible, but there # are a lot, and the names are long. So, the API calls have # defaults # Let's get some URL metrics. Results are now an array of dictionaries # the i'th dictionary is the results for the i'th URL metrics = l.urlMetrics(['www.moz.com', 'www.moz.com/blog']) # Now let's say we only want specific columns in the results authorities = l.urlMetrics(['www.moz.com'], lsapi.UMCols.domainAuthority | lsapi.UMCols.pageAuthority) # Or if you just need results for one URL mozMetrics = l.urlMetrics('www.moz.com') # Now for some anchor text results anchorResults = l.anchorText('www.moz.com/blog') # Or for just specific columns anchorTermResults = l.anchorText('www.moz.com/blog', cols=lsapi.ATCols.term) # Now for some links results links = l.links('www.moz.com') # The links API has more columns to specify, as well as sort, scope, etc. links = l.links('www.moz.com', scope='domain_to_domain', sort='domain_authority', filters=['external', 'nofollow'], targetCols=lsapi.UMCols.url)
Format keyword arguments like PEP-8.
Format keyword arguments like PEP-8.
Python
mit
seomoz/SEOmozAPISamples,seomoz/SEOmozAPISamples,seomoz/SEOmozAPISamples,seomoz/SEOmozAPISamples,seomoz/SEOmozAPISamples,seomoz/SEOmozAPISamples
--- +++ @@ -26,4 +26,4 @@ links = l.links('www.moz.com') # The links API has more columns to specify, as well as sort, scope, etc. links = l.links('www.moz.com', scope='domain_to_domain', sort='domain_authority', - filters=['external', 'nofollow'], targetCols = lsapi.UMCols.url) + filters=['external', 'nofollow'], targetCols=lsapi.UMCols.url)
610c27c15c09777a84143f551bb2cd3c2a5e3584
nltk/align/util.py
nltk/align/util.py
# Natural Language Toolkit: Aligner Utilities # # Copyright (C) 2001-2013 NLTK Project # Author: # URL: <http://www.nltk.org/> # For license information, see LICENSE.TXT
# Natural Language Toolkit: Aligner Utilities # # Copyright (C) 2001-2013 NLTK Project # Author: # URL: <http://www.nltk.org/> # For license information, see LICENSE.TXT from nltk.align.api import Alignment def pharaohtext2tuples(pharaoh_text): """ Converts pharaoh text format into an Alignment object (a list of tuples). >>> pharaoh_text = '0-0 2-1 9-2 21-3 10-4 7-5' >>> pharaohtext2tuples(pharaoh_text) Alignment([(0, 0), (2, 1), (7, 5), (9, 2), (10, 4), (21, 3)]) :type pharaoh_text: str :param pharaoh_text: the word alignment outputs in the pharaoh output format :rtype: Alignment :return: An Alignment object that contains a list of integer tuples """ list_of_tuples = [] for a in pharaoh_text.split(): # Converts integers to strings for a word alignment point. alignment_point = map(int,a.split('-')) list_of_tuples.append(tuple(alignment_point)) return Alignment(list_of_tuples) def alignment2pharaohtext(alignment): """ Converts an Alignment object (a list of tuples) into pharaoh text format. >>> alignment = [(0, 0), (2, 1), (9, 2), (21, 3), (10, 4), (7, 5)] >>> alignment2pharaohtext(alignment) '0-0 2-1 9-2 21-3 10-4 7-5' :type alignment: Alignment :param alignment: An Alignment object that contains a list of integer tuples :rtype: str :return: the word alignment outputs in the pharaoh output format """ o = [] for i,j in alignment: o.append(str(i) + "-" + str(j)) pharaoh_text = ' '.join(o) return pharaoh_text if __name__ == "__main__": import doctest doctest.testmod()
Convert pharaoh output format to Alignment object and vice versa:wq
Convert pharaoh output format to Alignment object and vice versa:wq
Python
apache-2.0
nltk/nltk,nltk/nltk,nltk/nltk
--- +++ @@ -5,3 +5,48 @@ # URL: <http://www.nltk.org/> # For license information, see LICENSE.TXT +from nltk.align.api import Alignment + +def pharaohtext2tuples(pharaoh_text): + """ + Converts pharaoh text format into an Alignment object (a list of tuples). + + >>> pharaoh_text = '0-0 2-1 9-2 21-3 10-4 7-5' + >>> pharaohtext2tuples(pharaoh_text) + Alignment([(0, 0), (2, 1), (7, 5), (9, 2), (10, 4), (21, 3)]) + + :type pharaoh_text: str + :param pharaoh_text: the word alignment outputs in the pharaoh output format + :rtype: Alignment + :return: An Alignment object that contains a list of integer tuples + """ + list_of_tuples = [] + for a in pharaoh_text.split(): + # Converts integers to strings for a word alignment point. + alignment_point = map(int,a.split('-')) + list_of_tuples.append(tuple(alignment_point)) + return Alignment(list_of_tuples) + + +def alignment2pharaohtext(alignment): + """ + Converts an Alignment object (a list of tuples) into pharaoh text format. + + >>> alignment = [(0, 0), (2, 1), (9, 2), (21, 3), (10, 4), (7, 5)] + >>> alignment2pharaohtext(alignment) + '0-0 2-1 9-2 21-3 10-4 7-5' + + :type alignment: Alignment + :param alignment: An Alignment object that contains a list of integer tuples + :rtype: str + :return: the word alignment outputs in the pharaoh output format + """ + o = [] + for i,j in alignment: + o.append(str(i) + "-" + str(j)) + pharaoh_text = ' '.join(o) + return pharaoh_text + +if __name__ == "__main__": + import doctest + doctest.testmod()
59eb6eaf18c67c424dad35f82f6baac6c93de380
elasticmock/__init__.py
elasticmock/__init__.py
# -*- coding: utf-8 -*- from functools import wraps from mock import patch from elasticmock.fake_elasticsearch import FakeElasticsearch ELASTIC_INSTANCES = {} def _get_elasticmock(hosts=None, *args, **kwargs): elastic_key = 'localhost:9200' if hosts is None else '{0}:{1}'.format(hosts[0].get('host'), hosts[0].get('port')) if elastic_key in ELASTIC_INSTANCES: connection = ELASTIC_INSTANCES.get(elastic_key) else: connection = FakeElasticsearch() ELASTIC_INSTANCES[elastic_key] = connection return connection def elasticmock(f): @wraps(f) def decorated(*args, **kwargs): ELASTIC_INSTANCES.clear() with patch('elasticsearch.Elasticsearch', _get_elasticmock): result = f(*args, **kwargs) return result return decorated
# -*- coding: utf-8 -*- from functools import wraps from elasticsearch.client import _normalize_hosts from mock import patch from elasticmock.fake_elasticsearch import FakeElasticsearch ELASTIC_INSTANCES = {} def _get_elasticmock(hosts=None, *args, **kwargs): host = _normalize_hosts(hosts)[0] elastic_key = '{0}:{1}'.format( host.get('host', 'localhost'), host.get('port', 9200) ) if elastic_key in ELASTIC_INSTANCES: connection = ELASTIC_INSTANCES.get(elastic_key) else: connection = FakeElasticsearch() ELASTIC_INSTANCES[elastic_key] = connection return connection def elasticmock(f): @wraps(f) def decorated(*args, **kwargs): ELASTIC_INSTANCES.clear() with patch('elasticsearch.Elasticsearch', _get_elasticmock): result = f(*args, **kwargs) return result return decorated
Allow the same 'hosts' settings that Elasticsearch does
Allow the same 'hosts' settings that Elasticsearch does
Python
mit
vrcmarcos/elasticmock
--- +++ @@ -2,6 +2,7 @@ from functools import wraps +from elasticsearch.client import _normalize_hosts from mock import patch from elasticmock.fake_elasticsearch import FakeElasticsearch @@ -10,7 +11,11 @@ def _get_elasticmock(hosts=None, *args, **kwargs): - elastic_key = 'localhost:9200' if hosts is None else '{0}:{1}'.format(hosts[0].get('host'), hosts[0].get('port')) + host = _normalize_hosts(hosts)[0] + elastic_key = '{0}:{1}'.format( + host.get('host', 'localhost'), host.get('port', 9200) + ) + if elastic_key in ELASTIC_INSTANCES: connection = ELASTIC_INSTANCES.get(elastic_key) else:
7d10b9d803089d1cf8a0c06219608d31bf5fb84f
src/collectors/MongoDBCollector/MongoDBCollector.py
src/collectors/MongoDBCollector/MongoDBCollector.py
try: from numbers import Number import pymongo except ImportError: Number = None import diamond class MongoDBCollector(diamond.collector.Collector): """Collects data from MongoDB's db.serverStatus() command Collects all number values from the db.serverStatus() command, other values are ignored. """ def get_default_config(self): """ Returns the default collector settings """ return { 'path': 'mongo', 'host': 'localhost' } def collect(self): """Collect number values from db.serverStatus()""" if Number is None: self.log.error('Unable to import either Number or pymongo') return {} conn = pymongo.Connection(self.config['host'],slave_okay=True) data = conn.db.command('serverStatus') for key in data: self._publish_metrics([], key, data) def _publish_metrics(self, prev_keys, key, data): """Recursively publish keys""" value = data[key] keys = prev_keys + [key] if isinstance(value, dict): for new_key in value: self._publish_metrics(keys, new_key, value) elif isinstance(value, Number): self.publish('.'.join(keys), value)
try: from numbers import Number import pymongo from pymongo import ReadPreference except ImportError: Number = None import diamond class MongoDBCollector(diamond.collector.Collector): """Collects data from MongoDB's db.serverStatus() command Collects all number values from the db.serverStatus() command, other values are ignored. """ def get_default_config(self): """ Returns the default collector settings """ return { 'path': 'mongo', 'host': 'localhost' } def collect(self): """Collect number values from db.serverStatus()""" if Number is None: self.log.error('Unable to import either Number or pymongo') return {} conn = pymongo.Connection(self.config['host'],read_preference=ReadPreference.SECONDARY) data = conn.db.command('serverStatus') for key in data: self._publish_metrics([], key, data) def _publish_metrics(self, prev_keys, key, data): """Recursively publish keys""" value = data[key] keys = prev_keys + [key] if isinstance(value, dict): for new_key in value: self._publish_metrics(keys, new_key, value) elif isinstance(value, Number): self.publish('.'.join(keys), value)
Replace deprecated slave_ok for read_preference in pymongo.Connection
Replace deprecated slave_ok for read_preference in pymongo.Connection See: http://api.mongodb.org/python/current/api/pymongo/connection.html
Python
mit
jriguera/Diamond,MichaelDoyle/Diamond,Netuitive/netuitive-diamond,hamelg/Diamond,dcsquared13/Diamond,sebbrandt87/Diamond,python-diamond/Diamond,gg7/diamond,sebbrandt87/Diamond,actmd/Diamond,signalfx/Diamond,Basis/Diamond,bmhatfield/Diamond,python-diamond/Diamond,krbaker/Diamond,jumping/Diamond,signalfx/Diamond,rtoma/Diamond,datafiniti/Diamond,CYBERBUGJR/Diamond,metamx/Diamond,ramjothikumar/Diamond,Clever/Diamond,dcsquared13/Diamond,socialwareinc/Diamond,jumping/Diamond,szibis/Diamond,zoidbergwill/Diamond,Ssawa/Diamond,h00dy/Diamond,tellapart/Diamond,janisz/Diamond-1,MichaelDoyle/Diamond,EzyInsights/Diamond,works-mobile/Diamond,thardie/Diamond,Netuitive/netuitive-diamond,codepython/Diamond,Netuitive/netuitive-diamond,MediaMath/Diamond,cannium/Diamond,hvnsweeting/Diamond,MediaMath/Diamond,Ssawa/Diamond,jumping/Diamond,skbkontur/Diamond,eMerzh/Diamond-1,anandbhoraskar/Diamond,krbaker/Diamond,acquia/Diamond,Precis/Diamond,gg7/diamond,cannium/Diamond,tusharmakkar08/Diamond,stuartbfox/Diamond,skbkontur/Diamond,zoidbergwill/Diamond,Slach/Diamond,actmd/Diamond,mzupan/Diamond,socialwareinc/Diamond,janisz/Diamond-1,MichaelDoyle/Diamond,eMerzh/Diamond-1,thardie/Diamond,jaingaurav/Diamond,h00dy/Diamond,saucelabs/Diamond,EzyInsights/Diamond,skbkontur/Diamond,Nihn/Diamond-1,Ormod/Diamond,TinLe/Diamond,Netuitive/netuitive-diamond,jaingaurav/Diamond,Netuitive/Diamond,Slach/Diamond,Netuitive/Diamond,ceph/Diamond,mzupan/Diamond,mzupan/Diamond,tusharmakkar08/Diamond,tellapart/Diamond,hamelg/Diamond,CYBERBUGJR/Diamond,Netuitive/Diamond,rtoma/Diamond,Ormod/Diamond,Ensighten/Diamond,bmhatfield/Diamond,cannium/Diamond,skbkontur/Diamond,cannium/Diamond,acquia/Diamond,Slach/Diamond,Ensighten/Diamond,bmhatfield/Diamond,jumping/Diamond,joel-airspring/Diamond,dcsquared13/Diamond,Basis/Diamond,Ssawa/Diamond,Precis/Diamond,tuenti/Diamond,ramjothikumar/Diamond,MichaelDoyle/Diamond,Ormod/Diamond,russss/Diamond,szibis/Diamond,works-mobile/Diamond,hamelg/Diamond,CYBERBUGJR/Diamond,russss/Diamond,tellapart/Diamond,anandbhoraskar/Diamond,gg7/diamond,timchenxiaoyu/Diamond,Precis/Diamond,zoidbergwill/Diamond,TAKEALOT/Diamond,codepython/Diamond,Netuitive/Diamond,signalfx/Diamond,Precis/Diamond,works-mobile/Diamond,russss/Diamond,h00dy/Diamond,timchenxiaoyu/Diamond,TinLe/Diamond,szibis/Diamond,jaingaurav/Diamond,TAKEALOT/Diamond,stuartbfox/Diamond,datafiniti/Diamond,saucelabs/Diamond,tellapart/Diamond,jriguera/Diamond,saucelabs/Diamond,Ensighten/Diamond,python-diamond/Diamond,disqus/Diamond,hamelg/Diamond,datafiniti/Diamond,tuenti/Diamond,datafiniti/Diamond,mfriedenhagen/Diamond,timchenxiaoyu/Diamond,Basis/Diamond,acquia/Diamond,mfriedenhagen/Diamond,eMerzh/Diamond-1,EzyInsights/Diamond,TAKEALOT/Diamond,dcsquared13/Diamond,anandbhoraskar/Diamond,saucelabs/Diamond,ceph/Diamond,EzyInsights/Diamond,joel-airspring/Diamond,metamx/Diamond,actmd/Diamond,TinLe/Diamond,krbaker/Diamond,jaingaurav/Diamond,MediaMath/Diamond,ceph/Diamond,disqus/Diamond,anandbhoraskar/Diamond,socialwareinc/Diamond,hvnsweeting/Diamond,jriguera/Diamond,tusharmakkar08/Diamond,rtoma/Diamond,bmhatfield/Diamond,tuenti/Diamond,Basis/Diamond,joel-airspring/Diamond,acquia/Diamond,works-mobile/Diamond,janisz/Diamond-1,Nihn/Diamond-1,thardie/Diamond,tusharmakkar08/Diamond,ceph/Diamond,russss/Diamond,jriguera/Diamond,Ormod/Diamond,codepython/Diamond,socialwareinc/Diamond,Clever/Diamond,tuenti/Diamond,Clever/Diamond,ramjothikumar/Diamond,thardie/Diamond,CYBERBUGJR/Diamond,sebbrandt87/Diamond,Nihn/Diamond-1,Nihn/Diamond-1,codepython/Diamond,stuartbfox/Diamond,TAKEALOT/Diamond,szibis/Diamond,actmd/Diamond,gg7/diamond,mfriedenhagen/Diamond,joel-airspring/Diamond,timchenxiaoyu/Diamond,h00dy/Diamond,stuartbfox/Diamond,disqus/Diamond,sebbrandt87/Diamond,MediaMath/Diamond,metamx/Diamond,signalfx/Diamond,Clever/Diamond,krbaker/Diamond,ramjothikumar/Diamond,Ensighten/Diamond,mfriedenhagen/Diamond,eMerzh/Diamond-1,hvnsweeting/Diamond,mzupan/Diamond,TinLe/Diamond,hvnsweeting/Diamond,Ssawa/Diamond,janisz/Diamond-1,Slach/Diamond,zoidbergwill/Diamond,rtoma/Diamond
--- +++ @@ -1,6 +1,7 @@ try: from numbers import Number import pymongo + from pymongo import ReadPreference except ImportError: Number = None @@ -31,7 +32,7 @@ self.log.error('Unable to import either Number or pymongo') return {} - conn = pymongo.Connection(self.config['host'],slave_okay=True) + conn = pymongo.Connection(self.config['host'],read_preference=ReadPreference.SECONDARY) data = conn.db.command('serverStatus') for key in data: self._publish_metrics([], key, data)
ab5ebb50019add34333edb04cc96f7f55fce8d1c
src/toil/utils/__init__.py
src/toil/utils/__init__.py
from __future__ import absolute_import from toil import version import logging logger = logging.getLogger(__name__) def addBasicProvisionerOptions(parser): parser.add_argument("--version", action='version', version=version) parser.add_argument('-p', "--provisioner", dest='provisioner', choices=['aws', 'azure', 'gce'], required=False, default="aws", help="The provisioner for cluster auto-scaling. Only aws is currently " "supported") try: from toil.provisioners.aws import getCurrentAWSZone currentZone = getCurrentAWSZone() except ImportError: currentZone = None zoneString = currentZone if currentZone else 'No zone could be determined' parser.add_argument('-z', '--zone', dest='zone', required=False, default=currentZone, help="The AWS availability zone of the master. This parameter can also be " "set via the TOIL_AWS_ZONE environment variable, or by the ec2_region_name " "parameter in your .boto file, or derived from the instance metadata if " "using this utility on an existing EC2 instance. " "Currently: %s" % zoneString) parser.add_argument("clusterName", help="The name that the cluster will be identifiable by. " "Must be lowercase and may not contain the '_' " "character.") return parser
from __future__ import absolute_import from toil import version import logging import os logger = logging.getLogger(__name__) def addBasicProvisionerOptions(parser): parser.add_argument("--version", action='version', version=version) parser.add_argument('-p', "--provisioner", dest='provisioner', choices=['aws', 'azure', 'gce'], required=False, default="aws", help="The provisioner for cluster auto-scaling. Only aws is currently " "supported") parser.add_argument('-z', '--zone', dest='zone', required=False, default=None, help="The availability zone of the master. This parameter can also be set via the 'TOIL_X_ZONE' " "environment variable, where X is AWS, GCE, or AZURE, or by the ec2_region_name parameter " "in your .boto file, or derived from the instance metadata if using this utility on an " "existing EC2 instance.") parser.add_argument("clusterName", help="The name that the cluster will be identifiable by. " "Must be lowercase and may not contain the '_' " "character.") return parser def getZoneFromEnv(provisioner): """ Find the zone specified in an environment variable. The user can specify zones in environment variables in leiu of writing them at the commandline every time. Given a provisioner, this method will look for the stored value and return it. :param str provisioner: One of the supported provisioners ('azure', 'aws', 'gce') :rtype: str :return: None or the value stored in a 'TOIL_X_ZONE' environment variable. """ return os.environ.get('TOIL_' + provisioner.upper() + '_ZONE')
Remove default for zone, add method for searching for specified zone in environ vars.
Remove default for zone, add method for searching for specified zone in environ vars.
Python
apache-2.0
BD2KGenomics/slugflow,BD2KGenomics/slugflow
--- +++ @@ -2,7 +2,7 @@ from toil import version import logging - +import os logger = logging.getLogger(__name__) @@ -11,19 +11,26 @@ parser.add_argument('-p', "--provisioner", dest='provisioner', choices=['aws', 'azure', 'gce'], required=False, default="aws", help="The provisioner for cluster auto-scaling. Only aws is currently " "supported") - try: - from toil.provisioners.aws import getCurrentAWSZone - currentZone = getCurrentAWSZone() - except ImportError: - currentZone = None - zoneString = currentZone if currentZone else 'No zone could be determined' - parser.add_argument('-z', '--zone', dest='zone', required=False, default=currentZone, - help="The AWS availability zone of the master. This parameter can also be " - "set via the TOIL_AWS_ZONE environment variable, or by the ec2_region_name " - "parameter in your .boto file, or derived from the instance metadata if " - "using this utility on an existing EC2 instance. " - "Currently: %s" % zoneString) + parser.add_argument('-z', '--zone', dest='zone', required=False, default=None, + help="The availability zone of the master. This parameter can also be set via the 'TOIL_X_ZONE' " + "environment variable, where X is AWS, GCE, or AZURE, or by the ec2_region_name parameter " + "in your .boto file, or derived from the instance metadata if using this utility on an " + "existing EC2 instance.") parser.add_argument("clusterName", help="The name that the cluster will be identifiable by. " "Must be lowercase and may not contain the '_' " "character.") return parser + + +def getZoneFromEnv(provisioner): + """ + Find the zone specified in an environment variable. + + The user can specify zones in environment variables in leiu of writing them at the commandline every time. + Given a provisioner, this method will look for the stored value and return it. + :param str provisioner: One of the supported provisioners ('azure', 'aws', 'gce') + :rtype: str + :return: None or the value stored in a 'TOIL_X_ZONE' environment variable. + """ + + return os.environ.get('TOIL_' + provisioner.upper() + '_ZONE')
98d45ace5ce5b8918e470beb7dbcb1d59b0608af
server_app/__main__.py
server_app/__main__.py
import sys import os import logging if not os.path.exists(os.path.expanduser("~/.chatserver")): os.makedirs(os.path.expanduser("~/.chatserver") logging.basicConfig(filename=os.path.expanduser("~/.chatserver/chat.log"), level=logging.DEBUG) from app import app, db, main, socketio db.create_all() app.register_blueprint(main) port = app.config['PORT'] if len(sys.argv) == 2: port = int(sys.argv[1]) logging.info("Chat server is now running on 0.0.0.0:%r" % port) socketio.run(app, host="0.0.0.0", port=port)
import sys import os import logging if not os.path.exists(os.path.expanduser("~/.chatserver")): os.makedirs(os.path.expanduser("~/.chatserver")) logging.basicConfig(filename=os.path.expanduser("~/.chatserver/chat.log"), level=logging.DEBUG) from app import app, db, main, socketio db.create_all() app.register_blueprint(main) port = app.config['PORT'] if len(sys.argv) == 2: port = int(sys.argv[1]) logging.info("Chat server is now running on 0.0.0.0:%r" % port) socketio.run(app, host="0.0.0.0", port=port)
Fix logging directory njot existing
Fix logging directory njot existing
Python
bsd-3-clause
jos0003/Chat,jos0003/Chat,jos0003/Chat,jos0003/Chat,jos0003/Chat
--- +++ @@ -3,7 +3,7 @@ import logging if not os.path.exists(os.path.expanduser("~/.chatserver")): - os.makedirs(os.path.expanduser("~/.chatserver") + os.makedirs(os.path.expanduser("~/.chatserver")) logging.basicConfig(filename=os.path.expanduser("~/.chatserver/chat.log"), level=logging.DEBUG) from app import app, db, main, socketio
7755ab25249c39350004447daa614bc35e4517e7
src/malibu/__init__.py
src/malibu/__init__.py
# -*- coding: utf-8 -*- from malibu import command # noqa from malibu import config # noqa from malibu import database # noqa from malibu import design # noqa from malibu import text # noqa from malibu import util # noqa import subprocess __git_label__ = '' try: __git_label__ = subprocess.check_output( [ 'git', 'rev-parse', '--short', 'HEAD' ]) except (subprocess.CalledProcessError, IOError): __git_label__ = 'RELEASE' __version__ = '0.1.8-7' __release__ = '{}-{}'.format(__version__, __git_label__).strip('\n') __doc__ = """ malibu is a collection of classes and utilities that make writing code a little bit easier and a little less tedious. The whole point of this library is to have a small codebase that could be easily reused across projects with nice, easily loadable chunks that can be used disjointly. """
# -*- coding: utf-8 -*- from malibu import command # noqa from malibu import config # noqa from malibu import database # noqa from malibu import design # noqa from malibu import text # noqa from malibu import util # noqa import subprocess __git_label__ = '' try: __git_label__ = subprocess.check_output( [ 'git', 'rev-parse', '--short', 'HEAD' ]) except (subprocess.CalledProcessError, IOError): __git_label__ = 'RELEASE' finally: __git_label__ = __git_label__.decode('utf-8').strip() __version__ = '0.1.8-7' __release__ = '{}-{}'.format(__version__, __git_label__) __doc__ = """ malibu is a collection of classes and utilities that make writing code a little bit easier and a little less tedious. The whole point of this library is to have a small codebase that could be easily reused across projects with nice, easily loadable chunks that can be used disjointly. """
Remove unnecessary strip, add finally for release tagger
0.1.8: Remove unnecessary strip, add finally for release tagger
Python
unlicense
maiome-development/malibu
--- +++ @@ -19,9 +19,11 @@ ]) except (subprocess.CalledProcessError, IOError): __git_label__ = 'RELEASE' +finally: + __git_label__ = __git_label__.decode('utf-8').strip() __version__ = '0.1.8-7' -__release__ = '{}-{}'.format(__version__, __git_label__).strip('\n') +__release__ = '{}-{}'.format(__version__, __git_label__) __doc__ = """ malibu is a collection of classes and utilities that make writing code a little bit easier and a little less tedious.
44e5d35b6d43a22a480000b39a4e85335a27904b
corehq/apps/es/management/commands/wipe_es.py
corehq/apps/es/management/commands/wipe_es.py
from django.core.management import BaseCommand from corehq.apps.cleanup.utils import confirm_destructive_operation from corehq.elastic import get_es_new class Command(BaseCommand): """ Wipe all data from BlobDB. """ def add_arguments(self, parser): parser.add_argument( '--commit', action='store_true', dest='commit', default=False, ) def handle(self, *args, **options): confirm_destructive_operation() data = wipe_es(options['commit']) if data: print(data) if not options['commit']: print("You need to run with --commit for the deletion to happen.") def wipe_es(commit=False): """ The equivalent of calling :: $ curl -X DELETE "$PROTO://$HOSTNAME:$PORT/_all" """ es = get_es_new() if commit: return es.transport.perform_request('DELETE', '_all')
from django.core.management import BaseCommand from corehq.apps.cleanup.utils import confirm_destructive_operation from corehq.elastic import get_es_new from corehq.util.es.elasticsearch import IndicesClient class Command(BaseCommand): """ Wipe all data from BlobDB. """ def add_arguments(self, parser): parser.add_argument( '--commit', action='store_true', dest='commit', default=False, ) def handle(self, *args, **options): confirm_destructive_operation() data = wipe_es(options['commit']) if data: print(data) if not options['commit']: print("You need to run with --commit for the deletion to happen.") def wipe_es(commit=False): """ The equivalent of calling :: $ curl -X DELETE "$PROTO://$HOSTNAME:$PORT/_all" """ es = get_es_new() client = IndicesClient(es) if commit: client.delete('_all')
Use IndicesClient to get full URL
Use IndicesClient to get full URL
Python
bsd-3-clause
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
--- +++ @@ -2,6 +2,7 @@ from corehq.apps.cleanup.utils import confirm_destructive_operation from corehq.elastic import get_es_new +from corehq.util.es.elasticsearch import IndicesClient class Command(BaseCommand): @@ -34,5 +35,6 @@ $ curl -X DELETE "$PROTO://$HOSTNAME:$PORT/_all" """ es = get_es_new() + client = IndicesClient(es) if commit: - return es.transport.perform_request('DELETE', '_all') + client.delete('_all')
a4bc21a1e24b0ac86564b1259a2583ed680a29e2
aque/handlers/reduce.py
aque/handlers/reduce.py
import logging from aque.utils import decode_callable log = logging.getLogger(__name__) def reduce_children(job): func = decode_callable(job.get('func')) sequence = [child.result() for child in job.children()] try: args = (job['initial'], ) except: args = () log.debug('reducing %r with %r and %r' % (sequence, func, args)) job.complete(reduce(func, sequence, *args))
import logging from aque.utils import decode_callable log = logging.getLogger(__name__) def reduce_children(job): func = decode_callable(job.get('func')) args = job.get('args', ()) if len(args) >= 2: job.error('too many args; reduce expects 1, got %d' % len(args)) return sequence = [child.result() for child in job.children()] log.debug('reducing %r with %r and %r' % (sequence, func, args)) job.complete(reduce(func, sequence, *args))
Reduce pulls initial from args
Reduce pulls initial from args
Python
bsd-3-clause
mikeboers/aque,mikeboers/aque
--- +++ @@ -9,11 +9,13 @@ def reduce_children(job): func = decode_callable(job.get('func')) + + args = job.get('args', ()) + if len(args) >= 2: + job.error('too many args; reduce expects 1, got %d' % len(args)) + return + sequence = [child.result() for child in job.children()] - try: - args = (job['initial'], ) - except: - args = () log.debug('reducing %r with %r and %r' % (sequence, func, args))
a6be362f03410b53cd21d6ff7b63bdaef6963ad4
findingaids/__init__.py
findingaids/__init__.py
# file findingaids/__init__.py # # Copyright 2012 Emory University Library # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __version_info__ = (1, 9, 0, None) # Dot-connect all but the last. Last is dash-connected if not None. __version__ = '.'.join(str(i) for i in __version_info__[:-1]) if __version_info__[-1] is not None: __version__ += ('-%s' % (__version_info__[-1],))
# file findingaids/__init__.py # # Copyright 2012 Emory University Library # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __version_info__ = (1, 10, 0, 'dev') # Dot-connect all but the last. Last is dash-connected if not None. __version__ = '.'.join(str(i) for i in __version_info__[:-1]) if __version_info__[-1] is not None: __version__ += ('-%s' % (__version_info__[-1],))
Update dev version to 1.10
Update dev version to 1.10
Python
apache-2.0
emory-libraries/findingaids,emory-libraries/findingaids
--- +++ @@ -14,7 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version_info__ = (1, 9, 0, None) +__version_info__ = (1, 10, 0, 'dev') # Dot-connect all but the last. Last is dash-connected if not None. __version__ = '.'.join(str(i) for i in __version_info__[:-1])
4a69aa350f0b3cf95cbf2f518d58310be5731a74
salt/pwm-server/config.py
salt/pwm-server/config.py
SQLALCHEMY_DATABASE_URI = 'sqlite:///srv/pwm-server/db.sqlite'
SQLALCHEMY_DATABASE_URI = 'sqlite:////srv/pwm-server/db.sqlite'
Correct the path to the sqlite db on vagrant
Correct the path to the sqlite db on vagrant
Python
mit
thusoy/pwm-server,thusoy/pwm-server
--- +++ @@ -1 +1 @@ -SQLALCHEMY_DATABASE_URI = 'sqlite:///srv/pwm-server/db.sqlite' +SQLALCHEMY_DATABASE_URI = 'sqlite:////srv/pwm-server/db.sqlite'
a8b1b4ca3fd4964b2349ed085e8d2350072e67b9
d1_libclient_python/src/d1_client/__init__.py
d1_libclient_python/src/d1_client/__init__.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # This work was created by participants in the DataONE project, and is # jointly copyrighted by participating institutions in DataONE. For # more information on DataONE, see our web site at http://dataone.org. # # Copyright 2009-2012 DataONE # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __version__ = "2.0.dev8" __all__ = [ 'cnclient', 'cnclient_1_1', 'd1baseclient', 'd1baseclient_1_1', 'd1baseclient_2_0', 'd1client', 'data_package', 'logrecorditerator', 'mnclient', 'mnclient_1_1', 'object_format_info', 'objectlistiterator', 'solr_client', 'svnrevision', 'systemmetadata', ]
#!/usr/bin/env python # -*- coding: utf-8 -*- # This work was created by participants in the DataONE project, and is # jointly copyrighted by participating institutions in DataONE. For # more information on DataONE, see our web site at http://dataone.org. # # Copyright 2009-2012 DataONE # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __version__ = "2.0.dev8" # __all__ = [ # 'cnclient', # 'cnclient_1_1', # 'd1baseclient', # 'd1baseclient_1_1', # 'd1baseclient_2_0', # 'd1client', # 'data_package', # 'logrecorditerator', # 'mnclient', # 'mnclient_1_1', # 'object_format_info', # 'objectlistiterator', # 'solr_client', # 'svnrevision', # 'systemmetadata', # ]
Remove implicit import of symbols
Remove implicit import of symbols Currently, using libclient for python requires selecting the target node type (MN or CN) and the target DataONE API version, by specifying the appropriate client, so it better to use the library by explicitly importing only the needed clients instead of all of of them.
Python
apache-2.0
DataONEorg/d1_python,DataONEorg/d1_python,DataONEorg/d1_python,DataONEorg/d1_python
--- +++ @@ -21,20 +21,20 @@ __version__ = "2.0.dev8" -__all__ = [ - 'cnclient', - 'cnclient_1_1', - 'd1baseclient', - 'd1baseclient_1_1', - 'd1baseclient_2_0', - 'd1client', - 'data_package', - 'logrecorditerator', - 'mnclient', - 'mnclient_1_1', - 'object_format_info', - 'objectlistiterator', - 'solr_client', - 'svnrevision', - 'systemmetadata', -] +# __all__ = [ +# 'cnclient', +# 'cnclient_1_1', +# 'd1baseclient', +# 'd1baseclient_1_1', +# 'd1baseclient_2_0', +# 'd1client', +# 'data_package', +# 'logrecorditerator', +# 'mnclient', +# 'mnclient_1_1', +# 'object_format_info', +# 'objectlistiterator', +# 'solr_client', +# 'svnrevision', +# 'systemmetadata', +# ]
1ebf1cfb0ca67d7bb10fa776b5944d8755292999
shellstreaming/test/inputstream/test_textfile.py
shellstreaming/test/inputstream/test_textfile.py
# -*- coding: utf-8 -*- from nose.tools import * import os import time from shellstreaming.inputstream.textfile import TextFile TEST_FILE = os.path.abspath(os.path.dirname(__file__)) + '/test_textfile_input01.txt' def test_textfile_usage(): n_batches = n_records = 0 stream = TextFile(TEST_FILE, batch_span_ms=20) for batch in stream: if batch is None: time.sleep(0.1) continue assert_greater_equal(len(batch), 1) n_batches += 1 for record in batch: eq_(len(record), 1) line = record[0] eq_('line ', line[0:5]) ok_(0 <= int(line[5:]) < 100) # record order in a batch is not always 'oldest-first' n_records += 1 print('number of batches (%d) >= 1 ?' % (n_batches)) assert_greater_equal(n_batches, 1) eq_(n_records, 100)
# -*- coding: utf-8 -*- from nose.tools import * import time from os.path import abspath, dirname, join from shellstreaming.inputstream.textfile import TextFile TEST_FILE = join(abspath(dirname(__file__)), 'test_textfile_input01.txt') def test_textfile_usage(): n_batches = n_records = 0 stream = TextFile(TEST_FILE, batch_span_ms=20) for batch in stream: if batch is None: time.sleep(0.1) continue assert_greater_equal(len(batch), 1) n_batches += 1 for record in batch: eq_(len(record), 1) line = record[0] eq_('line ', line[0:5]) ok_(0 <= int(line[5:]) < 100) # record order in a batch is not always 'oldest-first' n_records += 1 print('number of batches (%d) >= 1 ?' % (n_batches)) assert_greater_equal(n_batches, 1) eq_(n_records, 100)
Use os.path.join instead of string concatenation.
Use os.path.join instead of string concatenation. Use `join` in order to create path intelligently on all operating systems. See: http://docs.python.org/2/library/os.path.html#os.path.join
Python
apache-2.0
laysakura/shellstreaming,laysakura/shellstreaming
--- +++ @@ -1,11 +1,11 @@ # -*- coding: utf-8 -*- from nose.tools import * -import os import time +from os.path import abspath, dirname, join from shellstreaming.inputstream.textfile import TextFile -TEST_FILE = os.path.abspath(os.path.dirname(__file__)) + '/test_textfile_input01.txt' +TEST_FILE = join(abspath(dirname(__file__)), 'test_textfile_input01.txt') def test_textfile_usage():