hash
stringlengths
40
40
diff
stringlengths
131
114k
message
stringlengths
7
980
project
stringlengths
5
67
split
stringclasses
1 value
58506df70429b4a15b4f63cdc0950eddadd557ba
diff --git a/workbench/clients/workbench_shell.py b/workbench/clients/workbench_shell.py index <HASH>..<HASH> 100644 --- a/workbench/clients/workbench_shell.py +++ b/workbench/clients/workbench_shell.py @@ -50,20 +50,50 @@ class AutoQuoteTransformer(IPython.core.prefilter.PrefilterTransformer): def transform(self, line, _continue_prompt): """IPython Transformer for commands to use 'auto-quotes'""" - # Very conservative logic here - # - Need to have more than one token - # - First token in line must be in the workbench command set - # - No other otkens can be in any of the shell namespaces + # Capture the original line + orig_line = line + + # Very conservative logic (but possibly flawed) + # 1) Lines with any of these symbols ; , ' " ( ) aren't touched + # 2) Need to have more than one token + # 3) First token in line must be in the workbench command set + # 4) If first token is 'help' than all other tokens are quoted + # 5) Otherwise only tokens that are not in any of the namespace are quoted + + + # 1) Lines with any of these symbols ; , ' " ( ) aren't touched + skip_symbols = [';',',','\'','"','(',')'] + if any([sym in line for sym in skip_symbols]): + return line + + # Build up token set and info token_list = re.split(' |;|,|(|)|\'|"', line) num_tokens = len(token_list) first_token = token_list[0] token_set = set(token_list) + if None in token_set: # In some cases None creeps in + token_set.remove(None) + + # 2) Need to have more than one token + # 3) First token in line must be in the workbench command set if num_tokens > 1 and first_token in self.command_set: - ns_token_set = set([token for nspace in self.shell.all_ns_refs for token in nspace]) - if len(token_set.intersection(ns_token_set))==1: - return ','+line - # Doesn't match criteria so don't try to auto-quote it + # 4) If first token is 'help' than all other tokens are quoted + if first_token == 'help': + token_set.remove('help') + for token in token_set: + line = line.replace(token, '"'+token+'"') + + # 5) Otherwise only tokens that are not in any of the namespace are quoted + else: # Not help + ns_token_set = set([token for nspace in self.shell.all_ns_refs for token in nspace]) + for token in token_set: + if token not in ns_token_set: + line = line.replace(token, '"'+token+'"') + + # Push out the processed line and return it + if orig_line != line: + self.shell.auto_rewrite_input(line) return line class WorkbenchShell(object):
working on the shortcut logic; this will be an ongoing process but this logic seems one better than old; Issue #<I>
SuperCowPowers_workbench
train
b8bcee3f73b598db48fc534b5c57a956219a25ec
diff --git a/generators/generator-base.js b/generators/generator-base.js index <HASH>..<HASH> 100644 --- a/generators/generator-base.js +++ b/generators/generator-base.js @@ -1215,7 +1215,9 @@ Generator.prototype.getAngularAppName = function () { * get the java main class name. */ Generator.prototype.getMainClassName = function () { - return _.upperFirst(this.getAngularAppName()); + // Don't name by baseName because numbers can cause compilation issues. + // https://github.com/jhipster/generator-jhipster/issues/3889 + return 'Application'; }; /**
Revert mainClassName to be 'Application' This fixes the problem where baseName can create class names that have invalid Java syntax.
jhipster_generator-jhipster
train
81ab2bb02026d4b46b22aac4c983453fc64d275f
diff --git a/src/ai/backend/client/scaling_group.py b/src/ai/backend/client/scaling_group.py index <HASH>..<HASH> 100644 --- a/src/ai/backend/client/scaling_group.py +++ b/src/ai/backend/client/scaling_group.py @@ -200,7 +200,7 @@ class ScalingGroup: @api_function @classmethod - async def associate_domain(cls, scaling_group: str, domain: str): + async def associate_domain(cls, scaling_group: str, domain: str, total_resource_slots: str): ''' Associate scaling_group with domain. @@ -208,9 +208,11 @@ class ScalingGroup: :param domain: The name of a domain. ''' query = textwrap.dedent('''\ - mutation($scaling_group: String!, $domain: String!) { + mutation($scaling_group: String!, $domain: String!, $total_resource_slots: String) { associate_scaling_group_with_domain( - scaling_group: $scaling_group, domain: $domain) { + scaling_group: $scaling_group, + domain: $domain, + total_resource_slots: $total_resource_slots) { ok msg } } @@ -276,3 +278,59 @@ class ScalingGroup: async with rqst.fetch() as resp: data = await resp.json() return data['disassociate_all_scaling_groups_with_domain'] + + @api_function + @classmethod + async def associate_group(cls, scaling_group: str, group_id: str, total_resource_slots: str): + ''' + Associate scaling_group with group. + + :param scaling_group: The name of a scaling group. + :param group_id: The ID of a group. + ''' + query = textwrap.dedent('''\ + mutation($scaling_group: String!, $user_group: String!, $total_resource_slots: String) { + associate_scaling_group_with_user_group( + scaling_group: $scaling_group, + user_group: $user_group, + total_resource_slots: $total_resource_slots) { + ok msg + } + } + ''') + variables = {'scaling_group': scaling_group, 'user_group': group_id} + rqst = Request(cls.session, 'POST', '/admin/graphql') + rqst.set_json({ + 'query': query, + 'variables': variables, + }) + async with rqst.fetch() as resp: + data = await resp.json() + return data['associate_scaling_group_with_user_group'] + + @api_function + @classmethod + async def dissociate_group(cls, scaling_group: str, group_id: str): + ''' + Dissociate scaling_group from group. + + :param scaling_group: The name of a scaling group. + :param group_id: The ID of a group. + ''' + query = textwrap.dedent('''\ + mutation($scaling_group: String!, $user_group: String!) { + disassociate_scaling_group_with_user_group( + scaling_group: $scaling_group, user_group: $user_group) { + ok msg + } + } + ''') + variables = {'scaling_group': scaling_group, 'user_group': group_id} + rqst = Request(cls.session, 'POST', '/admin/graphql') + rqst.set_json({ + 'query': query, + 'variables': variables, + }) + async with rqst.fetch() as resp: + data = await resp.json() + return data['disassociate_scaling_group_with_user_group']
Add total_resource_slots fields in associating sgroup
lablup_backend.ai-client-py
train
a00bf3e1e17bab4338889628809a49c7b930866c
diff --git a/browser/default_app/main.js b/browser/default_app/main.js index <HASH>..<HASH> 100644 --- a/browser/default_app/main.js +++ b/browser/default_app/main.js @@ -9,6 +9,7 @@ if (argv._.length > 0) { require(path.resolve(argv._[0])); } catch(e) { if (e.code == 'MODULE_NOT_FOUND') { + console.error(e.stack); console.error('Specified app is invalid'); process.exit(1); } else {
Print stack when got error on startup.
electron_electron
train
689f01c533ce09e5094e90df0c9cac82089cf7a1
diff --git a/master/contrib/github_buildbot.py b/master/contrib/github_buildbot.py index <HASH>..<HASH> 100755 --- a/master/contrib/github_buildbot.py +++ b/master/contrib/github_buildbot.py @@ -19,7 +19,7 @@ import os import sys import traceback from hashlib import sha1 -from httplib import BAD_REQUEST +from httplib import BAD_REQUEST, INTERNAL_SERVER_ERROR from optparse import OptionParser from twisted.cred import credentials @@ -111,10 +111,11 @@ class GitHubBuildBot(resource.Resource): project = project[0] logging.debug("Payload: " + str(payload)) self.process_change(payload, user, repo, repo_url, project) - except Exception: - logging.error("Encountered an exception:") - for msg in traceback.format_exception(*sys.exc_info()): - logging.error(msg.strip()) + + except Exception, e: + logging.exception(e) + request.setResponseCode(INTERNAL_SERVER_ERROR) + return json.dumps({"error": e.message}) def process_change(self, payload, user, repo, repo_url, project): """
fix exception logging and respond to the request with the error
buildbot_buildbot
train
2c4cdb2ab18ebc8a4e07649106ec7abb8c105e71
diff --git a/utility/metadata-schema-to-js-doc/inject-schema.js b/utility/metadata-schema-to-js-doc/inject-schema.js index <HASH>..<HASH> 100644 --- a/utility/metadata-schema-to-js-doc/inject-schema.js +++ b/utility/metadata-schema-to-js-doc/inject-schema.js @@ -161,11 +161,12 @@ function addGeneratedComment(options, callback) { } options.lines.splice.apply(options.lines, [index, 0].concat(generatedLines)); }); - options.comment.value = options.lines.join(options.eol); + options.comment.generatedComment = options.lines.join(options.eol); return callback(null, options); } function generateOutput(options, callback) { + replaceComments(options.parsedFile); var generateOptions = { format: { newline: options.eol, //doesn't seem like the escodegen lib honours this, so have a workaround @@ -215,4 +216,25 @@ function removeDuplicateComments(ast) { }); } }); +} + +function replaceComments(rootDocument) { + estreeWalker.walk(rootDocument, { + enter: visitNode + }); + + function visitNode(node) { + if (!node.value || node.type !== 'Block' || node.generatedComment) { + return; + } + rootDocument.comments.forEach(function (rootComment) { + if (rootComment.value !== node.value) { + return; + } + if (!rootComment.generatedComment) { + return; + } + node.value = rootComment.generatedComment; + }); + } } \ No newline at end of file
code to walk the nodes and set the comments based on the root comments.
eXigentCoder_swagger-spec-express
train
622f410d94b1fd80c27fb7c751cea164318e2ec9
diff --git a/slurmpy/slurmpy.py b/slurmpy/slurmpy.py index <HASH>..<HASH> 100755 --- a/slurmpy/slurmpy.py +++ b/slurmpy/slurmpy.py @@ -10,11 +10,29 @@ r""" <BLANKLINE> #SBATCH --account=ucgd-kp #SBATCH --partition=ucgd-kp +#SBATCH --time=84:00:00 <BLANKLINE> set -eo pipefail -o nounset <BLANKLINE> __script__ +>>> s = Slurm("job-name", {"account": "ucgd-kp", "partition": "ucgd-kp"}, bash_strict=False) +>>> print(str(s)) +#!/bin/bash +<BLANKLINE> +#SBATCH -e logs/job-name.%J.err +#SBATCH -o logs/job-name.%J.out +#SBATCH -J job-name +<BLANKLINE> +#SBATCH --account=ucgd-kp +#SBATCH --partition=ucgd-kp +#SBATCH --time=84:00:00 +<BLANKLINE> +<BLANKLINE> +<BLANKLINE> +__script__ + + >>> job_id = s.run("rm -f aaa; sleep 10; echo 213 > aaa", name_addition="", tries=1) >>> job = s.run("cat aaa; rm aaa", name_addition="", tries=1, depends_on=[job_id]) @@ -52,14 +70,14 @@ def tmp(suffix=".sh"): class Slurm(object): def __init__(self, name, slurm_kwargs=None, tmpl=None, - date_in_name=True, scripts_dir="slurm-scripts/", - log_dir='logs', check_bash_var=True): + date_in_name=True, scripts_dir="slurm-scripts", + log_dir='logs', bash_strict=True): if slurm_kwargs is None: slurm_kwargs = {} if tmpl is None: tmpl = TMPL self.log_dir = log_dir - self.check_bash_var = check_bash_var + self.bash_strict = bash_strict header = [] if 'time' not in slurm_kwargs.keys(): @@ -73,7 +91,7 @@ class Slurm(object): # add bash setup list to collect bash script config bash_setup = [] - if check_bash_var: + if bash_strict: bash_setup.append("set -eo pipefail -o nounset") self.header = "\n".join(header) @@ -138,9 +156,6 @@ class Slurm(object): if depends_on is None or (len(depends_on) == 1 and depends_on[0] is None): depends_on = [] - if "logs/" in tmpl and not os.path.exists("logs/"): - os.makedirs("logs") - with open(self._tmpfile(), "w") as sh: sh.write(tmpl)
name modification 1. bash_strict is a better name. 2. delete lines to make directory "logs". 3. add "#SBATCH --time=<I>:<I>:<I>" in the doctests to pass test.
brentp_slurmpy
train
cbc1cd9ae0f8328dde8b5fb6c2168b78e7ece9e1
diff --git a/examples/tornado_server.py b/examples/tornado_server.py index <HASH>..<HASH> 100755 --- a/examples/tornado_server.py +++ b/examples/tornado_server.py @@ -7,6 +7,8 @@ import tornado.tcpserver from options import get_args from tchannel.messages import CallResponseMessage +from tchannel.messages import ErrorMessage +from tchannel.messages.types import Types from tchannel.tornado.connection import TornadoConnection @@ -56,6 +58,15 @@ class MyServer(tornado.tcpserver.TCPServer): connection.frame_and_write(response) + elif context.message.message_type == Types.PING_REQ: + connection.pong() + + else: + response = ErrorMessage() + response.code = 0x06 + response.original_message_id = context.message_id + response.message = response.error_name() + connection.frame_and_write(response) connection.handle_calls(self.handle_call)
make tornado server pong and respond with error on unknown endpoints
uber_tchannel-python
train
d4d63af2b006967a6b5845c2a88c014005c46e0a
diff --git a/lib/app.js b/lib/app.js index <HASH>..<HASH> 100644 --- a/lib/app.js +++ b/lib/app.js @@ -41,10 +41,13 @@ module.exports = function (config) { } app.use(bodyParser.json({ strict: false })); - app.use(log4js.connectLogger(logger, { - format: ':status :method :url :response-timems', - level: 'auto', // 3XX=WARN, 4xx/5xx=ERROR - })); + + if(config.enableRequestLogger) { + app.use(log4js.connectLogger(logger, { + format: ':status :method :url :response-timems', + level: 'auto', // 3XX=WARN, 4xx/5xx=ERROR + })); + } // Setup API routes const apiRouter = express.Router(); // eslint-disable-line new-cap diff --git a/lib/options.js b/lib/options.js index <HASH>..<HASH> 100644 --- a/lib/options.js +++ b/lib/options.js @@ -1,12 +1,15 @@ 'use strict'; const { publicFolder } = require('unleash-frontend'); +const isDev = () => process.env.NODE_ENV === 'development'; + const DEFAULT_OPTIONS = { databaseUrl: process.env.DATABASE_URL, port: process.env.HTTP_PORT || process.env.PORT || 4242, baseUriPath: process.env.BASE_URI_PATH || '', serverMetrics: true, publicFolder, + enableRequestLogger: isDev() ? true : false }; module.exports = { @@ -14,7 +17,7 @@ module.exports = { const options = Object.assign({}, DEFAULT_OPTIONS, opts); // If we are running in development we should assume local db - if(process.env.NODE_ENV === 'development' && !options.databaseUrl) { + if(isDev() && !options.databaseUrl) { options.databaseUrl = 'postgres://unleash_user:passord@localhost:5432/unleash'; } diff --git a/lib/server-impl.js b/lib/server-impl.js index <HASH>..<HASH> 100644 --- a/lib/server-impl.js +++ b/lib/server-impl.js @@ -19,6 +19,7 @@ function createApp (options) { const config = { baseUriPath: options.baseUriPath, serverMetrics: options.serverMetrics, + enableRequestLogger: options.enableRequestLogger, port: options.port, publicFolder: options.publicFolder, stores,
Option to disable request logging. Because all request details are available via prometheus there is no need to also log all requests. This solves #<I> but we should still replace log4j with something else.
Unleash_unleash
train
a7a18ccbed61f5f620109d7d3828c45cfbce65e9
diff --git a/src/main/java/com/thinkaurelius/titan/diskstorage/berkeleydb/je/BerkeleyJEStorageManager.java b/src/main/java/com/thinkaurelius/titan/diskstorage/berkeleydb/je/BerkeleyJEStorageManager.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/thinkaurelius/titan/diskstorage/berkeleydb/je/BerkeleyJEStorageManager.java +++ b/src/main/java/com/thinkaurelius/titan/diskstorage/berkeleydb/je/BerkeleyJEStorageManager.java @@ -152,18 +152,24 @@ public class BerkeleyJEStorageManager implements KeyValueStorageManager { try { long blockSize = blockSizer.getBlockSize(partition); Preconditions.checkArgument(blockSize<Integer.MAX_VALUE); - BDBTxHandle tx = beginTransaction(); - ByteBuffer value = idDB.get(key,tx); int counter = 1; - if (value!=null) { - assert value.remaining()==4; - counter = value.getInt(); + BDBTxHandle tx = null; + try { + tx = beginTransaction(); + ByteBuffer value = idDB.get(key,tx); + if (value!=null) { + assert value.remaining()==4; + counter = value.getInt(); + } + Preconditions.checkArgument(Integer.MAX_VALUE-blockSize>counter); + int next = counter + (int)blockSize; + idDB.insert(new KeyValueEntry(key,ByteBufferUtil.getIntByteBuffer(next)),tx.getTransaction(),true); + tx.commit(); + return new long[]{counter,next}; + } catch (RuntimeException e) { + if (tx!=null) tx.abort(); + throw e; } - Preconditions.checkArgument(Integer.MAX_VALUE-blockSize>counter); - int next = counter + (int)blockSize; - idDB.insert(new KeyValueEntry(key,ByteBufferUtil.getIntByteBuffer(next)),tx.getTransaction(),true); - tx.commit(); - return new long[]{counter,next}; } finally { idAcquisitionLock.unlock(); }
Aborting tx when id block renewal fails
thinkaurelius_titan
train
a6bc3bf786629df1cb3c578a85569d272a39e4ba
diff --git a/src/i18n/ca.js b/src/i18n/ca.js index <HASH>..<HASH> 100644 --- a/src/i18n/ca.js +++ b/src/i18n/ca.js @@ -63,7 +63,7 @@ export default { "loginAtLabel": "Iniciar a %s", "loginLabel": "Iniciar sessió", "loginSubmitLabel": "Iniciar sessió", - "loginWithLabel": "Iniciar ambb %s", + "loginWithLabel": "Iniciar amb %s", "notYourAccountAction": "¿No es el teu compte?", "passwordInputPlaceholder": "la teva contrassenya", "passwordStrength": {
Update ca.js (#<I>) "with" translation error --> "amb"
auth0_lock
train
0d7fa83bec36f803e72d611b363fe7d793a2641c
diff --git a/lib/client_side_validations/action_view/form_builder.rb b/lib/client_side_validations/action_view/form_builder.rb index <HASH>..<HASH> 100644 --- a/lib/client_side_validations/action_view/form_builder.rb +++ b/lib/client_side_validations/action_view/form_builder.rb @@ -34,6 +34,13 @@ module ClientSideValidations::ActionView::Helpers end end + def validate(*attrs) + attrs.each do |attr| + build_validation_options(attr) + end + nil + end + def initialize_with_client_side_validations(object_name, object, template, options, proc) initialize_without_client_side_validations(object_name, object, template, options, proc) @options[:validators] = { object => {} } diff --git a/test/action_view/cases/test_helpers.rb b/test/action_view/cases/test_helpers.rb index <HASH>..<HASH> 100644 --- a/test/action_view/cases/test_helpers.rb +++ b/test/action_view/cases/test_helpers.rb @@ -516,4 +516,13 @@ class ClientSideValidations::ActionViewHelpersTest < ActionView::TestCase assert_equal expected, output_buffer end + def test_text_field_with_added_validators + form_for(@post, :validate => true) do |f| + concat f.validate(:cost, :body, :title) + end + + validators = {'post[cost]' => {:presence => [{:message => "can't be blank"}]}, 'post[body]' => {:presence => [{:message => "can't be blank"}]}} + expected = whole_form('/posts', 'new_post', 'new_post', :validators => validators) + assert_equal expected, output_buffer + end end diff --git a/test/action_view/models/post.rb b/test/action_view/models/post.rb index <HASH>..<HASH> 100644 --- a/test/action_view/models/post.rb +++ b/test/action_view/models/post.rb @@ -5,7 +5,7 @@ class Post include ActiveModel::Conversion attr_accessor :title, :author_name, :body, :secret, :written_on, :cost - validates :cost, :presence => true + validates :cost, :body, :presence => true def initialize(params={}) params.each do |attr, value|
Added FormBuilder#validate Inject validators for attributes that are not rendered initially on the form object
DavyJonesLocker_client_side_validations
train
7a5dd25c6e6eef4f1556a57f457b1baf89e4ed5b
diff --git a/test/build_realtest.rb b/test/build_realtest.rb index <HASH>..<HASH> 100644 --- a/test/build_realtest.rb +++ b/test/build_realtest.rb @@ -19,8 +19,8 @@ class TestVeeweeBuild < Test::Unit::TestCase # - postinstall execution def test_box_1_build assert_nothing_raised { - #@box.build({"auto" => true,:force => true, #:nogui => true }) - @box.build({"auto" => true,"force" => true }) + @box.build({'auto' => true,'force' => true, 'nogui' => true }) + #@box.build({"auto" => true,"force" => true }) } end @@ -51,8 +51,8 @@ class TestVeeweeBuild < Test::Unit::TestCase # Now try build again (with no force flag) def test_box_5_build assert_raise(Veewee::Error) { - @box.build({"auto" => true}) - #@box.build({"auto" => true,:force => true, :nogui => true }) + #@box.build({"auto" => true}) + @box.build({"auto" => true,'force' => true, 'nogui' => true }) } end
fix mixed options from symbols to strings for tests
jedi4ever_veewee
train
a9913527ebd74549157512fcd5417f50c4308f1e
diff --git a/mastodon/Mastodon.py b/mastodon/Mastodon.py index <HASH>..<HASH> 100644 --- a/mastodon/Mastodon.py +++ b/mastodon/Mastodon.py @@ -3669,8 +3669,8 @@ class MastodonMalformedEventError(MastodonError): def guess_type(media_file): mime_type = None - if magic: + try: mime_type = magic.from_file(media_file, mime=True) - else: + except AttributeError: mime_type = mimetypes.guess_type(media_file)[0] return mime_type
Try/catch magic for #<I>.
halcy_Mastodon.py
train
b12a6b86aa3b5d537ff5c0d4b675d8df1a9ce70a
diff --git a/CHANGELOG.rst b/CHANGELOG.rst index <HASH>..<HASH> 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -14,6 +14,14 @@ Change Log Unreleased ~~~~~~~~~~ +[0.19.1] - 2017-01-30 +~~~~~~~~~~~~~~~~~~~~~ + +Added +----- + +* Resolved conflicting urls for User API endpoint. + [0.19.0] - 2017-01-30 ~~~~~~~~~~~~~~~~~~~~~ diff --git a/enterprise/__init__.py b/enterprise/__init__.py index <HASH>..<HASH> 100644 --- a/enterprise/__init__.py +++ b/enterprise/__init__.py @@ -4,6 +4,6 @@ Your project description goes here. from __future__ import absolute_import, unicode_literals -__version__ = "0.19.0" +__version__ = "0.19.1" default_app_config = "enterprise.apps.EnterpriseConfig" # pylint: disable=invalid-name diff --git a/enterprise/api/v1/urls.py b/enterprise/api/v1/urls.py index <HASH>..<HASH> 100644 --- a/enterprise/api/v1/urls.py +++ b/enterprise/api/v1/urls.py @@ -11,7 +11,7 @@ from enterprise.api.v1 import views # that is why we have disabled 'invalid-name' check for variable definition below. router = DefaultRouter() # pylint: disable=invalid-name router.register("site", views.SiteViewSet, 'site') -router.register("user", views.UserViewSet, 'user') +router.register("auth-user", views.UserViewSet, 'auth-user') router.register("enterprise-customer", views.EnterpriseCustomerViewSet, 'enterprise-customer') router.register("enterprise-learner", views.EnterpriseCustomerUserViewSet, 'enterprise-learner') router.register("user-data-sharing-consent", views.UserDataSharingConsentAuditViewSet, 'user-data-sharing-consent') diff --git a/tests/api/test_views.py b/tests/api/test_views.py index <HASH>..<HASH> 100644 --- a/tests/api/test_views.py +++ b/tests/api/test_views.py @@ -33,7 +33,7 @@ class TestEnterpriseAPIViews(APITest): @ddt.data( ( factories.UserFactory, - reverse('user-list'), + reverse('auth-user-list'), itemgetter('username'), [ {
Resolve conflicting urls for User API endpoint.
edx_edx-enterprise
train
d3a3f18fb99df8a343bbac55d966686f97f0fe1c
diff --git a/test/client-test.js b/test/client-test.js index <HASH>..<HASH> 100644 --- a/test/client-test.js +++ b/test/client-test.js @@ -54,20 +54,18 @@ var testData = { } }; -suite.addBatch(modelBatch('client', 'Client', testSchema, testData)); +var mb = modelBatch('client', 'Client', testSchema, testData); -suite.addBatch({ - 'When we create a new client': { - topic: function() { - var Client = require('../lib/model/client').Client; - Client.create({title: "Another App", - description: "another app"}, - this.callback); - }, - 'secret is automatically created': function(err, client) { - assert.isString(client.secret); - } - } -}); +mb['When we require the client module'] + ['and we get its Client class export'] + ['and we create an client instance'] + ['auto-generated fields are there'] = function(err, created) { + assert.isString(created.consumer_key); + assert.isString(created.secret); + assert.isString(created.created); + assert.isString(created.updated); +}; + +suite.addBatch(mb); suite.export(module);
Hackaround hard-coded stuff in activityobject
pump-io_pump.io
train
c22d7d2dc64aa4b2f05ed04f5ab1179442e7a8a9
diff --git a/Octo/Pages/Block/Text.php b/Octo/Pages/Block/Text.php index <HASH>..<HASH> 100644 --- a/Octo/Pages/Block/Text.php +++ b/Octo/Pages/Block/Text.php @@ -52,7 +52,7 @@ class Text extends Block if (!empty($content)) { // Replace file blocks - $content = preg_replace_callback('/\<img id\=\"([a-zA-Z0-9]{32})".*>/', [$this, 'replaceFile'], $content); + $content = preg_replace_callback('/<img id="([a-zA-Z0-9]{32})"(?:.*?)>/i', [$this, 'replaceFile'], $content); } $this->view->content = $content; @@ -65,6 +65,7 @@ class Text extends Block if ($file) { $template = Template::getPublicTemplate('Block/Text/File'); $template->file = $file; + return $template->render(); } }
fix regex for parsing img with file id
Block8_Octo
train
5d9804d8f5d63cf58268170fb51654326ca8710d
diff --git a/host/scan/scan.py b/host/scan/scan.py index <HASH>..<HASH> 100644 --- a/host/scan/scan.py +++ b/host/scan/scan.py @@ -11,7 +11,7 @@ from analysis.RawDataConverter.data_struct import NameValue from basil.dut import Dut from fei4.register import FEI4Register from fei4.register_utils import FEI4RegisterUtils -from daq.readout import DataReadout, RxSyncError, EightbTenbError, FifoError, open_raw_data_file +from daq.readout import DataReadout, RxSyncError, EightbTenbError, FifoError, NoDataTimeout, StopTimeout, open_raw_data_file from collections import namedtuple, Mapping from contextlib import contextmanager from run_manager import RunBase, RunAborted @@ -210,7 +210,7 @@ class ScanBase(RunBase): self.handle_err(sys.exc_info()) if not self.err_queue.empty(): exc = self.err_queue.get() - if isinstance(exc[1], (RxSyncError, EightbTenbError, FifoError)): + if isinstance(exc[1], (RxSyncError, EightbTenbError, FifoError, NoDataTimeout, StopTimeout)): raise RunAborted(exc[1]) else: raise exc[0], exc[1], exc[2]
ENH: abort scan on no data timeout and stop timeout
SiLab-Bonn_pyBAR
train
a61a86111193bb1ade99b99a3eec26936efcde3e
diff --git a/test/common/tc_tree_node.rb b/test/common/tc_tree_node.rb index <HASH>..<HASH> 100644 --- a/test/common/tc_tree_node.rb +++ b/test/common/tc_tree_node.rb @@ -80,7 +80,18 @@ class TCTreeNode < Test::Unit::TestCase assert_equal( 1, tb.nr_nodes ) assert_equal( 1, tb.nr_leaves ) assert_equal( 0, tb.nr_childs ) + end + + def test_prefix_path + ta = TreeNode.new( "a" ) + ln1 = LeafNode.new("1", ta) + ln2 = LeafNode.new("2", ta) + tb = TreeNode.new( "b", ta ) + ln3 = LeafNode.new( "3", tb ) + ta.prefix_path= "<root>/" + assert_equal( ta.path, "<root>/a" ) + assert_equal( tb.path, "<root>/a/b") end end
moving from svn repository (dreamhost) to git
26fe_tree.rb
train
e47b0562896f48b6346b63a4d507e8891312370a
diff --git a/tensorbase/base.py b/tensorbase/base.py index <HASH>..<HASH> 100644 --- a/tensorbase/base.py +++ b/tensorbase/base.py @@ -47,7 +47,8 @@ class Layers: """ self.count['conv'] += 1 scope = 'conv_' + str(self.count['conv']) - with tf.variable_scope(scope): + with tf.variable_scope(scope) as sc: + print(sc) # Conv function input_channels = self.input.get_shape()[3]
convnet and deconvnet
dancsalo_TensorBase
train
257240864df1bfd554e3b2ab69e37180cf8cb473
diff --git a/library/ZF2DoctrineCrudHandler/src/ZF2DoctrineCrudHandler/Handler/AbstractDataHandler.php b/library/ZF2DoctrineCrudHandler/src/ZF2DoctrineCrudHandler/Handler/AbstractDataHandler.php index <HASH>..<HASH> 100644 --- a/library/ZF2DoctrineCrudHandler/src/ZF2DoctrineCrudHandler/Handler/AbstractDataHandler.php +++ b/library/ZF2DoctrineCrudHandler/src/ZF2DoctrineCrudHandler/Handler/AbstractDataHandler.php @@ -42,31 +42,7 @@ abstract class AbstractDataHandler extends AbstractCrudHandler } else { $value = $entity->$getter(); } - switch ($property->getType()) { - case Property::PROPERTY_TYPE_COLUMN: - if ($value instanceof \DateTime) { - $value = $value->format('d.M.Y H:m:s'); - } - break; - case Property::PROPERTY_TYPE_REF_ONE: - if ($value) { - $targetEntity = $property->getTargetEntity(); - $targetPropertsGetter = 'get' . ucfirst($targetEntity::DISPLAY_NAME_PROPERTY); - $value = $value->$targetPropertsGetter(); - } - break; - case Property::PROPERTY_TYPE_REF_MANY: - $targetEntity = $property->getTargetEntity(); - $targetPropertsGetter = 'get' . ucfirst($targetEntity::DISPLAY_NAME_PROPERTY); - $listString = ''; - foreach ($value as $targetEntity) { - $listString .= $targetEntity->$targetPropertsGetter() . ', '; - } - $value = substr($listString, 0, -2); - break; - default: - break; - } + $value = $property->ensurePrintableValue($value); $dataToDisplay[$name] = $value; } return $dataToDisplay; diff --git a/library/ZF2DoctrineCrudHandler/src/ZF2DoctrineCrudHandler/Request/RequestHandler.php b/library/ZF2DoctrineCrudHandler/src/ZF2DoctrineCrudHandler/Request/RequestHandler.php index <HASH>..<HASH> 100644 --- a/library/ZF2DoctrineCrudHandler/src/ZF2DoctrineCrudHandler/Request/RequestHandler.php +++ b/library/ZF2DoctrineCrudHandler/src/ZF2DoctrineCrudHandler/Request/RequestHandler.php @@ -131,26 +131,7 @@ class RequestHandler $name = $element->getAttribute('name'); $getter = 'get' . ucfirst($name); $value = $entity->$getter(); - if ($properties[$name]->getType() == Property::PROPERTY_TYPE_COLUMN) { - if ($value instanceof \DateTime) { - $value = $value->format('d.M.Y H:m:s'); - } - } else if ($properties[$name]->getType() == Property::PROPERTY_TYPE_REF_MANY) { - if ($value === null) { - continue; - } - $multiValues = []; - foreach ($value as $refEntity) { - $multiValues[] = $refEntity->getId(); - } - $value = $multiValues; - } else if ($properties[$name]->getType() == Property::PROPERTY_TYPE_REF_ONE) { - if ($value) { - $value = $value->getId(); - } else { - $value = 0; - } - } + $value = $properties[$name]->ensurePrintableValue($value, true); $element->setValue($value); } } diff --git a/library/ZF2DoctrineCrudHandler/view/zf2doctrinecrudhandler/show.phtml b/library/ZF2DoctrineCrudHandler/view/zf2doctrinecrudhandler/show.phtml index <HASH>..<HASH> 100644 --- a/library/ZF2DoctrineCrudHandler/view/zf2doctrinecrudhandler/show.phtml +++ b/library/ZF2DoctrineCrudHandler/view/zf2doctrinecrudhandler/show.phtml @@ -3,9 +3,6 @@ <div> <label> <span><?php echo $this->translate($label); ?>:</span> - <?php if (gettype($data) == 'boolean' || gettype($data) == 'NULL') { - $data = ($data == true) ? 'yes' : 'no'; - } ?> <div><?php echo $this->translate($data); ?></div> </label> </div>
switched to let the "Property"-Class of entity reader ensure printable values
Cyberrebell_ZF2DoctrineCrudHandler
train
8b594dfa090dedd642bc0b584f620eb427100a2d
diff --git a/auth0/v3/test/authentication/test_logout.py b/auth0/v3/test/authentication/test_logout.py index <HASH>..<HASH> 100644 --- a/auth0/v3/test/authentication/test_logout.py +++ b/auth0/v3/test/authentication/test_logout.py @@ -31,7 +31,7 @@ class TestLogout(unittest.TestCase): args, kwargs = mock_get.call_args - self.assertEqual(args[0], 'https://my.domain.com/v2/logout?federated&cid%rto') + self.assertEqual(args[0], 'https://my.domain.com/v2/logout?federated&cid&rto') self.assertEqual(kwargs['headers'], { 'Content-Type': 'application/json' })
Corrected URL structure in test_federated_logout
auth0_auth0-python
train
8833c529f358fd6edfddebc7e3139812b52b1b9d
diff --git a/lib/components/map/enhanced-stop-marker.js b/lib/components/map/enhanced-stop-marker.js index <HASH>..<HASH> 100644 --- a/lib/components/map/enhanced-stop-marker.js +++ b/lib/components/map/enhanced-stop-marker.js @@ -23,10 +23,10 @@ const getComplementaryColor = color => color.isLight() ? color.darken(30) : colo const caretPixels = 24 const iconPixels = 32 const iconPadding = 8 -const borderPixels = 1 +const borderPixels = props => props?.thick ? 3 : 1 const caretVisibleHeight = caretPixels / 1.4142 // Math.sqrt(2) -const caretMarginPixels = (iconPixels - caretPixels) / 2 -const bubblePixels = iconPixels + iconPadding + 2 * borderPixels +const caretMarginPixels = props => (iconPixels - caretPixels - borderPixels(props)) / 2 +const bubblePixels = props => iconPixels + iconPadding + 2 * borderPixels(props) const BaseStopIcon = styled.div` background: ${props => props.mainColor}; @@ -47,8 +47,8 @@ const BaseStopIcon = styled.div` &::after { background: linear-gradient(to bottom right, transparent 0%, transparent 50%, ${props => props.mainColor} 50%, ${props => props.mainColor} 100%); - border-bottom: 1px solid ${props => props.secondaryColor}; - border-right: 1px solid ${props => props.secondaryColor}; + border-bottom: ${borderPixels}px solid ${props => props.secondaryColor}; + border-right: ${borderPixels}px solid ${props => props.secondaryColor}; content: ''; display: block; height: ${caretPixels}px; @@ -110,6 +110,7 @@ class EnhancedStopMarker extends Component { mainColor={color} secondaryColor={getComplementaryColor(tinycolor(color))} // Show actual stop name on hover for easy identification. + thick={activeStopId === id} title={getStopName(stop)} > {stopIcon} @@ -119,7 +120,7 @@ class EnhancedStopMarker extends Component { // Instead, we want the tip of the caret at the bottom center of the bubble // to be at the stop location. // Add some margins so the stop marker (which may be unintentionally offset) remains visible. - iconAnchor: [bubblePixels / 2 + 4, bubblePixels + caretVisibleHeight + 8] + iconAnchor: [bubblePixels(this.props) / 2 + 4, bubblePixels(this.props) + caretVisibleHeight + 8] }) return (
improvement(enhanced-stop-marker): thicken active stop border
opentripplanner_otp-react-redux
train
9183eaf41f558da51dc80eb3fee430dd4038fdbd
diff --git a/Kwf/Component/View/Helper/Component.php b/Kwf/Component/View/Helper/Component.php index <HASH>..<HASH> 100644 --- a/Kwf/Component/View/Helper/Component.php +++ b/Kwf/Component/View/Helper/Component.php @@ -6,7 +6,7 @@ class Kwf_Component_View_Helper_Component extends Kwf_Component_View_Renderer if (!$component) return ''; $plugins = self::_getGroupedViewPlugins($component->componentClass); return $this->_getRenderPlaceholder( - $component->componentId, array(), null, null, $plugins + $component->componentId, array(), null, $plugins ); } diff --git a/Kwf/Component/View/Renderer.php b/Kwf/Component/View/Renderer.php index <HASH>..<HASH> 100644 --- a/Kwf/Component/View/Renderer.php +++ b/Kwf/Component/View/Renderer.php @@ -20,18 +20,12 @@ abstract class Kwf_Component_View_Renderer extends Kwf_Component_View_Helper_Abs return $plugins; } - protected function _getRenderPlaceholder($componentId, $config = array(), $value = null, $type = null, $plugins = array()) + protected function _getRenderPlaceholder($componentId, $config = array(), $value = null, $plugins = array()) { //is caching possible for this type? - if (!$type) { - $canBeIncludedInFullPageCache = $this->enableCache(); - } else { - $class = 'Kwf_Component_View_Helper_' . ucfirst($type); - $helper = new $class(); - $canBeIncludedInFullPageCache = $helper->enableCache(); - } + $canBeIncludedInFullPageCache = $this->enableCache(); - if (!$type) $type = $this->_getType(); + $type = $this->_getType(); if ($canBeIncludedInFullPageCache) { //is the view cache enabled for this component?
remove type parameter, now unused current helper type will always be used as type
koala-framework_koala-framework
train
76beb98546791e2ff2cbb03ba6aae239b3c5610d
diff --git a/lib/app.js b/lib/app.js index <HASH>..<HASH> 100644 --- a/lib/app.js +++ b/lib/app.js @@ -85,11 +85,6 @@ module.exports = function(tilelive, options) { uri = clone(uri); } - if (uri.protocol === "mapnik:") { - // disable mapnik's internal cache - uri.query.internal_cache = false; - } - tilePattern = tilePath .replace(/\.(?!.*\.)/, ":scale(" + SCALE_PATTERN + ")?.") .replace(/\./g, "\.")
Move Mapnik cache bypass to tilelive-cache
mojodna_tessera
train
e1d3158d162c35d4b8e7b65840f9ac347199be63
diff --git a/src/Illuminate/Collections/Collection.php b/src/Illuminate/Collections/Collection.php index <HASH>..<HASH> 100644 --- a/src/Illuminate/Collections/Collection.php +++ b/src/Illuminate/Collections/Collection.php @@ -873,36 +873,6 @@ class Collection implements ArrayAccess, Enumerable } /** - * Reduce the collection to a single value. - * - * @param callable $callback - * @param mixed $initial - * @return mixed - */ - public function reduce(callable $callback, $initial = null) - { - return array_reduce($this->items, $callback, $initial); - } - - /** - * Reduce an associative collection to a single value. - * - * @param callable $callback - * @param mixed $initial - * @return mixed - */ - public function reduceWithKeys(callable $callback, $initial = null) - { - $result = $initial; - - foreach ($this->items as $key => $value) { - $result = $callback($result, $value, $key); - } - - return $result; - } - - /** * Replace the collection items with the given items. * * @param mixed $items diff --git a/src/Illuminate/Collections/LazyCollection.php b/src/Illuminate/Collections/LazyCollection.php index <HASH>..<HASH> 100644 --- a/src/Illuminate/Collections/LazyCollection.php +++ b/src/Illuminate/Collections/LazyCollection.php @@ -828,42 +828,6 @@ class LazyCollection implements Enumerable } /** - * Reduce the collection to a single value. - * - * @param callable $callback - * @param mixed $initial - * @return mixed - */ - public function reduce(callable $callback, $initial = null) - { - $result = $initial; - - foreach ($this as $value) { - $result = $callback($result, $value); - } - - return $result; - } - - /** - * Reduce an associative collection to a single value. - * - * @param callable $callback - * @param mixed $initial - * @return mixed - */ - public function reduceWithKeys(callable $callback, $initial = null) - { - $result = $initial; - - foreach ($this as $key => $value) { - $result = $callback($result, $value, $key); - } - - return $result; - } - - /** * Replace the collection items with the given items. * * @param mixed $items diff --git a/src/Illuminate/Collections/Traits/EnumeratesValues.php b/src/Illuminate/Collections/Traits/EnumeratesValues.php index <HASH>..<HASH> 100644 --- a/src/Illuminate/Collections/Traits/EnumeratesValues.php +++ b/src/Illuminate/Collections/Traits/EnumeratesValues.php @@ -717,6 +717,42 @@ trait EnumeratesValues } /** + * Reduce the collection to a single value. + * + * @param callable $callback + * @param mixed $initial + * @return mixed + */ + public function reduce(callable $callback, $initial = null) + { + $result = $initial; + + foreach ($this as $key => $value) { + $result = $callback($result, $value, $key); + } + + return $result; + } + + /** + * Reduce an associative collection to a single value. + * + * @param callable $callback + * @param mixed $initial + * @return mixed + */ + public function reduceWithKeys(callable $callback, $initial = null) + { + $result = $initial; + + foreach ($this as $key => $value) { + $result = $callback($result, $value, $key); + } + + return $result; + } + + /** * Create a collection of all elements that do not pass a given truth test. * * @param callable|mixed $callback diff --git a/tests/Support/SupportCollectionTest.php b/tests/Support/SupportCollectionTest.php index <HASH>..<HASH> 100755 --- a/tests/Support/SupportCollectionTest.php +++ b/tests/Support/SupportCollectionTest.php @@ -3590,6 +3590,14 @@ class SupportCollectionTest extends TestCase $this->assertEquals(6, $data->reduce(function ($carry, $element) { return $carry += $element; })); + + $data = new $collection([ + 'foo' => 'bar', + 'baz' => 'qux', + ]); + $this->assertEquals('foobarbazqux', $data->reduce(function ($carry, $element, $key) { + return $carry .= $key.$element; + })); } /**
[8.x] Pass $key to closure in Collection and LazyCollection's reduce method as well (#<I>) * add reduce with keys to collections * add reduce with keys to lazy collections * add test for reduce with keys * fix style * merge reduceWithKeys into the existing reduce method * remove reduce and reduceWithKeys from Collection * remove reduce and reduceWithKeys from LazyCollection * add reduce and reduceWithKeys to EnumeratesValues * add test for reduce with keys and reduceWithKeys
laravel_framework
train
ea2355b972fbf766e85505fc329a6c3a8126f719
diff --git a/lib/json_schemer/schema/base.rb b/lib/json_schemer/schema/base.rb index <HASH>..<HASH> 100644 --- a/lib/json_schemer/schema/base.rb +++ b/lib/json_schemer/schema/base.rb @@ -36,7 +36,7 @@ module JSONSchemer keywords: nil, ref_resolver: DEFAULT_REF_RESOLVER ) - raise InvalidSymbolKey, 'schemas must use string keys' if schema.is_a?(Hash) && !schema.first.first.is_a?(String) + raise InvalidSymbolKey, 'schemas must use string keys' if schema.is_a?(Hash) && !schema.empty? && !schema.first.first.is_a?(String) @root = schema @format = format @insert_property_defaults = insert_property_defaults diff --git a/test/json_schemer_test.rb b/test/json_schemer_test.rb index <HASH>..<HASH> 100644 --- a/test/json_schemer_test.rb +++ b/test/json_schemer_test.rb @@ -142,6 +142,17 @@ class JSONSchemerTest < Minitest::Test } end + def test_it_does_not_fail_when_the_schema_is_completely_empty + schema = {} + data = { + 'a' => 1 + } + assert JSONSchemer.schema(schema).valid?(data) + assert data == { + 'a' => 1 + } + end + def test_it_allows_disabling_format schema = JSONSchemer.schema( { 'format' => 'email' },
Fix an issue where empty schema causes a crash
davishmcclurg_json_schemer
train
d2a7bf0f1383e042cc6f6ba75392f6f9b898961c
diff --git a/api/src/main/java/com/github/commonsrdf/api/BlankNode.java b/api/src/main/java/com/github/commonsrdf/api/BlankNode.java index <HASH>..<HASH> 100644 --- a/api/src/main/java/com/github/commonsrdf/api/BlankNode.java +++ b/api/src/main/java/com/github/commonsrdf/api/BlankNode.java @@ -18,11 +18,11 @@ package com.github.commonsrdf.api; * Blank Node</a>, as defined by <a href= * "http://www.w3.org/TR/rdf11-concepts/#section-blank-nodes" >RDF-1.1 Concepts * and Abstract Syntax</a>, a W3C Recommendation published on 25 February 2014.<br> - * + * <p> * Note that: Blank nodes are disjoint from IRIs and literals. Otherwise, the * set of possible blank nodes is arbitrary. RDF makes no reference to any * internal structure of blank nodes. - * + * <p> * Also note that: Blank node identifiers are local identifiers that are used in * some concrete RDF syntaxes or RDF store implementations. They are always * locally scoped to the file or RDF store, and are not persistent or portable @@ -46,17 +46,17 @@ public interface BlankNode extends BlankNodeOrIRI { * >label</a> for the blank node. This is not a serialization/syntax label. * It should be uniquely identifying within the local scope it is created in * but has no uniqueness guarantees other than that. - * + * <p> * In particular, the existence of two objects of type {@link BlankNode} * with the same value returned from {@link #internalIdentifier()} are not * equivalent unless they are known to have been created in the same local - * scope. - * + * scope (see {@link #equals(Object)}) + * <p> * An example of a local scope may be an instance of a Java Virtual Machine * (JVM). In the context of a JVM instance, an implementor may support * insertion and removal of {@link Triple} objects containing Blank Nodes * without modifying the blank node labels. - * + * <p> * Another example of a local scope may be a <a * href="http://www.w3.org/TR/rdf11-concepts/#section-rdf-graph">Graph</a> * or <a @@ -67,11 +67,16 @@ public interface BlankNode extends BlankNodeOrIRI { * guarantee that it is unique for the JVM instance. In this case, the * implementor may support a mechanism to provide a mapping for blank nodes * between Graph or Dataset instances to guarantee their uniqueness. - * + * <p> * If implementors support <a * href="http://www.w3.org/TR/rdf11-concepts/#section-skolemization" * >Skolemisation</a>, they may map instances of {@link BlankNode} objects * to {@link IRI} objects to reduce scoping issues. + * <p> + * It is not a requirement for the internal identifier to be a part of the + * {@link #ntriplesString()}, except that two BlankNode instances with the + * same internalIdentifier() and same local scope should have the same + * {@link #ntriplesString()}. * * @return An internal, system identifier for the {@link BlankNode}. */
Clarify that internalIdentifer need not match ntriplesString
commons-rdf_commons-rdf
train
8fbb98c8ff6206d5ab59ca27cb7f83807b82a6d9
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -12,7 +12,7 @@ setup( packages=['jsonrpcclient'], package_data={'jsonrpcclient': ['response-schema.json']}, install_requires=['requests', 'jsonschema'], - version='1.0.3', + version='1.0.4', description='JSON-RPC 2.0 client library for Python 3', long_description=long_description, author='Beau Barker',
Updated setup.py to new version <I>
bcb_jsonrpcclient
train
5669c599c222713757e6107348412da9f74ebd25
diff --git a/test/unit/exception_handling/log_error_stub_test.rb b/test/unit/exception_handling/log_error_stub_test.rb index <HASH>..<HASH> 100644 --- a/test/unit/exception_handling/log_error_stub_test.rb +++ b/test/unit/exception_handling/log_error_stub_test.rb @@ -3,25 +3,25 @@ require File.expand_path('../../test_helper', __dir__) module ExceptionHandling - class LogErrorStubTest < ActiveSupport::TestCase + describe LogErrorStub do include LogErrorStub context "while running tests" do - setup do + before do setup_log_error_stub end - teardown do + after do teardown_log_error_stub end - should "raise an error when log_error and log_warning are called" do + it "raise an error when log_error and log_warning are called" do begin ExceptionHandling.log_error("Something happened") flunk rescue Exception => ex # LogErrorStub::UnexpectedExceptionLogged => ex - assert ex.to_s.starts_with?("StandardError: Something happened"), ex.to_s + expect(ex.to_s.starts_with?("StandardError: Something happened")).to be_truthy end begin @@ -31,16 +31,16 @@ module ExceptionHandling begin ExceptionHandling.log_error(ex) rescue LogErrorStub::UnexpectedExceptionLogged => ex - assert ex.to_s.starts_with?("RaisedError: This should raise"), ex.to_s + expect(ex.to_s.starts_with?("RaisedError: This should raise")).to be_truthy end end end - should "allow for the regex specification of an expected exception to be ignored" do + it "allow for the regex specification of an expected exception to be ignored" do exception_pattern = /StandardError: This is a test error/ - assert_nil exception_whitelist # test that exception expectations are cleared + expect(exception_whitelist).to be_nil # test that exception expectations are cleared expects_exception(exception_pattern) - assert_equal exception_pattern, exception_whitelist[0][0] + expect(exception_whitelist[0][0]).to eq(exception_pattern) begin ExceptionHandling.log_error("This is a test error") rescue StandardError @@ -48,11 +48,11 @@ module ExceptionHandling end end - should "allow for the string specification of an expected exception to be ignored" do + it "allow for the string specification of an expected exception to be ignored" do exception_pattern = "StandardError: This is a test error" - assert_nil exception_whitelist # test that exception expectations are cleared + expect(exception_whitelist).to be_nil # test that exception expectations are cleared expects_exception(exception_pattern) - assert_equal exception_pattern, exception_whitelist[0][0] + expect(exception_whitelist[0][0]).to eq(exception_pattern) begin ExceptionHandling.log_error("This is a test error") rescue StandardError @@ -60,9 +60,9 @@ module ExceptionHandling end end - should "allow multiple errors to be ignored" do + it "allow multiple errors to be ignored" do class IgnoredError < StandardError; end - assert_nil exception_whitelist # test that exception expectations are cleared + expect(exception_whitelist).to be_nil # test that exception expectations are cleared expects_exception(/StandardError: This is a test error/) expects_exception(/IgnoredError: This should be ignored/) ExceptionHandling.log_error("This is a test error") @@ -73,7 +73,7 @@ module ExceptionHandling end end - should "expect exception twice if declared twice" do + it "expect exception twice if declared twice" do expects_exception(/StandardError: ERROR: I love lamp/) expects_exception(/StandardError: ERROR: I love lamp/) ExceptionHandling.log_error("ERROR: I love lamp") @@ -82,7 +82,7 @@ module ExceptionHandling end context "teardown_log_error_stub" do - should "support MiniTest framework for adding a failure" do + it "support MiniTest framework for adding a failure" do expects_exception(/foo/) mock(self).is_mini_test?.returns { true } @@ -93,7 +93,7 @@ module ExceptionHandling self.exception_whitelist = nil end - should "support Test::Unit framework for adding a failure" do + it "support Test::Unit framework for adding a failure" do expects_exception(/foo/) mock(self).is_mini_test?.returns { false }
TECH-<I> change log_error_stub_test.rb to use rspec
Invoca_exception_handling
train
7638c2b8b0da137a36755acdcf1d0dcc9c281084
diff --git a/asv/commands/compare.py b/asv/commands/compare.py index <HASH>..<HASH> 100644 --- a/asv/commands/compare.py +++ b/asv/commands/compare.py @@ -163,14 +163,18 @@ class Compare(Command): raise util.UserError( "Results for machine '{0} not found".format(machine)) + commit_names = {hash_1: repo.get_name_from_hash(hash_1), + hash_2: repo.get_name_from_hash(hash_2)} + cls.print_table(conf, hash_1, hash_2, factor=factor, split=split, only_changed=only_changed, sort=sort, - machine=machine, env_names=env_names) + machine=machine, env_names=env_names, commit_names=commit_names) @classmethod def print_table(cls, conf, hash_1, hash_2, factor, split, resultset_1=None, resultset_2=None, machine=None, - only_changed=False, sort='name', use_stats=True, env_names=None): + only_changed=False, sort='name', use_stats=True, env_names=None, + commit_names=None): results_1 = {} results_2 = {} stats_1 = {} @@ -178,6 +182,9 @@ class Compare(Command): versions_1 = {} versions_2 = {} + if commit_names is None: + commit_names = {} + def results_default_iter(commit_hash): for result in iter_results_for_machine_and_hash( conf.results_dir, machine, commit_hash): @@ -360,6 +367,21 @@ class Compare(Command): color_print(" before after ratio") color_print(" [{0:8s}] [{1:8s}]".format(hash_1[:8], hash_2[:8])) + name_1 = commit_names.get(hash_1) + if name_1: + name_1 = '<{0}>'.format(name_1) + else: + name_1 = '' + + name_2 = commit_names.get(hash_2) + if name_2: + name_2 = '<{0}>'.format(name_2) + else: + name_2 = '' + + if name_1 or name_2: + color_print(" {0:10s} {1:10s}".format(name_1, name_2)) + if sort == 'ratio': bench[key].sort(key=lambda v: v[3], reverse=True) elif sort == 'name': diff --git a/asv/commands/continuous.py b/asv/commands/continuous.py index <HASH>..<HASH> 100644 --- a/asv/commands/continuous.py +++ b/asv/commands/continuous.py @@ -126,11 +126,15 @@ class Continuous(Command): stats = result.get_result_stats(name, params) yield name, params, value, stats, version, machine_name, env.name + commit_names = {parent: repo.get_name_from_hash(parent), + head: repo.get_name_from_hash(head)} + status = Compare.print_table(conf, parent, head, resultset_1=results_iter(parent), resultset_2=results_iter(head), factor=factor, split=split, - only_changed=only_changed, sort=sort) + only_changed=only_changed, sort=sort, + commit_names=commit_names) worsened, improved = status color_print("") diff --git a/test/test_compare.py b/test/test_compare.py index <HASH>..<HASH> 100644 --- a/test/test_compare.py +++ b/test/test_compare.py @@ -105,6 +105,7 @@ x 1.00s 3.00s 3.00 time_with_version_mismatch_other REFERENCE_ONLY_CHANGED = """ before after ratio [22b920c6] [fcf8c079] + <name1> <name2> ! n/a failed n/a params_examples.ParamSuite.track_value ! 454μs failed n/a time_coordinates.time_latitude ! 3.00s failed n/a time_other.time_parameterized(3) @@ -161,7 +162,8 @@ def test_compare(capsys, tmpdir): # Check print_table output as called from Continuous status = Compare.print_table(conf, '22b920c6', 'fcf8c079', factor=2, machine='cheetah', split=False, only_changed=True, sort='ratio', - env_names=["py2.7-numpy1.8"]) + env_names=["py2.7-numpy1.8"], + commit_names={'22b920c6': 'name1', 'fcf8c079': 'name2'}) worsened, improved = status assert worsened assert improved
compare/continuous: display commit names in table printout, if available
airspeed-velocity_asv
train
ee22eb506f03fc9dba8893be64b828ce5d7d4e26
diff --git a/UITable/index.js b/UITable/index.js index <HASH>..<HASH> 100644 --- a/UITable/index.js +++ b/UITable/index.js @@ -214,9 +214,9 @@ class UITable extends UIView { Math.abs(this.yNext - this.yLowerBound) / this.cellHeight ); - if (this.cache_nRowsToShift + this.rowEndIndex > this.props.totalRows) { + if (this.cache_nRowsToShift + this.rowEndIndex + 1 > this.props.totalRows) { /* more rows than there is data available, truncate */ - this.cache_nRowsToShift = this.props.totalRows - this.rowEndIndex; + this.cache_nRowsToShift = this.props.totalRows - this.rowEndIndex + 1; } if (this.cache_nRowsToShift > 0) { @@ -346,7 +346,8 @@ class UITable extends UIView { this.yNext = this.yLowerBound; } - this.xScrollNubPosition = (Math.abs(this.xNext) / (this.rowWidth - this.containerWidth)) * (this.xScrollerWidth - this.state.xScrollerNubSize); + this.xScrollNubPosition = (Math.abs(this.xNext) / (this.rowWidth - this.containerWidth)) + * (this.xScrollerWidth - this.state.xScrollerNubSize); if (this.xScrollNubPosition + this.state.xScrollerNubSize > this.xScrollerWidth) { this.xScrollNubPosition = this.xScrollerWidth - this.state.xScrollerNubSize;
UITable: fix last row sometimes not being displayed The perils of zero-based indexing systems.
enigma-io_boundless
train
fecbbbb6dd1c39c252f7437ae59c4915b9fee653
diff --git a/yotta/lib/cmakegen.py b/yotta/lib/cmakegen.py index <HASH>..<HASH> 100644 --- a/yotta/lib/cmakegen.py +++ b/yotta/lib/cmakegen.py @@ -519,7 +519,7 @@ class CMakeGen(object): for root, dires, files in os.walk(os.path.join(component.path, 'source')): for f in files: name, ext = os.path.splitext(f) - if ext.lower() == '.cmake': + if ext.lower() == '.cmake' and not component.ignores(os.path.relpath(os.path.join(root, f), component.path)): cmake_files.append(os.path.join(root, f)) dummy_template = jinja_environment.get_template('dummy_CMakeLists.txt') @@ -587,7 +587,7 @@ class CMakeGen(object): for root, dires, files in os.walk(os.path.join(component.path, dirname)): for f in files: name, ext = os.path.splitext(f) - if ext.lower() == '.cmake': + if ext.lower() == '.cmake' and not component.ignores(os.path.relpath(os.path.join(root, f), component.path)): cmake_files.append(os.path.join(root, f)) test_template = jinja_environment.get_template('test_CMakeLists.txt') @@ -637,7 +637,7 @@ class CMakeGen(object): for root, dires, files in os.walk(os.path.join(component.path, dirname)): for f in files: name, ext = os.path.splitext(f) - if ext.lower() == '.cmake': + if ext.lower() == '.cmake' and not component.ignores(os.path.relpath(os.path.join(root, f), component.path)): cmake_files.append(os.path.join(root, f)) subdir_template = jinja_environment.get_template('subdir_CMakeLists.txt') diff --git a/yotta/test/cli/test_build.py b/yotta/test/cli/test_build.py index <HASH>..<HASH> 100644 --- a/yotta/test/cli/test_build.py +++ b/yotta/test/cli/test_build.py @@ -147,6 +147,49 @@ int foo(){ 'test/g/a/a/b/bar.c':'#include "stdio.h"\nint bar(){ printf("bar!\\n"); return 7; }' } +Test_Ignore_Custom_Cmake = { +'module.json':'''{ + "name": "cmake-ignore-test", + "version": "0.0.0", + "keywords": [], + "author": "", + "license": "Apache-2.0", + "dependencies": {} +}''', +'.yotta_ignore':''' +./source/CMakeLists.txt +CMakeLists.txt +./source/empty.c +./source/ignoreme.cmake +./test/CMakeLists.txt +./test/ignoreme.cmake +''', +'source/CMakeLists.txt':''' +message("source CMakeLists.txt should be ignored!") +add_library(cmake-ignore-test "empty.c") +''', +'CMakeLists.txt': ''' +message("root cmakelist should be ignored!") +add_subdirectory(source) +add_subdirectory(test) +''', +'source/empty.c': ''' +int foo(){ } +''', +'source/ignoreme.cmake': ''' +message(".cmake file should be ignored!") +''', +'test/CMakeLists.txt': ''' +message("test CMakeLists.txt file should be ignored!") +''', +'test/ignoreme.cmake': ''' +message("test .cmake file should be ignored!") +''', +'test/empty.c': ''' +int main(){ return 0; } +''' +} + class TestCLIBuild(unittest.TestCase): @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") @@ -244,6 +287,13 @@ class TestCLIBuild(unittest.TestCase): stdout = self.runCheckCommand(['--target', util.nativeTarget(), 'build'], test_dir) util.rmRf(test_dir) + @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") + def test_ignoreCustomCMake(self): + test_dir = util.writeTestFiles(Test_Ignore_Custom_Cmake, True) + stdout = self.runCheckCommand(['--target', util.nativeTarget(), 'build'], test_dir) + self.assertNotIn('should be ignored', stdout) + util.rmRf(test_dir) + def runCheckCommand(self, args, test_dir): stdout, stderr, statuscode = cli.run(args, cwd=test_dir) if statuscode != 0:
Ensure custom .cmake files are ignored * ignore .cmake files in dummy libs, normal libs/exes, and test directories * add test for ignoring of .cmake and CMakeLists.txt files
ARMmbed_yotta
train
13021409737b977771d6c79a3a084aac1b0d24a4
diff --git a/blockstack/lib/rpc.py b/blockstack/lib/rpc.py index <HASH>..<HASH> 100644 --- a/blockstack/lib/rpc.py +++ b/blockstack/lib/rpc.py @@ -1224,6 +1224,20 @@ class BlockstackAPIEndpointHandler(SimpleHTTPRequestHandler): self._reply_json({'status': 'alive', 'version': VERSION}) return + + def GET_getinfo(self, path_info): + """ + getinfo + """ + blockstackd_url = get_blockstackd_url() + info = blockstackd_client.getinfo(hostport=blockstackd_url) + if json_is_error(info): + # error + status_code = info.get('http_status', 502) + return self._reply_json({'error': info['error']}, status_code=status_code) + + return self._reply_json(info) + def _dispatch(self, method_name): """ @@ -1237,6 +1251,11 @@ class BlockstackAPIEndpointHandler(SimpleHTTPRequestHandler): 'GET': self.GET_ping, }, }, + r'^/v1/info$': { + 'routes': { + 'GET': self.GET_getinfo, + }, + }, r'^/v1/addresses/({}{{1,256}})/({}{{1,40}})$'.format(URLENCODING_CLASS, URLENCODING_CLASS): { 'routes': { 'GET': self.GET_names_owned_by_address,
expose getinfo as a RESTful endpoint
blockstack_blockstack-core
train
dfc54fa74725d7c74a3a654fdaf81acc962ead9b
diff --git a/lib/optionscrapper.rb b/lib/optionscrapper.rb index <HASH>..<HASH> 100644 --- a/lib/optionscrapper.rb +++ b/lib/optionscrapper.rb @@ -22,7 +22,7 @@ module OptionScrapper def self.version OptionScrapper::VERSION - end + end def self.new &block OptionScrapper::OptParser::new do |o| diff --git a/lib/optionscrapper/optparser.rb b/lib/optionscrapper/optparser.rb index <HASH>..<HASH> 100644 --- a/lib/optionscrapper/optparser.rb +++ b/lib/optionscrapper/optparser.rb @@ -12,7 +12,7 @@ module OptionScrapper class OptParser include OptionScrapper::Parsing include OptionScrapper::Usage - + alias_method :newline, :puts def initialize &block @@ -50,7 +50,7 @@ module OptionScrapper end def on *args, &block - # step: we are creating an array of all the + # step: we are creating an array of all the parse_option_switches *args do |x| @cursor[:switches][x] = true end @@ -78,6 +78,7 @@ module OptionScrapper def initialize_parsers # step: we create the global and inject the global parser into the parser hash parsers[:global] = parser( 'global' ) + parsers[:global][:parser].program_name = program_name # step: set the cursor to global - i.e. all options are initially global @cursor = parsers[:global] # step: inject a default help options for global diff --git a/lib/optionscrapper/parsing.rb b/lib/optionscrapper/parsing.rb index <HASH>..<HASH> 100644 --- a/lib/optionscrapper/parsing.rb +++ b/lib/optionscrapper/parsing.rb @@ -99,5 +99,9 @@ module OptionScrapper end end end + + def program_name + File.basename( $0 ) + end end end diff --git a/lib/optionscrapper/version.rb b/lib/optionscrapper/version.rb index <HASH>..<HASH> 100644 --- a/lib/optionscrapper/version.rb +++ b/lib/optionscrapper/version.rb @@ -5,5 +5,5 @@ # vim:ts=2:sw=2:et # module OptionScrapper - VERSION = "0.0.7" + VERSION = "0.0.8" end
[changes] - adding the program_name to the base of the command - removing the trailing spaces in the code
gambol99_optionscrapper
train
c6777039d980026c0b921e1ebf9e2725d8846fa6
diff --git a/examples/mlgtv/main.rb b/examples/mlgtv/main.rb index <HASH>..<HASH> 100644 --- a/examples/mlgtv/main.rb +++ b/examples/mlgtv/main.rb @@ -42,9 +42,13 @@ loop do "Call of Duty: Modern Warfare Remastered", "Call of Duty 4: Modern Warfare", ].each do |game| - (JSON.parse( - NetHTTPUtils.request_data "https://api.twitch.tv/kraken/streams?game=#{CGI::escape game}&access_token=#{File.read("twitch.token").strip}&client_id=#{File.read("client.id").strip}&channel=Ricky,ACHES,Lacefield,Clayster,Enable,Zoomaa,Attach,TheFEARS,PHiZZURP,MiRx1,SaintsRF,StuDyy,SpaceLyTV,NAMELESS,Scumperjumper,FORMAL,Crimsix,Karma,Loony,Slacked,Octane,MJChino,Diabolic_TV,ImTheIvy,Senderxz,Jkap,John,SlasheRAL,Apathy,ColtHavok,MikeSwarley,ParasiteTV,TyreeLegal,Silly,Blfire,methodz,TwiZzyTV,Mochila,Remy,Xotic16,AquA,Faccento,Nagafen,Tylerfelo,TheoryCoD,ColeChanTV,happyy97,goonjar,Burns,Dedo,Neslo,TeeCM,K1lla93,NeLsoNNaTeR,ProoFy,Whea7s,MBoZe,Merk,Nadeshot,ReeP,Sharp,TeePee,Braaain2015,Nolsonn,QwiKeRTHaNu,Zedenyer1,Jurd,Tommey,Swanny,MadCatEU,Rated_EU1,BsportJoshh,Sy_Vortex,TheMarkyB,Peatie95,urbandm,TreiZer0,iDqvee,Tojor,MethodZ_TV,Gotaga,WailersWL,TCM_Moose,RampageSkrapz,Reedy,fighta71,Swiftazor,BacabecNZ,Zeuss_Gaming,Hopeyy,GuydraCOD,mattmrx,Maven,CouRageJD,Revan,BriceyHD,Benson,PHILWHI7" - )["streams"] || []).each do |channel| + (begin + JSON.parse NetHTTPUtils.request_data "https://api.twitch.tv/kraken/streams?game=#{CGI::escape game}&access_token=#{File.read("twitch.token").strip}&client_id=#{File.read("client.id").strip}&channel=Ricky,ACHES,Lacefield,Clayster,Enable,Zoomaa,Attach,TheFEARS,PHiZZURP,MiRx1,SaintsRF,StuDyy,SpaceLyTV,NAMELESS,Scumperjumper,FORMAL,Crimsix,Karma,Loony,Slacked,Octane,MJChino,Diabolic_TV,ImTheIvy,Senderxz,Jkap,John,SlasheRAL,Apathy,ColtHavok,MikeSwarley,ParasiteTV,TyreeLegal,Silly,Blfire,methodz,TwiZzyTV,Mochila,Remy,Xotic16,AquA,Faccento,Nagafen,Tylerfelo,TheoryCoD,ColeChanTV,happyy97,goonjar,Burns,Dedo,Neslo,TeeCM,K1lla93,NeLsoNNaTeR,ProoFy,Whea7s,MBoZe,Merk,Nadeshot,ReeP,Sharp,TeePee,Braaain2015,Nolsonn,QwiKeRTHaNu,Zedenyer1,Jurd,Tommey,Swanny,MadCatEU,Rated_EU1,BsportJoshh,Sy_Vortex,TheMarkyB,Peatie95,urbandm,TreiZer0,iDqvee,Tojor,MethodZ_TV,Gotaga,WailersWL,TCM_Moose,RampageSkrapz,Reedy,fighta71,Swiftazor,BacabecNZ,Zeuss_Gaming,Hopeyy,GuydraCOD,mattmrx,Maven,CouRageJD,Revan,BriceyHD,Benson,PHILWHI7" + rescue JSON::ParserError + puts "JSON::ParserError" + sleep 60 + retry + end["streams"] || []).each do |channel| list << "* [](#twitch) [](#live) [**#{ channel["channel"]["display_name"] }**](#{
caught JSON::ParserError
Nakilon_reddit_bot
train
6db0f57fb222ea3a81c3f38ac925ba0943592433
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -18,7 +18,7 @@ setup( description='Show all LogEntries in the Django admin site.', long_description=open('README.rst').read(), install_requires=[ - 'Django>=1.4', + 'Django>=1.7', ], classifiers=[ 'Development Status :: 3 - Alpha', @@ -34,5 +34,6 @@ setup( 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', + 'Programming Language :: Python :: 3.5', ], )
Upgrade Django version in setup.py, add Python <I> classifiers
yprez_django-logentry-admin
train
0f570fcd76a6622491632477330ad690a0a6ea9d
diff --git a/src/Laracasts/TestDummy/Builder.php b/src/Laracasts/TestDummy/Builder.php index <HASH>..<HASH> 100644 --- a/src/Laracasts/TestDummy/Builder.php +++ b/src/Laracasts/TestDummy/Builder.php @@ -229,7 +229,7 @@ class Builder { return $this->relationshipIds[$relationshipType]; } - return $this->relationshipIds[$relationshipType] = $this->persist($relationshipType)->id; + return $this->relationshipIds[$relationshipType] = $this->persist($relationshipType)->getKey(); } /**
Use `Model:getKey()` for getting the ID for relationships.
laracasts_TestDummy
train
f0c2586a1e177ed5a3b478db1e4e5a454b450c69
diff --git a/store/search/definition.js b/store/search/definition.js index <HASH>..<HASH> 100644 --- a/store/search/definition.js +++ b/store/search/definition.js @@ -7,7 +7,7 @@ module.exports = { facet:{ domain: "DO_TEXT" }, - list:{ + map:{ domain: "DO_TEXT" }, pageInfos: { diff --git a/store/search/index.js b/store/search/index.js index <HASH>..<HASH> 100644 --- a/store/search/index.js +++ b/store/search/index.js @@ -6,16 +6,13 @@ var keys = require('lodash/object/keys'); var intersection = require('lodash/array/intersection'); var Immutable = require('immutable'); var isArray = require('lodash/lang/isArray'); +var isEqual = require('lodash/lang/isEqual'); +var merge = require('lodash/object/merge'); /** * Default configuration of the search. * @type {Object} */ -/*var defaultSearchConfig = { - facet:"facet", - list:"list", - pageInfos: "pageInfos" - };*/ class SearchStore extends CoreStore { constructor(conf) { @@ -33,9 +30,10 @@ class SearchStore extends CoreStore { var previousData = this.data.toJS(); var processedData = assign({}, previousData, newData); - if(isArray(newData.list)) { + if (keys(newData.map).length === 1) { // Only one type of result, compute the total pages to trigger the infinite scroll behaviour if (this._isSameSearchContext(previousData, newData)) { - processedData.list = previousData.list.concat(newData.list); + var key = keys(previousData.map)[0]; + processedData.map[key] = previousData.map[key].concat(newData.map[key]); } //add calculated fields on data @@ -47,6 +45,7 @@ class SearchStore extends CoreStore { } } } + var data = {}; for(var key in processedData){ data[key] = Immutable[isArray(processedData[key]) ? 'List' : 'Map'](processedData[key]); @@ -60,7 +59,7 @@ class SearchStore extends CoreStore { */ _isSameSearchContext(previousData, newData) { if(newData.pageInfos) { - return newData.pageInfos.currentPage != 1; + return newData.pageInfos.currentPage != 1 && isEqual(keys(previousData.map), keys(newData.map)); } return false; }
[search-store] new contract : only maps
KleeGroup_focus-core
train
72ba73d9c8d7b31efb6c1814372bb7617b6d54e9
diff --git a/distutilazy/test.py b/distutilazy/test.py index <HASH>..<HASH> 100644 --- a/distutilazy/test.py +++ b/distutilazy/test.py @@ -97,7 +97,7 @@ class run_tests(Command): try: self.announce("importing {0} as package ...".format(package_name)) importlib.import_module(package_name) - except (ImportError, ValueError) as err: + except (ImportError, ValueError, SystemError) as err: self.announce("failed to import {0}. not a package. {1}".format(package_name, err)) sys.path.insert(0, root) package_name = None @@ -109,8 +109,11 @@ class run_tests(Command): if package_name: modulename = '.' + modulename self.announce("importing module {0} from file {1} ...".format(modulename, filename)) - module = importlib.import_module(modulename, package=package_name) - modules.append(module) + try: + module = importlib.import_module(modulename, package=package_name) + modules.append(module) + except (ImportError, ValueError, SystemError) as err: + self.announce("failed to import {0} from {1}. {2}. skipping this file!".format(modulename, filename, err)) return modules def test_suite_for_modules(self, modules):
Fix errors for python3 while importing possible test files in run test Pyton3 __pycahe__ directory is also the target of the discovery, by catching import errors silent the importing errors for those files.
farzadghanei_distutilazy
train
df0d317a64e4a74433359e826bc1d606e050a5ed
diff --git a/container/container.go b/container/container.go index <HASH>..<HASH> 100644 --- a/container/container.go +++ b/container/container.go @@ -400,21 +400,20 @@ func (container *Container) AddMountPointWithVolume(destination string, vol volu func (container *Container) UnmountVolumes(volumeEventLog func(name, action string, attributes map[string]string)) error { var errors []string for _, volumeMount := range container.MountPoints { - // Check if the mountpoint has an ID, this is currently the best way to tell if it's actually mounted - // TODO(cpuguyh83): there should be a better way to handle this - if volumeMount.Volume != nil && volumeMount.ID != "" { - if err := volumeMount.Volume.Unmount(volumeMount.ID); err != nil { - errors = append(errors, err.Error()) - continue - } - volumeMount.ID = "" + if volumeMount.Volume == nil { + continue + } - attributes := map[string]string{ - "driver": volumeMount.Volume.DriverName(), - "container": container.ID, - } - volumeEventLog(volumeMount.Volume.Name(), "unmount", attributes) + if err := volumeMount.Cleanup(); err != nil { + errors = append(errors, err.Error()) + continue + } + + attributes := map[string]string{ + "driver": volumeMount.Volume.DriverName(), + "container": container.ID, } + volumeEventLog(volumeMount.Volume.Name(), "unmount", attributes) } if len(errors) > 0 { return fmt.Errorf("error while unmounting volumes for container %s: %s", container.ID, strings.Join(errors, "; ")) diff --git a/integration-cli/docker_cli_external_volume_driver_unix_test.go b/integration-cli/docker_cli_external_volume_driver_unix_test.go index <HASH>..<HASH> 100644 --- a/integration-cli/docker_cli_external_volume_driver_unix_test.go +++ b/integration-cli/docker_cli_external_volume_driver_unix_test.go @@ -616,3 +616,18 @@ func (s *DockerExternalVolumeSuite) TestExternalVolumeDriverUnmountOnMountFail(c out, _ = s.d.Cmd("run", "-w", "/foo", "-v", "testumount:/foo", "busybox", "true") c.Assert(s.ec.unmounts, checker.Equals, 0, check.Commentf(out)) } + +func (s *DockerExternalVolumeSuite) TestExternalVolumeDriverUnmountOnCp(c *check.C) { + s.d.StartWithBusybox(c) + s.d.Cmd("volume", "create", "-d", "test-external-volume-driver", "--name=test") + + out, _ := s.d.Cmd("run", "-d", "--name=test", "-v", "test:/foo", "busybox", "/bin/sh", "-c", "touch /test && top") + c.Assert(s.ec.mounts, checker.Equals, 1, check.Commentf(out)) + + out, _ = s.d.Cmd("cp", "test:/test", "/tmp/test") + c.Assert(s.ec.mounts, checker.Equals, 2, check.Commentf(out)) + c.Assert(s.ec.unmounts, checker.Equals, 1, check.Commentf(out)) + + out, _ = s.d.Cmd("kill", "test") + c.Assert(s.ec.unmounts, checker.Equals, 2, check.Commentf(out)) +} diff --git a/volume/volume.go b/volume/volume.go index <HASH>..<HASH> 100644 --- a/volume/volume.go +++ b/volume/volume.go @@ -120,6 +120,28 @@ type MountPoint struct { // Sepc is a copy of the API request that created this mount. Spec mounttypes.Mount + + // Track usage of this mountpoint + // Specicially needed for containers which are running and calls to `docker cp` + // because both these actions require mounting the volumes. + active int +} + +// Cleanup frees resources used by the mountpoint +func (m *MountPoint) Cleanup() error { + if m.Volume == nil || m.ID == "" { + return nil + } + + if err := m.Volume.Unmount(m.ID); err != nil { + return errors.Wrapf(err, "error unmounting volume %s", m.Volume.Name()) + } + + m.active-- + if m.active == 0 { + m.ID = "" + } + return nil } // Setup sets up a mount point by either mounting the volume if it is @@ -147,12 +169,16 @@ func (m *MountPoint) Setup(mountLabel string, rootUID, rootGID int) (path string if err != nil { return "", errors.Wrapf(err, "error while mounting volume '%s'", m.Source) } + m.ID = id + m.active++ return path, nil } + if len(m.Source) == 0 { return "", fmt.Errorf("Unable to setup mount point, neither source nor volume defined") } + // system.MkdirAll() produces an error if m.Source exists and is a file (not a directory), if m.Type == mounttypes.TypeBind { // idtools.MkdirAllNewAs() produces an error if m.Source exists and is a file (not a directory)
Add refcount for MountPoint This makes sure that multiple users of MountPoint pointer can mount/unmount without affecting each other. Before this PR, if you run a container (stay running), then do `docker cp`, when the `docker cp` is done the MountPoint is mutated such that when the container stops the volume driver will not get an Unmount request. Effectively there would be two mounts with only one unmount.
moby_moby
train
6e8f66a140e44c42eb1ce53951daa998184e55f0
diff --git a/pgpy/pgp.py b/pgpy/pgp.py index <HASH>..<HASH> 100644 --- a/pgpy/pgp.py +++ b/pgpy/pgp.py @@ -28,7 +28,7 @@ def PGPLoad(pgpbytes): b = [] # now, are there any ASCII PGP blocks at all? - if f.is_ascii(): + if f.is_ascii: # decode/parse ASCII PGP blocks nascii = list(re.finditer(ASCII_BLOCK, f.bytes.decode(), flags=re.MULTILINE | re.DOTALL)) @@ -200,7 +200,7 @@ class PGPBlock(FileLoader): data = self.bytes # if type is bytes, try to decode so re doesn't choke - if self.is_ascii(): + if self.is_ascii: data = data.decode() # this is binary data; skip extracting the block and move on
is_ascii is a property method now
SecurityInnovation_PGPy
train
2456ca0668ddf28b20a5ed0ba33d422e14bc2669
diff --git a/modules/bootstrap-launcher/src/main/java/coursier/bootstrap/launcher/Download.java b/modules/bootstrap-launcher/src/main/java/coursier/bootstrap/launcher/Download.java index <HASH>..<HASH> 100644 --- a/modules/bootstrap-launcher/src/main/java/coursier/bootstrap/launcher/Download.java +++ b/modules/bootstrap-launcher/src/main/java/coursier/bootstrap/launcher/Download.java @@ -13,6 +13,7 @@ import java.nio.file.StandardCopyOption; import java.util.ArrayList; import java.util.List; import java.util.concurrent.*; +import java.util.concurrent.atomic.AtomicInteger; import coursier.paths.CachePath; @@ -33,10 +34,12 @@ class Download { static List<URL> getLocalURLs(List<URL> urls) throws MalformedURLException { ThreadFactory threadFactory = new ThreadFactory() { - // from scalaz Strategy.DefaultDaemonThreadFactory + AtomicInteger counter = new AtomicInteger(1); ThreadFactory defaultThreadFactory = Executors.defaultThreadFactory(); public Thread newThread(Runnable r) { + String name = "coursier-bootstrap-downloader-" + counter.getAndIncrement(); Thread t = defaultThreadFactory.newThread(r); + t.setName(name); t.setDaemon(true); return t; } @@ -44,6 +47,15 @@ class Download { ExecutorService pool = Executors.newFixedThreadPool(concurrentDownloadCount, threadFactory); + try { + return getLocalURLs(urls, pool); + } finally { + pool.shutdown(); + } + } + + private static List<URL> getLocalURLs(List<URL> urls, ExecutorService pool) throws MalformedURLException { + CompletionService<URL> completionService = new ExecutorCompletionService<>(pool);
Shutdown thread pool created by bootstraps
coursier_coursier
train
1a6a2f3b42ef3e00a0e1cbaece9b8f4fecaf3899
diff --git a/driver-core/src/main/com/mongodb/connection/ScramSha1Authenticator.java b/driver-core/src/main/com/mongodb/connection/ScramSha1Authenticator.java index <HASH>..<HASH> 100644 --- a/driver-core/src/main/com/mongodb/connection/ScramSha1Authenticator.java +++ b/driver-core/src/main/com/mongodb/connection/ScramSha1Authenticator.java @@ -281,7 +281,7 @@ class ScramSha1Authenticator extends SaslAuthenticator { } private String prepUserName(final String userName) { - return userName.replace("=", "=3D").replace(",", "=2D"); + return userName.replace("=", "=3D").replace(",", "=2C"); } private byte[] xor(final byte[] a, final byte[] b) {
JAVA-<I>: Replace ',' (comma) with =2C instead of =2D in username for SCRAM-SHA-1 authentication
mongodb_mongo-java-driver
train
98075352f1887dba909457bc0930d90a69873cbb
diff --git a/lib/conceptql/operators/vocabulary.rb b/lib/conceptql/operators/vocabulary.rb index <HASH>..<HASH> 100644 --- a/lib/conceptql/operators/vocabulary.rb +++ b/lib/conceptql/operators/vocabulary.rb @@ -193,7 +193,7 @@ module ConceptQL end def vocab_format_regexp - (vocab_entry || {format_regexp: ""})[:format_regexp] + (vocab_entry || {format_regexp: nil})[:format_regexp] end # Defined so that bad_arguments can check for bad codes
Vocabulary: fix potential issue with format checker
outcomesinsights_conceptql
train
1717ca64217a8757f45fc1f6ac78ed9e0ab37cfc
diff --git a/flink-staging/flink-gelly/src/main/java/org/apache/flink/graph/Graph.java b/flink-staging/flink-gelly/src/main/java/org/apache/flink/graph/Graph.java index <HASH>..<HASH> 100644 --- a/flink-staging/flink-gelly/src/main/java/org/apache/flink/graph/Graph.java +++ b/flink-staging/flink-gelly/src/main/java/org/apache/flink/graph/Graph.java @@ -88,11 +88,10 @@ public class Graph<K extends Comparable<K> & Serializable, VV extends Serializab /** * Function that checks whether a graph's ids are valid * @return - */ - public <K extends Comparable<K> & Serializable, VV extends Serializable, EV extends Serializable> DataSet<Boolean> - validate(GraphValidator<K, VV, EV> validator) throws Exception { + */ + public DataSet<Boolean> validate(GraphValidator<K, VV, EV> validator) { - return validator.validate((Graph<K, VV, EV>) this); + return validator.validate(this); } public DataSet<Vertex<K, VV>> getVertices() { diff --git a/flink-staging/flink-gelly/src/main/java/org/apache/flink/graph/GraphValidator.java b/flink-staging/flink-gelly/src/main/java/org/apache/flink/graph/GraphValidator.java index <HASH>..<HASH> 100644 --- a/flink-staging/flink-gelly/src/main/java/org/apache/flink/graph/GraphValidator.java +++ b/flink-staging/flink-gelly/src/main/java/org/apache/flink/graph/GraphValidator.java @@ -9,9 +9,10 @@ import org.apache.flink.api.java.DataSet; * @param <VV> * @param <EV> */ +@SuppressWarnings("serial") public abstract class GraphValidator<K extends Comparable<K> & Serializable, VV extends Serializable, EV extends Serializable> implements Serializable{ - public abstract DataSet<Boolean> validate(Graph<K, VV, EV> graph) throws Exception; + public abstract DataSet<Boolean> validate(Graph<K, VV, EV> graph); } \ No newline at end of file diff --git a/flink-staging/flink-gelly/src/main/java/org/apache/flink/graph/InvalidVertexIdsValidator.java b/flink-staging/flink-gelly/src/main/java/org/apache/flink/graph/InvalidVertexIdsValidator.java index <HASH>..<HASH> 100644 --- a/flink-staging/flink-gelly/src/main/java/org/apache/flink/graph/InvalidVertexIdsValidator.java +++ b/flink-staging/flink-gelly/src/main/java/org/apache/flink/graph/InvalidVertexIdsValidator.java @@ -10,6 +10,7 @@ import org.apache.flink.util.Collector; import java.io.Serializable; +@SuppressWarnings("serial") public class InvalidVertexIdsValidator<K extends Comparable<K> & Serializable, VV extends Serializable, EV extends Serializable> extends GraphValidator<K, VV, EV> { @@ -20,12 +21,12 @@ public class InvalidVertexIdsValidator<K extends Comparable<K> & Serializable, V * with respect to its vertex ids. */ @Override - public DataSet<Boolean> validate(Graph<K, VV, EV> graph) throws Exception { + public DataSet<Boolean> validate(Graph<K, VV, EV> graph) { DataSet<Tuple1<K>> edgeIds = graph.getEdges().flatMap(new MapEdgeIds<K, EV>()).distinct(); DataSet<K> invalidIds = graph.getVertices().coGroup(edgeIds).where(0).equalTo(0) .with(new GroupInvalidIds<K, VV>()).first(1); - return GraphUtils.count(invalidIds.map(new KToTupleMap()), ExecutionEnvironment.getExecutionEnvironment()) + return GraphUtils.count(invalidIds.map(new KToTupleMap<K>()), ExecutionEnvironment.getExecutionEnvironment()) .map(new InvalidIdsMap()); }
[FLINK-<I>] [gelly] integrated inline suggestions
apache_flink
train
ddf412070c2de5edd7fb39a7afb6778a4cf43e5c
diff --git a/kconfiglib.py b/kconfiglib.py index <HASH>..<HASH> 100644 --- a/kconfiglib.py +++ b/kconfiglib.py @@ -1114,12 +1114,18 @@ class Kconfig(object): Fetches and tokenizes the next line from the current Kconfig file. Returns False at EOF and True otherwise. """ - # This provides a single line of "unget" after help texts + # _saved_line provides a single line of "unget", currently only used + # for help texts. + # + # This also works as expected if _saved_line is "", indicating EOF: + # "" is falsy, and readline() returns "" over and over at EOF. if self._saved_line: self._line = self._saved_line self._saved_line = None else: self._line = self._file.readline() + if not self._line: + return False self._linenr += 1 # Handle line joining @@ -1127,9 +1133,6 @@ class Kconfig(object): self._line = self._line[:-2] + self._file.readline() self._linenr += 1 - if not self._line: - return False - self._tokenize() return True @@ -1730,10 +1733,6 @@ class Kconfig(object): add_help_line(_dedent_rstrip(line, indent)) node.help = "\n".join(help_lines).rstrip() + "\n" - - if not line: - break - self._saved_line = line # "Unget" the line elif t0 == _T_SELECT:
Simplify help text parsing and _next_line() - _saved_line already handles EOF automagically, so no need to special-case it at the end of help text parsing. - Check for EOF earlier in _next_line(). Bit silly to do it after line joining.
ulfalizer_Kconfiglib
train
73c13cde704ef396a152ed9dcb8596779473107d
diff --git a/src/videoCache.js b/src/videoCache.js index <HASH>..<HASH> 100644 --- a/src/videoCache.js +++ b/src/videoCache.js @@ -14,6 +14,12 @@ import { config } from './config.js'; import {auctionManager} from './auctionManager.js'; /** + * Might be useful to be configurable in the future + * Depending on publisher needs + */ +const ttlBufferInSeconds = 15; + +/** * @typedef {object} CacheableUrlBid * @property {string} vastUrl A URL which loads some valid VAST XML. */ @@ -63,11 +69,11 @@ function wrapURI(uri, impUrl) { function toStorageRequest(bid, {index = auctionManager.index} = {}) { const vastValue = bid.vastXml ? bid.vastXml : wrapURI(bid.vastUrl, bid.vastImpUrl); const auction = index.getAuction(bid); - + const ttlWithBuffer = Number(bid.ttl) + ttlBufferInSeconds; let payload = { type: 'xml', value: vastValue, - ttlseconds: Number(bid.ttl) + ttlseconds: ttlWithBuffer }; if (config.getConfig('cache.vasttrack')) { diff --git a/test/spec/videoCache_spec.js b/test/spec/videoCache_spec.js index <HASH>..<HASH> 100644 --- a/test/spec/videoCache_spec.js +++ b/test/spec/videoCache_spec.js @@ -155,12 +155,12 @@ describe('The video cache', function () { puts: [{ type: 'xml', value: vastXml1, - ttlseconds: 25, + ttlseconds: 40, key: customKey1 }, { type: 'xml', value: vastXml2, - ttlseconds: 25, + ttlseconds: 40, key: customKey2 }] }; @@ -205,7 +205,7 @@ describe('The video cache', function () { puts: [{ type: 'xml', value: vastXml1, - ttlseconds: 25, + ttlseconds: 40, key: customKey1, bidid: '12345abc', aid: '1234-56789-abcde', @@ -213,7 +213,7 @@ describe('The video cache', function () { }, { type: 'xml', value: vastXml2, - ttlseconds: 25, + ttlseconds: 40, key: customKey2, bidid: 'cba54321', aid: '1234-56789-abcde', @@ -276,7 +276,7 @@ describe('The video cache', function () { puts: [{ type: 'xml', value: vastXml1, - ttlseconds: 25, + ttlseconds: 40, key: customKey1, bidid: '12345abc', bidder: 'appnexus', @@ -285,7 +285,7 @@ describe('The video cache', function () { }, { type: 'xml', value: vastXml2, - ttlseconds: 25, + ttlseconds: 40, key: customKey2, bidid: 'cba54321', bidder: 'rubicon', @@ -309,7 +309,7 @@ describe('The video cache', function () { puts: [{ type: 'xml', value: expectedValue, - ttlseconds: 25 + ttlseconds: 40 }], }); }
Prebid Core: Add ttl buffer to videoCache.js (#<I>) * Update videoCache.js * Update videoCache.js * Update videoCache_spec.js * Update videoCache_spec.js * master into ttl-buffer (#<I>) * Update Sonobi adapter with GVLID (#<I>) * dgkeyword RTD provider: fix tests causing ID5 test failures (#<I>)
prebid_Prebid.js
train
3ac94fe967bde3046bfd8c69b88ff5d39c0a446a
diff --git a/contact_form/__init__.py b/contact_form/__init__.py index <HASH>..<HASH> 100644 --- a/contact_form/__init__.py +++ b/contact_form/__init__.py @@ -1,4 +1,4 @@ -VERSION = (0, 3, 2, 'stable') +VERSION = (0, 3, 3, 'dev') def get_release():
running on <I>-dev
dlancer_django-crispy-contact-form
train
6b626630c8eb4892ade5c1b484c359d3afa4a224
diff --git a/lib/zendesk_apps_support/validations/requirements.rb b/lib/zendesk_apps_support/validations/requirements.rb index <HASH>..<HASH> 100644 --- a/lib/zendesk_apps_support/validations/requirements.rb +++ b/lib/zendesk_apps_support/validations/requirements.rb @@ -1,29 +1,55 @@ require 'multi_json' +require 'json/stream' +require 'pp' module ZendeskAppsSupport module Validations module Requirements + REQUIREMENTS_TYPES = %w(automations macros targets ticket_fields triggers user_fields).freeze + class <<self def call(package) - requirements = package.files.find { |f| f.relative_path == 'requirements.json' } + requirements_file = package.files.find { |f| f.relative_path == 'requirements.json' } - errors = [] + return [ValidationError.new(:missing_requirements)] unless requirements_file - if requirements && !valid_json?(requirements) - errors << ValidationError.new(:requirements_not_json) + requirements_stream = requirements_file.read + duplicates = non_unique_type_keys(requirements_stream) + unless duplicates.empty? + return [ValidationError.new(:duplicate_requirements, :duplicate_keys => duplicates.join(', '), :count => duplicates.length)] end - errors + requirements = MultiJson.load(requirements_stream) + [].tap do |errors| + errors << invalid_requirements_types(requirements) + errors.compact! + end + rescue MultiJson::DecodeError => e + return [ValidationError.new(:requirements_not_json, :errors => e)] end private - def valid_json? json - MultiJson.load(json) - true - rescue MultiJson::DecodeError - false + def invalid_requirements_types(requirements) + invalid_types = requirements.keys - REQUIREMENTS_TYPES + + unless invalid_types.empty? + ValidationError.new(:invalid_requirements_types, :invalid_types => invalid_types.join(', '), :count => invalid_types.length) + end + end + + def non_unique_type_keys(requirements) + keys = [] + duplicates = [] + parser = JSON::Stream::Parser.new do + start_object { keys.push({}) } + end_object { keys.pop } + key { |k| duplicates.push(k) if keys.last.include? k; keys.last[k] = nil } + end + parser << requirements + + duplicates end end
Add validations for unique and valid types
zendesk_zendesk_apps_support
train
4d086907f0dc8718a9008af8218cc997633d1e9f
diff --git a/tests/unit/components/sl-chart-test.js b/tests/unit/components/sl-chart-test.js index <HASH>..<HASH> 100755 --- a/tests/unit/components/sl-chart-test.js +++ b/tests/unit/components/sl-chart-test.js @@ -31,22 +31,22 @@ test( 'Default classNames are present', function( assert ) { this.subject({ object, series }); assert.ok( - this.$().hasClass('panel'), + this.$().hasClass( 'panel' ), 'Default rendered component has class "panel"' ); assert.ok( - this.$().hasClass('panel-default'), + this.$().hasClass( 'panel-default' ), 'Default rendered component has class "panel-default"' ); assert.ok( - this.$().hasClass('sl-chart'), + this.$().hasClass( 'sl-chart' ), 'Default rendered component has class "sl-chart"' ); assert.ok( - this.$().hasClass('sl-panel'), + this.$().hasClass( 'sl-panel' ), 'Default rendered component has class "sl-panel"' ); });
issue #<I> revision, added spaces around parenthesis based on code review
softlayer_sl-ember-components
train
7c96fe43600803e9d48364e1018e2a7ef626f5ab
diff --git a/Core/Listener/Page/EditSeoListener.php b/Core/Listener/Page/EditSeoListener.php index <HASH>..<HASH> 100644 --- a/Core/Listener/Page/EditSeoListener.php +++ b/Core/Listener/Page/EditSeoListener.php @@ -21,9 +21,9 @@ use AlphaLemon\AlphaLemonCmsBundle\Core\Event\Content\Page\BeforeEditPageCommitE use AlphaLemon\AlphaLemonCmsBundle\Core\Content\Seo\AlSeoManager; /** - * Listen to the onBeforeAddPageCommit event to add the page attributes when a new page is added + * Listen to the onBeforeEditPageCommit event to edit the seo attributes when a new page is edited * - * @author AlphaLemon <info@alphalemon.com> + * @author AlphaLemon <webmaster@alphalemon.com> */ class EditSeoListener { @@ -35,10 +35,11 @@ class EditSeoListener } /** - * Adds the page attributes when a new page is added, for each language of the site + * Edits the seo attributes when a new page is edited * - * @param BeforeAddPageCommitEvent $event - * @throws \Exception + * @param BeforeEditPageCommitEvent $event + * @throws \InvalidArgumentException + * @throws Exception */ public function onBeforeEditPageCommit(BeforeEditPageCommitEvent $event) {
fixed docblocks [ci skip]
redkite-labs_RedKiteCmsBundle
train
9ba2f73f2d2b501eb4c517e71fb4dd35f889f5e0
diff --git a/.gitignore b/.gitignore index <HASH>..<HASH> 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,2 @@ data/test2.png +vendor/ diff --git a/src/PNG/Image.php b/src/PNG/Image.php index <HASH>..<HASH> 100644 --- a/src/PNG/Image.php +++ b/src/PNG/Image.php @@ -1,6 +1,6 @@ <?php -namespace UoMCS\PNG; +namespace PNG; /** * Top level Image class. diff --git a/src/PNG/ImageFile.php b/src/PNG/ImageFile.php index <HASH>..<HASH> 100644 --- a/src/PNG/ImageFile.php +++ b/src/PNG/ImageFile.php @@ -1,6 +1,6 @@ <?php -namespace UoMCS\PNG; +namespace PNG; class ImageFile extends Image { diff --git a/tests/ImageFileTest.php b/tests/ImageFileTest.php index <HASH>..<HASH> 100644 --- a/tests/ImageFileTest.php +++ b/tests/ImageFileTest.php @@ -1,6 +1,6 @@ <?php -namespace UoMCS\PNG; +namespace PNG; class ImageFileTest extends \PHPUnit_Framework_TestCase { @@ -24,7 +24,7 @@ class ImageFileTest extends \PHPUnit_Framework_TestCase public function testConstructor() { $png = $this->getTestPng(); - $this->assertInstanceOf('UoMCS\\PNG\\ImageFile', $png); + $this->assertInstanceOf('PNG\\ImageFile', $png); } public function testGetWidth() diff --git a/tests/bootstrap.php b/tests/bootstrap.php index <HASH>..<HASH> 100644 --- a/tests/bootstrap.php +++ b/tests/bootstrap.php @@ -1,3 +1,5 @@ <?php +require_once __DIR__ . '/../vendor/autoload.php'; + date_default_timezone_set('Europe/London');
Move files around to meet composer conventions By laying out the directory structure in this manner, we can take advantage of default rules such as the composer autoloading (specified in composer.json) instead of having to write our own autoloader. Should also make the library easier to work with in other projects.
UoMCS_php-png
train
817473cda6ce038d0e483fd1bf01a22723066417
diff --git a/tests/dummy/app/controllers/modules/nag.js b/tests/dummy/app/controllers/modules/nag.js index <HASH>..<HASH> 100644 --- a/tests/dummy/app/controllers/modules/nag.js +++ b/tests/dummy/app/controllers/modules/nag.js @@ -3,7 +3,7 @@ import Ember from 'ember'; export default Ember.Controller.extend(Ember.Evented, { actions: { - clear: function(name) { + clear: function() { $('.cookie.nag').nag('clear'); $('.cookie.nag').nag('show'); }
Removed the unnecessary variable for nag
Semantic-Org_Semantic-UI-Ember
train
0d455de96546f756e8830efeec8dab0f489af135
diff --git a/generator.go b/generator.go index <HASH>..<HASH> 100644 --- a/generator.go +++ b/generator.go @@ -170,7 +170,7 @@ func writeCompressedFileInfo(w io.Writer, file *fileInfo, r io.Reader) error { return err } sw := &stringWriter{Writer: w} - gw := gzip.NewWriter(sw) + gw, _ := gzip.NewWriterLevel(sw, gzip.BestCompression) _, err = io.Copy(gw, r) if err != nil { return err diff --git a/test/test_test.go b/test/test_test.go index <HASH>..<HASH> 100644 --- a/test/test_test.go +++ b/test/test_test.go @@ -114,7 +114,7 @@ func Example_compressed() { // <not compressed> // /sample-file.txt // "This file compresses well. Blaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaah!" <nil> - // "\x1f\x8b\b\x00\x00\x00\x00\x00\x00\xff\n\xc9\xc8,VH\xcb\xccIUH\xce\xcf-(J-.N-V(O\xcd\xc9\xd1Sp\xcaI\x1c\xd4 C\x11\x10\x00\x00\xff\xff\xe7G\x81:\xbd\x00\x00\x00" + // "\x1f\x8b\b\x00\x00\x00\x00\x00\x02\xff\n\xc9\xc8,VH\xcb\xccIUH\xce\xcf-(J-.N-V(O\xcd\xc9\xd1Sp\xcaI\x1c\xd4 C\x11\x10\x00\x00\xff\xff\xe7G\x81:\xbd\x00\x00\x00" } func Example_readTwoOpenedCompressedFiles() { diff --git a/test/test_vfsdata_test.go b/test/test_vfsdata_test.go index <HASH>..<HASH> 100644 --- a/test/test_vfsdata_test.go +++ b/test/test_vfsdata_test.go @@ -58,7 +58,7 @@ var assets = func() http.FileSystem { modTime: time.Time{}, uncompressedSize: 189, - compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x0a\xc9\xc8\x2c\x56\x48\xcb\xcc\x49\x55\x48\xce\xcf\x2d\x28\x4a\x2d\x2e\x4e\x2d\x56\x28\x4f\xcd\xc9\xd1\x53\x70\xca\x49\x1c\xd4\x20\x43\x11\x10\x00\x00\xff\xff\xe7\x47\x81\x3a\xbd\x00\x00\x00"), + compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x02\xff\x0a\xc9\xc8\x2c\x56\x48\xcb\xcc\x49\x55\x48\xce\xcf\x2d\x28\x4a\x2d\x2e\x4e\x2d\x56\x28\x4f\xcd\xc9\xd1\x53\x70\xca\x49\x1c\xd4\x20\x43\x11\x10\x00\x00\xff\xff\xe7\x47\x81\x3a\xbd\x00\x00\x00"), }, } fs["/"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
use BestCompression level for gzip writer In the context of vfsgen, it should be a favorable trade-off to spend more time and effort when performing file compression, if it can lead to file sizes being reduced further. The previous choice of compression level (DefaultCompression) was inherited from this project's go-bindata lineage. Fixes <URL>
shurcooL_vfsgen
train
85c423f3e58f53bbd4c90886357a2e8f2aab9476
diff --git a/core/lib/refinery/cli.rb b/core/lib/refinery/cli.rb index <HASH>..<HASH> 100644 --- a/core/lib/refinery/cli.rb +++ b/core/lib/refinery/cli.rb @@ -28,6 +28,11 @@ module Refinery :dir => 'models', :desc => 'model', }, + :helper => { + :glob => '*.rb', + :dir => 'helpers', + :desc => 'helper', + }, :presenter => { :glob => '*.rb', :dir => 'presenters', @@ -60,6 +65,7 @@ module Refinery :stylesheet => %w(home refinery/site_bar), :controller => %w(pages), :model => %w(page refinery/page), + :helper => %w(site_bar refinery/site_bar_helper), :presenter => %w(refinery/page_presenter) }.each do |type, examples| examples.each do |example| diff --git a/core/spec/lib/refinery/cli_spec.rb b/core/spec/lib/refinery/cli_spec.rb index <HASH>..<HASH> 100644 --- a/core/spec/lib/refinery/cli_spec.rb +++ b/core/spec/lib/refinery/cli_spec.rb @@ -26,6 +26,8 @@ describe "CLI" do msg.should match("rake refinery:override controller=pages") msg.should match("rake refinery:override model=page") msg.should match("rake refinery:override model=refinery/page") + msg.should match("rake refinery:override helper=site_bar") + msg.should match("rake refinery:override helper=refinery/site_bar_helper") end end @@ -96,6 +98,17 @@ describe "CLI" do end end + describe "overriding helpers" do + it_behaves_like "refinery:override" do + let(:env) { "helper" } + let(:not_found_message) { "Couldn't match any helper files in any extensions like non-existent\n" } + let(:spec_success_message) { %W(create app/helpers/refinery/#{file_name}) } + let(:file_location) { File.expand_path("../../../../app/helpers/refinery", __FILE__) } + let(:env_file_location) { "refinery/#{file_name.sub(%r{\..+}, "")}" } + let(:copied_file_location) { "app/helpers/refinery/#{file_name}" } + end + end + describe "overriding javascripts" do it_behaves_like "refinery:override" do let(:env) { "javascript" }
Added ability to override helper.
refinery_refinerycms
train
29da39d2185296f9c728fc22d3bc95acbdf52497
diff --git a/enforcer.go b/enforcer.go index <HASH>..<HASH> 100644 --- a/enforcer.go +++ b/enforcer.go @@ -243,9 +243,8 @@ func (e *Enforcer) SetWatcher(watcher persist.Watcher) error { } // SetDispatcher sets the current dispatcher. -func (e *Enforcer) SetDispatcher(dispatcher persist.Dispatcher) error { +func (e *Enforcer) SetDispatcher(dispatcher persist.Dispatcher) { e.dispatcher = dispatcher - return dispatcher.SetEnforcer(e) } // GetRoleManager gets the current role manager. diff --git a/internal_api.go b/internal_api.go index <HASH>..<HASH> 100644 --- a/internal_api.go +++ b/internal_api.go @@ -30,14 +30,14 @@ func (e *Enforcer) shouldPersist() bool { // addPolicy adds a rule to the current policy. func (e *Enforcer) addPolicy(sec string, ptype string, rule []string) (bool, error) { - if e.model.HasPolicy(sec, ptype, rule) { - return false, nil - } - if e.dispatcher != nil && e.autoNotifyDispatcher { return true, e.dispatcher.AddPolicies(sec, ptype, [][]string{rule}) } + if e.model.HasPolicy(sec, ptype, rule) { + return false, nil + } + if e.shouldPersist() { if err := e.adapter.AddPolicy(sec, ptype, rule); err != nil { if err.Error() != notImplemented { @@ -70,14 +70,14 @@ func (e *Enforcer) addPolicy(sec string, ptype string, rule []string) (bool, err // addPolicies adds rules to the current policy. func (e *Enforcer) addPolicies(sec string, ptype string, rules [][]string) (bool, error) { - if e.model.HasPolicies(sec, ptype, rules) { - return false, nil - } - if e.dispatcher != nil && e.autoNotifyDispatcher { return true, e.dispatcher.AddPolicies(sec, ptype, rules) } + if e.model.HasPolicies(sec, ptype, rules) { + return false, nil + } + if e.shouldPersist() { if err := e.adapter.(persist.BatchAdapter).AddPolicies(sec, ptype, rules); err != nil { if err.Error() != notImplemented { @@ -146,6 +146,10 @@ func (e *Enforcer) removePolicy(sec string, ptype string, rule []string) (bool, } func (e *Enforcer) updatePolicy(sec string, ptype string, oldRule []string, newRule []string) (bool, error) { + if e.dispatcher != nil && e.autoNotifyDispatcher { + return true, e.dispatcher.UpdatePolicy(sec, ptype, oldRule, newRule) + } + if e.shouldPersist() { if err := e.adapter.(persist.UpdatableAdapter).UpdatePolicy(sec, ptype, oldRule, newRule); err != nil { if err.Error() != notImplemented { diff --git a/persist/dispatcher.go b/persist/dispatcher.go index <HASH>..<HASH> 100644 --- a/persist/dispatcher.go +++ b/persist/dispatcher.go @@ -24,8 +24,6 @@ type Dispatcher interface { RemoveFilteredPolicy(sec string, ptype string, fieldIndex int, fieldValues ...string) error // ClearPolicy clears all current policy in all instances ClearPolicy() error - - // SetEnforcer set up the instance that need to be maintained. - // The parameter should be SyncedEnforced - SetEnforcer(interface{}) error + // UpdatePolicy updates policy rule from all instance. + UpdatePolicy(sec string, ptype string, oldRule, newPolicy []string) error }
fix: add update policy to dispatcher
casbin_casbin
train
48fcc4646a648c3af8b405d9ec3481274a34a425
diff --git a/solr/search.go b/solr/search.go index <HASH>..<HASH> 100644 --- a/solr/search.go +++ b/solr/search.go @@ -55,7 +55,7 @@ func (q *Query) FieldList(fl string) { } // geofilt - The distance filter http://wiki.apache.org/solr/SpatialSearch -// output example: fq={!geofilt+pt=45.15,-93.850000021001+sfield=store+d=5} +// output example: fq={!geofilt pt=45.15,-93.85 sfield=store d=5} func (q *Query) Geofilt(latitude float64, longitude float64, sfield string, distance float64) { q.params.Add("fq", fmt.Sprintf("{!geofilt pt=%#v,%#v sfield=%s d=%#v}", latitude, longitude, sfield, distance)) }
Make output example with urlencoding, easier to read
vanng822_go-solr
train
d70ee31ad38a5dc50af7212811b4bb492c7e0b06
diff --git a/biotools/bbtools.py b/biotools/bbtools.py index <HASH>..<HASH> 100644 --- a/biotools/bbtools.py +++ b/biotools/bbtools.py @@ -170,7 +170,11 @@ def bbnorm(forward_in, forward_out, returncmd=False, reverse_in='NA', reverse_ou cmd = 'bbnorm.sh in1={} in2={} out1={} out2={} {}'.format(forward_in, reverse_in, forward_out, reverse_out, options) - out, err = accessoryfunctions.run_subprocess(cmd) + if not os.path.isfile(forward_out): + out, err = accessoryfunctions.run_subprocess(cmd) + else: + out = str() + err = str() if returncmd: return out, err, cmd else: @@ -195,7 +199,11 @@ def bbmerge(forward_in, merged_reads, returncmd=False, reverse_in='NA', **kwargs cmd = 'bbmerge.sh in={} out={} {}'.format(forward_in, merged_reads, options) else: cmd = 'bbmerge.sh in={} in2={} out={} {}'.format(forward_in, reverse_in, merged_reads, options) - out, err = accessoryfunctions.run_subprocess(cmd) + if not os.path.isfile(merged_reads): + out, err = accessoryfunctions.run_subprocess(cmd) + else: + out = str() + err = str() if returncmd: return out, err, cmd else: diff --git a/metagenomefilter/automateCLARK.py b/metagenomefilter/automateCLARK.py index <HASH>..<HASH> 100755 --- a/metagenomefilter/automateCLARK.py +++ b/metagenomefilter/automateCLARK.py @@ -50,9 +50,9 @@ class CLARK(object): def clean_sequences(self): """Removes reads/contigs that contain plasmids, and masks phage sequences.""" - printtime('Removing plasmid and masking phage', self.start) - plasmid_db = '/mnt/nas/Adam/assemblypipeline/plasmidfinder/plasmid_database.fa' - phage_db = '/mnt/nas/Adam/assemblypipeline/prophages/combinedtargets.tfa' + printtime('Removing plasmids and masking phages', self.start) + plasmid_db = os.path.join(self.reffilepath, 'plasmidfinder', 'plasmid_database.fa') + phage_db = os.path.join(self.reffilepath, 'prophages', 'combinedtargets.tfa') for sample in self.runmetadata.samples: plasmid_removal = 'bbduk.sh ref={} in={} out={} overwrite'\ .format(plasmid_db, sample.general.combined, sample.general.combined.replace('.f', '_noplasmid.f')) @@ -285,6 +285,11 @@ class CLARK(object): self.datapath = '' self.reportpath = os.path.join(self.path, 'reports') self.clean_seqs = args.clean_seqs + if self.clean_seqs: + try: + self.reffilepath = args.reffilepath + except AttributeError: + self.clean_seqs = False # If run as part of the assembly pipeline, a few modifications are necessary to ensure that the metadata objects # and variables play nice try: @@ -323,11 +328,12 @@ class CLARK(object): sample[clarkextension].abundance = sample.general.abundance sample[clarkextension].classification = sample.general.classification sample[clarkextension].combined = sample.general.combined - # Remove the combined .fastq files - try: - os.remove(sample.general.combined) - except OSError: - pass + if self.extension == 'fastq': + # Remove the combined .fastq files + try: + os.remove(sample.general.combined) + except OSError: + pass # Remove all the attributes from .general map(lambda x: delattr(sample.general, x), ['abundance', 'classification', 'combined']) # Remove the text files lists of files and reports created by CLARK @@ -422,13 +428,16 @@ class PipelineInit(object): # args.databasepath = os.path.join(inputobject.reffilepath, 'clark') args.databasepath = '{}clark'.format(inputobject.reffilepath) make_path(args.databasepath) - args.clarkpath = os.path.dirname(which('estimate_abundance.sh')) + args.clarkpath = os.path.dirname(which('CLARK')) + args.clarkpath += '/../opt/clark/' args.cutoff = 0.005 args.database = 'bacteria' args.rank = 'species' args.filter = False args.threads = inputobject.cpus args.runmetadata = inputobject.runmetadata + args.clean_seqs = False + args.reffilepath = inputobject.reffilepath if analysis == 'pipeline': # Run CLARK on both .fastq and .fasta files for extension in ['fastq', 'fasta']: diff --git a/spadespipeline/mash.py b/spadespipeline/mash.py index <HASH>..<HASH> 100755 --- a/spadespipeline/mash.py +++ b/spadespipeline/mash.py @@ -1,8 +1,9 @@ #!/usr/bin/env python -from subprocess import call +from accessoryFunctions.accessoryFunctions import GenObject, make_path, printtime, run_subprocess, write_to_logfile from threading import Thread import threading -from accessoryFunctions.accessoryFunctions import * +import os +import re __author__ = 'adamkoziol' @@ -96,7 +97,6 @@ class Mash(object): self.mashqueue.task_done() def parse(self): - import re printtime('Determining closest refseq genome', self.starttime) for sample in self.metadata: if sample.general.bestassemblyfile != 'NA':
Getting methods to work with conda-sourced dependencies
lowandrew_OLCTools
train
8f1246a5a8a8ed65a694dc25cbd2cd54cfd04956
diff --git a/metpy/calc/tools.py b/metpy/calc/tools.py index <HASH>..<HASH> 100644 --- a/metpy/calc/tools.py +++ b/metpy/calc/tools.py @@ -323,10 +323,12 @@ def _get_bound_pressure_height(pressure, bound, heights=None, interpolate=True): The bound pressure and height. """ + # Make sure pressure is monotonically decreasing sort_inds = np.argsort(pressure)[::-1] pressure = pressure[sort_inds] if heights is not None: heights = heights[sort_inds] + # Bound is given in pressure if bound.dimensionality == {'[length]': -1.0, '[mass]': 1.0, '[time]': -2.0}: # If the bound is in the pressure data, we know the pressure bound exactly
Sort pressure and heights before getting bounds.
Unidata_MetPy
train
36b012b88c9f81cfc84a80eb3f5b2af6d49a72f1
diff --git a/controllers/DefaultController.php b/controllers/DefaultController.php index <HASH>..<HASH> 100644 --- a/controllers/DefaultController.php +++ b/controllers/DefaultController.php @@ -131,6 +131,7 @@ class DefaultController extends Controller public function actionDelete($id) { $commentModel = $this->findModel($id); + $commentModel->setScenario(CommentModel::SCENARIO_MODERATION); $event = Yii::createObject(['class' => CommentEvent::class, 'commentModel' => $commentModel]); $this->trigger(self::EVENT_BEFORE_DELETE, $event); diff --git a/models/CommentModel.php b/models/CommentModel.php index <HASH>..<HASH> 100644 --- a/models/CommentModel.php +++ b/models/CommentModel.php @@ -42,6 +42,8 @@ class CommentModel extends ActiveRecord { use ModuleTrait; + const SCENARIO_MODERATION = 'moderation'; + /** * @var null|array|ActiveRecord[] comment children */ @@ -67,7 +69,7 @@ class CommentModel extends ActiveRecord ['status', 'default', 'value' => Status::APPROVED], ['status', 'in', 'range' => Status::getConstantsByName()], ['level', 'default', 'value' => 1], - ['parentId', 'validateParentID'], + ['parentId', 'validateParentID', 'except'=>static::SCENARIO_MODERATION], [['entityId', 'parentId', 'status', 'level'], 'integer'], ]; }
skip the validation of parentId on deletion
yii2mod_yii2-comments
train
d0f4717bd22afce3fabe3dd2c523550a3928efea
diff --git a/dump/__init__.py b/dump/__init__.py index <HASH>..<HASH> 100644 --- a/dump/__init__.py +++ b/dump/__init__.py @@ -6,5 +6,5 @@ import logging logging.getLogger(__name__).addHandler(logging.NullHandler()) -__version__ = '0.0.4' +__version__ = '0.0.5' diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -9,8 +9,8 @@ from codecs import open name = 'dump' -with open(os.path.join(name, "__init__.py")) as f: - version = re.search("^__version__\s*=\s*[\'\"]([^\'\"]+)", f.read(), flags=re.I | re.M).group(1) +with open(os.path.join(name, "__init__.py"), encoding='utf-8') as f: + version = re.search(r"^__version__\s*=\s*[\'\"]([^\'\"]+)", f.read(), flags=re.I | re.M).group(1) long_description = "" if os.path.isfile('README.rst'):
modifies setup.py a bit to try and figure out issue #<I>
Jaymon_dump
train
062326eadc058ad9b5faa62434eb7bbdb043fa42
diff --git a/lib/stale_fish.rb b/lib/stale_fish.rb index <HASH>..<HASH> 100644 --- a/lib/stale_fish.rb +++ b/lib/stale_fish.rb @@ -12,24 +12,15 @@ end module StaleFish # no one likes stale fish. - def self.use_fakeweb=(enabled) - @use_fakeweb = enabled - end - def self.use_fakeweb - @use_fakeweb + class << self + attr_accessor :use_fakeweb + attr_accessor :config_path + attr_accessor :yaml end self.use_fakeweb = false - def self.config_path=(path) - @config_path = path - end - - def self.config_path - @config_path - end - def self.valid_path? return false if @config_path.nil? File.exist?(@config_path) @@ -44,14 +35,6 @@ module StaleFish return stale.size end - def self.yaml=(data) - @yaml = data - end - - def self.yaml - !@yaml.nil? ? @yaml['stale'] : @yaml - end - def self.register_uri(source_uri, response) if self.use_fakeweb && !FakeWeb.registered_uri?(source_uri) FakeWeb.register_uri(:any, source_uri, :body => response) @@ -60,7 +43,7 @@ module StaleFish def self.load_yaml if valid_path? - @yaml = YAML.load_file(@config_path) + self.yaml = YAML.load_file(@config_path) check_syntax else raise Errno::ENOENT, 'invalid path, please set StaleFish.config_path than ensure StaleFish.valid_path? is true' @@ -73,20 +56,20 @@ protected raise YAML::Error, 'missing stale root element' unless @yaml['stale'] # Grab Configuration from YAML - @configuration = @yaml['stale'].delete('configuration') + @configuration = self.yaml['stale'].delete('configuration') self.use_fakeweb = (@configuration['use_fakeweb'] || false) unless @configuration.nil? # Process remaining nodes as items - @yaml['stale'].each do |key, value| + self.yaml['stale'].each do |key, value| %w{ filepath frequency source }.each do |field| - raise YAML::Error, "missing #{field} node for #{key}" unless @yaml['stale'][key][field] + raise YAML::Error, "missing #{field} node for #{key}" unless self.yaml['stale'][key][field] end end end def self.flag_stale(args) force = args.pop[:force] if args.last.is_a?(Hash) - stale, scope = {}, self.yaml + stale, scope = {}, self.yaml['stale'] scope.each do |key, value| if args.empty? if scope[key]['updated'].blank? @@ -130,7 +113,7 @@ protected end def self.update_fixture(key) - @yaml['stale'][key]['updated'] = DateTime.now + self.yaml['stale'][key]['updated'] = DateTime.now end def self.write_yaml
refactor to use attr_accessor instead of declaring by hand
jsmestad_stale_fish
train
0c9b2d47b0ebeb83ce099fe77567fe720e8672ea
diff --git a/src/Symfony/Component/Security/Http/Firewall/ContextListener.php b/src/Symfony/Component/Security/Http/Firewall/ContextListener.php index <HASH>..<HASH> 100644 --- a/src/Symfony/Component/Security/Http/Firewall/ContextListener.php +++ b/src/Symfony/Component/Security/Http/Firewall/ContextListener.php @@ -20,7 +20,7 @@ use Symfony\Component\Security\Core\Authentication\Token\AnonymousToken; use Symfony\Component\Security\Core\Authentication\Token\TokenInterface; use Symfony\Component\Security\Core\Exception\UsernameNotFoundException; use Symfony\Component\Security\Core\Exception\UnsupportedUserException; -use Symfony\Component\Security\Core\SecurityContext; +use Symfony\Component\Security\Core\SecurityContextInterface; use Symfony\Component\Security\Core\User\UserInterface; use Symfony\Component\EventDispatcher\EventDispatcherInterface; @@ -37,7 +37,7 @@ class ContextListener implements ListenerInterface private $logger; private $userProviders; - public function __construct(SecurityContext $context, array $userProviders, $contextKey, LoggerInterface $logger = null, EventDispatcherInterface $dispatcher = null) + public function __construct(SecurityContextInterface $context, array $userProviders, $contextKey, LoggerInterface $logger = null, EventDispatcherInterface $dispatcher = null) { if (empty($contextKey)) { throw new \InvalidArgumentException('$contextKey must not be empty.'); diff --git a/src/Symfony/Component/Security/Http/Firewall/RememberMeListener.php b/src/Symfony/Component/Security/Http/Firewall/RememberMeListener.php index <HASH>..<HASH> 100644 --- a/src/Symfony/Component/Security/Http/Firewall/RememberMeListener.php +++ b/src/Symfony/Component/Security/Http/Firewall/RememberMeListener.php @@ -6,7 +6,7 @@ use Symfony\Component\HttpKernel\Log\LoggerInterface; use Symfony\Component\HttpKernel\Event\GetResponseEvent; use Symfony\Component\Security\Core\Authentication\AuthenticationManagerInterface; use Symfony\Component\Security\Core\Exception\AuthenticationException; -use Symfony\Component\Security\Core\SecurityContext; +use Symfony\Component\Security\Core\SecurityContextInterface; use Symfony\Component\Security\Http\RememberMe\RememberMeServicesInterface; use Symfony\Component\Security\Http\Event\InteractiveLoginEvent; use Symfony\Component\Security\Http\SecurityEvents; @@ -37,13 +37,13 @@ class RememberMeListener implements ListenerInterface /** * Constructor * - * @param SecurityContext $securityContext + * @param SecurityContextInterface $securityContext * @param RememberMeServicesInterface $rememberMeServices * @param AuthenticationManagerInterface $authenticationManager * @param LoggerInterface $logger * @param EventDispatcherInterface $dispatcher */ - public function __construct(SecurityContext $securityContext, RememberMeServicesInterface $rememberMeServices, AuthenticationManagerInterface $authenticationManager, LoggerInterface $logger = null, EventDispatcherInterface $dispatcher = null) + public function __construct(SecurityContextInterface $securityContext, RememberMeServicesInterface $rememberMeServices, AuthenticationManagerInterface $authenticationManager, LoggerInterface $logger = null, EventDispatcherInterface $dispatcher = null) { $this->securityContext = $securityContext; $this->rememberMeServices = $rememberMeServices;
use SecurityContextInterface instead of SecurityContext
symfony_symfony
train
726973ef8b6cd3e6c0d42345d216790f27b34a5f
diff --git a/bench.py b/bench.py index <HASH>..<HASH> 100644 --- a/bench.py +++ b/bench.py @@ -107,8 +107,8 @@ def benchmark(chardet_mod=chardet, verbose=False, num_iters=10): print('\nCalls per second for each encoding:') for encoding in sorted(encoding_times.keys()): print('{}: {}'.format(encoding, - num_iters * encoding_times[encoding] / - encoding_num_files[encoding])) + num_iters * encoding_num_files[encoding] / + encoding_times[encoding])) print('\nTotal time: {}s ({} calls per second)'.format(total_time, num_iters * num_files /
Fix miscalculation in bench.py
chardet_chardet
train
69cceed4aed504994e766768897d5215fa97da7f
diff --git a/types/container/config.go b/types/container/config.go index <HASH>..<HASH> 100644 --- a/types/container/config.go +++ b/types/container/config.go @@ -36,7 +36,7 @@ type HealthConfig struct { type Config struct { Hostname string // Hostname Domainname string // Domainname - User string // User that will run the command(s) inside the container + User string // User that will run the command(s) inside the container, also support user:group AttachStdin bool // Attach the standard input, makes possible user interaction AttachStdout bool // Attach the standard output AttachStderr bool // Attach the standard error
container: Config.User field supports "user:group" syntax
docker_engine-api
train
2524146eae222f643e087d8f3b30372830330696
diff --git a/lib/janky/repository.rb b/lib/janky/repository.rb index <HASH>..<HASH> 100644 --- a/lib/janky/repository.rb +++ b/lib/janky/repository.rb @@ -190,6 +190,7 @@ module Janky md5 << job_config_path.read md5 << builder.callback_url.to_s md5.hexdigest + "#{github_owner}-#{github_name}-#{md5.hexdigest}" end end end
use readable job_name that is also unique "#{github_owner}/#{github_name}-#{md5.hexdigest}" (note fwd-slash) would be better but not sure how much jenkins would appreciate a / in the name...
github_janky
train
53590d5f72426773073b3ac3d565224422f9f145
diff --git a/packages/components/bolt-typeahead/typeahead.autosuggest.js b/packages/components/bolt-typeahead/typeahead.autosuggest.js index <HASH>..<HASH> 100755 --- a/packages/components/bolt-typeahead/typeahead.autosuggest.js +++ b/packages/components/bolt-typeahead/typeahead.autosuggest.js @@ -267,7 +267,9 @@ class BoltAutosuggest extends withPreact() { distance: 100, maxPatternLength: 32, minMatchCharLength: 1, - keys: ['label', 'description'], + // @todo: re-enable description meta data after further testing + refinement + // keys: ['label', 'description'], + keys: ['label'], }; const fuse = new Fuse(items, fuseOptions); let results = fuse.search(value);
refactor: temporarily disable the additional `description` suggestion field until further fine-tuned
bolt-design-system_bolt
train
4ba8107abebdfee698a2bed234a0599a5a8b84f2
diff --git a/languagetool-office-extension/src/main/java/org/languagetool/openoffice/ResultCache.java b/languagetool-office-extension/src/main/java/org/languagetool/openoffice/ResultCache.java index <HASH>..<HASH> 100644 --- a/languagetool-office-extension/src/main/java/org/languagetool/openoffice/ResultCache.java +++ b/languagetool-office-extension/src/main/java/org/languagetool/openoffice/ResultCache.java @@ -37,7 +37,7 @@ class ResultCache { } /** - * Remove a cache entries for a sentence + * Remove a cache entry for a sentence */ void remove(int numberOfParagraph, int startOfSentencePosition) { for(int i = 0; i < entries.size(); i++) { @@ -50,7 +50,7 @@ class ResultCache { } /** - * Remove all cache entries for a paragraph + * Remove all cache entry for a paragraph */ void remove(int numberOfParagraph) { for(int i = 0; i < entries.size(); i++) { @@ -80,21 +80,21 @@ class ResultCache { } /** - * Add an cache entries + * Add an cache entry */ public void add(int numberOfParagraph, int startOfSentencePosition, int nextSentencePosition, SingleProofreadingError[] errorArray) { entries.add(new CacheEntry(numberOfParagraph, startOfSentencePosition, nextSentencePosition, errorArray)); } /** - * Add an cache entries for paragraph + * Add an cache entry for paragraph */ public void add(int numberOfParagraph, SingleProofreadingError[] errorArray) { this.add(numberOfParagraph, 0, 0, errorArray); } /** - * replace an cache entries + * replace an cache entry */ void put(int numberOfParagraph, int startOfSentencePosition, int nextSentencePosition, SingleProofreadingError[] errorArray) { remove(numberOfParagraph, startOfSentencePosition); @@ -102,7 +102,7 @@ class ResultCache { } /** - * replace an cache entries for paragraph + * replace an cache entry for paragraph */ void put(int numberOfParagraph, SingleProofreadingError[] errorArray) { this.put(numberOfParagraph, 0, 0, errorArray);
[LO extension] ResultCache description of functions changed (entries -> entry)
languagetool-org_languagetool
train
f8a30f45677697bf92a6515be6e6a7c452ab22e3
diff --git a/lib/is-xml.js b/lib/is-xml.js index <HASH>..<HASH> 100644 --- a/lib/is-xml.js +++ b/lib/is-xml.js @@ -1,3 +1,11 @@ +/* + * Copyright (c) 2015 by Greg Reimer <gregreimer@gmail.com> + * MIT License. See mit-license.txt for more info. + */ + +// TODO: This simple regex will cover many cases but +// will probably need some improvements. + var patt = /xml/; module.exports = function(mimeType){
Added comments in is-xml module
greim_hoxy
train
3dc852c57cacb3177cd8714031ea6724afe95d25
diff --git a/pymc/diagnostics.py b/pymc/diagnostics.py index <HASH>..<HASH> 100644 --- a/pymc/diagnostics.py +++ b/pymc/diagnostics.py @@ -134,6 +134,8 @@ def gelman_rubin(mtrace): def calc_rhat(x): try: + # When the variable is multidimensional, this assignment will fail, triggering + # a ValueError that will handle the multidimensional case m, n = x.shape # Calculate between-chain variance @@ -149,7 +151,9 @@ def gelman_rubin(mtrace): except ValueError: + # Tricky transpose here, shifting the last dimension to the first rotated_indices = np.roll(np.arange(x.ndim), 1) + # Now iterate over the dimension of the variable return np.squeeze([calc_rhat(xi) for xi in x.transpose(rotated_indices)]) Rhat = {}
Added comments to describe error handling in calc_rhat
pymc-devs_pymc
train
7a8d96e5f0f18cd82b1fda7ef736817855cbf7bc
diff --git a/src/AppWrapper.php b/src/AppWrapper.php index <HASH>..<HASH> 100644 --- a/src/AppWrapper.php +++ b/src/AppWrapper.php @@ -8,6 +8,8 @@ use Psr\Http\Message\ServerRequestInterface; use Slim\App; use Slim\Container; use Slim\Exception\Exception as SlimException; +use Slim\Http\Headers; +use Slim\Http\Response; /** * Wraps a Slim v3 application object as an implementation of the kernel @@ -43,7 +45,8 @@ class AppWrapper implements KernelInterface */ public function handleRequest(ServerRequestInterface $request) { - $response = $this->container->get('response'); + $headers = new Headers(['Content-Type': 'text/html']); + $response = (new Response(200, $headers))->withProtocolVersion('1.1'); try { $response = $this->app->callMiddlewareStack($request, $response);
Bugfix: Response body persistence Bugfix: Response body was being appended to (stream isn't immutable).
PHPFastCGI_SlimAdapter
train
c006e9e82fda5dd698a915488a78f448a9dcfcef
diff --git a/src/Encryptor/OpenSsl.php b/src/Encryptor/OpenSsl.php index <HASH>..<HASH> 100644 --- a/src/Encryptor/OpenSsl.php +++ b/src/Encryptor/OpenSsl.php @@ -85,6 +85,7 @@ class OpenSsl implements EncryptorInterface protected $saltLength = 8; protected $ivLength = null; // dependant on cipher method protected $macLength = 32; // strlen(hash_hmac('sha256', '', '', true)) + protected $keyLength = 16; // 128 bits /** * OpenSsl constructor @@ -105,10 +106,11 @@ class OpenSsl implements EncryptorInterface { $salt = random_bytes($this->saltLength); $iv = random_bytes($this->ivLength); - $key = hash_pbkdf2('sha256', $this->password, $salt, $this->pbkdf2Iterations, 0, true); - $encryptedData = openssl_encrypt($data, $this->cipherMethod, $key, OPENSSL_RAW_DATA, $iv); - $mac = hash_hmac('sha256', $encryptedData . $iv, $key, true); + list($encKey, $authKey) = $this->deriveKeys($salt); + + $encryptedData = openssl_encrypt($data, $this->cipherMethod, $encKey, OPENSSL_RAW_DATA, $iv); + $mac = hash_hmac('sha256', $encryptedData . $iv, $authKey, true); return $salt . $iv . $mac . $encryptedData; } @@ -127,14 +129,15 @@ class OpenSsl implements EncryptorInterface $mac = substr($data, $this->saltLength + $this->ivLength, $this->macLength); $encryptedData = substr($data, $this->saltLength + $this->ivLength + $this->macLength); - $key = hash_pbkdf2('sha256', $this->password, $salt, $this->pbkdf2Iterations, 0, true); - $calculatedMac = hash_hmac('sha256', $encryptedData . $iv, $key, true); + list($encKey, $authKey) = $this->deriveKeys($salt); + + $calculatedMac = hash_hmac('sha256', $encryptedData . $iv, $authKey, true); if (!hash_equals($calculatedMac, $mac)) { throw new RuntimeException('HMAC failed to match'); } - $decryptedData = openssl_decrypt($encryptedData, $this->cipherMethod, $key, OPENSSL_RAW_DATA, $iv); + $decryptedData = openssl_decrypt($encryptedData, $this->cipherMethod, $encKey, OPENSSL_RAW_DATA, $iv); if ($decryptedData === false) { throw new RuntimeException('Failed to decrypt data'); @@ -143,4 +146,17 @@ class OpenSsl implements EncryptorInterface return $decryptedData; } + /** + * Derive the keys for encryption and authentication using the given salt, and the password + * + * @param string $salt + * @return array + */ + protected function deriveKeys($salt) + { + $key = hash_pbkdf2('sha256', $this->password, $salt, $this->pbkdf2Iterations, $this->keyLength * 2, true); + + return str_split($key, $this->keyLength); + } + }
Use separate keys for encryption and authentication The derived key is still generated the same way as before, but we're now splitting the string to give two separate keys. This means that each key will now be <I> bits instead of <I> bits, but this should still be sufficient for use in the hmac and encryption. Fixes #2
phlib_encrypt
train
5b92d8e83d019bb9d9a28b9b1a1b5b174d7dd0f0
diff --git a/src/org/mozilla/classfile/ClassFileWriter.java b/src/org/mozilla/classfile/ClassFileWriter.java index <HASH>..<HASH> 100644 --- a/src/org/mozilla/classfile/ClassFileWriter.java +++ b/src/org/mozilla/classfile/ClassFileWriter.java @@ -4191,7 +4191,14 @@ public class ClassFileWriter { "org/mozilla/classfile/ClassFileWriter.class"); } byte[] header = new byte[8]; - is.read(header); + // read loop is required since JDK7 will only provide 2 bytes + // on the first read() - see bug #630111 + int read = 0; + while (read < 8) { + int c = is.read(header, read, 8 - read); + if (c < 0) throw new IOException(); + read += c; + } minor = (header[4] << 8) | header[5]; major = (header[6] << 8) | header[7]; } catch (Exception e) {
Fix bug <I> - Codegen optimizer fails with Unsupported major.minor version <I> on JDK7
mozilla_rhino
train
471c1e4194135e639ec4fd398f129a48b03102a3
diff --git a/js/developer.js b/js/developer.js index <HASH>..<HASH> 100644 --- a/js/developer.js +++ b/js/developer.js @@ -77,7 +77,7 @@ function setupDeveloperMenu(param) { views: views }; - if (config.autoJoin) { + if (config.autoJoin && !param.isReplay) { // NOTE: この時点でgame._loadedにgame._start()がハンドルされている必要がある // スナップショットから復元時はloadedが発火しないのでJOINは行われない props.game._loaded.handle(function() {
do not send joinEvent when replay
akashic-games_akashic-sandbox
train
6c39c22266f314f7a8008c6c4cbb966eeed9d41f
diff --git a/ghost/admin/app/components/gh-members-chart.js b/ghost/admin/app/components/gh-members-chart.js index <HASH>..<HASH> 100644 --- a/ghost/admin/app/components/gh-members-chart.js +++ b/ghost/admin/app/components/gh-members-chart.js @@ -131,7 +131,12 @@ export default Component.extend({ bodyFontSize: 13, titleFontStyle: 'normal', titleFontColor: 'rgba(255, 255, 255, 0.7)', - titleMarginBottom: 4 + titleMarginBottom: 4, + callbacks: { + label: function (tooltipItems, data) { + return data.datasets[0].label + `: ` + data.datasets[0].data[tooltipItems.index].toString().replace(/\B(?=(\d{3})+(?!\d))/g, ','); + } + } }, hover: { mode: 'index', @@ -186,7 +191,10 @@ export default Component.extend({ maxTicksLimit: 5, fontColor: '#9baeb8', padding: 8, - precision: 0 + precision: 0, + callback: function (value) { + return value.toString().replace(/\B(?=(\d{3})+(?!\d))/g, ','); + } } }] }
Added 'thousands' separator to members chart no refs. - added 'thousands' separator to Y axis values and tooltip in members chart
TryGhost_Ghost
train
3722c49e25ec895758e5791ac56593e141a1f9a3
diff --git a/dispatch/modules/content/mixins.py b/dispatch/modules/content/mixins.py index <HASH>..<HASH> 100644 --- a/dispatch/modules/content/mixins.py +++ b/dispatch/modules/content/mixins.py @@ -43,19 +43,50 @@ class AuthorMixin(object): def get_author_type_string(self, links=False): def author_type(author): + ArticleAuthors = [] + ArticlePhotographers = [] + ArticleIllustrators = [] + ArticleVideographers = [] if links and author.person.slug: return '<a href="/authors/%s/">%s</a>' % (author.person.slug, author.person.full_name) - return " %s, %s" % (author.person.full_name, author.type) + for author in self.authors.all(): + if author.type == 'author': + ArticleAuthors.append(author.person.full_name) + if self.authors.all().filter(type='author').count() == 1: + ArticleAuthorsStr = "Written by " + ArticleAuthors[0] + elif self.authors.all().filter(type='author').count() > 1: + ArticleAuthorsStr = "Written by " + ArticleAuthors[0] + ", ".join(ArticleAuthors[1:-1]) + " and " + ArticleAuthors[-1] + if author.type == 'photographer': + ArticlePhotographers.append(author.person.full_name) + if self.authors.all().filter(type='photographer').count() == 1: + ArticlePhotographersStr = "Photos by " + ArticlePhotographers[0] + elif self.authors.all().filter(type='photographer').count() > 1: + ArticlePhotographersStr = "Photos by " + ArticlePhotographers[0] + ", ".join(ArticlePhotographers[0:-1]) + " and " + ArticlePhotographers[-1] + if author.type == 'illustrator': + ArticleIllustrators.append(author.person.full_name) + if self.authors.all().filter(type='illustrator').count() == 1: + ArticleIllustratorsStr = "Illustrated by " + ArticleIllustrators[0] + elif self.authors.all().filter(type='illustrator').count() > 1: + ArticleIllustratorsStr = "Illustrated by "+ ArticleIllustrators[0] + ", ".join(ArticleIllustrators[1:-1]) + " and " + ArticleIllustrators[-1] + if author.type == 'videographer': + ArticleVideographers.append(author.person.full_name) + if self.authors.all().filter(type='videographer').count() == 1: + ArticleVideographersStr = "Videos by " + ArticleVideographers[0] + elif self.authors.all().filter(type='videographer').count() > 1: + ArticleVideographersStr = "Videos by "+ ArticleVideographers[0] + ", ".join(ArticleVideographers[1:-1]) + " and " + ArticleVideographers[-1] - authors_type = map(author_type, self.authors.all()) + AuthorTypeString = "" + if ArticleAuthors: + AuthorTypeString = AuthorTypeString + ArticleAuthorsStr + if ArticlePhotographers: + AuthorTypeString = AuthorTypeString + ", " + ArticlePhotographersStr + if ArticleIllustrators: + AuthorTypeString = AuthorTypeString + ", " + ArticleIllustratorsStr + if ArticleVideographers: + AuthorTypeString = AuthorTypeString + ", " + ArticleVideographersStr + return AuthorTypeString - if not authors_type: - return "" - elif len(authors_type) == 1: - # If this is the only author, just return author name - return authors_type[0] - - return ", ".join(authors_type[:-1]) + " and " + authors_type[-1] + return author_type(self.authors.all()) def get_author_url(self): """Returns list of authors (including hyperlinks) as a
Wrote a function to make type statements on article look great
ubyssey_dispatch
train
5fce7030e193ba7a14263bfee9879cbdc7d9ba32
diff --git a/tests/calculators/hazard/classical/core_test.py b/tests/calculators/hazard/classical/core_test.py index <HASH>..<HASH> 100644 --- a/tests/calculators/hazard/classical/core_test.py +++ b/tests/calculators/hazard/classical/core_test.py @@ -157,7 +157,7 @@ class ClassicalHazardCalculatorTestCase(unittest.TestCase): # We expect 2 logic tree realizations ltr1, ltr2 = models.LtRealization.objects.filter( - hazard_calculation=self.job.hazard_calculation.id) + hazard_calculation=self.job.hazard_calculation.id).order_by("id") # Check each ltr contents, just to be thorough. self.assertEqual(0, ltr1.ordinal) @@ -294,7 +294,7 @@ class ClassicalHazardCalculatorTestCase(unittest.TestCase): task_signal_queue(conn.channel()).declare() with conn.Consumer(task_signal_queue, callbacks=[test_callback]): # call the task as a normal function - core.hazard_curves(self.job.id, lt_rlz.id, [src_id]) + core.hazard_curves(self.job.id, [src_id], lt_rlz.id) # wait for the completion signal conn.drain_events() diff --git a/tests/utils_stats_test.py b/tests/utils_stats_test.py index <HASH>..<HASH> 100644 --- a/tests/utils_stats_test.py +++ b/tests/utils_stats_test.py @@ -255,15 +255,6 @@ class PkIncTestCase(helpers.RedisTestCase, unittest.TestCase): self.assertRaises(KeyError, stats.pk_inc, job_id, pkey) - def test_pk_inc_with_non_existent_debug_key(self): - """`KeyError` is raised for debug keys that are not in `STATS_KEYS`.""" - job_id = 86 - pkey = "How hard can it be!?" - stats.delete_job_counters(job_id) - with helpers.patch("openquake.utils.stats.debug_stats_enabled") as dse: - dse.return_value = False - self.assertRaises(KeyError, stats.pk_inc, job_id, pkey) - class PkGetTestCase(helpers.RedisTestCase, unittest.TestCase): """Tests the behaviour of utils.stats.pk_get().""" @@ -299,14 +290,6 @@ class PkGetTestCase(helpers.RedisTestCase, unittest.TestCase): stats.delete_job_counters(job_id) self.assertRaises(KeyError, stats.pk_get, job_id, pkey) - def test_pk_get_with_non_existent_debug_key(self): - """`KeyError` is raised for debug keys that are not in `STATS_KEYS`.""" - job_id = 96 - pkey = "Not a key!?" - with helpers.patch("openquake.utils.stats.debug_stats_enabled") as dse: - dse.return_value = False - self.assertRaises(KeyError, stats.pk_get, job_id, pkey) - class KvsOpTestCase(helpers.RedisTestCase, unittest.TestCase): """Tests the behaviour of utils.stats.pk_kvs_op()."""
rm obsolete tests and fix the failing ones
gem_oq-engine
train
c6dd07e7f29bb1ef098197c9bf80e5e41866091b
diff --git a/viewer.js b/viewer.js index <HASH>..<HASH> 100644 --- a/viewer.js +++ b/viewer.js @@ -41,13 +41,10 @@ , states = { DEFAULT: 0, ANSWER_DRAW: 1, - SOLUTION_DRAW: 2, - SOLUTION_MOVE: 3, - SOLUTION_POINT_DELETE: 4, - ANNOTATION_SELECT: 5, - ANNOTATION_DRAW: 6, - ANNOTATION_MOVE: 7, - ANNOTATION_DISPLAY: 8 + POLYGON_DRAW: 2, + POLYGON_MOVE: 3, + POLYGON_POINT_DELETE: 4, + ANNOTATION_DISPLAY: 5 } , state = states.DEFAULT @@ -91,6 +88,8 @@ , mouseLastPos = null // UI element which is currently in focus, i.e. the mouse is hovering over it , focusUIElement = null + // active polygon in edit mode + , activePolygon = null // answer feature , answerEditable = (typeof options.mode === 'string' && options.mode === 'editAnswer') @@ -160,7 +159,7 @@ }; this.importSolution = function(importedSolution){ - this.solution = (importedSolution.length >= 1) ? importPolygon(importedSolution) : null; + this.solution = activePolygon = (importedSolution.length >= 1) ? importPolygon(importedSolution) : null; dirty = true; }; @@ -411,26 +410,32 @@ var newVertex = new Vertex(x, y, polygon); newVertex.onClick = function(evt){ - if(state === states.SOLUTION_POINT_DELETE){ - self.solution.deleteVertex(newVertex); - self.onSolutionChange(self.exportSolution()); - dirty = true; + if(state === states.POLYGON_POINT_DELETE){ + if(newVertex.polygon !== null){ + newVertex.polygon.deleteVertex(newVertex); + if(newVertex.polygon === self.solution){ + self.onSolutionChange(self.exportSolution()); + } + dirty = true; + } return; } - if(state === states.SOLUTION_DRAW - && self.solution !== null - && newVertex.equals(self.solution.initialVertex) - && self.solution.getLength() > 2 - ){ - self.solution.close(); - state = states.DEFAULT; - self.onSolutionChange(self.exportSolution()); - return; + if(state === states.POLYGON_DRAW){ + var isInitialVertex = newVertex.polygon !== null + && newVertex.equals(newVertex.polygon.initialVertex); + if(isInitialVertex && newVertex.polygon.getLength() > 2){ + newVertex.polygon.close(); + state = states.DEFAULT; + if(newVertex.polygon === self.solution){ + self.onSolutionChange(self.exportSolution()); + } + return; + } } }; newVertex.onMouseDown = function(){ - if(state === states.SOLUTION_MOVE){ + if(state === states.POLYGON_MOVE){ activeMoveElement = newVertex.position; leftMouseButtonDown = true; return true; @@ -481,7 +486,7 @@ ctx.fillStyle = (vertex === focusUIElement && vertex === self.solution.initialVertex - && state === states.SOLUTION_DRAW) + && state === states.POLYGON_DRAW) ? '#FF6600' // if mouse is hovering over this and a click would close the polygon : '#FFFFFF'; // default ctx.strokeStyle = '#000000'; @@ -753,7 +758,7 @@ self.answer = convertToImagePosition(clickPos); dirty = true; } - if(state === states.SOLUTION_DRAW){ + if(state === states.POLYGON_DRAW){ if(evt.shiftKey){ // close polygon if(self.solution !== null){ @@ -764,7 +769,7 @@ } else { var newVertexPosition = convertToImagePosition(clickPos) , newVertex = createVertex(newVertexPosition.x, newVertexPosition.y); - if(self.solution === null) self.solution = new Polygon(); + if(self.solution === null) self.solution = activePolygon = new Polygon(); self.solution.addVertex(newVertex); } self.onSolutionChange(self.exportSolution()); @@ -979,34 +984,34 @@ // if solution feature enable, show their buttons if(solutionEditable){ drawSolutionPointButton.enabled = function(){ - return (state === states.SOLUTION_DRAW); + return (state === states.POLYGON_DRAW); }; drawSolutionPointButton.onClick = function(){ - state = (state === states.SOLUTION_DRAW) + state = (state === states.POLYGON_DRAW) ? states.DEFAULT - : states.SOLUTION_DRAW; + : states.POLYGON_DRAW; dirty = true; }; moveSolutionButton.enabled = function(){ - return (state === states.SOLUTION_MOVE); + return (state === states.POLYGON_MOVE); }; moveSolutionButton.onClick = function(){ - state = (state === states.SOLUTION_MOVE) + state = (state === states.POLYGON_MOVE) ? states.DEFAULT - : states.SOLUTION_MOVE; + : states.POLYGON_MOVE; dirty = true; }; deleteSolutionPointButton.enabled = function(){ - return (state === states.SOLUTION_POINT_DELETE); + return (state === states.POLYGON_POINT_DELETE); }; deleteSolutionPointButton.onClick = function(){ - state = (state === states.SOLUTION_POINT_DELETE) + state = (state === states.POLYGON_POINT_DELETE) ? states.DEFAULT - : states.SOLUTION_POINT_DELETE; + : states.POLYGON_POINT_DELETE; dirty = true; }; deleteSolutionButton.onClick = function(){ - self.solution = null; + self.solution = activePolygon = null; self.onSolutionChange(self.exportSolution()); dirty = true; };
refactoring towards multiple polygons
pfirpfel_image-viewer
train
ad4d1c7608dcb9ba0eea781db0f3923200a208af
diff --git a/java/client/src/org/openqa/selenium/firefox/FirefoxBinary.java b/java/client/src/org/openqa/selenium/firefox/FirefoxBinary.java index <HASH>..<HASH> 100644 --- a/java/client/src/org/openqa/selenium/firefox/FirefoxBinary.java +++ b/java/client/src/org/openqa/selenium/firefox/FirefoxBinary.java @@ -139,6 +139,10 @@ public class FirefoxBinary { return executable.getPath(); } + protected List<String> getExtraOptions() { + return extraOptions; + } + public long getTimeout() { return timeout; } diff --git a/java/client/src/org/openqa/selenium/firefox/XpiDriverService.java b/java/client/src/org/openqa/selenium/firefox/XpiDriverService.java index <HASH>..<HASH> 100644 --- a/java/client/src/org/openqa/selenium/firefox/XpiDriverService.java +++ b/java/client/src/org/openqa/selenium/firefox/XpiDriverService.java @@ -139,6 +139,7 @@ public class XpiDriverService extends DriverService { Map<String, String> env = envBuilder.build(); List<String> cmdArray = new ArrayList<>(getArgs()); + cmdArray.addAll(binary.getExtraOptions()); cmdArray.add("-foreground"); process = new CommandLine(binary.getPath(), Iterables.toArray(cmdArray, String.class)); process.setEnvironmentVariables(env);
[java] Passing extra options to legacy Firefox process
SeleniumHQ_selenium
train
824ee8678eb4e4f59b1b1a6e62e7fba70f0ac7ba
diff --git a/clients/web/src/server.js b/clients/web/src/server.js index <HASH>..<HASH> 100644 --- a/clients/web/src/server.js +++ b/clients/web/src/server.js @@ -83,6 +83,18 @@ restartServer._checkServer = function (lastStartDate) { }); }; +/* Having these as object properties facilitates testing */ +restartServer._callSystemRestart = function () { + return restRequest({ + type: 'PUT', + path: 'system/restart' + }); +}; + +restartServer._reloadWindow = function () { + window.location.reload(); +}; + function restartServerPrompt() { confirm({ text: 'Are you sure you want to restart the server? This ' + @@ -93,19 +105,10 @@ function restartServerPrompt() { } function rebuildWebClient() { - return restartServer._rebuildWebClient(); + return rebuildWebClient._rebuildWebClient(); } -/* Having these as object properties facilitates testing */ -restartServer._callSystemRestart = function () { - return restRequest({type: 'PUT', path: 'system/restart'}); -}; - -restartServer._reloadWindow = function () { - window.location.reload(); -}; - -restartServer._rebuildWebClient = function () { +rebuildWebClient._rebuildWebClient = function () { return restRequest({ path: 'system/web_build', type: 'POST', diff --git a/clients/web/test/spec/adminSpec.js b/clients/web/test/spec/adminSpec.js index <HASH>..<HASH> 100644 --- a/clients/web/test/spec/adminSpec.js +++ b/clients/web/test/spec/adminSpec.js @@ -583,11 +583,11 @@ describe('Test the plugins page', function () { }, 100); return restartResolution.promise(); }); + spyOn(girder.server.restartServer, '_reloadWindow'); // We don't want to really rebuild the web code, so replace the original one with a resolved Promise - spyOn(girder.server.restartServer, '_rebuildWebClient').andCallFake(function () { + spyOn(girder.server.rebuildWebClient, '_rebuildWebClient').andCallFake(function () { return $.Deferred().resolve().promise(); }); - spyOn(girder.server.restartServer, '_reloadWindow'); }); it('Test that anonymous loading plugins page prompts login', function () {
Reorder and move some private functions to a more logical grouping
girder_girder
train
36c816e8e0b341aa8857cc45693560a5935a5b8c
diff --git a/lib/snmp/manager.rb b/lib/snmp/manager.rb index <HASH>..<HASH> 100644 --- a/lib/snmp/manager.rb +++ b/lib/snmp/manager.rb @@ -270,10 +270,10 @@ class Manager # Sends an SNMPv2c style trap. # # sys_up_time: an integer respresenting the number of hundredths of - # a second that this system has been up + # a second that this system has been up # - # trap_oid: an ObjectId or String with the OID identifier for this - # trap + # trap_oid: an ObjectId or String with the OID identifier for this + # trap # # object_list: a list of additional varbinds to send with the trap # @@ -284,13 +284,13 @@ class Manager end ## - # Sends an inform request using the supplied list + # Sends an inform request using the supplied varbind list. # # sys_up_time: an integer respresenting the number of hundredths of - # a second that this system has been up + # a second that this system has been up # - # trap_oid: an ObjectId or String with the OID identifier for this - # inform request + # trap_oid: an ObjectId or String with the OID identifier for this + # inform request # # object_list: a list of additional varbinds to send with the inform #
Changing rdoc comments so the formating isnt messed up
hallidave_ruby-snmp
train
2bcba718d3dcd79fdec0e108b24fd3c8a841c89e
diff --git a/drools-compiler/src/test/java/org/drools/integrationtests/MiscTest.java b/drools-compiler/src/test/java/org/drools/integrationtests/MiscTest.java index <HASH>..<HASH> 100644 --- a/drools-compiler/src/test/java/org/drools/integrationtests/MiscTest.java +++ b/drools-compiler/src/test/java/org/drools/integrationtests/MiscTest.java @@ -7426,6 +7426,48 @@ public class MiscTest { } @Test + @Ignore("Added this test 05-APR-2011. Used to work in 5.2.0.M1 -Toni-") + public void testMemberOfNotWorkingWithOr() throws Exception { + + String rule = ""; + rule += "package org.drools;\n"; + rule += "import java.util.ArrayList;\n"; + rule += "import org.drools.Person;\n"; + rule += "rule \"Test Rule\"\n"; + rule += "when\n"; + rule += " $list: ArrayList() \n"; + rule += " ArrayList() \n"; + rule += " from collect( \n"; + rule += " Person( \n"; + rule += " ( \n"; + rule += " pet memberOf $list \n"; + rule += " ) || ( \n"; + rule += " pet == null \n"; + rule += " ) \n"; + rule += " ) \n"; + rule += " )\n"; + rule += "then\n"; + rule += " System.out.println(\"hello person\");\n"; + rule += "end"; + + final PackageBuilder builder = new PackageBuilder(); + builder.addPackageFromDrl(new StringReader(rule)); + final org.drools.rule.Package pkg = builder.getPackage(); + + final RuleBase ruleBase = getRuleBase(); + ruleBase.addPackage(pkg); + StatefulSession session = ruleBase.newStatefulSession(); + + Person toni = new Person("Toni", 12); + toni.setPet(new Pet("Mittens")); + + session.insert(new ArrayList()); + session.insert(toni); + + session.fireAllRules(); + } + + @Test @Ignore("Added this test 31-MAR-2011. Used to work in 5.2.0.M1 -Toni-") public void testAccessFieldsFromSubClass() throws Exception {
yet another test to MiscTest
kiegroup_drools
train
3458de86389ee3e912e65c9d15eaf87185724efa
diff --git a/web/concrete/config/version.php b/web/concrete/config/version.php index <HASH>..<HASH> 100644 --- a/web/concrete/config/version.php +++ b/web/concrete/config/version.php @@ -1,3 +1,3 @@ <? defined('C5_EXECUTE') or die("Access Denied."); -$APP_VERSION = '5.5.1b1'; \ No newline at end of file +$APP_VERSION = '5.5.1b2'; \ No newline at end of file diff --git a/web/concrete/models/attribute/key.php b/web/concrete/models/attribute/key.php index <HASH>..<HASH> 100644 --- a/web/concrete/models/attribute/key.php +++ b/web/concrete/models/attribute/key.php @@ -94,6 +94,7 @@ class AttributeKey extends Object { */ public static function getList($akCategoryHandle, $filters = array()) { $db = Loader::db(); + $pkgHandle = $db->GetOne('select pkgHandle from AttributeKeyCategories inner join Packages on Packages.pkgID = AttributeKeyCategories.pkgID where akCategoryHandle = ?', array($akCategoryHandle)); $q = 'select akID from AttributeKeys inner join AttributeKeyCategories on AttributeKeys.akCategoryID = AttributeKeyCategories.akCategoryID where akCategoryHandle = ?'; foreach($filters as $key => $value) { $q .= ' and ' . $key . ' = ' . $value . ' '; @@ -101,7 +102,11 @@ class AttributeKey extends Object { $r = $db->Execute($q, array($akCategoryHandle)); $list = array(); $txt = Loader::helper('text'); - Loader::model('attribute/categories/' . $akCategoryHandle); + if ($pkgHandle) { + Loader::model('attribute/categories/' . $akCategoryHandle, $pkgHandle); + } else { + Loader::model('attribute/categories/' . $akCategoryHandle); + } $className = $txt->camelcase($akCategoryHandle); while ($row = $r->FetchRow()) { $c1 = $className . 'AttributeKey';
fixing bug in attribute set attributes and packages Former-commit-id: c<I>ca<I>cdc<I>c<I>b<I>
concrete5_concrete5
train
fe0a41f5a70b99d3f3ee0b5393259b94ecd404f5
diff --git a/autopython/cpython.py b/autopython/cpython.py index <HASH>..<HASH> 100644 --- a/autopython/cpython.py +++ b/autopython/cpython.py @@ -130,6 +130,18 @@ class PresenterShell(object): color_scheme=self._color_scheme, locals=ns) else: self._interpreter = PresenterInterpreter(locals=ns) + have_readline = True + try: + import readline + except ImportError: + try: + import pyreadline as readline + except ImportError: + have_readline = False + if have_readline: + import rlcompleter + readline.set_completer(rlcompleter.Completer(ns).complete) + readline.parse_and_bind("tab: complete") def begin(self): self.reset_interpreter() @@ -203,8 +215,7 @@ class PresenterShell(object): while self._interacting: try: try: - print(end=ps2 if need_more else ps1, flush=True) - line = input() + line = input(ps2 if need_more else ps1) if PY2: line = line.decode(sys.stdin.encoding) lines.append(line)
Add readline support (with autocomplete) for CPython interactive prompt
gosella_autopython
train
3c5ea7f329f1b346b39a30fbf79b23fe42478065
diff --git a/voluptuous/validators.py b/voluptuous/validators.py index <HASH>..<HASH> 100644 --- a/voluptuous/validators.py +++ b/voluptuous/validators.py @@ -416,8 +416,13 @@ def IsFile(v): True >>> with raises(FileInvalid, 'not a file'): ... IsFile()("random_filename_goes_here.py") + >>> with raises(FileInvalid, 'Not a file'): + ... IsFile()(None) """ - return os.path.isfile(v) + if v: + return os.path.isfile(v) + else: + raise FileInvalid('Not a file') @message('not a directory', cls=DirInvalid) @@ -427,8 +432,13 @@ def IsDir(v): >>> IsDir()('/') '/' + >>> with raises(DirInvalid, 'Not a directory'): + ... IsDir()(None) """ - return os.path.isdir(v) + if v: + return os.path.isdir(v) + else: + raise DirInvalid("Not a directory") @message('path does not exist', cls=PathInvalid) @@ -440,8 +450,13 @@ def PathExists(v): True >>> with raises(Invalid, 'path does not exist'): ... PathExists()("random_filename_goes_here.py") + >>> with raises(PathInvalid, 'Not a Path'): + ... PathExists()(None) """ - return os.path.exists(v) + if v: + return os.path.exists(v) + else: + raise PathInvalid("Not a Path") class Range(object):
Fixes #<I>: Making Path validators resilient to None.
alecthomas_voluptuous
train
2036017f9696ad82b2166bfde0a1b6046de34a15
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -14,7 +14,6 @@ module.exports = function(opts) { opts || (opts = {}); opts.wait || (opts.wait = 1000); - function debounce(fn, wait, immediate) { var req = this, map, id; @@ -40,6 +39,11 @@ module.exports = function(opts) { if (err) return fn(err); map = req.session._debounce; + + if (!map || !map[id]) { + return; + } + if (map[id] + wait < Date.now()) { debounce.call(req, fn, wait, true); } diff --git a/test.js b/test.js index <HASH>..<HASH> 100644 --- a/test.js +++ b/test.js @@ -1,10 +1,14 @@ QUnit.module('debounce'); -function getReq() { +function getReq(reloadTimeout, saveTimeout) { var req = {session: {}}; - req.session.reload = req.session.save = function(cb) { - cb(); + req.session.reload = function(cb) { + setTimeout(cb, reloadTimeout); + }; + + req.session.save = function(cb) { + setTimeout(cb, saveTimeout); }; return req; @@ -182,3 +186,30 @@ test('debounce multiple times with immediate=true', function() { start(); }, 5000); }); + +test('double call with immediate=true, second call faster than session store roundtrip', function() { + var middleware = debounce(), + reloadTimeout = 500, + req = getReq(reloadTimeout, 0), + counter = 0; + + expect(1); + stop(); + + middleware(req, null, noop); + + function fn() { + counter++; + } + + req.debounce(fn, 500, true); + + setTimeout(function () { + req.debounce(fn, 500, true); + }, reloadTimeout / 2); + + setTimeout(function() { + equal(counter, 1, 'debounced function called only once'); + start(); + }, 5000); +});
Closes #3, with test to reproduce
kof_connect-debounce
train
970fa9f041fd5738091af3eea90fda3bc18c25b2
diff --git a/objects/interface.go b/objects/interface.go index <HASH>..<HASH> 100644 --- a/objects/interface.go +++ b/objects/interface.go @@ -219,6 +219,8 @@ type UserProfile struct { LastName string `json:"last_name"` RealName string `json:"real_name"` RealNameNormalized string `json:"real_name_normalized"` + StatusText string `json:"status_text,omitempty"` + StatusEmoji string `json:"status_emoji,omitempty"` } type User struct {
Add these status fields while we're at it
lestrrat_go-slack
train
af8637c7d20144d0a0d3f2d0bbf7c0c4a4a041d1
diff --git a/bika/lims/content/analysisrequest.py b/bika/lims/content/analysisrequest.py index <HASH>..<HASH> 100644 --- a/bika/lims/content/analysisrequest.py +++ b/bika/lims/content/analysisrequest.py @@ -313,13 +313,6 @@ schema = BikaSchema.copy() + Schema(( showOn=True, ), ), - ComputedField( - 'BatchUID', - expression='here.getBatch().UID() if here.getBatch() else ""', - widget=ComputedWidget( - visible=False, - ), - ), ReferenceField( 'SamplingRound', allowed_types=('SamplingRound',), @@ -1861,6 +1854,7 @@ class AnalysisRequest(BaseFolder): client = self.aq_parent return client.getProvince() + @security.public def getBatch(self): # The parent type may be "Batch" during ar_add. # This function fills the hidden field in ar_add.pt @@ -1869,6 +1863,19 @@ class AnalysisRequest(BaseFolder): else: return self.Schema()['Batch'].get(self) + @security.public + def getBatchUID(self): + batch = self.getBatch() + if batch: + return batch.UID() + + @security.public + def setBatch(self, value=None): + original_value = self.Schema().getField('Batch').get(self) + if original_value != value: + self.Schema().getField('Batch').set(self, value) + self._reindexAnalyses(['getBatchUID'], False) + def getDefaultMemberDiscount(self): """ compute default member discount if it applies """ if hasattr(self, 'getMemberDiscountApplies'): @@ -2985,8 +2992,10 @@ class AnalysisRequest(BaseFolder): def setPriority(self, value): if not value: value = self.Schema().getField('Priority').getDefault(self) - self.Schema().getField('Priority').set(self, value) - self._reindexAnalyses(['getPrioritySortkey'], True) + original_value = self.Schema().getField('Priority').get(self) + if original_value != value: + self.Schema().getField('Priority').set(self, value) + self._reindexAnalyses(['getPrioritySortkey'], True) @security.private def _reindexAnalyses(self, idxs=None, update_metadata=False): diff --git a/bika/lims/subscribers/objectmodified.py b/bika/lims/subscribers/objectmodified.py index <HASH>..<HASH> 100644 --- a/bika/lims/subscribers/objectmodified.py +++ b/bika/lims/subscribers/objectmodified.py @@ -66,15 +66,3 @@ def ObjectModifiedEventHandler(obj, event): brains = cat(portal_type=i[0], getCategoryUID=obj.UID()) for brain in brains: brain.getObject().reindexObject(idxs=['getCategoryTitle']) - # TODO: This is a workaround in order to reindex the getBatchUID index - # of analyses when the analysis request has been modified. When the - # ReferenceField 'batch' is modified (and because it is reference field) - # only the archetype 'Reference' object is flagged as modified, not - # the whole AnalysisRequest. We need to migrate that reference field - # to the new ones. - # For now, we will take advantage of that reference object and we will - # reindex only the getbatchUID. - elif IAnalysisRequest.providedBy(obj.aq_parent.aq_inner): - analyses = obj.getAnalyses() - for analysis in analyses: - analysis.getObject().reindexObject(idxs=['getBatchUID'])
Performance. Removal of reindex of BatchUID on ObjectModifiedEvent
senaite_senaite.core
train
c8fa5bd43f4d94f27fb16a20de84fbf6f3b0a70f
diff --git a/underfs/hdfs/src/main/java/alluxio/underfs/hdfs/HdfsUnderFileSystem.java b/underfs/hdfs/src/main/java/alluxio/underfs/hdfs/HdfsUnderFileSystem.java index <HASH>..<HASH> 100644 --- a/underfs/hdfs/src/main/java/alluxio/underfs/hdfs/HdfsUnderFileSystem.java +++ b/underfs/hdfs/src/main/java/alluxio/underfs/hdfs/HdfsUnderFileSystem.java @@ -438,6 +438,19 @@ public class HdfsUnderFileSystem extends UnderFileSystem { } @Override + public void setOwner(String path, String user, String group) throws IOException { + try { + FileStatus fileStatus = mFileSystem.getFileStatus(new Path(path)); + LOG.info("Changing file '{}' user from: {} to {}, group from: {} to {}", fileStatus.getPath(), + fileStatus.getOwner(), user, fileStatus.getGroup(), group); + mFileSystem.setOwner(fileStatus.getPath(), user, group); + } catch (IOException e) { + LOG.error("Fail to set owner for {} with user: {}, group: {}", path, user, group, e); + throw e; + } + } + + @Override public void setPermission(String path, String posixPerm) throws IOException { try { FileStatus fileStatus = mFileSystem.getFileStatus(new Path(path));
Implement setowner for hdfs.
Alluxio_alluxio
train
9a1cf7ab9ee1662c4d1a69c5579ce1935da24a99
diff --git a/src/python/pants/backend/codegen/tasks/scrooge_gen.py b/src/python/pants/backend/codegen/tasks/scrooge_gen.py index <HASH>..<HASH> 100644 --- a/src/python/pants/backend/codegen/tasks/scrooge_gen.py +++ b/src/python/pants/backend/codegen/tasks/scrooge_gen.py @@ -259,8 +259,9 @@ class ScroogeGen(NailgunTask, JvmToolTaskMixin): except AddressLookupError as e: raise self.DepLookupError("{message}\n referenced from [{section}] key: " \ "gen->deps->{category} in pants.ini".format( - message=e, section='thrift-gen', - key="{category}" + message=e, + section=_CONFIG_SECTION, + category=category )) return self.GenInfo(gen, deps)
Fix error message in scrooge_gen I hit this error within an error when I tried to run the gen goal. Testing Done: Built a pex, triggered the error. Reviewed at <URL>
pantsbuild_pants
train
50dcc09f17d1e641360f4db519a4b2656f3f4ae2
diff --git a/openxc/formats/binary.py b/openxc/formats/binary.py index <HASH>..<HASH> 100644 --- a/openxc/formats/binary.py +++ b/openxc/formats/binary.py @@ -3,6 +3,7 @@ from __future__ import absolute_import import binascii import numbers +import logging import google.protobuf.message from google.protobuf.internal.decoder import _DecodeVarint @@ -11,6 +12,8 @@ from google.protobuf.internal import encoder from openxc.formats.base import VehicleMessageStreamer from openxc import openxc_pb2 +LOG = logging.getLogger(__name__) + class UnrecognizedBinaryCommandError(Exception): pass class ProtobufStreamer(VehicleMessageStreamer):
Adding missing definition of logging object.
openxc_openxc-python
train
21259185e604a10a09982e22808d3c0cc1240e33
diff --git a/modules/org.opencms.workplace.explorer/resources/system/workplace/resources/commons/explorer.js b/modules/org.opencms.workplace.explorer/resources/system/workplace/resources/commons/explorer.js index <HASH>..<HASH> 100644 --- a/modules/org.opencms.workplace.explorer/resources/system/workplace/resources/commons/explorer.js +++ b/modules/org.opencms.workplace.explorer/resources/system/workplace/resources/commons/explorer.js @@ -1,7 +1,7 @@ /* * File : $Source: /alkacon/cvs/opencms/modules/org.opencms.workplace.explorer/resources/system/workplace/resources/commons/explorer.js,v $ - * Date : $Date: 2007/08/29 12:50:29 $ - * Version: $Revision: 1.20 $ + * Date : $Date: 2007/09/06 09:40:20 $ + * Version: $Revision: 1.21 $ * * This library is part of OpenCms - * the Open Source Content Management System @@ -328,6 +328,8 @@ function getContextMenu() { var resourceList = ""; if (selectedResources.length == 1) { resourceList = getResourceAbsolutePath(active_mouse_id); + // set resource in hidden form field value + win.files.forms["formmulti"].elements["resource"].value = resourceList; } else if (selectedResources.length > 1) { // concatenate all selected resources var isFirst = true; @@ -339,7 +341,7 @@ function getContextMenu() { isFirst = false; } // set resource list in hidden form field value - win.files.forms["formmulti"].elements["resourcelist"].value = resourceList; + win.files.forms["formmulti"].elements["resourcelist"].value = resourceList; } // ajax call makeRequest(vr.servpath + '/system/workplace/views/explorer/contextmenu.jsp', 'resourcelist=' + resourceList + "&acttarget=" + top.active_target, 'showContextMenu'); @@ -388,6 +390,17 @@ function submitMultiAction(dialog) { var doc = win.files; doc.forms["formmulti"].action = dialog; + win.files.forms["formmulti"].elements["resource"].value = ""; + doc.forms["formmulti"].submit(); +} + +// submits a selected single action +function submitSingleAction(dialog, targetFrame) { + + var doc = win.files; + doc.forms["formmulti"].action = dialog; + doc.forms["formmulti"].target = targetFrame; + win.files.forms["formmulti"].elements["resourcelist"].value = ""; doc.forms["formmulti"].submit(); } @@ -836,6 +849,7 @@ function printList(wo) { // create multi context menu form wo.writeln("<form name=\"formmulti\" action=\"\" method=\"post\">"); wo.writeln("<input type=\"hidden\" name=\"resourcelist\" value=\"\">"); + wo.writeln("<input type=\"hidden\" name=\"resource\" value=\"\">"); wo.writeln("</form>"); // create div for context menus diff --git a/src/org/opencms/workplace/explorer/CmsExplorerContextMenuBuilder.java b/src/org/opencms/workplace/explorer/CmsExplorerContextMenuBuilder.java index <HASH>..<HASH> 100644 --- a/src/org/opencms/workplace/explorer/CmsExplorerContextMenuBuilder.java +++ b/src/org/opencms/workplace/explorer/CmsExplorerContextMenuBuilder.java @@ -1,7 +1,7 @@ /* * File : $Source: /alkacon/cvs/opencms/src/org/opencms/workplace/explorer/CmsExplorerContextMenuBuilder.java,v $ - * Date : $Date: 2007/09/06 08:19:55 $ - * Version: $Revision: 1.4 $ + * Date : $Date: 2007/09/06 09:40:20 $ + * Version: $Revision: 1.5 $ * * This library is part of OpenCms - * the Open Source Content Management System @@ -60,7 +60,7 @@ import javax.servlet.jsp.PageContext; * @author Michael Moossen * @author Andreas Zahner * - * @version $Revision: 1.4 $ + * @version $Revision: 1.5 $ * * @since 6.5.6 */ @@ -417,11 +417,21 @@ public class CmsExplorerContextMenuBuilder extends CmsWorkplace { link.append(itemTarget); link.append("\""); } + // mouse up event is necessary for Firefox browser + link.append(" onmouseup=\"top.submitSingleAction('"); + link.append(itemLink); + link.append("', '"); + link.append(itemTarget); + link.append("');\""); } else { // create link for multi resource context menu link.append("href=\"javascript:top.submitMultiAction('"); link.append(itemLink); link.append("');\""); + // mouse up event is necessary for Firefox browser + link.append(" onmouseup=\"top.submitMultiAction('"); + link.append(itemLink); + link.append("');\""); } menu.append("\n<li><a "); menu.append(link);
fixed issue in context menu that mouse up did not work when selecting an option
alkacon_opencms-core
train
31afd2f44a9830e5de459e09b9bdcabb96e4e182
diff --git a/src/actions/__tests__/entity-action-builder-test.js b/src/actions/__tests__/entity-action-builder-test.js index <HASH>..<HASH> 100644 --- a/src/actions/__tests__/entity-action-builder-test.js +++ b/src/actions/__tests__/entity-action-builder-test.js @@ -10,6 +10,7 @@ describe('The actionBuilder', () => { expect(()=>{ actionBuilder({name: 1})}).to.throw(NAME_MESSAGE); expect(()=>{ actionBuilder({name: {}})}).to.throw(NAME_MESSAGE); expect(()=>{ actionBuilder({name: ()=>{}})}).to.throw(NAME_MESSAGE); + expect(()=>{ actionBuilder({name: ''})}).to.throw(NAME_MESSAGE); expect(()=>{ actionBuilder({name: 'test'})}).to.not.throw(NAME_MESSAGE); }); it('should throw an error when called without a string type parameter : load,save,delete', () => { diff --git a/src/actions/entity-actions-builder.js b/src/actions/entity-actions-builder.js index <HASH>..<HASH> 100644 --- a/src/actions/entity-actions-builder.js +++ b/src/actions/entity-actions-builder.js @@ -2,6 +2,7 @@ import {capitalize, toUpper} from 'lodash/string'; import {isString, isUndefined} from 'lodash/lang'; const ACTION_BUILDER = 'ACTION_BUILDER'; const ALLOW_ACTION_TYPES = ['load', 'save', 'delete']; +const STRING_EMPTY = ''; // A simple function to create action creators // Return a function which returns a type and a payload // example: _actionCreatorBuilder('REQUEST_LOAD_USER') will return `payload => {type: 'REQUEST_LOAD_USER', payload}` @@ -32,7 +33,7 @@ const _asyncActionCreator = ({service: promiseSvc, creators:{receive: {value: re // Validate the action builder parameters const _validateActionBuilderParams = ({name, type, service})=>{ - if(!isString(name)){ + if(!isString(name) || STRING_EMPTY === name){ throw new Error(`${ACTION_BUILDER}: the name parameter should be a string.`); } if(!isString(type) || ALLOW_ACTION_TYPES.indexOf(type) === -1){
[action builder] Add the string empty test for the name.
get-focus_deprecated-focus-graph
train
b42dd7880a5be56b761c1f6886244420754fb1af
diff --git a/parsl/monitoring/monitoring.py b/parsl/monitoring/monitoring.py index <HASH>..<HASH> 100644 --- a/parsl/monitoring/monitoring.py +++ b/parsl/monitoring/monitoring.py @@ -3,6 +3,7 @@ import socket import pickle import logging import time +import typeguard import datetime import zmq @@ -12,7 +13,7 @@ from parsl.utils import RepresentationMixin from parsl.monitoring.message_type import MessageType -from typing import Optional +from typing import Optional, Tuple try: from parsl.monitoring.db_manager import dbm_starter @@ -119,22 +120,23 @@ class UDPRadio(object): return +@typeguard.typechecked class MonitoringHub(RepresentationMixin): def __init__(self, - hub_address, - hub_port=None, - hub_port_range=(55050, 56000), - - client_address="127.0.0.1", - client_port_range=(55000, 56000), - - workflow_name=None, - workflow_version=None, - logging_endpoint='sqlite:///monitoring.db', - logdir=None, - monitoring_debug=False, - resource_monitoring_enabled=True, - resource_monitoring_interval=30): # in seconds + hub_address: str, + hub_port: Optional[int] = None, + hub_port_range: Tuple[int, int] = (55050, 56000), + + client_address: str = "127.0.0.1", + client_port_range: Tuple[int, int] = (55000, 56000), + + workflow_name: Optional[str] = None, + workflow_version: Optional[str] = None, + logging_endpoint: str = 'sqlite:///monitoring.db', + logdir: Optional[str] = None, + monitoring_debug: bool = False, + resource_monitoring_enabled: bool = True, + resource_monitoring_interval: float = 30): # in seconds """ Parameters ---------- @@ -164,7 +166,7 @@ class MonitoringHub(RepresentationMixin): Enable monitoring debug logging. Default: False resource_monitoring_enabled : boolean Set this field to True to enable logging the info of resource usage of each task. Default: True - resource_monitoring_interval : int + resource_monitoring_interval : float The time interval, in seconds, at which the monitoring records the resource usage of each task. Default: 30 seconds """ self.logger = None
Add typeguard to MonitoringHub initializer (#<I>) * Add typeguard to MonitoringHub initializer This is part of issue #<I> * Change resource monitoring interval to float after feedback from zhuozhao * change docstring to match new type
Parsl_parsl
train
643f78a8f906161860652632df7261616259debf
diff --git a/cid.go b/cid.go index <HASH>..<HASH> 100644 --- a/cid.go +++ b/cid.go @@ -161,11 +161,11 @@ type Cid struct{ str string } // also acceptable. var Nil = Cid{} -// Nil returns true if a Cid is uninitialized or the Nil value. -// Calling any other methods on an uninitialized Cid will result in +// Defined returns true if a Cid is defined +// Calling any other methods on an undefined Cid will result in // undefined behavior. -func (c Cid) IsNil() bool { - return c.str == "" +func (c Cid) Defined() bool { + return c.str != "" } // Parse is a short-hand function to perform Decode, Cast etc... on
Change 'IsNil' method to 'Defined'.
ipfs_go-cid
train
27f10c17c26b4d55e05db7b47c634ac32d3e7f72
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -38,7 +38,7 @@ setup( '': 'src', }, install_requires=[ - 'requests==2.19.1', + 'requests==2.20.1', 'defusedxml==0.5.0', 'lxml==4.2.3', 'python-dateutil==2.7.3'
Forgot to Update requests version in setup.py
onelogin_onelogin-python-sdk
train
158b1863f5b544aaea5946273b97980f5ee5493f
diff --git a/track.go b/track.go index <HASH>..<HASH> 100644 --- a/track.go +++ b/track.go @@ -62,6 +62,8 @@ type PlaylistTrack struct { // The Spotify user who added the track to the playlist. // Warning: vary old playlists may not populate this value. AddedBy User `json:"added_by"` + // Whether this track is a local file or not. + IsLocal bool `json:"is_local"` // Information about the track. Track FullTrack `json:"track"` }
Add IsLocal boolean field on PlaylistTrack object (#<I>)
zmb3_spotify
train
7923015646596b3ab6eda557985cb2829010fc05
diff --git a/app/models/renalware/hd/sessions/auditable_session.rb b/app/models/renalware/hd/sessions/auditable_session.rb index <HASH>..<HASH> 100644 --- a/app/models/renalware/hd/sessions/auditable_session.rb +++ b/app/models/renalware/hd/sessions/auditable_session.rb @@ -13,10 +13,12 @@ module Renalware module HD module Sessions class AuditableSession < SimpleDelegator + delegate :observations_before, :observations_after, to: :document + def blood_pressure_measurements [ - document.observations_before.blood_pressure, - document.observations_after.blood_pressure + observations_before.blood_pressure, + observations_after.blood_pressure ] end @@ -52,11 +54,16 @@ module Renalware end def weight_loss - document.observations_before.weight.to_f - document.observations_after.weight.to_f + pre_weight = observations_before.weight.to_f + post_weight = observations_after.weight.to_f + + return if pre_weight.zero? || post_weight.zero? + + observations_before.weight.to_f - observations_after.weight.to_f end def weight_loss_as_percentage_of_body_weight - return unless measured_dry_weight > 0 + return if measured_dry_weight.zero? || weight_loss.to_f.zero? (weight_loss / measured_dry_weight) * 100.0 end diff --git a/app/models/renalware/hd/sessions/auditable_session_collection.rb b/app/models/renalware/hd/sessions/auditable_session_collection.rb index <HASH>..<HASH> 100644 --- a/app/models/renalware/hd/sessions/auditable_session_collection.rb +++ b/app/models/renalware/hd/sessions/auditable_session_collection.rb @@ -107,7 +107,10 @@ module Renalware def mean_weight_loss selector = ->(session) { session.weight_loss } - MeanValueStrategy.new(sessions: closed_sessions, selector: selector).call + MeanValueStrategy.new( + sessions: closed_sessions.reject{ |sess| sess.weight_loss.nil? }, + selector: selector + ).call end def mean_weight_loss_as_percentage_of_body_weight diff --git a/spec/models/renalware/hd/sessions/auditable_session_collection_spec.rb b/spec/models/renalware/hd/sessions/auditable_session_collection_spec.rb index <HASH>..<HASH> 100644 --- a/spec/models/renalware/hd/sessions/auditable_session_collection_spec.rb +++ b/spec/models/renalware/hd/sessions/auditable_session_collection_spec.rb @@ -164,6 +164,27 @@ module Renalware # effective weight loss = [3, 2, 1] so the mean = 1.99999 rounded to 2 places expect(audit.mean_weight_loss).to eq(2.0) end + + it "excludes sessions where either pre or post is missing a value" do + # Here the last 2 session weights shold be ignored. + # In session 2 the pre weight has been declared as unmeasureable. + # In session 3 the post weight has been declared as unmeasureable. + pre_obs_weights = [100, 1000, nil] + post_obs_weights = [99, nil, 10] + + @sessions = (0..2).map do |idx| + Session::Closed.new.tap do |session| + session.document.observations_before.weight = pre_obs_weights[idx] + session.document.observations_after.weight = post_obs_weights[idx] + end + end + + # Only 1 session has a weight_loss - the others will have returned + # nil for weight_loss as either pre or post was nil, and so they + # are not included in the mean calculation. + # So mean calc = just one valie (100-99 = 1) + expect(audit.mean_weight_loss).to eq(1.0) + end end describe "#mean_machine_ktv" do @@ -331,16 +352,22 @@ module Renalware session_with_dry_weight2.document.observations_before.weight = 200.0 session_with_dry_weight2.document.observations_after.weight = 190.0 + session_with_dry_weight3 = Session::Closed.new(dry_weight: dry_weight1) + session_with_dry_weight3.document.observations_before.weight = nil + session_with_dry_weight3.document.observations_after.weight = 99.0 + @sessions = [ session_with_no_dry_weight, session_with_dry_weight1, - session_with_dry_weight2 + session_with_dry_weight2, + session_with_dry_weight3 ] # dryweight | weight loss | weight loss as % of body weight # nil | 1 | 0 # 100 | 1 | 1 # 200 | 10 | 5 + # 100 | nil (unmeasured) | nil so excluded from mean # =========================== # % Mean = (0 + 1 + 5) / 2 = 3%
Correct mean weight loss calculation Exclude weight measurements where the user has selected Taken: No
airslie_renalware-core
train
0f5e5df71382068d9464e2f8586e1f6b59b2e270
diff --git a/src/frontend/org/voltdb/exportclient/ExportToFileClient.java b/src/frontend/org/voltdb/exportclient/ExportToFileClient.java index <HASH>..<HASH> 100644 --- a/src/frontend/org/voltdb/exportclient/ExportToFileClient.java +++ b/src/frontend/org/voltdb/exportclient/ExportToFileClient.java @@ -75,29 +75,38 @@ public class ExportToFileClient extends ExportClientBase super(source); m_escaper = escaper; // Create the output file for this table - String filename = - nonce + "-" + source.tableName() + "." + escaper.getExtension(); - m_logger.info("Opening filename " + filename); - m_outFile = new File(outdir.getPath() + File.separator + filename); - boolean fail = false; - try { - if (!m_outFile.createNewFile()) { + String filename = null; + if (outdir != null) + { + filename = + nonce + "-" + source.tableName() + "." + escaper.getExtension(); + m_logger.info("Opening filename " + filename); + m_outFile = new File(outdir.getPath() + File.separator + filename); + boolean fail = false; + try { + if (!m_outFile.createNewFile()) { + m_logger.error("Error: Failed to create output file " + + m_outFile.getPath() + " for table " + + source.tableName() + + ": File already exists"); + fail = true; + } + } catch (IOException e) { + m_logger.error(e.getMessage()); m_logger.error("Error: Failed to create output file " + m_outFile.getPath() + " for table " + - source.tableName() + - ": File already exists"); + source.tableName()); fail = true; } - } catch (IOException e) { - m_logger.error(e.getMessage()); - m_logger.error("Error: Failed to create output file " + - m_outFile.getPath() + " for table " + - source.tableName()); - fail = true; + if (fail) + { + throw new RuntimeException(); + } } - if (fail) + else { - m_logger.error("Ha, writing to /dev/null"); + m_logger.error("--discard provided, data will be dumped to /dev/null"); + filename = "/dev/null"; m_outFile = new File("/dev/null"); } try @@ -224,8 +233,12 @@ public class ExportToFileClient extends ExportClientBase System.out.println("java -cp <classpath> -Djava.library.path=<library path> org.voltdb.exportclient.ExportToFileClient --help"); System.out.println("java -cp <classpath> -Djava.library.path=<library path> org.voltdb.exportclient.ExportToFileClient " + "--servers server1,server2,... --type CSV|TSV " + - "--outdir dir --nonce any_string --user export_username " + - "--password export_password"); + "--outdir dir --nonce any_string " + + "--user export_username --password export_password"); + System.out.println("java -cp <classpath> -Djava.library.path=<library path> org.voltdb.exportclient.ExportToFileClient " + + "--servers server1,server2,... --type CSV|TSV " + + "--discard" + + "--user export_username --password export_password"); System.exit(code); } @@ -237,6 +250,7 @@ public class ExportToFileClient extends ExportClientBase String nonce = null; Escaper escaper = null; File outdir = null; + boolean discard = false; for (int ii = 0; ii < args.length; ii++) { @@ -245,6 +259,10 @@ public class ExportToFileClient extends ExportClientBase { printHelpAndQuit(0); } + else if (arg.equals("--discard")) + { + discard = true; + } else if (arg.equals("--servers")) { if (args.length < ii + 1) { @@ -347,16 +365,23 @@ public class ExportToFileClient extends ExportClientBase System.err.println("ExportToFile: must provide an output type"); printHelpAndQuit(-1); } - if (nonce == null) + if (!discard) { - System.err.println("ExportToFile: must provide a filename nonce"); - printHelpAndQuit(-1); + if (nonce == null) + { + System.err.println("ExportToFile: must provide a filename nonce"); + printHelpAndQuit(-1); + } + if (outdir == null) + { + outdir = new File("."); + } } - if (outdir == null) + else { - outdir = new File("."); + outdir = null; + nonce = null; } - ExportToFileClient client = new ExportToFileClient(escaper, nonce, outdir); client.setVoltServers(volt_servers);
Replace the 'write to /dev/null when file exists' hack with: - Exit with an error if any output file already exists - Add a --discard switch to write intentionally to /dev/null
VoltDB_voltdb
train