hash
stringlengths 40
40
| diff
stringlengths 131
114k
| message
stringlengths 7
980
| project
stringlengths 5
67
| split
stringclasses 1
value |
|---|---|---|---|---|
8c7492c4beb77e368c5c26a8b8033372a40e6382
|
diff --git a/modules/citrus-http/src/main/java/com/consol/citrus/http/message/HttpMessageContentBuilder.java b/modules/citrus-http/src/main/java/com/consol/citrus/http/message/HttpMessageContentBuilder.java
index <HASH>..<HASH> 100644
--- a/modules/citrus-http/src/main/java/com/consol/citrus/http/message/HttpMessageContentBuilder.java
+++ b/modules/citrus-http/src/main/java/com/consol/citrus/http/message/HttpMessageContentBuilder.java
@@ -56,19 +56,46 @@ public class HttpMessageContentBuilder extends AbstractMessageContentBuilder {
HttpMessage messageToBuild = new HttpMessage(message);
delegate.setMessageHeaders(messageToBuild.getHeaders());
+ Message delegateMessage = delegate.buildMessageContent(context, messageType, direction);
messageToBuild.setName(delegate.getMessageName());
+ messageToBuild.setPayload(delegateMessage.getPayload());
+ copyHeaders(delegateMessage, messageToBuild);
+ replaceDynamicValues(messageToBuild.getCookies(), context);
- Message delegateMessage = delegate.buildMessageContent(context, messageType, direction);
+ return messageToBuild;
+ }
- for (Map.Entry<String, Object> headerEntry : delegateMessage.getHeaders().entrySet()) {
- if (!headerEntry.getKey().equals(MessageHeaders.ID) &&
- !headerEntry.getKey().equals(MessageHeaders.TIMESTAMP)) {
- messageToBuild.setHeader(headerEntry.getKey(), headerEntry.getValue());
+ /**
+ * Copies all headers except id and timestamp
+ * @param from The message to copy the headers from
+ * @param to The message to set the headers to
+ */
+ private void copyHeaders(Message from, Message to) {
+ for (Map.Entry<String, Object> headerEntry : from.getHeaders().entrySet()) {
+ if (notIdOrTimestamp(headerEntry.getKey())) {
+ to.setHeader(headerEntry.getKey(), headerEntry.getValue());
}
}
- messageToBuild.setPayload(delegateMessage.getPayload());
- for (Cookie cookie: messageToBuild.getCookies()) {
+ }
+
+ /**
+ * Checks whether the given message header is not an ID or a TIMESTAMP
+ * @param messageHeader The message header to be checked
+ * @return whether the given message header is not an ID or a TIMESTAMP
+ */
+ private boolean notIdOrTimestamp(String messageHeader) {
+ return !(MessageHeaders.ID.equals(messageHeader) ||
+ MessageHeaders.TIMESTAMP.equals(messageHeader));
+ }
+
+ /**
+ * Replaces the dynamic content in the given list of cookies
+ * @param cookies The cookies in which the variables will be replaced
+ * @param context The context to replace the variables with
+ */
+ private void replaceDynamicValues(List<Cookie> cookies, TestContext context) {
+ for (Cookie cookie: cookies) {
if (cookie.getValue() != null) {
cookie.setValue(context.replaceDynamicContentInString(cookie.getValue()));
}
@@ -85,8 +112,6 @@ public class HttpMessageContentBuilder extends AbstractMessageContentBuilder {
cookie.setDomain(context.replaceDynamicContentInString(cookie.getDomain()));
}
}
-
- return messageToBuild;
}
@Override
|
(#<I>) Extracted methods
|
citrusframework_citrus
|
train
|
0e0f75992853b2d2f19b4ebbc4ad612fcfef6fc5
|
diff --git a/jishaku/features/python.py b/jishaku/features/python.py
index <HASH>..<HASH> 100644
--- a/jishaku/features/python.py
+++ b/jishaku/features/python.py
@@ -156,17 +156,18 @@ class PythonFeature(Feature):
arg_dict["_"] = self.last_result
convertables: typing.Dict[str, str] = {}
- for index, user in enumerate(ctx.message.mentions):
- arg_dict[f"__user_mention_{index}"] = user
- convertables[user.mention] = f"__user_mention_{index}"
-
- for index, channel in enumerate(ctx.message.channel_mentions):
- arg_dict[f"__channel_mention_{index}"] = channel
- convertables[channel.mention] = f"__channel_mention_{index}"
-
- for index, role in enumerate(ctx.message.role_mentions):
- arg_dict[f"__role_mention_{index}"] = role
- convertables[role.mention] = f"__role_mention_{index}"
+ if getattr(ctx, 'interaction', None) is None:
+ for index, user in enumerate(ctx.message.mentions):
+ arg_dict[f"__user_mention_{index}"] = user
+ convertables[user.mention] = f"__user_mention_{index}"
+
+ for index, channel in enumerate(ctx.message.channel_mentions):
+ arg_dict[f"__channel_mention_{index}"] = channel
+ convertables[channel.mention] = f"__channel_mention_{index}"
+
+ for index, role in enumerate(ctx.message.role_mentions):
+ arg_dict[f"__role_mention_{index}"] = role
+ convertables[role.mention] = f"__role_mention_{index}"
return arg_dict, convertables
|
Don't try to handle mentions for hybrid contexts
|
Gorialis_jishaku
|
train
|
26f020d380cd3ed7476bf7d43d2309f4d12f932c
|
diff --git a/irc/client.py b/irc/client.py
index <HASH>..<HASH> 100644
--- a/irc/client.py
+++ b/irc/client.py
@@ -684,19 +684,17 @@ class ServerConnection(Connection):
for fn in self.handlers[event.type]:
fn(self, event)
- def _parse_tag(self, tag_item):
- if '=' in tag_item:
- tag_key, tag_value = tag_item.split('=', 1)
- tag_value = tag_value.replace('\\:', ';')
- tag_value = tag_value.replace('\\s', ' ')
- tag_value = tag_value.replace('\\n', '\n')
- tag_value = tag_value.replace('\\r', '\r')
- tag_value = tag_value.replace('\\\\', '\\')
- else:
- tag_key, tag_value = tag_item, None
+ def _parse_tag(self, item):
+ key, sep, value = item.partition('=')
+ value = value.replace('\\:', ';')
+ value = value.replace('\\s', ' ')
+ value = value.replace('\\n', '\n')
+ value = value.replace('\\r', '\r')
+ value = value.replace('\\\\', '\\')
+ value = value or None
return {
- 'key': tag_key,
- 'value': tag_value,
+ 'key': key,
+ 'value': value,
}
def is_connected(self):
|
Use partition to unify the implementation.
|
jaraco_irc
|
train
|
7a89666f8bcdec161cce5f0b7834e719528f9290
|
diff --git a/lib/incarnatorHandler.js b/lib/incarnatorHandler.js
index <HASH>..<HASH> 100644
--- a/lib/incarnatorHandler.js
+++ b/lib/incarnatorHandler.js
@@ -30,7 +30,7 @@ var IncarnatorHandler = function (opts) {
cb(new HandlerError(errorCodes.ALREADY_INITIALIZED));
return;
}
- newIncarnator = new Incarnator({
+ var newIncarnator = new Incarnator({
id: incarnatorId,
couchUrl: couchUrl,
persister: persister,
|
fix: new incarnator not defined in correct scope
|
afters_couch-incarnate
|
train
|
fdf3a880f564cfab15e6331577617b53ab44c695
|
diff --git a/commerce-payment-method-mercanet/src/main/java/com/liferay/commerce/payment/method/mercanet/internal/servlet/MercanetServlet.java b/commerce-payment-method-mercanet/src/main/java/com/liferay/commerce/payment/method/mercanet/internal/servlet/MercanetServlet.java
index <HASH>..<HASH> 100644
--- a/commerce-payment-method-mercanet/src/main/java/com/liferay/commerce/payment/method/mercanet/internal/servlet/MercanetServlet.java
+++ b/commerce-payment-method-mercanet/src/main/java/com/liferay/commerce/payment/method/mercanet/internal/servlet/MercanetServlet.java
@@ -101,8 +101,7 @@ public class MercanetServlet extends HttpServlet {
requestDispatcher.forward(httpServletRequest, httpServletResponse);
}
catch (Exception e) {
- _log.error(e.getMessage());
- e.printStackTrace();
+ _log.error(e, e);
}
}
@@ -208,8 +207,7 @@ public class MercanetServlet extends HttpServlet {
}
}
catch (Exception e) {
- _log.error(e.getMessage());
- e.printStackTrace();
+ _log.error(e, e);
}
}
diff --git a/commerce-payment-service/src/main/java/com/liferay/commerce/payment/servlet/CommercePaymentServlet.java b/commerce-payment-service/src/main/java/com/liferay/commerce/payment/servlet/CommercePaymentServlet.java
index <HASH>..<HASH> 100644
--- a/commerce-payment-service/src/main/java/com/liferay/commerce/payment/servlet/CommercePaymentServlet.java
+++ b/commerce-payment-service/src/main/java/com/liferay/commerce/payment/servlet/CommercePaymentServlet.java
@@ -164,8 +164,7 @@ public class CommercePaymentServlet extends HttpServlet {
}
}
catch (Exception e) {
- _log.error(e.getMessage());
- e.printStackTrace();
+ _log.error(e, e);
}
}
|
COMMERCE-<I> COMMERCE-<I> Use logger rather than printStackTrace
|
liferay_com-liferay-commerce
|
train
|
acdcb95a7423b3369d39c5c4cf2535951cdafa5f
|
diff --git a/lib/librarian.rb b/lib/librarian.rb
index <HASH>..<HASH> 100644
--- a/lib/librarian.rb
+++ b/lib/librarian.rb
@@ -121,10 +121,6 @@ module Librarian
end
previous_resolution = lockfile.load(lockfile_path.read)
partial_manifests = ManifestSet.deep_strip(previous_resolution.manifests, dependency_names)
- debug { "Precaching Sources:" }
- previous_resolution.sources.each do |source|
- debug { " #{source}" }
- end
spec = specfile.read(previous_resolution.sources)
spec_changes = spec_change_set(spec, previous_resolution)
raise Error, "Cannot update when the specfile has been changed." unless spec_changes.same?
@@ -150,10 +146,6 @@ module Librarian
manifests = []
else
lock = lockfile.read
- debug { "Precaching Sources:" }
- lock.sources.each do |source|
- debug { " #{source}" }
- end
spec = specfile.read(lock.sources)
changes = spec_change_set(spec, lock)
if changes.same?
diff --git a/lib/librarian/chef/dsl.rb b/lib/librarian/chef/dsl.rb
index <HASH>..<HASH> 100644
--- a/lib/librarian/chef/dsl.rb
+++ b/lib/librarian/chef/dsl.rb
@@ -1,9 +1,13 @@
require 'librarian/dsl'
+require 'librarian/chef/particularity'
require 'librarian/chef/source'
module Librarian
module Chef
class Dsl < Librarian::Dsl
+
+ include Particularity
+
dependency :cookbook
source :site => Source::Site
diff --git a/lib/librarian/dsl.rb b/lib/librarian/dsl.rb
index <HASH>..<HASH> 100644
--- a/lib/librarian/dsl.rb
+++ b/lib/librarian/dsl.rb
@@ -1,10 +1,15 @@
require 'librarian/dependency'
require 'librarian/dsl/receiver'
require 'librarian/dsl/target'
+require 'librarian/helpers/debug'
+require 'librarian/particularity'
module Librarian
class Dsl
+ include Particularity
+ include Helpers::Debug
+
class Error < Exception
end
@@ -63,14 +68,28 @@ module Librarian
def run(specfile = nil, sources = [])
Target.new(self).tap do |target|
target.precache_sources(sources)
+ debug_named_source_cache("Pre-Cached Sources", target)
+
receiver = Receiver.new(target)
if block_given?
receiver.run(&Proc.new)
else
receiver.run(specfile)
end
+
+ debug_named_source_cache("Post-Cached Sources", target)
end.to_spec
end
+ def debug_named_source_cache(name, target)
+ source_cache = target.source_cache
+ debug { "#{name}:" }
+ source_cache.each do |key, value|
+ type = key[0]
+ attributes = key[1...key.size]
+ debug { " #{key.inspect}" }
+ end
+ end
+
end
end
diff --git a/lib/librarian/mock/dsl.rb b/lib/librarian/mock/dsl.rb
index <HASH>..<HASH> 100644
--- a/lib/librarian/mock/dsl.rb
+++ b/lib/librarian/mock/dsl.rb
@@ -1,9 +1,13 @@
require 'librarian/dsl'
+require 'librarian/mock/particularity'
require 'librarian/mock/source'
module Librarian
module Mock
class Dsl < Librarian::Dsl
+
+ include Particularity
+
dependency :dep
source :src => Source::Mock
|
In verbose, print out the pre- and post-cached sources as the dsl target sees them.
|
applicationsonline_librarian
|
train
|
87acfc4e9b361fde6fc6fd86536d4c1176daf8ba
|
diff --git a/safe/impact_functions/impact_function_manager.py b/safe/impact_functions/impact_function_manager.py
index <HASH>..<HASH> 100644
--- a/safe/impact_functions/impact_function_manager.py
+++ b/safe/impact_functions/impact_function_manager.py
@@ -41,17 +41,29 @@ class ImpactFunctionManager(object):
def get(self, class_name):
"""Return an instance of an impact function given its class name.
- :param class_name: the name of IF class
+ .. example::
+
+ if_manager = ImpactFunctionManager()
+ if_class_name = 'FloodBuildingImpactFunction'
+ if = if_manager.get(if_class_name)
+
+ :param class_name: The name of IF class.
:type class_name: str
:return: Impact function instance that matches the argument.
- :rtype: safe.impact_functions.base.ImpactFunction.instance()
+ :rtype: safe.impact_functions.base.ImpactFunction
"""
return self.registry.get(class_name)
def get_class(self, class_name):
"""Return the class of an impact function given its class name.
+ .. example::
+
+ if_manager = ImpactFunctionManager()
+ if_class_name = 'FloodBuildingImpactFunction'
+ if_class = if_manager.get_class(if_class_name)
+
:param class_name: the name of IF class
:type class_name: str
@@ -66,6 +78,12 @@ class ImpactFunctionManager(object):
This is a preferred way to get an instance of IF. IF should have a
unique human readable ID in their metadata.
+ .. example::
+
+ if_manager = ImpactFunctionManager()
+ if_id = 'FloodBuildingImpactFunction'
+ if = if_manager.get_by_id(if_id)
+
:param impact_function_id: The ID of impact function in the metadata.
:type impact_function_id: str
@@ -83,12 +101,58 @@ class ImpactFunctionManager(object):
impact_function_id)
return impact_functions[0].instance()
+ def filter(self, hazard_metadata=None, exposure_metadata=None):
+ """Get available impact functions from hazard and exposure metadata.
+
+ Disabled impact function will not be loaded.
+
+ .. example::
+
+ if_manager = ImpactFunctionManager()
+ hazard_metadata = {
+ 'subcategory': hazard_flood,
+ 'units': unit_wetdry,
+ 'layer_constraints': layer_vector_polygon
+ }
+ exposure_metadata = {
+ 'subcategory': exposure_structure,
+ 'units': unit_building_type_type,
+ 'layer_constraints': layer_vector_polygon
+ }
+ ifs = if_manager.filter(hazard_metadata, exposure_metadata)
+
+ :param hazard_metadata: The metadata of the hazard.
+ :type hazard_metadata: dict
+
+ :param exposure_metadata: The metadata of the exposure.
+ :type exposure_metadata: dict
+ """
+ return self.registry.filter(hazard_metadata, exposure_metadata)
+
def filter_by_keywords(
self, hazard_keywords=None, exposure_keywords=None):
"""Get available impact functions from hazard and exposure keywords.
Disabled impact function will not be loaded.
+ .. example::
+
+ if_manager = ImpactFunctionManager()
+ hazard_keywords = {
+ 'subcategory': 'flood',
+ 'units': 'wetdry',
+ 'layer_type': 'vector',
+ 'data_type': 'polygon'
+ }
+ exposure_keywords = {
+ 'subcategory': 'structure',
+ 'units': 'building_type',
+ 'layer_type': 'vector',
+ 'data_type': 'polygon'
+ }
+ ifs = if_manager.filter_by_keywords(hazard_keywords,
+ exposure_keywords)
+
:param hazard_keywords: The keywords of the hazard.
:type hazard_keywords: dict
@@ -101,6 +165,14 @@ class ImpactFunctionManager(object):
def filter_by_metadata(self, metadata_key, metadata_value):
"""Return IF classes given its metadata key and value.
+ .. example::
+
+ if_manager = ImpactFunctionManager()
+ metadata_key = 'author'
+ metadata_value = 'Akbar Gumbira'
+ ifs = if_manager.filter_by_metadata(metadata_key,
+ metadata_value)
+
:param metadata_key: The key of the metadata e.g 'id', 'name'
:type metadata_key: str
|
Give example on how to deal with IF manager to query IF.
|
inasafe_inasafe
|
train
|
949cde7e55252acab1b7911712eca30d5dec747e
|
diff --git a/lib/genevalidator.rb b/lib/genevalidator.rb
index <HASH>..<HASH> 100644
--- a/lib/genevalidator.rb
+++ b/lib/genevalidator.rb
@@ -393,6 +393,8 @@ module GeneValidator
validations.push OpenReadingFrameValidation.new(@type, prediction, hits, plot_path)
validations.push AlignmentValidation.new(@type, prediction, hits, plot_path, @opt[:raw_sequences], @raw_seq_file_index, @raw_seq_file_load, @opt[:db], @opt[:num_threads])
+ validations = validations.select { |v| @opt[:validations].include? v.cli_name.downcase }
+
# check the class type of the elements in the list
validations.each do |v|
fail ValidationClassError unless v.is_a? ValidationTest
@@ -402,12 +404,11 @@ module GeneValidator
aliases = validations.map(&:cli_name)
fail AliasDuplicationError unless aliases.length == aliases.uniq.length
- desired_validations = validations.select { |v| @opt[:validations].map { |vv| vv.strip.downcase }.include? v.cli_name.downcase }
- desired_validations.each do |v|
+ validations.each do |v|
v.run
fail ReportClassError unless v.validation_report.is_a? ValidationReport
end
- query_output.validations = desired_validations.map(&:validation_report)
+ query_output.validations = validations.map(&:validation_report)
fail NoValidationError if query_output.validations.length == 0
|
refactor the do_validations method
|
wurmlab_genevalidator
|
train
|
46538b17f87d910b92c53379075ce0b5d4c2a13c
|
diff --git a/easyci/commands/test.py b/easyci/commands/test.py
index <HASH>..<HASH> 100644
--- a/easyci/commands/test.py
+++ b/easyci/commands/test.py
@@ -14,7 +14,6 @@ def test(ctx, staged_only, head_only):
git = GitVcs()
known_signatures = get_known_signatures(git)
with git.temp_copy() as copy:
- copy.remove_ignored_files()
if head_only:
copy.clear('HEAD')
elif staged_only:
diff --git a/easyci/vcs/base.py b/easyci/vcs/base.py
index <HASH>..<HASH> 100644
--- a/easyci/vcs/base.py
+++ b/easyci/vcs/base.py
@@ -6,10 +6,9 @@ from __future__ import absolute_import
import os
import os.path
-import shutil
from contextlib import contextmanager
-from subprocess import Popen, PIPE
+from subprocess32 import Popen, PIPE, check_call
from easyci.utils import contextmanagers
@@ -138,17 +137,30 @@ class Vcs(object):
"""
raise NotImplementedError
+ def ignore_patterns_file(self):
+ """The ignore patterns file for this repo type.
+
+ e.g. .gitignore for git
+
+ Returns:
+ str - file name
+ """
+ raise NotImplementedError
+
@contextmanager
def temp_copy(self):
"""Yields a new Vcs object that represents a temporary, disposable
copy of the current repository. The copy is deleted at the end
of the context.
+ Note that ignored files are not copied.
+
Yields:
Vcs
"""
with contextmanagers.temp_dir() as temp_dir:
temp_root_path = os.path.join(temp_dir, 'root')
- shutil.copytree(self.path, temp_root_path)
+ path = os.path.join(self.path, '') # adds trailing slash
+ check_call(['rsync', '-r', "--filter=dir-merge,- {}".format(self.ignore_patterns_file()), path, temp_root_path])
copy = self.__class__(path=temp_root_path)
yield copy
diff --git a/easyci/vcs/git.py b/easyci/vcs/git.py
index <HASH>..<HASH> 100644
--- a/easyci/vcs/git.py
+++ b/easyci/vcs/git.py
@@ -109,3 +109,13 @@ class GitVcs(Vcs):
with open(hook_path, 'w') as f:
f.write(hook_content)
os.chmod(hook_path, stat.S_IEXEC | stat.S_IREAD | stat.S_IWRITE)
+
+ def ignore_patterns_file(self):
+ """The ignore patterns file for this repo type.
+
+ e.g. .gitignore for git
+
+ Returns:
+ str - file name
+ """
+ return '.gitignore'
diff --git a/tests/easyci/vcs/test_base.py b/tests/easyci/vcs/test_base.py
index <HASH>..<HASH> 100644
--- a/tests/easyci/vcs/test_base.py
+++ b/tests/easyci/vcs/test_base.py
@@ -2,6 +2,7 @@ import mock
import os
import pytest
+from easyci.utils import contextmanagers
from easyci.vcs.base import Vcs
@@ -9,6 +10,16 @@ class DummyVcs(Vcs):
def get_working_directory(self):
return os.getcwd()
+ def ignore_patterns_file(self):
+ return '.dummyignore'
+
+
+@pytest.yield_fixture(scope='function')
+def temp_path():
+ with contextmanagers.temp_dir() as temp:
+ with contextmanagers.chdir(temp):
+ yield temp
+
def test_init_with_path():
path = '/dummy'
@@ -36,10 +47,15 @@ def test_init_without_path():
Vcs()
-def test_temp_copy():
+def test_temp_copy(temp_path):
vcs = DummyVcs()
+ assert not os.system("echo '*.txt' > {}".format(vcs.ignore_patterns_file()))
+ assert not os.system("touch a.txt b")
with vcs.temp_copy() as copy:
assert isinstance(copy, DummyVcs)
assert copy.path != vcs.path
assert os.path.exists(copy.path)
+ assert os.path.exists(os.path.join(copy.path, copy.ignore_patterns_file()))
+ assert not os.path.exists(os.path.join(copy.path, 'a.txt'))
+ assert os.path.exists(os.path.join(copy.path, 'b'))
assert not os.path.exists(copy.path)
|
improved copying performance by using rsync
|
naphatkrit_easyci
|
train
|
a5c3e9d413e7ca297b7e433ca05fdf73a46c6b0c
|
diff --git a/lib/couch.js b/lib/couch.js
index <HASH>..<HASH> 100644
--- a/lib/couch.js
+++ b/lib/couch.js
@@ -102,7 +102,7 @@ CouchAdapter.prototype.remove = function(key, revision, callback)
this.db.remove(key, revision, callback);
};
-CouchAdapter.prototype.removeMany = function(objects, callback)
+CouchAdapter.prototype.destroyMany = function(objects, callback)
{
var self = this;
diff --git a/lib/persistence.js b/lib/persistence.js
index <HASH>..<HASH> 100644
--- a/lib/persistence.js
+++ b/lib/persistence.js
@@ -148,7 +148,7 @@ function persist(modelfunc, keyfield, adapter)
if (!objects || !Array.isArray(objects))
return callback(null);
- modelfunc.adapter.removeMany(objects, callback);
+ modelfunc.adapter.destroyMany(objects, callback);
};
// methods on model objects
diff --git a/lib/redis.js b/lib/redis.js
index <HASH>..<HASH> 100644
--- a/lib/redis.js
+++ b/lib/redis.js
@@ -11,6 +11,8 @@ var
var attachpat = /^attach:(.*)/;
function inflate(Modelfunc, payload)
{
+ if (payload === null)
+ return;
var object = new Modelfunc();
var json = {};
var matches;
@@ -177,7 +179,7 @@ RedisAdapter.prototype.remove = function(key, ignored, callback)
});
};
-RedisAdapter.prototype.removeMany = function(objects, callback)
+RedisAdapter.prototype.destroyMany = function(objects, callback)
{
var self = this;
var ids = _.map(objects, function(obj)
@@ -187,12 +189,13 @@ RedisAdapter.prototype.removeMany = function(objects, callback)
return obj.key;
});
+ var idkey = this.idskey();
var chain = this.redis.multi();
- chain.srem(this.idskey(), ids);
+ _.each(ids, function(id) { chain.srem(idkey, id); });
chain.del(_.map(ids, function(key) { return self.hashkey(key); }));
+
chain.exec(function(err, replies)
{
- // TODO process replies
callback(err);
});
};
|
removeMany -> destroyMany
Also, the redis test now cleans up after itself properly.
|
ceejbot_polyclay
|
train
|
67815d19472c02f074a3af8349e2c7a87580e4f6
|
diff --git a/app/Blueprint/Infrastructure/Service/Maker/CustomFiles/CustomFilesMaker.php b/app/Blueprint/Infrastructure/Service/Maker/CustomFiles/CustomFilesMaker.php
index <HASH>..<HASH> 100644
--- a/app/Blueprint/Infrastructure/Service/Maker/CustomFiles/CustomFilesMaker.php
+++ b/app/Blueprint/Infrastructure/Service/Maker/CustomFiles/CustomFilesMaker.php
@@ -31,7 +31,7 @@ class CustomFilesMaker {
foreach($extraFiles as $filePath) {
$fileName = basename($filePath);
- $mainService->addVolume($filePath, '/opt/custom/'.$fileName);
+ $mainService->addVolume(getcwd().DIRECTORY_SEPARATOR.$filePath, '/opt/custom/'.$fileName);
}
}
|
added getcwd().'/' to the extra files volume
The current working dir needs to be present in the volume otherwise the path will be relative to .rancherize instead of the project root
|
ipunkt_rancherize
|
train
|
2fbd5c8d7231b2d18011b28a289460213d34cd83
|
diff --git a/future/__init__.py b/future/__init__.py
index <HASH>..<HASH> 100644
--- a/future/__init__.py
+++ b/future/__init__.py
@@ -85,96 +85,12 @@ On Python 2, the ``from future.builtins import *`` line shadows builtins
to provide their Python 3 semantics. (See below for the explicit import
form.)
+Documentation
+-------------
-Standard library reorganization
--------------------------------
-``future`` supports the standard library reorganization (PEP 3108)
-via import hooks, allowing almost all moved standard library modules to be
-accessed under their Python 3 names and locations in Python 2::
-
- from future import standard_library
-
- import socketserver
- import queue
- import configparser
- import test.support
- from collections import UserList
- from itertools import filterfalse, zip_longest
- # and other moved modules and definitions
-
-It also includes backports for these stdlib packages from Py3 that were
-heavily refactored versus Py2::
-
- import html, html.entities, html.parser
- import http, http.client, http.server
-
-These currently are not supported, but we may support them in the
-future::
-
- import http.cookies, http.cookiejar
- import urllib, urllib.parse, urllib.request, urllib.error
-
-
-Utilities
----------
-``future`` also provides some useful functions and decorators to ease backward
-compatibility with Py2 in the ``future.utils`` module. These are a selection
-of the most useful functions from ``six`` and various home-grown Py2/3
-compatibility modules from various Python projects, such as Jinja2, Pandas,
-IPython, and Django.
-
-Examples::
-
- # Functions like print() expect __str__ on Py2 to return a byte
- string. This decorator maps the __str__ to __unicode__ on Py2 and
- defines __str__ to encode it as utf-8:
-
- from future.utils import python_2_unicode_compatible
-
- @python_2_unicode_compatible
- class MyClass(object):
- def __str__(self):
- return u'Unicode string: \u5b54\u5b50'
- a = MyClass()
-
- # This then prints the Chinese characters for Confucius:
- print(a)
-
-
- # Iterators on Py3 require a __next__() method, whereas on Py2 this
- # is called next(). This decorator allows Py3-style iterators to work
- # identically on Py2:
-
- @implements_iterator
- class Upper(object):
- def __init__(self, iterable):
- self._iter = iter(iterable)
- def __next__(self): # note the Py3 interface
- return next(self._iter).upper()
- def __iter__(self):
- return self
-
- print(list(Upper('hello')))
- # prints ['H', 'E', 'L', 'L', 'O']
-
-On Python 3 these decorators are no-ops.
-
-
-Explicit imports
-----------------
-If you prefer explicit imports, the explicit equivalent of the ``from
-future.builtins import *`` line above is::
-
- from future.builtins.iterators import (filter, map, zip)
- from future.builtins.misc import (ascii, chr, hex, input, int, oct, open)
- from future.builtins.backports import (bytes, range, round, str, super)
- from future.builtins.disabled import (apply, cmp, coerce, execfile,
- file, long, raw_input, reduce, reload, unicode,
- xrange, StandardError)
-
-But please note that the internal API is still evolving.
+See: http://python-future.org
-See the docstrings for each of these modules for more info::
+Also see the docstrings for each of these modules for more info::
- future.standard_library
- future.builtins
@@ -188,12 +104,6 @@ either Python 2 code or Python 3 code compatible with both platforms
using the ``future`` module. See `<http://python-future.org/automatic_conversion.html>`_.
-Documentation
--------------
-
-See http://python-future.org
-
-
Credits
-------
|
Simplify the top-level future package docstring
- Eliminates some redundancy ...
|
PythonCharmers_python-future
|
train
|
dc94ea47e55655e06d99fb0fa0b10dea27312893
|
diff --git a/src/js/videojs.vr.js b/src/js/videojs.vr.js
index <HASH>..<HASH> 100644
--- a/src/js/videojs.vr.js
+++ b/src/js/videojs.vr.js
@@ -436,6 +436,7 @@
}
player.vr = {
+ camera: camera,
cameraVector: cameraVector
};
}
|
Expose ThreeJS perspective camera in player.vr object
exposeThreeJSCamera
|
videojs_videojs-vr
|
train
|
ba004ebf3c4b465e5dbf2fe8bfb3da8bb4c91d03
|
diff --git a/changes.md b/changes.md
index <HASH>..<HASH> 100644
--- a/changes.md
+++ b/changes.md
@@ -6,6 +6,7 @@
- Fixed bug with protected endpoints becoming unprotected when setting protection on both group and route.
- Fixed bug where routes defined before the replacement of the bound router would be lost.
- `AuthFilter` now require authentication for internal requests.
+- Don't catch exceptions and handle them within the `AuthFilter`, this causes issues with internal requests.
#### Added
diff --git a/src/Http/Filter/AuthFilter.php b/src/Http/Filter/AuthFilter.php
index <HASH>..<HASH> 100644
--- a/src/Http/Filter/AuthFilter.php
+++ b/src/Http/Filter/AuthFilter.php
@@ -63,10 +63,6 @@ class AuthFilter extends Filter
$providers = array_merge(array_slice(func_get_args(), 2), $route->getAuthProviders());
- try {
- $this->auth->authenticate($providers);
- } catch (UnauthorizedHttpException $exception) {
- return $this->events->until('router.exception', [$exception]);
- }
+ $this->auth->authenticate($providers);
}
}
diff --git a/tests/Http/Filter/AuthFilterTest.php b/tests/Http/Filter/AuthFilterTest.php
index <HASH>..<HASH> 100644
--- a/tests/Http/Filter/AuthFilterTest.php
+++ b/tests/Http/Filter/AuthFilterTest.php
@@ -46,7 +46,10 @@ class AuthFilterTest extends PHPUnit_Framework_TestCase
}
- public function testAuthFailsAndEventIsFired()
+ /**
+ * @expectedException \Symfony\Component\HttpKernel\Exception\UnauthorizedHttpException
+ */
+ public function testAuthFailsAndExceptionIsThrown()
{
$request = Request::create('test', 'GET');
$route = new Route(['GET'], 'test', ['protected' => true]);
@@ -55,13 +58,7 @@ class AuthFilterTest extends PHPUnit_Framework_TestCase
$this->auth->shouldReceive('check')->once()->andReturn(false);
$this->auth->shouldReceive('authenticate')->once()->with([])->andThrow($exception);
- $this->events->listen('router.exception', function ($thrown) use ($exception) {
- $this->assertSame($thrown, $exception);
-
- return 'caught';
- });
-
- $this->assertEquals('caught', $this->filter->filter($route, $request));
+ $this->filter->filter($route, $request);
}
|
Avoid catching and handling exceptions in AuthFilter.
|
dingo_api
|
train
|
23c71abcd70af866d7752dab91c37cd2e84196e8
|
diff --git a/api/cloudcontroller/ccv2/application.go b/api/cloudcontroller/ccv2/application.go
index <HASH>..<HASH> 100644
--- a/api/cloudcontroller/ccv2/application.go
+++ b/api/cloudcontroller/ccv2/application.go
@@ -123,7 +123,7 @@ func (application Application) MarshalJSON() ([]byte, error) {
DockerCredentials *DockerCredentials `json:"docker_credentials,omitempty"`
DockerImage string `json:"docker_image,omitempty"`
EnvironmentVariables map[string]string `json:"environment_json,omitempty"`
- HealthCheckHTTPEndpoint string `json:"health_check_http_endpoint,omitempty"`
+ HealthCheckHTTPEndpoint *string `json:"health_check_http_endpoint,omitempty"`
HealthCheckTimeout int `json:"health_check_timeout,omitempty"`
HealthCheckType constant.ApplicationHealthCheckType `json:"health_check_type,omitempty"`
Instances *int `json:"instances,omitempty"`
@@ -133,17 +133,16 @@ func (application Application) MarshalJSON() ([]byte, error) {
StackGUID string `json:"stack_guid,omitempty"`
State ApplicationState `json:"state,omitempty"`
}{
- DiskQuota: application.DiskQuota,
- DockerImage: application.DockerImage,
- EnvironmentVariables: application.EnvironmentVariables,
- HealthCheckHTTPEndpoint: application.HealthCheckHTTPEndpoint,
- HealthCheckTimeout: application.HealthCheckTimeout,
- HealthCheckType: application.HealthCheckType,
- Memory: application.Memory,
- Name: application.Name,
- SpaceGUID: application.SpaceGUID,
- StackGUID: application.StackGUID,
- State: application.State,
+ DiskQuota: application.DiskQuota,
+ DockerImage: application.DockerImage,
+ EnvironmentVariables: application.EnvironmentVariables,
+ HealthCheckTimeout: application.HealthCheckTimeout,
+ HealthCheckType: application.HealthCheckType,
+ Memory: application.Memory,
+ Name: application.Name,
+ SpaceGUID: application.SpaceGUID,
+ StackGUID: application.StackGUID,
+ State: application.State,
}
if application.Buildpack.IsSet {
@@ -165,6 +164,10 @@ func (application Application) MarshalJSON() ([]byte, error) {
ccApp.Instances = &application.Instances.Value
}
+ if application.HealthCheckType != "" {
+ ccApp.HealthCheckHTTPEndpoint = &application.HealthCheckHTTPEndpoint
+ }
+
return json.Marshal(ccApp)
}
diff --git a/api/cloudcontroller/ccv2/application_test.go b/api/cloudcontroller/ccv2/application_test.go
index <HASH>..<HASH> 100644
--- a/api/cloudcontroller/ccv2/application_test.go
+++ b/api/cloudcontroller/ccv2/application_test.go
@@ -437,7 +437,7 @@ var _ = Describe("Application", func() {
"detected_buildpack": null,
"health_check_type": "some-health-check-type",
"health_check_http_endpoint": "/",
- "instances": 13,
+ "instances": 7,
"memory": 1024,
"name": "app-name-1",
"package_updated_at": "2015-03-10T23:11:54Z",
@@ -448,7 +448,7 @@ var _ = Describe("Application", func() {
server.AppendHandlers(
CombineHandlers(
VerifyRequest(http.MethodPut, "/v2/apps/some-app-guid"),
- VerifyBody([]byte(`{"health_check_type":"some-health-check-type"}`)),
+ VerifyBody([]byte(`{"instances":7}`)),
RespondWith(http.StatusCreated, response1, http.Header{"X-Cf-Warnings": {"this is a warning"}}),
),
)
@@ -456,8 +456,8 @@ var _ = Describe("Application", func() {
It("returns the updated object and warnings and sends only updated field", func() {
app, warnings, err := client.UpdateApplication(Application{
- GUID: "some-app-guid",
- HealthCheckType: "some-health-check-type",
+ GUID: "some-app-guid",
+ Instances: types.NullInt{IsSet: true, Value: 7},
})
Expect(err).NotTo(HaveOccurred())
@@ -472,7 +472,7 @@ var _ = Describe("Application", func() {
GUID: "some-app-guid",
HealthCheckType: "some-health-check-type",
HealthCheckHTTPEndpoint: "/",
- Instances: types.NullInt{Value: 13, IsSet: true},
+ Instances: types.NullInt{Value: 7, IsSet: true},
Memory: 1024,
Name: "app-name-1",
PackageUpdatedAt: updatedAt,
|
always send health_check_http_endpoint when health_check_type is set
[Finishes #<I>]
|
cloudfoundry_cli
|
train
|
672624669703c68f3bf321d04453af14c47dfc2e
|
diff --git a/core/server/services/themes/storage.js b/core/server/services/themes/storage.js
index <HASH>..<HASH> 100644
--- a/core/server/services/themes/storage.js
+++ b/core/server/services/themes/storage.js
@@ -75,12 +75,14 @@ module.exports = {
name: themeName,
path: checkedTheme.path
});
+
// CASE: loads the theme from the fs & sets the theme on the themeList
const loadedTheme = await themeLoader.loadOneTheme(themeName);
overrideTheme = (themeName === settingsCache.get('active_theme'));
+
// CASE: if this is the active theme, we are overriding
if (overrideTheme) {
- debug('setFromZip Theme is active alreadu');
+ debug('setFromZip Theme is active already');
activator.activateFromAPIOverride(themeName, loadedTheme, checkedTheme);
}
diff --git a/test/api-acceptance/admin/themes.test.js b/test/api-acceptance/admin/themes.test.js
index <HASH>..<HASH> 100644
--- a/test/api-acceptance/admin/themes.test.js
+++ b/test/api-acceptance/admin/themes.test.js
@@ -1,4 +1,5 @@
const should = require('should');
+const sinon = require('sinon');
const path = require('path');
const fs = require('fs');
const _ = require('lodash');
@@ -7,6 +8,8 @@ const nock = require('nock');
const testUtils = require('../../utils');
const config = require('../../../core/shared/config');
const localUtils = require('./utils');
+const settingsCache = require('../../../core/shared/settings-cache');
+const origCache = _.cloneDeep(settingsCache);
describe('Themes API', function () {
let ownerRequest;
@@ -28,6 +31,10 @@ describe('Themes API', function () {
await localUtils.doAuth(ownerRequest);
});
+ after(function () {
+ sinon.restore();
+ });
+
it('Can request all available themes', async function () {
const res = await ownerRequest
.get(localUtils.API.getApiQuery('themes/'))
@@ -92,19 +99,8 @@ describe('Themes API', function () {
jsonResponse.themes[0].name.should.eql('valid');
jsonResponse.themes[0].active.should.be.false();
- // upload same theme again to force override
- const res2 = await uploadTheme({themePath: path.join(__dirname, '..', '..', 'utils', 'fixtures', 'themes', 'valid.zip')});
- const jsonResponse2 = res2.body;
-
- should.not.exist(res2.headers['x-cache-invalidate']);
- should.exist(jsonResponse2.themes);
- localUtils.API.checkResponse(jsonResponse2, 'themes');
- jsonResponse2.themes.length.should.eql(1);
- localUtils.API.checkResponse(jsonResponse2.themes[0], 'theme');
- jsonResponse2.themes[0].name.should.eql('valid');
- jsonResponse2.themes[0].active.should.be.false();
-
- // ensure tmp theme folder contains two themes now
+ // Note: at this point, the tmpFolder can legitimately still contain a valid_34324324 backup
+ // As it is deleted asynchronously
const tmpFolderContents = fs.readdirSync(config.getContentPath('themes'));
tmpFolderContents.forEach((theme, index) => {
if (theme.match(/^\./)) {
@@ -112,8 +108,6 @@ describe('Themes API', function () {
}
});
- // Note: at this point, the tmpFolder can legitimately still contain a valid_34324324 backup
- // As it is deleted asynchronously
tmpFolderContents.should.containEql('valid');
tmpFolderContents.should.containEql('valid.zip');
@@ -307,4 +301,29 @@ describe('Themes API', function () {
.set('Origin', config.get('url'))
.expect(204);
});
+
+ it('Can re-upload the active theme to override', async function () {
+ // The tricky thing about this test is the default active theme is Casper and you're not allowed to override it.
+ // So we upload a valid theme, activate it, and then upload again.
+ sinon.stub(settingsCache, 'get').callsFake(function (key, options) {
+ if (key === 'active_theme') {
+ return 'valid';
+ }
+
+ return origCache.get(key, options);
+ });
+
+ // Upload the valid theme
+ const res = await uploadTheme({themePath: path.join(__dirname, '..', '..', 'utils', 'fixtures', 'themes', 'valid.zip')});
+ const jsonResponse = res.body;
+
+ should.exist(res.headers['x-cache-invalidate']);
+
+ should.exist(jsonResponse.themes);
+ localUtils.API.checkResponse(jsonResponse, 'themes');
+ jsonResponse.themes.length.should.eql(1);
+ localUtils.API.checkResponse(jsonResponse.themes[0], 'theme', 'templates');
+ jsonResponse.themes[0].name.should.eql('valid');
+ jsonResponse.themes[0].active.should.be.true();
+ });
});
|
Fixed test for overriding active theme
refs: <URL> doesn't test the right codepath
- It incorrectly assumes uploading the same theme twice results in an override, but this is only true for the active theme
- This change splits the override test out into it's own test, and only tests overriding by changing the active theme first
- Also fixed a minor comment type whilst here
|
TryGhost_Ghost
|
train
|
d9fada95496d4b92404e5e2f82b7404bb0d28f3e
|
diff --git a/spyderlib/plugins/editor.py b/spyderlib/plugins/editor.py
index <HASH>..<HASH> 100644
--- a/spyderlib/plugins/editor.py
+++ b/spyderlib/plugins/editor.py
@@ -682,8 +682,8 @@ class Editor(SpyderPluginWidget):
# --- Run toolbar ---
run_action = create_action(self, _("&Run"), icon='run.png',
- tip=_("Run selected script in<br> "
- "current console"),
+ tip=_("Run selected script in\n"
+ "current console"),
triggered=self.run_file)
self.register_shortcut(run_action, context="Editor",
name="Run", default="F5")
@@ -695,8 +695,8 @@ class Editor(SpyderPluginWidget):
name="Configure", default="F6")
re_run_action = create_action(self,
_("Re-run &last script"), icon='run_again.png',
- tip=_("Run again last script in<br> "
- "current console with the same options"),
+ tip=_("Run again last script in current\n"
+ "console with the same options"),
triggered=self.re_run_file)
self.register_shortcut(re_run_action, context="Editor",
name="Re-run last script", default="Ctrl+F6")
@@ -704,8 +704,8 @@ class Editor(SpyderPluginWidget):
run_selected_action = create_action(self,
_("Run &selection or current block"),
icon='run_selection.png',
- tip=_("Run selected text or current<br> "
- "block of lines inside current console"),
+ tip=_("Run selected text or current block\n"
+ "of lines inside current console"),
triggered=self.run_selection_or_block)
self.register_shortcut(run_selected_action, context="Editor",
name="Run selection", default="F9")
|
Run Toolbar: Use "\n" instead of "<br>" to break its long tooltips
- Unfortunately "<br>" is printed on the status bar, while "\n" is not.
|
spyder-ide_spyder
|
train
|
e323c3c66ff5c95a00351d892a6838a428c70bd1
|
diff --git a/lib/searchkick/index_options.rb b/lib/searchkick/index_options.rb
index <HASH>..<HASH> 100644
--- a/lib/searchkick/index_options.rb
+++ b/lib/searchkick/index_options.rb
@@ -144,6 +144,15 @@ module Searchkick
}
}
+ if below60
+ # ES docs say standard token filter does nothing in ES 5
+ # (and therefore isn't needed at at), but tests say otherwise
+ # https://www.elastic.co/guide/en/elasticsearch/reference/5.0/analysis-standard-tokenfilter.html
+ [default_analyzer, :searchkick_search, :searchkick_search2].each do |analyzer|
+ settings[:analysis][:analyzer][analyzer][:filter].unshift("standard")
+ end
+ end
+
case language
when "chinese"
settings[:analysis][:analyzer].merge!(
|
Added standard filter back in ES 5
|
ankane_searchkick
|
train
|
89225008d37b186c972fdf779658bbdf8a25ab84
|
diff --git a/src/Select.js b/src/Select.js
index <HASH>..<HASH> 100644
--- a/src/Select.js
+++ b/src/Select.js
@@ -98,6 +98,22 @@ var Select = React.createClass({
this._focusAfterUpdate = false;
}.bind(this), 50);
}
+
+ if (this._focusedOptionReveal) {
+ if (this.refs.selected && this.refs.menu) {
+ var selectedDOM = this.refs.selected.getDOMNode();
+ var menuDOM = this.refs.menu.getDOMNode();
+ var selectedRect = selectedDOM.getBoundingClientRect();
+ var menuRect = menuDOM.getBoundingClientRect();
+
+ if (selectedRect.bottom > menuRect.bottom ||
+ selectedRect.top < menuRect.top) {
+ menuDOM.scrollTop = (selectedDOM.offsetTop + selectedDOM.clientHeight - menuDOM.offsetHeight);
+ }
+ }
+
+ this._focusedOptionReveal = false;
+ }
},
getStateFromValue: function(value, options) {
@@ -369,6 +385,7 @@ var Select = React.createClass({
},
focusAdjacentOption: function(dir) {
+ this._focusedOptionReveal = true;
var ops = this.state.filteredOptions;
@@ -425,17 +442,20 @@ var Select = React.createClass({
var focusedValue = this.state.focusedOption ? this.state.focusedOption.value : null;
var ops = _.map(this.state.filteredOptions, function(op) {
+ var isFocused = focusedValue === op.value;
var optionClass = classes({
'Select-option': true,
- 'is-focused': focusedValue === op.value
+ 'is-focused': isFocused
});
+
+ var ref = isFocused ? 'selected' : null;
var mouseEnter = this.focusOption.bind(this, op),
mouseLeave = this.unfocusOption.bind(this, op),
mouseDown = this.selectValue.bind(this, op);
- return <div key={'option-' + op.value} className={optionClass} onMouseEnter={mouseEnter} onMouseLeave={mouseLeave} onMouseDown={mouseDown}>{op.label}</div>;
+ return <div ref={ref} key={'option-' + op.value} className={optionClass} onMouseEnter={mouseEnter} onMouseLeave={mouseLeave} onMouseDown={mouseDown}>{op.label}</div>;
}, this);
@@ -471,7 +491,7 @@ var Select = React.createClass({
var loading = this.state.isLoading ? <span className="Select-loading" aria-hidden="true" /> : null;
var clear = this.state.value ? <span className="Select-clear" aria-label="Clear value" onMouseDown={this.clearValue} dangerouslySetInnerHTML={{ __html: '×' }} /> : null;
- var menu = this.state.isOpen ? <div className="Select-menu">{this.buildMenu()}</div> : null;
+ var menu = this.state.isOpen ? <div ref="menu" className="Select-menu">{this.buildMenu()}</div> : null;
return (
<div ref="wrapper" className={selectClass}>
|
scroll focused item into view on keyboard navigation
|
HubSpot_react-select-plus
|
train
|
2d2d399ced6261c18b21d6f4e45b9c6d64e8cecd
|
diff --git a/lib/cancan-permits/license/base_license.rb b/lib/cancan-permits/license/base_license.rb
index <HASH>..<HASH> 100644
--- a/lib/cancan-permits/license/base_license.rb
+++ b/lib/cancan-permits/license/base_license.rb
@@ -14,15 +14,16 @@ module License
end
def load_rules name = nil
- return if !licenses || licenses.empty?
+ return if !licenses || licenses.permissions.empty?
- name ||= self.class.to_s.gsub(/License$/, "").underscore.to_sym
+ name ||= self.class.to_s.gsub(/License$/, "").underscore
- licenses[name].can_statement do |permission_statement|
- instance_eval permission_statement
- end
+ return if licenses.permissions[name].nil?
- licenses[name].cannot_statement do |permission_statement|
+ licenses.permissions[name].can_eval do |permission_statement|
+ instance_eval permission_statement
+ end
+ licenses.permissions[name].cannot_eval do |permission_statement|
instance_eval permission_statement
end
end
@@ -39,4 +40,4 @@ module License
permit.owns user, clazz, ownership_relation, user_id_attribute
end
end
-end
\ No newline at end of file
+end
diff --git a/lib/cancan-permits/loader/permission_config.rb b/lib/cancan-permits/loader/permission_config.rb
index <HASH>..<HASH> 100644
--- a/lib/cancan-permits/loader/permission_config.rb
+++ b/lib/cancan-permits/loader/permission_config.rb
@@ -10,14 +10,16 @@ class PermissionConfig
# can(:manage, :all)
# cannot(:update, [User, Profile])
def can_eval &block
+ return nil if !can
statements = [:manage, :read, :update, :create, :write].map do |action|
- targets = can[action]
+ targets = can[action.to_s]
targets ? "can(:#{action}, #{parse_targets(targets)})" : nil
end.compact.join("\n")
yield statements if !statements.empty? && block
end
- def can_eval &block
+ def cannot_eval &block
+ return nil if !cannot
statements = [:manage, :read, :update, :create, :write].map do |action|
targets = cannot[action]
targets ? "cannot(:#{action}, #{parse_targets(targets)})" : nil
@@ -28,14 +30,14 @@ class PermissionConfig
def parse_targets targets
targets.map do |target|
if target == 'all'
- ':all'
+ :all
else
begin
- "#{target.constantize}"
+ target #.constantize
rescue
puts "[permission] target #{target} does not have a class so it was skipped"
end
end
end
end
-end
\ No newline at end of file
+end
diff --git a/lib/cancan-permits/permit/base_permit.rb b/lib/cancan-permits/permit/base_permit.rb
index <HASH>..<HASH> 100644
--- a/lib/cancan-permits/permit/base_permit.rb
+++ b/lib/cancan-permits/permit/base_permit.rb
@@ -6,7 +6,7 @@ module Permit
attr_reader :ability
attr_reader :strategy # this can be used to customize the strategy used by owns to determine ownership, fx to support alternative ORMs
- attr_reader :user_permissions
+ attr_reader :user_permissions, :role_permissions
def licenses *names
names.to_strings.each do |name|
@@ -31,32 +31,32 @@ module Permit
end
def load_role_rules
- return if !role_permissions || role_permissions.empty?
- name ||= self.class.to_s.gsub(/Permit$/, "").underscore.to_sym
+ return if !role_permissions || role_permissions.permissions.empty?
+ name ||= self.class.to_s.gsub(/Permit$/, "").underscore.to_s #ym
- role_permissions[name].can_statement do |permission_statement|
- instance_eval permission_statement
- end
+ return if role_permissions.permissions[name].nil?
- role_permissions[name].cannot_statement do |permission_statement|
+ role_permissions.permissions[name].can_eval do |permission_statement|
+ instance_eval permission_statement
+ end
+ role_permissions.permissions[name].cannot_eval do |permission_statement|
instance_eval permission_statement
end
end
def load_user_rules user
- return if !user_permissions || user_permissions.empty?
+ return if !user_permissions || user_permissions.permissions.empty?
raise "#load_user_rules expects the user to have an email property: #{user.inspect}" if !user || !user.respond_to?(:email)
id = user.email
- return nil if id.strip.empty?
+ return nil if id.strip.empty? || user_permissions.permissions[id].nil?
- user_permissions[id].can_statement do |permission_statement|
+ user_permissions.permissions[id].can_eval do |permission_statement|
instance_eval permission_statement
- end
-
- user_permissions[id].cannot_statement do |permission_statement|
+ end
+ user_permissions.permissions[id].cannot_eval do |permission_statement|
instance_eval permission_statement
- end
+ end
end
def initialize ability, options = {}
@@ -137,4 +137,4 @@ module Permit
end
end
-end
\ No newline at end of file
+end
|
patch on 'yaml files for roles configuration' functionality
|
kristianmandrup_cancan-permits
|
train
|
1cfcc59c058c66364cdbf208a281c48b55a74847
|
diff --git a/lib/methods/drush.py b/lib/methods/drush.py
index <HASH>..<HASH> 100644
--- a/lib/methods/drush.py
+++ b/lib/methods/drush.py
@@ -392,7 +392,7 @@ class DrushMethod(BaseMethod):
pass
time.sleep(5)
- print "Wait another 5 secs for the database ..."
+ print "Wait another 5 secs for the database ({user}@{host}) ...".format(**config['database'])
print red('Database not available!')
return False
|
Display host and user when waiting for the database (Fixes #<I>)
|
factorial-io_fabalicious
|
train
|
6a3e485a9c9619b40a919feecbaeded76c9c6b0e
|
diff --git a/lib/grunt-horde/index.js b/lib/grunt-horde/index.js
index <HASH>..<HASH> 100644
--- a/lib/grunt-horde/index.js
+++ b/lib/grunt-horde/index.js
@@ -66,15 +66,11 @@ var shelljs = require('shelljs');
* - `{object} loadTasks`
* - `{object} registerMultiTask`
* - `{object} registerTask`
- * - `{object} decree` Key/value pairs:
- * - Applied during template variable replacement
- * - Available read-only in config modules under `this.decree`
* - `{object} grunt` Module collected from Gruntfile.js
* - `{string} home` Absolute path to project root dir w/out trailing slash
* - `{string} seek` Absolute path to project-local grunt config dir w/out trailing slash
*/
function GruntHorde() {
- this.decree = null;
this.home = process.cwd();
this.grunt = null;
this.config = {
|
fix(decree): Remove artifacts
|
codeactual_grunt-horde
|
train
|
74f9bbb008c3905c72d1b806bbaa17b90c1aeab6
|
diff --git a/documentation/components/MasterDetails.md b/documentation/components/MasterDetails.md
index <HASH>..<HASH> 100644
--- a/documentation/components/MasterDetails.md
+++ b/documentation/components/MasterDetails.md
@@ -1,6 +1,5 @@
#### Examples:
-
__1:__ A simple master detail example using the dummy <Details> component on the right side.
```jsx
@@ -11,10 +10,29 @@ __1:__ A simple master detail example using the dummy <Details> component
columns={tableData.experts.columns}
rows={tableData.experts.rows}
details={Details}
- />
+ />
+```
+
+__2:__ A master detail example using the dummy <Details> component on the right side,
+multiselect table option, that always requires a table selection.
+
+```jsx
+ const tableData = require('../sampleData/TableData').default;
+ const Details = require('../../src/components/Details').default;
+
+ <MasterDetails
+ columns={tableData.experts.columns}
+ rows={tableData.experts.rows}
+ details={Details}
+ rowComparator={(rowA, rowB) => rowA.id === rowB.id}
+ multiSelect
+ activeRowBackgroundColor="#d08afc"
+ multiSelectBackgroundColor="#dcaef9"
+ noEmptySelection
+ />
```
-__2:__ Another example showing the use of a header and footer and with a 50-50 split, widthwise, between
+__3:__ Another example showing the use of a header and footer and with a 50-50 split, widthwise, between
the table and details, along with 20 pixels of padding between them.
```jsx
@@ -33,10 +51,10 @@ the table and details, along with 20 pixels of padding between them.
)}
split={6}
padding={20}
- />
+ />
```
-__3:__ A multi-select variation with a custom details component. In real life, it's recommended that you
+__4:__ A multi-select variation with a custom details component. In real life, it's recommended that you
create a full class-based component for the details pane.
```jsx
@@ -101,5 +119,5 @@ create a full class-based component for the details pane.
columns={tableData.experts.columns}
rows={tableData.experts.rows}
details={details}
- />
+ />
```
diff --git a/src/components/MasterDetails.js b/src/components/MasterDetails.js
index <HASH>..<HASH> 100644
--- a/src/components/MasterDetails.js
+++ b/src/components/MasterDetails.js
@@ -38,7 +38,7 @@ type MasterDetailsProps = {
*/
multiSelect?: boolean;
/**
- * This callback is called when the user changes the selection in the table. However the
+ * This optional callback is called when the user changes the selection in the table. However the
* Table component is responsible for maintaining the selection in the table; this callback
* is just a "courtesy" so the parent can do something such as change the enablement of buttons,
* etc., based on the selection. See the onSelect property of the Table component for more details.
@@ -105,6 +105,11 @@ type MasterDetailsProps = {
*/
activeRowBackgroundColor?: string;
/**
+ * Optional background color to apply all selected rows except for the active row. Only used if multiSelect is specified as well.
+ * Takes precedence over all other background colors specified through classNames.
+ */
+ multiSelectBackgroundColor?: string;
+ /**
* Row comparator function passed through to the Table component. See Table component for details.
*/
rowComparator: (rowA: {}, rowB: {}) => boolean;
@@ -144,10 +149,10 @@ export default class MasterDetails extends React.Component<MasterDetailsDefaultP
multiSelect: false,
noEmptySelection: false,
padding: 0,
- selectedClassName: 'attivio-table-row-selected',
+ selectedClassName: 'attivio-table-row-selected attivio-table-row',
sortColumn: 0,
split: 8,
- tableClassName: 'table table-striped attivio-table attivio-table-sm',
+ tableClassName: 'table table-striped attivio-table attivio-table-sm attivio-table-no-outline',
tableContainerClassName: '',
};
@@ -203,6 +208,7 @@ export default class MasterDetails extends React.Component<MasterDetailsDefaultP
details: Detail,
detailsProps,
multiSelect,
+ multiSelectBackgroundColor,
noEmptySelection,
onSort,
padding = 0,
@@ -236,6 +242,7 @@ export default class MasterDetails extends React.Component<MasterDetailsDefaultP
selectedClassName={selectedClassName}
tableClassName={tableClassName}
activeRowBackgroundColor={activeRowBackgroundColor}
+ multiSelectBackgroundColor={multiSelectBackgroundColor}
rowComparator={rowComparator}
/>
{this.renderFooter()}
diff --git a/src/components/Table.js b/src/components/Table.js
index <HASH>..<HASH> 100644
--- a/src/components/Table.js
+++ b/src/components/Table.js
@@ -178,7 +178,7 @@ export default class Table extends React.Component<TableDefaultProps, TableProps
rows: [],
selectedClassName: 'attivio-table-row-selected attivio-table-row',
sortColumn: 0,
- tableClassName: 'table table-striped attivio-table attivio-table-sm attivio-table-no-outline ',
+ tableClassName: 'table table-striped attivio-table attivio-table-sm attivio-table-no-outline',
};
static makeCustomRenderer(column) {
|
feat/PLAT-<I>-A: Update Master Details style guide to reflect multiselect Table changes
|
attivio_suit
|
train
|
7c41b5352f80b92fe66188dd3f36268a8a04b60c
|
diff --git a/lib/elasticity/aws_request_v4.rb b/lib/elasticity/aws_request_v4.rb
index <HASH>..<HASH> 100644
--- a/lib/elasticity/aws_request_v4.rb
+++ b/lib/elasticity/aws_request_v4.rb
@@ -51,7 +51,12 @@ module Elasticity
end
def payload
- AwsUtils.convert_ruby_to_aws_v4(@ruby_service_hash).to_json
+ configurations = @ruby_service_hash.delete(:configurations)
+ service_hash = AwsUtils.convert_ruby_to_aws_v4(@ruby_service_hash)
+ return service_hash.to_json if configurations.nil?
+ @ruby_service_hash[:configurations] = configurations
+ service_hash['Configurations'] = configurations
+ service_hash.to_json
end
private
diff --git a/lib/elasticity/aws_utils.rb b/lib/elasticity/aws_utils.rb
index <HASH>..<HASH> 100644
--- a/lib/elasticity/aws_utils.rb
+++ b/lib/elasticity/aws_utils.rb
@@ -33,4 +33,4 @@ module Elasticity
end
-end
\ No newline at end of file
+end
diff --git a/spec/lib/elasticity/aws_request_v4_spec.rb b/spec/lib/elasticity/aws_request_v4_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/lib/elasticity/aws_request_v4_spec.rb
+++ b/spec/lib/elasticity/aws_request_v4_spec.rb
@@ -86,8 +86,29 @@ describe Elasticity::AwsRequestV4 do
end
describe '#payload' do
- it 'should create the proper payload' do
- subject.payload.should == '{"JobFlowIds":["TEST_JOBFLOW_ID"]}'
+ context 'when no configurations are given' do
+ it 'should create the proper payload' do
+ subject.payload.should == '{"JobFlowIds":["TEST_JOBFLOW_ID"]}'
+ end
+ end
+
+ context 'when configurations are given' do
+ subject do
+ Elasticity::AwsRequestV4.new(
+ Elasticity::AwsSession.new,
+ {:operation => 'DescribeJobFlows', :job_flow_ids => ['TEST_JOBFLOW_ID'],
+ :configurations => [
+ 'Classification' => 'capacity-scheduler',
+ 'Properties' => {
+ 'yarn.scheduler.capacity.resource-calculator' =>
+ 'org.apache.hadoop.yarn.util.resource.DominantResourceCalculator'
+ }]
+ }
+ )
+ end
+ it 'should create the proper payload' do
+ subject.payload.should == '{"JobFlowIds":["TEST_JOBFLOW_ID"],"Configurations":[{"Classification":"capacity-scheduler","Properties":{"yarn.scheduler.capacity.resource-calculator":"org.apache.hadoop.yarn.util.resource.DominantResourceCalculator"}}]}'
+ end
end
end
diff --git a/spec/lib/elasticity/aws_utils_spec.rb b/spec/lib/elasticity/aws_utils_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/lib/elasticity/aws_utils_spec.rb
+++ b/spec/lib/elasticity/aws_utils_spec.rb
@@ -56,4 +56,4 @@ describe Elasticity::AwsUtils do
end
end
-end
\ No newline at end of file
+end
|
Don't convert configurations
Assuming configurations are specified in the correct form, pass them to convert_ruby_to_aws_4 will incorrectly upcase the property name in the inner hash
|
rslifka_elasticity
|
train
|
72c0c1ae8bb867112d5f425a67b47d044a08e449
|
diff --git a/cmd/helm/install.go b/cmd/helm/install.go
index <HASH>..<HASH> 100644
--- a/cmd/helm/install.go
+++ b/cmd/helm/install.go
@@ -202,14 +202,17 @@ func (i *installCmd) run() error {
}
// Check chart requirements to make sure all dependencies are present in /charts
- if c, err := chartutil.Load(i.chartPath); err == nil {
- if req, err := chartutil.LoadRequirements(c); err == nil {
- checkDependencies(c, req, i.out)
- }
+ chartRequested, err := chartutil.Load(i.chartPath)
+ if err != nil {
+ return prettyError(err)
+ }
+
+ if req, err := chartutil.LoadRequirements(chartRequested); err == nil {
+ checkDependencies(chartRequested, req, i.out)
}
- res, err := i.client.InstallRelease(
- i.chartPath,
+ res, err := i.client.InstallReleaseFromChart(
+ chartRequested,
i.namespace,
helm.ValueOverrides(rawVals),
helm.ReleaseName(i.name),
|
Replaced InstallRelease with InstallReleaseFromChart in cmd/install.go
Fixes <URL>
|
helm_helm
|
train
|
b231128d181cf4ecbbc70c627b8d2a6e23d593b0
|
diff --git a/runtimes/azure-client-runtime/src/main/java/com/microsoft/azure/PagedList.java b/runtimes/azure-client-runtime/src/main/java/com/microsoft/azure/PagedList.java
index <HASH>..<HASH> 100644
--- a/runtimes/azure-client-runtime/src/main/java/com/microsoft/azure/PagedList.java
+++ b/runtimes/azure-client-runtime/src/main/java/com/microsoft/azure/PagedList.java
@@ -47,7 +47,10 @@ public abstract class PagedList<E> implements List<E> {
*/
public PagedList(Page<E> page) {
this();
- items.addAll(page.getItems());
+ List<E> retrievedItems = page.getItems();
+ if (retrievedItems != null && retrievedItems.size() != 0) {
+ items.addAll(retrievedItems);
+ }
nextPageLink = page.getNextPageLink();
currentPage = page;
}
@@ -138,14 +141,17 @@ public abstract class PagedList<E> implements List<E> {
public E next() {
if (!itemsListItr.hasNext()) {
if (!hasNextPage()) {
- throw new NoSuchElementException();
+ throw new NoSuchElementException();
} else {
int size = items.size();
loadNextPage();
itemsListItr = items.listIterator(size);
}
}
- return itemsListItr.next();
+ if (itemsListItr.hasNext()) {
+ return itemsListItr.next();
+ }
+ return null;
}
@Override
|
fixed page listing for no item lists.
|
Azure_azure-sdk-for-java
|
train
|
0ea4abe8b2e44bdd02308ad590ffb1e846201300
|
diff --git a/terms/sitemaps.py b/terms/sitemaps.py
index <HASH>..<HASH> 100644
--- a/terms/sitemaps.py
+++ b/terms/sitemaps.py
@@ -1,4 +1,5 @@
from django.contrib.sitemaps import Sitemap
+from django.db.models import Q
from .models import Term
@@ -7,4 +8,4 @@ class TermsSitemap(Sitemap):
priority = 0.1
def items(self):
- return Term.objects.all()
+ return Term.objects.filter(Q(url__startswith='/') | Q(url=''))
|
Exclude external urls from the sitemap.
|
BertrandBordage_django-terms
|
train
|
5cd9d5fa93baad644e95c3dc7559a080cbae1cc4
|
diff --git a/lib/modem.js b/lib/modem.js
index <HASH>..<HASH> 100644
--- a/lib/modem.js
+++ b/lib/modem.js
@@ -200,13 +200,15 @@ Modem.prototype.buildPayload = function(err, isStream, statusCodes, openStdin, r
if (err) return cb(err, null);
if (statusCodes[res.statusCode] !== true) {
- var msg = new Error(
- 'HTTP code is ' + res.statusCode + ' which indicates error: ' + statusCodes[res.statusCode] + ' - ' + json
- );
- msg.reason = statusCodes[res.statusCode];
- msg.statusCode = res.statusCode;
- msg.json = json;
- cb(msg, null);
+ getCause(isStream, res, json, function(err, cause) {
+ var msg = new Error(
+ 'HTTP code is ' + res.statusCode + ' which indicates error: ' + statusCodes[res.statusCode] + ' - ' + cause
+ );
+ msg.reason = statusCodes[res.statusCode];
+ msg.statusCode = res.statusCode;
+ msg.json = json;
+ cb(msg, null);
+ });
} else {
if (openStdin) {
cb(null, new HttpDuplex(req, res));
@@ -216,6 +218,20 @@ Modem.prototype.buildPayload = function(err, isStream, statusCodes, openStdin, r
cb(null, json);
}
}
+
+ function getCause(isStream, res, json, callback){
+ var chunks = '';
+ if (isStream) {
+ res.on('data', function(chunk) {
+ chunks += chunk;
+ });
+ res.on('end', function() {
+ callback(null, chunks)
+ });
+ } else {
+ callback(null, json);
+ }
+ }
};
Modem.prototype.demuxStream = function(stream, stdout, stderr) {
|
get error cause from stream response, too
|
apocas_docker-modem
|
train
|
126482884cd4e4bb5d58d1afdb6937b2afcebb70
|
diff --git a/protowhat/Feedback.py b/protowhat/Feedback.py
index <HASH>..<HASH> 100644
--- a/protowhat/Feedback.py
+++ b/protowhat/Feedback.py
@@ -8,7 +8,20 @@ class Feedback:
self.highlighting_disabled = state.highlighting_disabled
def _highlight_data(self):
- return self.highlight.get_position()
+ if hasattr(self.highlight, "get_position"):
+ return self.highlight.get_position()
+ elif hasattr(self.highlight, "first_token") and hasattr(
+ self.highlight, "last_token"
+ ):
+ # used by pythonwhat
+ # a plugin+register system would be better
+ # if many different AST interfaces exist
+ return {
+ "line_start": self.highlight.first_token.start[0],
+ "column_start": self.highlight.first_token.start[1],
+ "line_end": self.highlight.last_token.end[0],
+ "column_end": self.highlight.last_token.end[1],
+ }
def get_highlight_data(self):
result = None
|
Make Feedback compatible with pythonwhat
|
datacamp_protowhat
|
train
|
c90ffff8551b1d4ff4b83aed95226daf8135d14a
|
diff --git a/cmd/influxd/run/config.go b/cmd/influxd/run/config.go
index <HASH>..<HASH> 100644
--- a/cmd/influxd/run/config.go
+++ b/cmd/influxd/run/config.go
@@ -3,6 +3,7 @@ package run
import (
"errors"
"fmt"
+ "os"
"os/user"
"path/filepath"
@@ -73,15 +74,20 @@ func NewConfig() *Config {
func NewDemoConfig() (*Config, error) {
c := NewConfig()
+ var homeDir string
// By default, store meta and data files in current users home directory
u, err := user.Current()
- if err != nil {
+ if err == nil {
+ homeDir = u.HomeDir
+ } else if os.Getenv("HOME") != "" {
+ homeDir = os.Getenv("HOME")
+ } else {
return nil, fmt.Errorf("failed to determine current user for storage")
}
- c.Meta.Dir = filepath.Join(u.HomeDir, ".influxdb/meta")
- c.Data.Dir = filepath.Join(u.HomeDir, ".influxdb/data")
- c.HintedHandoff.Dir = filepath.Join(u.HomeDir, ".influxdb/hh")
+ c.Meta.Dir = filepath.Join(homeDir, ".influxdb/meta")
+ c.Data.Dir = filepath.Join(homeDir, ".influxdb/data")
+ c.HintedHandoff.Dir = filepath.Join(homeDir, ".influxdb/hh")
c.Admin.Enabled = true
c.Monitoring.Enabled = false
|
Fallback to HOME env var to determine home dir
When building a static binary without cgo, user.Current() uses cgo
and always fails. Fallback to to HOME env variable if it exists.
|
influxdata_influxdb
|
train
|
cf246ae123df6eb07608ea2d0a333855fdbbafef
|
diff --git a/osgi/service/src/main/java/org/jboss/as/osgi/service/BundleLifecycleIntegration.java b/osgi/service/src/main/java/org/jboss/as/osgi/service/BundleLifecycleIntegration.java
index <HASH>..<HASH> 100644
--- a/osgi/service/src/main/java/org/jboss/as/osgi/service/BundleLifecycleIntegration.java
+++ b/osgi/service/src/main/java/org/jboss/as/osgi/service/BundleLifecycleIntegration.java
@@ -28,6 +28,7 @@ import static org.jboss.as.server.Services.JBOSS_SERVER_CONTROLLER;
import java.io.InputStream;
import java.net.URI;
import java.net.URISyntaxException;
+import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
@@ -277,12 +278,14 @@ public final class BundleLifecycleIntegration extends BundleLifecyclePlugin {
}
depUnit.getAttachment(Attachments.DEFERRED_ACTIVATION_COUNT).incrementAndGet();
- phaseService.setMode(Mode.ACTIVE);
- final StabilityMonitor monitor = new StabilityMonitor();
+
+ StabilityMonitor monitor = new StabilityMonitor();
monitor.addController(phaseService);
- final Set<ServiceController<?>> failed = new HashSet<ServiceController<?>>();
+ Set<ServiceController<?>> failed = new HashSet<ServiceController<?>>();
+ Set<ServiceController<?>> problems = new HashSet<ServiceController<?>>();
try {
- monitor.awaitStability(failed, null);
+ phaseService.setMode(Mode.ACTIVE);
+ monitor.awaitStability(failed, problems);
} catch (final InterruptedException ex) {
// ignore
} finally {
@@ -291,10 +294,14 @@ public final class BundleLifecycleIntegration extends BundleLifecyclePlugin {
// In case of failure we go back to NEVER
- if (failed.size() > 0) {
+ if (failed.size() > 0 || problems.size() > 0) {
+ List<ServiceController<?>> combined = new ArrayList<ServiceController<?>>();
+ combined.addAll(failed);
+ combined.addAll(problems);
+
// Collect the first start exception
StartException startex = null;
- for (ServiceController<?> aux : failed) {
+ for (ServiceController<?> aux : combined) {
if (aux.getStartException() != null) {
startex = aux.getStartException();
break;
|
[AS7-<I>] Activating deferred module phase may return prematurely
|
wildfly_wildfly
|
train
|
716e220ba1b43a0b992c7b06d2f7b1c8a81bf07f
|
diff --git a/cherrypy/test/test_core.py b/cherrypy/test/test_core.py
index <HASH>..<HASH> 100644
--- a/cherrypy/test/test_core.py
+++ b/cherrypy/test/test_core.py
@@ -691,7 +691,8 @@ hello
self.getPage('/maxrequestsize/upload', h, "POST", b)
self.assertBody('Size: 5')
- if cherrypy.server.httpserverclass.__name__ == "WSGIServer":
+ httpcls = cherrypy.server.httpserverclass
+ if httpcls and httpcls.__name__ == "WSGIServer":
cherrypy.config.update({
'/maxrequestsize': {'server.maxRequestBodySize': 3}})
self.getPage('/maxrequestsize/upload', h, "POST", b)
|
Bah. Missed one.
|
cherrypy_cheroot
|
train
|
fee5db091707ccc85cc56baf53f9f1360cf991be
|
diff --git a/pyimgur/__init__.py b/pyimgur/__init__.py
index <HASH>..<HASH> 100644
--- a/pyimgur/__init__.py
+++ b/pyimgur/__init__.py
@@ -876,6 +876,10 @@ class Imgur:
return self.get_gallery_album(id)
finally:
sys.stdout = original_stdout # turn STDOUT back on
+
+ if not self.is_imgur_url(url):
+ return None
+
objects = {'album': {'regex': "a/(?P<id>[\w.]*?)$",
'method': self.get_album},
'comment': {'regex': "gallery/\w*/comment/(?P<id>[\w.]*?)$",
|
Make behaviour explicit for non-imgur url.
|
Damgaard_PyImgur
|
train
|
8e1636067aab02b10480a3ed4373b2e3310156b6
|
diff --git a/src/Biblys/Isbn/Isbn.php b/src/Biblys/Isbn/Isbn.php
index <HASH>..<HASH> 100644
--- a/src/Biblys/Isbn/Isbn.php
+++ b/src/Biblys/Isbn/Isbn.php
@@ -204,11 +204,7 @@ class Isbn
*/
public function validate()
{
- $errors = $this->_errors;
- if ($errors) {
- throw new \Exception($errors[0]);
- }
-
+ Parser::parse($this->_input);
return true;
}
|
Call Parser::parse in validate method
|
biblys_isbn
|
train
|
dec5b871316b8e5183a08239d6251b163c4f5456
|
diff --git a/config/debugbar.php b/config/debugbar.php
index <HASH>..<HASH> 100644
--- a/config/debugbar.php
+++ b/config/debugbar.php
@@ -92,17 +92,17 @@ return [
'db' => true, // Show database (PDO) queries and bindings
'views' => true, // Views with their data
'route' => true, // Current route information
+ 'auth' => true, // Display Laravel authentication status
+ 'gate' => true, // Display Laravel Gate checks
+ 'session' => true, // Display session data
+ 'symfony_request' => true, // Only one can be enabled..
+ 'mail' => true, // Catch mail messages
'laravel' => false, // Laravel version and environment
'events' => false, // All events fired
'default_request' => false, // Regular or special Symfony request logger
- 'symfony_request' => true, // Only one can be enabled..
- 'mail' => true, // Catch mail messages
'logs' => false, // Add the latest log messages
'files' => false, // Show the included files
'config' => false, // Display config settings
- 'auth' => false, // Display Laravel authentication status
- 'gate' => false, // Display Laravel Gate checks
- 'session' => true, // Display session data
],
/*
@@ -116,13 +116,13 @@ return [
'options' => [
'auth' => [
- 'show_name' => false, // Also show the users name/email in the debugbar
+ 'show_name' => true, // Also show the users name/email in the debugbar
],
'db' => [
'with_params' => true, // Render SQL with the parameters substituted
+ 'backtrace' => true, // Use a backtrace to find the origin of the query in your files.
'timeline' => false, // Add the queries to the timeline
- 'backtrace' => false, // EXPERIMENTAL: Use a backtrace to find the origin of the query in your files.
- 'explain' => [ // EXPERIMENTAL: Show EXPLAIN output on queries
+ 'explain' => [ // Show EXPLAIN output on queries
'enabled' => false,
'types' => ['SELECT'], // ['SELECT', 'INSERT', 'UPDATE', 'DELETE']; for MySQL 5.6.3+
],
|
Enable more collectors + backtrace
|
barryvdh_laravel-debugbar
|
train
|
b484b14be535e0ad30374df9021d321977842b71
|
diff --git a/code/MSSQLDatabase.php b/code/MSSQLDatabase.php
index <HASH>..<HASH> 100644
--- a/code/MSSQLDatabase.php
+++ b/code/MSSQLDatabase.php
@@ -188,12 +188,10 @@ class MSSQLDatabase extends SS_Database {
/**
* Get the version of MSSQL.
- * NOTE: not yet implemented for MSSQL, we just return 2008; the minimum supported version
- * @return float
+ * @return string
*/
public function getVersion() {
- user_error("getVersion not implemented", E_USER_WARNING);
- return 2008;
+ return trim($this->query("SELECT CONVERT(char(15), SERVERPROPERTY('ProductVersion'))")->value());
}
/**
|
ENHANCEMENT Added MSSQLDatabase::getVersion() to determine the current version of MSSQL in use
|
silverstripe_silverstripe-mssql
|
train
|
0648fb06266e1f5c82820f6b0ad182622adc4a14
|
diff --git a/lib/model/folder.go b/lib/model/folder.go
index <HASH>..<HASH> 100644
--- a/lib/model/folder.go
+++ b/lib/model/folder.go
@@ -953,9 +953,13 @@ func (f *folder) updateLocals(fs []protocol.FileInfo) {
f.fset.Update(protocol.LocalDeviceID, fs)
filenames := make([]string, len(fs))
+ f.forcedRescanPathsMut.Lock()
for i, file := range fs {
filenames[i] = file.Name
+ // No need to rescan a file that was changed since anyway.
+ delete(f.forcedRescanPaths, file.Name)
}
+ f.forcedRescanPathsMut.Unlock()
f.evLogger.Log(events.LocalIndexUpdated, map[string]interface{}{
"folder": f.ID,
@@ -1005,6 +1009,9 @@ func (f *folder) handleForcedRescans() {
}
f.forcedRescanPaths = make(map[string]struct{})
f.forcedRescanPathsMut.Unlock()
+ if len(paths) == 0 {
+ return
+ }
batch := newFileInfoBatch(func(fs []protocol.FileInfo) error {
f.fset.Update(protocol.LocalDeviceID, fs)
|
lib/model: Don't force rescan already changed items (#<I>)
|
syncthing_syncthing
|
train
|
004a994025edf1fe2ab1b97edf2b189226371c10
|
diff --git a/lib/linear_expression.rb b/lib/linear_expression.rb
index <HASH>..<HASH> 100644
--- a/lib/linear_expression.rb
+++ b/lib/linear_expression.rb
@@ -37,6 +37,12 @@ module Cassowary
terms.empty?
end
+ def value
+ terms.keys.inject(constant) do |memo, v|
+ memo + coefficient_for(v) * v.value
+ end
+ end
+
def each_variable_and_coefficient(&block)
terms.each_pair(&block)
end
diff --git a/test/test_variables.rb b/test/test_variables.rb
index <HASH>..<HASH> 100644
--- a/test/test_variables.rb
+++ b/test/test_variables.rb
@@ -30,4 +30,19 @@ class VariablesTests < Test::Unit::TestCase
x = Cassowary::SlackVariable.new
assert_equal "<CV#0x" + x.object_id.to_s(16) + ">", x.inspect
end
+
+ def test_evaluating_linear_expressions
+ x = Cassowary::Variable.new name: 'x', value: 20
+ expr = x / 10
+ assert expr.value == 2
+
+ expr *= 2
+ assert expr.value == 4
+
+ expr += 10
+ assert expr.value == 14
+
+ expr -= 3
+ assert expr.value == 11
+ end
end
|
helper method to evaluate linear expressions
|
timfel_cassowary-ruby
|
train
|
7df5e09618feede3052a5a7edfa4570b43cf64b2
|
diff --git a/classes/phing/tasks/ext/coverage/CoverageSetupTask.php b/classes/phing/tasks/ext/coverage/CoverageSetupTask.php
index <HASH>..<HASH> 100644
--- a/classes/phing/tasks/ext/coverage/CoverageSetupTask.php
+++ b/classes/phing/tasks/ext/coverage/CoverageSetupTask.php
@@ -111,10 +111,6 @@ class CoverageSetupTask extends Task
function init()
{
- if (!extension_loaded('xdebug'))
- {
- throw new Exception("CoverageSetupTask depends on Xdebug being installed.");
- }
}
function main()
diff --git a/classes/phing/tasks/ext/phpunit/PHPUnitTask.php b/classes/phing/tasks/ext/phpunit/PHPUnitTask.php
index <HASH>..<HASH> 100644
--- a/classes/phing/tasks/ext/phpunit/PHPUnitTask.php
+++ b/classes/phing/tasks/ext/phpunit/PHPUnitTask.php
@@ -202,6 +202,11 @@ class PHPUnitTask extends Task
*/
function main()
{
+ if ($this->codecoverage && !extension_loaded('xdebug'))
+ {
+ throw new Exception("PHPUnitTask depends on Xdebug being installed to gather code coverage information.");
+ }
+
$tests = array();
if ($this->printsummary)
|
Move dependency on Xdebug to PHPUnitTask
|
phingofficial_phing
|
train
|
b9361ef5bfbf71d876687007ddadb949b6505804
|
diff --git a/js/bcex.js b/js/bcex.js
index <HASH>..<HASH> 100644
--- a/js/bcex.js
+++ b/js/bcex.js
@@ -325,24 +325,28 @@ module.exports = class bcex extends Exchange {
request += '?' + this.urlencode(query);
}
let url = this.urls['api'] + request;
- if (api === 'private') {
- this.checkRequiredCredentials();
- if (method !== 'GET') {
- let messageParts = ['api_key=' + this.encodeURIComponent(this.apiKey)];
- let paramsKeys = Object.keys(params).sort();
- for (let i = 0; i < paramsKeys.length; i++) {
- let paramKey = paramsKeys[i];
- let param = params[paramKey];
- messageParts.push(this.encode(paramKey) + '=' + encodeURIComponent(param));
- }
+ if (method === 'POST') {
+ let messageParts = []
+ let paramsKeys = Object.keys(params).sort();
+ for (let i = 0; i < paramsKeys.length; i++) {
+ let paramKey = paramsKeys[i];
+ let param = params[paramKey];
+ messageParts.push(this.encode(paramKey) + '=' + encodeURIComponent(param));
+ }
+ if (api === 'private') {
+ this.checkRequiredCredentials();
+ messageParts.unshift('api_key=' + this.encodeURIComponent(this.apiKey));
body = messageParts.join('&');
let message = body + "&secret_key=" + this.secret;
let signedMessage = this.hash(message);
body = body + "&sign=" + signedMessage;
params['sign'] = signedMessage;
- headers = {}
- headers['Content-Type'] = 'application/x-www-form-urlencoded';
}
+ else {
+ body = messageParts.join('&');
+ }
+ headers = {}
+ headers['Content-Type'] = 'application/x-www-form-urlencoded';
}
return { 'url': url, 'method': method, 'body': body, 'headers': headers };
}
|
Refactor sign() for non-private POST
|
ccxt_ccxt
|
train
|
b6730c063c3c18bd7b4693974dc462dec86fb080
|
diff --git a/lib/stream/page-crawling-stream.js b/lib/stream/page-crawling-stream.js
index <HASH>..<HASH> 100644
--- a/lib/stream/page-crawling-stream.js
+++ b/lib/stream/page-crawling-stream.js
@@ -1,18 +1,11 @@
-var stream = require('stream'),
+var BufferedReadableStream = require('./buffered-readable-stream'),
request = require('request'),
- util = require('util'),
- Dequeue = require('dequeue');
-
-Dequeue.prototype.isEmpty = function () {
- return this.length == 0;
-}
+ util = require('util');
var PageCrawlingStream = function (options) {
options = options || {};
- options.objectMode = true;
- stream.Readable.call(this, options);
- this._buffer = new Dequeue();
+ BufferedReadableStream.call(this, options);
this._nextUrl = options.start;
if (options.step)
@@ -21,7 +14,7 @@ var PageCrawlingStream = function (options) {
this._parse = options.parse;
};
-util.inherits(PageCrawlingStream, stream.Readable);
+util.inherits(PageCrawlingStream, BufferedReadableStream);
PageCrawlingStream.prototype._parse = function (current, result, push) {};
@@ -29,7 +22,7 @@ PageCrawlingStream.prototype._step = function (current, result) {
return null;
};
-PageCrawlingStream.prototype._fetchNextPage = function () {
+PageCrawlingStream.prototype._more = function (push, callback) {
var url = this._nextUrl,
buffer = this._buffer;
@@ -43,33 +36,15 @@ PageCrawlingStream.prototype._fetchNextPage = function () {
};
// emit chunks
- this._parse(url, ret, buffer.push.bind(buffer));
+ this._parse(url, ret, push);
// determine next url
this._nextUrl = this._step(url, ret);
- if (this._nextUrl == null)
- this._lastUrl = true;
- this._pushBuffered();
+ // done if next URL is null
+ callback(this._nextUrl == null);
}.bind(this));
};
-PageCrawlingStream.prototype._pushBuffered = function () {
- while (!this._buffer.isEmpty()) {
- if (this.push(this._buffer.shift()) === false)
- break;
- }
-};
-
-PageCrawlingStream.prototype._read = function (size) {
- if (!this._buffer.isEmpty()) {
- this._pushBuffered();
- } else if (this._lastUrl) {
- this.push(null);
- } else {
- this._fetchNextPage();
- }
-};
-
module.exports = PageCrawlingStream;
|
PageCrawlingStream inherits BufferedReadableStream.
|
simonpai_streamy-data
|
train
|
0e3e09306e9c4c1d0bc2f1ea4ca1b6fcd517053a
|
diff --git a/src/frontend/org/voltdb/SnapshotDaemon.java b/src/frontend/org/voltdb/SnapshotDaemon.java
index <HASH>..<HASH> 100644
--- a/src/frontend/org/voltdb/SnapshotDaemon.java
+++ b/src/frontend/org/voltdb/SnapshotDaemon.java
@@ -53,6 +53,8 @@ import org.voltdb.client.ClientResponse;
import org.voltdb.client.ProcedureCallback;
import org.voltdb.sysprocs.SnapshotSave;
+import com.google.common.base.Throwables;
+
/**
* A scheduler of automated snapshots and manager of archived and retained snapshots.
* The new functionality for handling truncation snapshots operates separately from
@@ -1411,7 +1413,22 @@ public class SnapshotDaemon implements SnapshotCompletionInterest {
byte blocking;
String format = "native";
if (params.length == 1) {
- JSONObject jsObj = new JSONObject((String)params[0]);
+ JSONObject jsObj;
+ try {
+ jsObj = new JSONObject((String)params[0]);
+ } catch (Exception e) {
+
+ final ClientResponseImpl errorResponse =
+ new ClientResponseImpl(ClientResponseImpl.GRACEFUL_FAILURE,
+ new VoltTable[0],
+ Throwables.getStackTraceAsString(e),
+ invocation.clientHandle);
+ ByteBuffer buf = ByteBuffer.allocate(errorResponse.getSerializedSize() + 4);
+ buf.putInt(buf.capacity() - 4);
+ errorResponse.flattenToBuffer(buf).flip();
+ c.writeStream().enqueue(buf);
+ return;
+ }
path = jsObj.getString("path");
nonce = jsObj.getString("nonce");
blocking = (byte)(jsObj.optBoolean("block", false) ? 1 : 0);
|
Add error handling for parsing user suppleid JSON which can be corrupt.
|
VoltDB_voltdb
|
train
|
bdda64e878c07288cfa64ba206d238c7244f2848
|
diff --git a/src/platforms/mp/compiler/codegen/compile-to-template.js b/src/platforms/mp/compiler/codegen/compile-to-template.js
index <HASH>..<HASH> 100644
--- a/src/platforms/mp/compiler/codegen/compile-to-template.js
+++ b/src/platforms/mp/compiler/codegen/compile-to-template.js
@@ -344,7 +344,12 @@ export class TemplateGenerator {
return `${binder}${mpType}="_pe"`
})
eventAttrs = eventAttrs.join(' ')
- return ` data-cid="{{ ${cid} }}" data-hid="{{ ${this.genHid(el)} }}" ${eventAttrs}`
+
+ /**
+ * when the element is in a slot, it will recieve "$c" as the actual component instance id
+ * othewise, using the current scope which usually the parent component in the template
+ */
+ return ` data-cid="{{ $c || ${cid} }}" data-hid="{{ ${this.genHid(el)} }}" ${eventAttrs}`
}
genIfConditions (el): string {
@@ -418,7 +423,13 @@ export class TemplateGenerator {
tail = `, $t: ${extractHidTail(_hid)}`
}
- return `${defaultSlot}<template is="{{ s_${slotName} || '${defaultSlotName}' }}" data="{{ ...$root[ s ], $root${tail} }}"${this.genFor(el)}/>`
+ /**
+ * use "$c" to passing the actual vdom host component instance id to slot template
+ * because the vdom is actually stored in the component's _vnodes
+ * event hanlders searching depends on this id
+ */
+
+ return `${defaultSlot}<template is="{{ s_${slotName} || '${defaultSlotName}' }}" data="{{ ...$root[ s ], $root${tail}, $c: c }}"${this.genFor(el)}/>`
}
genChildren (el): string {
|
chore: support slot snippet with v-on
|
kaola-fed_megalo
|
train
|
3393c4668a2c1d80b40edd867186b75b9c9bf0ec
|
diff --git a/lahja/endpoint.py b/lahja/endpoint.py
index <HASH>..<HASH> 100644
--- a/lahja/endpoint.py
+++ b/lahja/endpoint.py
@@ -145,8 +145,9 @@ class Endpoint:
if in_futures:
future = self._futures[config.filter_event_id]
- future.set_result(item)
- self._futures.pop(config.filter_event_id)
+ if not future.done():
+ future.set_result(item)
+ self._futures.pop(config.filter_event_id, None)
if in_queue:
for queue in self._queues[event_type]:
|
Do not set results on futures that are cancelled or done
|
ethereum_lahja
|
train
|
a405d7beaf4d2f5cd046732b40fe1ffcce73d7d5
|
diff --git a/PayPalOneTouch/src/main/java/com/paypal/android/sdk/onetouch/core/config/Recipe.java b/PayPalOneTouch/src/main/java/com/paypal/android/sdk/onetouch/core/config/Recipe.java
index <HASH>..<HASH> 100644
--- a/PayPalOneTouch/src/main/java/com/paypal/android/sdk/onetouch/core/config/Recipe.java
+++ b/PayPalOneTouch/src/main/java/com/paypal/android/sdk/onetouch/core/config/Recipe.java
@@ -80,8 +80,7 @@ public abstract class Recipe<T extends Recipe<T>> {
public boolean isValidAppTarget(Context context) {
for (String allowedWalletTarget : getTargetPackagesInReversePriorityOrder()) {
boolean isIntentAvailable = AppHelper.isIntentAvailable(context,
- AppSwitchHelper.createBaseIntent(getTargetIntentAction(), getTargetComponent(),
- allowedWalletTarget));
+ AppSwitchHelper.createBaseIntent(getTargetIntentAction(), allowedWalletTarget));
String locale = Locale.getDefault().toString();
// if no locales are specified, then presumed to be allowed for all
diff --git a/PayPalOneTouch/src/main/java/com/paypal/android/sdk/onetouch/core/sdk/AppSwitchHelper.java b/PayPalOneTouch/src/main/java/com/paypal/android/sdk/onetouch/core/sdk/AppSwitchHelper.java
index <HASH>..<HASH> 100644
--- a/PayPalOneTouch/src/main/java/com/paypal/android/sdk/onetouch/core/sdk/AppSwitchHelper.java
+++ b/PayPalOneTouch/src/main/java/com/paypal/android/sdk/onetouch/core/sdk/AppSwitchHelper.java
@@ -1,6 +1,5 @@
package com.paypal.android.sdk.onetouch.core.sdk;
-import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
@@ -38,16 +37,14 @@ public class AppSwitchHelper {
WALLET_APP_CERT_ISSUER, WALLET_APP_PUBLIC_KEY_HASH_CODE);
}
- public static Intent createBaseIntent(String action, String componentName, String packageName) {
+ public static Intent createBaseIntent(String action, String packageName) {
return new Intent(action)
- .setComponent(ComponentName.unflattenFromString(packageName + "/" + componentName))
.setPackage(packageName);
}
public static Intent getAppSwitchIntent(ContextInspector contextInspector,
ConfigManager configManager, Request request, Recipe recipe) {
- Intent intent = createBaseIntent(recipe.getTargetIntentAction(),
- recipe.getTargetComponent(), WALLET_APP_PACKAGE);
+ Intent intent = createBaseIntent(recipe.getTargetIntentAction(), WALLET_APP_PACKAGE);
intent.putExtra("version", recipe.getProtocol().getVersion());
// app_guid now present on all v1/v2 requests. Deemed not sensitive.
intent.putExtra("app_guid", contextInspector.getInstallationGUID());
diff --git a/PayPalOneTouch/src/test/java/com/paypal/android/sdk/onetouch/core/sdk/AppSwitchHelperTest.java b/PayPalOneTouch/src/test/java/com/paypal/android/sdk/onetouch/core/sdk/AppSwitchHelperTest.java
index <HASH>..<HASH> 100644
--- a/PayPalOneTouch/src/test/java/com/paypal/android/sdk/onetouch/core/sdk/AppSwitchHelperTest.java
+++ b/PayPalOneTouch/src/test/java/com/paypal/android/sdk/onetouch/core/sdk/AppSwitchHelperTest.java
@@ -62,10 +62,9 @@ public class AppSwitchHelperTest {
@Test
public void createBaseIntent_createsIntentCorrectly() {
- Intent intent = AppSwitchHelper.createBaseIntent("action", "component", "package");
+ Intent intent = AppSwitchHelper.createBaseIntent("action", "package");
assertEquals("action", intent.getAction());
- assertEquals("package/component", intent.getComponent().flattenToString());
assertEquals("package", intent.getPackage());
}
@@ -78,8 +77,6 @@ public class AppSwitchHelperTest {
request, getMockRecipe(1));
assertEquals("com.paypal.android.lib.authenticator.activity.v1.TouchActivity", intent.getAction());
- assertEquals("com.paypal.android.p2pmobile/com.paypal.android.lib.authenticator.activity.v1.TouchActivity",
- intent.getComponent().flattenToString());
assertEquals("com.paypal.android.p2pmobile", intent.getPackage());
assertEquals("1.0", intent.getStringExtra("version"));
}
@@ -93,8 +90,6 @@ public class AppSwitchHelperTest {
request, getMockRecipe(2));
assertEquals("com.paypal.android.lib.authenticator.activity.v2.TouchActivity", intent.getAction());
- assertEquals("com.paypal.android.p2pmobile/com.paypal.android.lib.authenticator.activity.v2.TouchActivity",
- intent.getComponent().flattenToString());
assertEquals("com.paypal.android.p2pmobile", intent.getPackage());
assertEquals("2.0", intent.getStringExtra("version"));
}
@@ -108,8 +103,6 @@ public class AppSwitchHelperTest {
request, getMockRecipe(3));
assertEquals("com.paypal.android.lib.authenticator.activity.v3.TouchActivity", intent.getAction());
- assertEquals("com.paypal.android.p2pmobile/com.paypal.android.lib.authenticator.activity.v3.TouchActivity",
- intent.getComponent().flattenToString());
assertEquals("com.paypal.android.p2pmobile", intent.getPackage());
assertEquals("3.0", intent.getStringExtra("version"));
}
|
Stop setting component for PayPal wallet intents
|
braintree_braintree_android
|
train
|
97253621715170608dd4397eb77b6f6af9236596
|
diff --git a/src/discoursegraphs/merging.py b/src/discoursegraphs/merging.py
index <HASH>..<HASH> 100755
--- a/src/discoursegraphs/merging.py
+++ b/src/discoursegraphs/merging.py
@@ -12,6 +12,35 @@ from discoursegraphs.readwrite.anaphoricity import AnaphoraDocumentGraph
from discoursegraphs.readwrite.rst import RSTGraph, rst_tokenlist
from discoursegraphs.readwrite.tiger import TigerDocumentGraph, tiger_tokenlist
+def add_rst_to_tiger(tiger_docgraph, rst_graph):
+ """
+ adds an RSTGraph to a TigerDocumentGraph, thereby adding edges from
+ each RST segment to the (Tiger) tokens they represent.
+
+ Parameters
+ ----------
+ tiger_docgraph : TigerDocumentGraph
+ multidigraph representing a syntax annotated (TigerXML) document
+ rst_graph : RSTGraph
+ multidigraph representing a RST annotated (RS3) document
+ """
+ tiger_tokens = tiger_tokenlist(tiger_docgraph)
+ rst_tokens = rst_tokenlist(rst_graph)
+
+ tiger_docgraph.add_nodes_from(rst_graph.nodes(data=True))
+ tiger_docgraph.add_edges_from(rst_graph.edges(data=True))
+
+ for i, (tiger_tok, tiger_sent_id, tiger_tok_id) in enumerate(tiger_tokens):
+ rst_token, rst_segment_node_id = rst_tokens[i]
+ if tiger_tok == rst_token:
+ tiger_docgraph.add_node(tiger_tok_id, layers={'rst', 'rst:token'},
+ attr_dict={'rst:token': rst_token})
+ tiger_docgraph.add_edge(int(rst_segment_node_id), tiger_tok_id,
+ layers={'rst', 'rst:token'})
+ else: # token mismatch
+ raise ValueError("Tokenization mismatch between:\n" \
+ "{0}\n{1}".format(tiger_filepath, rst_filepath))
+
if __name__ == '__main__':
if len(sys.argv) != 4:
@@ -25,26 +54,11 @@ if __name__ == '__main__':
assert os.path.isfile(tiger_filepath)
tiger_docgraph = TigerDocumentGraph(tiger_filepath)
- tiger_tokens = tiger_tokenlist(tiger_docgraph)
-
assert os.path.isfile(rst_filepath)
rst_graph = RSTGraph(rst_filepath)
- rst_tokens = rst_tokenlist(rst_graph)
-
- tiger_docgraph.add_nodes_from(rst_graph.nodes(data=True))
- tiger_docgraph.add_edges_from(rst_graph.edges(data=True))
-
- for i, (tiger_tok, tiger_sent_id, tiger_tok_id) in enumerate(tiger_tokens):
- rst_token, rst_segment_node_id = rst_tokens[i]
- if tiger_tok == rst_token:
- tiger_docgraph.add_node(tiger_tok_id, layers={'rst', 'rst:token'},
- attr_dict={'rst:token': rst_token})
- tiger_docgraph.add_edge(int(rst_segment_node_id), tiger_tok_id,
- layers={'rst', 'rst:token'})
- else: # token mismatch
- raise ValueError("Tokenization mismatch between:\n" \
- "{0}\n{1}".format(tiger_filepath, rst_filepath))
+
+ add_rst_to_tiger(tiger_docgraph, rst_graph)
for i, node in tiger_docgraph.nodes(data=True):
print i, node
- #~ write_gpickle(tiger_docgraph, pickle_filepath)
+ write_gpickle(tiger_docgraph, pickle_filepath)
|
merging: outsourced rst-tiger merging to function
|
arne-cl_discoursegraphs
|
train
|
25f85485a69cdb0c515cc06516caccdd9bdfa669
|
diff --git a/applications/default/public/js/controllers.js b/applications/default/public/js/controllers.js
index <HASH>..<HASH> 100644
--- a/applications/default/public/js/controllers.js
+++ b/applications/default/public/js/controllers.js
@@ -144,7 +144,7 @@ angular.module('choko')
$http.post(url, $scope.data)
.success(function(data, status, headers, config) {
- $scope.data = data;
+ $scope.data = data.data;
delete $scope.errors;
if (redirect) {
$location.path(redirect);
|
Fixing $scope.data upon user settings saving.
|
recidive_choko
|
train
|
477ee64a35838a344e003f18cecbb5f8412430aa
|
diff --git a/phy/waveform/loader.py b/phy/waveform/loader.py
index <HASH>..<HASH> 100644
--- a/phy/waveform/loader.py
+++ b/phy/waveform/loader.py
@@ -97,7 +97,7 @@ class WaveformLoader(object):
else:
return self.n_channels_traces
- def load_at(self, time):
+ def _load_at(self, time):
"""Load a waveform at a given time."""
time_o = time - self._offset
if not (0 <= time_o < self.n_samples_trace):
@@ -116,7 +116,11 @@ class WaveformLoader(object):
if margin_after > 0:
assert margin_before >= 0
waveforms = waveforms[margin_before:-margin_after, :]
- return waveforms
+ # Make a subselection with the specified channels.
+ if self._channels is not None:
+ return waveforms[..., self._channels]
+ else:
+ return waveforms
def __getitem__(self, item):
"""Load a number of waveforms."""
@@ -135,9 +139,5 @@ class WaveformLoader(object):
waveforms = np.empty(shape, dtype=np.float32)
# Load all spikes.
for i, time in enumerate(spikes):
- waveforms[i:i+1, ...] = self.load_at(time)
- # Make a subselection with the specified channels.
- if self._channels is not None:
- return waveforms[..., self._channels]
- else:
- return waveforms
+ waveforms[i:i+1, ...] = self._load_at(time)
+ return waveforms
diff --git a/phy/waveform/tests/test_loader.py b/phy/waveform/tests/test_loader.py
index <HASH>..<HASH> 100644
--- a/phy/waveform/tests/test_loader.py
+++ b/phy/waveform/tests/test_loader.py
@@ -45,7 +45,7 @@ def test_loader():
# Extract a waveform.
t = spike_times[10]
- waveform = loader.load_at(t)
+ waveform = loader._load_at(t)
assert waveform.shape == (n_samples, n_channels)
assert_array_equal(waveform, traces[t - 20:t + 20, :])
@@ -58,7 +58,26 @@ def test_loader():
# Invalid time.
with raises(ValueError):
- loader.load_at(200000)
+ loader._load_at(200000)
+
+
+def test_loader_channels():
+ n_samples_trace, n_channels = 1000, 50
+ n_samples = 40
+
+ traces = artificial_traces(n_samples_trace, n_channels)
+
+ # Create a loader.
+ loader = WaveformLoader(traces, n_samples=n_samples)
+ loader.traces = traces
+ channels = [10, 20, 30]
+ loader.channels = channels
+ assert loader.channels == channels
+ assert loader[500].shape == (1, n_samples, 3)
+ assert loader[[500, 501, 600, 300]].shape == (4, n_samples, 3)
+
+ with raises(NotImplementedError):
+ loader[500:510]
def test_loader_filter():
@@ -80,7 +99,7 @@ def test_loader_filter():
filter_margin=5)
t = spike_times[5]
- waveform_filtered = loader.load_at(t)
+ waveform_filtered = loader._load_at(t)
traces_filtered = my_filter(traces)
traces_filtered[t - 20:t + 20, :]
assert np.allclose(waveform_filtered, traces_filtered[t - 20:t + 20, :])
|
Increased coverage in waveform loader.
|
kwikteam_phy
|
train
|
e88716f1f9f9e5606e72dfb4a4e70b523bb8637f
|
diff --git a/tests/unit/helpers.js b/tests/unit/helpers.js
index <HASH>..<HASH> 100644
--- a/tests/unit/helpers.js
+++ b/tests/unit/helpers.js
@@ -1,4 +1,4 @@
-/* global suite */
+/* global suite, sinon */
var utils = require('../../src/utils');
@@ -66,4 +66,28 @@ module.exports.getSkipCISuite = function () {
} else {
return suite;
}
+};
+
+module.exports.MockNetworkAdapter = function MockNetworkAdapter() {
+ this.setServerUrl = sinon.stub();
+ this.setApp = sinon.stub();
+ this.setRoom = sinon.stub();
+ this.setWebRtcOptions = sinon.stub();
+
+ this.setServerConnectListeners = sinon.stub();
+ this.setRoomOccupantListener = sinon.stub();
+ this.setDataChannelListeners = sinon.stub();
+
+ this.connect = sinon.stub();
+ this.shouldStartConnectionTo = sinon.stub();
+ this.startStreamConnection = sinon.stub();
+ this.closeStreamConnection = sinon.stub();
+ this.getConnectStatus = sinon.stub();
+
+ this.sendData = sinon.stub();
+ this.sendDataGuaranteed = sinon.stub();
+ this.broadcastData = sinon.stub();
+ this.broadcastDataGuaranteed = sinon.stub();
+
+ this.getServerTime = sinon.stub();
};
\ No newline at end of file
diff --git a/tests/unit/networked.test.js b/tests/unit/networked.test.js
index <HASH>..<HASH> 100644
--- a/tests/unit/networked.test.js
+++ b/tests/unit/networked.test.js
@@ -21,33 +21,9 @@ suite('networked', function() {
naf.utils.whenEntityLoaded(scene, done);
}
- function MockNetworkAdapter() {
- this.setServerUrl = sinon.stub();
- this.setApp = sinon.stub();
- this.setRoom = sinon.stub();
- this.setWebRtcOptions = sinon.stub();
-
- this.setServerConnectListeners = sinon.stub();
- this.setRoomOccupantListener = sinon.stub();
- this.setDataChannelListeners = sinon.stub();
-
- this.connect = sinon.stub();
- this.shouldStartConnectionTo = sinon.stub();
- this.startStreamConnection = sinon.stub();
- this.closeStreamConnection = sinon.stub();
- this.getConnectStatus = sinon.stub();
-
- this.sendData = sinon.stub();
- this.sendDataGuaranteed = sinon.stub();
- this.broadcastData = sinon.stub();
- this.broadcastDataGuaranteed = sinon.stub();
-
- this.getServerTime = sinon.stub();
- }
-
setup(function(done) {
naf.options.compressSyncPackets = false;
- naf.connection.setNetworkAdapter(new MockNetworkAdapter());
+ naf.connection.setNetworkAdapter(new helpers.MockNetworkAdapter());
initScene(function() {
entity = document.querySelector('#test-entity');
networked = entity.components['networked'];
diff --git a/tests/unit/networked_attachLocalTemplate.test.js b/tests/unit/networked_attachLocalTemplate.test.js
index <HASH>..<HASH> 100644
--- a/tests/unit/networked_attachLocalTemplate.test.js
+++ b/tests/unit/networked_attachLocalTemplate.test.js
@@ -1,4 +1,4 @@
-/* global assert, process, setup, suite, test, teardown, sinon */
+/* global assert, process, setup, suite, test, teardown */
require('aframe');
var helpers = require('./helpers');
var naf = require('../../src/NafIndex');
@@ -21,33 +21,9 @@ suite('networked attachLocalTemplate:false', function() {
naf.utils.whenEntityLoaded(scene, done);
}
- function MockNetworkAdapter() {
- this.setServerUrl = sinon.stub();
- this.setApp = sinon.stub();
- this.setRoom = sinon.stub();
- this.setWebRtcOptions = sinon.stub();
-
- this.setServerConnectListeners = sinon.stub();
- this.setRoomOccupantListener = sinon.stub();
- this.setDataChannelListeners = sinon.stub();
-
- this.connect = sinon.stub();
- this.shouldStartConnectionTo = sinon.stub();
- this.startStreamConnection = sinon.stub();
- this.closeStreamConnection = sinon.stub();
- this.getConnectStatus = sinon.stub();
-
- this.sendData = sinon.stub();
- this.sendDataGuaranteed = sinon.stub();
- this.broadcastData = sinon.stub();
- this.broadcastDataGuaranteed = sinon.stub();
-
- this.getServerTime = sinon.stub();
- }
-
setup(function(done) {
naf.options.compressSyncPackets = false;
- naf.connection.setNetworkAdapter(new MockNetworkAdapter());
+ naf.connection.setNetworkAdapter(new helpers.MockNetworkAdapter());
initScene(function() {
entity = document.querySelector('#test-entity');
networked = entity.components['networked'];
|
Move MockNetworkAdapter to helpers.
|
networked-aframe_networked-aframe
|
train
|
bc94bfd89b89b44bd82264dc8f06aadb15e64682
|
diff --git a/master/buildbot/db/pool.py b/master/buildbot/db/pool.py
index <HASH>..<HASH> 100644
--- a/master/buildbot/db/pool.py
+++ b/master/buildbot/db/pool.py
@@ -98,7 +98,9 @@ class DBThreadPool(threadpool.ThreadPool):
def do_with_engine(self, callable, *args, **kwargs):
"""
- Like L{do}, but with an SQLAlchemy Engine as the first argument
+ Like L{do}, but with an SQLAlchemy Engine as the first argument. This
+ is only used for schema manipulation, and is not used at master
+ runtime.
"""
def thd():
if self.__broken_sqlite: # see bug #1810
diff --git a/master/buildbot/test/util/db.py b/master/buildbot/test/util/db.py
index <HASH>..<HASH> 100644
--- a/master/buildbot/test/util/db.py
+++ b/master/buildbot/test/util/db.py
@@ -45,12 +45,12 @@ class RealDatabaseMixin(object):
# - avoids repetitive implementation
# - cooperates better at runtime with thread-sensitive DBAPI's
- def __thd_clean_database(self, engine):
+ def __thd_clean_database(self, conn):
# drop the known tables
- model.Model.metadata.drop_all(bind=engine, checkfirst=True)
+ model.Model.metadata.drop_all(bind=conn, checkfirst=True)
# see if we can find any other tables to drop
- meta = MetaData(bind=engine)
+ meta = MetaData(bind=conn)
meta.reflect()
meta.drop_all()
@@ -94,14 +94,14 @@ class RealDatabaseMixin(object):
self.db_pool = pool.DBThreadPool(self.db_engine)
log.msg("cleaning database %s" % self.db_url)
- d = self.db_pool.do_with_engine(self.__thd_clean_database)
+ d = self.db_pool.do(self.__thd_clean_database)
d.addCallback(lambda _ :
self.db_pool.do(self.__thd_create_tables, table_names))
return d
def tearDownRealDatabase(self):
if self.__want_pool:
- return self.db_pool.do_with_engine(self.__thd_clean_database)
+ return self.db_pool.do(self.__thd_clean_database)
else:
return defer.succeed(None)
|
limit use of do_with_engine to schema functions
|
buildbot_buildbot
|
train
|
523b1213419e4de2cfa0a05f1df5bf5e0fd1f8f1
|
diff --git a/command/get_test.go b/command/get_test.go
index <HASH>..<HASH> 100644
--- a/command/get_test.go
+++ b/command/get_test.go
@@ -5,6 +5,7 @@ import (
"strings"
"testing"
+ "github.com/hashicorp/terraform/helper/copy"
"github.com/mitchellh/cli"
)
@@ -57,14 +58,10 @@ func TestGet_multipleArgs(t *testing.T) {
}
func TestGet_noArgs(t *testing.T) {
- cwd, err := os.Getwd()
- if err != nil {
- t.Fatalf("err: %s", err)
- }
- if err := os.Chdir(testFixturePath("get")); err != nil {
- t.Fatalf("err: %s", err)
- }
- defer os.Chdir(cwd)
+ td := tempDir(t)
+ copy.CopyDir(testFixturePath("get"), td)
+ defer os.RemoveAll(td)
+ defer testChdir(t, td)()
ui := new(cli.MockUi)
c := &GetCommand{
|
fix get test working directory
use a temp dir and cleanup
|
hashicorp_terraform
|
train
|
c75d06e9dd7316dbfa3abb87e8014793c82eb138
|
diff --git a/tests/ActiveDoctrine/Tests/Functional/FunctionalTestCase.php b/tests/ActiveDoctrine/Tests/Functional/FunctionalTestCase.php
index <HASH>..<HASH> 100644
--- a/tests/ActiveDoctrine/Tests/Functional/FunctionalTestCase.php
+++ b/tests/ActiveDoctrine/Tests/Functional/FunctionalTestCase.php
@@ -5,6 +5,7 @@ namespace ActiveDoctrine\Tests\Functional;
use Doctrine\DBAL\DriverManager;
use Doctrine\DBAL\Configuration;
use Doctrine\DBAL\Schema\SchemaException;
+use Doctrine\DBAL\Logging\DebugStack;
/**
* FunctionalTestCase
@@ -15,6 +16,7 @@ abstract class FunctionalTestCase extends \PHPUnit_Framework_TestCase
{
protected static $connection;
+ protected static $logger;
protected $loaded_schemas = [];
public function tearDown()
@@ -37,6 +39,25 @@ abstract class FunctionalTestCase extends \PHPUnit_Framework_TestCase
}
}
+ protected function getSQLLogger()
+ {
+ if (!isset(static::$logger)) {
+ static::$logger = new DebugStack();
+ }
+
+ return static::$logger;
+ }
+
+ protected function resetQueryCount()
+ {
+ $this->getSQLLogger()->queries = [];
+ }
+
+ protected function getQueryCount()
+ {
+ return count($this->getSQLLogger()->queries);
+ }
+
public function getConn()
{
if (isset(static::$connection)) {
@@ -45,6 +66,7 @@ abstract class FunctionalTestCase extends \PHPUnit_Framework_TestCase
//set up a special logger that counts queries here
$configuration = new Configuration();
+ $configuration->setSQLLogger($this->getSQLLogger());
static::$connection = DriverManager::getConnection($this->getConnectionParams(), $configuration);
return static::$connection;
|
Adding logging and helper methods to FunctionalTestCase.
|
glynnforrest_active-doctrine
|
train
|
2d70091b11496c829d0bfccbcfad986034b4437b
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -4,7 +4,7 @@ from setuptools import setup,find_packages
setup(
name='synapse',
- version='0.0.12', # sync with synapse.version!
+ version='0.0.13', # sync with synapse.version!
description='Synapse Distributed Key-Value Hypergraph Analysis Framework',
author='Invisigoth Kenshoto',
author_email='invisigoth.kenshoto@gmail.com',
diff --git a/synapse/__init__.py b/synapse/__init__.py
index <HASH>..<HASH> 100644
--- a/synapse/__init__.py
+++ b/synapse/__init__.py
@@ -14,7 +14,7 @@ if msgpack.version < (0,4,2):
if tornado.version_info < (3,2,2):
raise Exception('synapse requires tornado >= 3.2.2')
-version = (0,0,12)
+version = (0,0,13)
verstring = '.'.join([ str(x) for x in version ])
import synapse.lib.modules as s_modules
|
update versions in prep for <I> tag
|
vertexproject_synapse
|
train
|
88d27165756b36dce9dc485a49ba8fce8415cfc8
|
diff --git a/stream/src/main/java/com/annimon/stream/IntStream.java b/stream/src/main/java/com/annimon/stream/IntStream.java
index <HASH>..<HASH> 100644
--- a/stream/src/main/java/com/annimon/stream/IntStream.java
+++ b/stream/src/main/java/com/annimon/stream/IntStream.java
@@ -3,7 +3,6 @@ package com.annimon.stream;
import com.annimon.stream.function.*;
import java.util.Arrays;
-import java.util.Iterator;
import java.util.NoSuchElementException;
/**
@@ -397,21 +396,10 @@ public final class IntStream {
public IntStream distinct() {
// While functional and quick to implement, this approach is not very efficient.
// An efficient version requires an int-specific map/set implementation.
-
- final Stream<Integer> dist = boxed().distinct();
-
- return new IntStream(new PrimitiveIterator.OfInt() {
-
- Iterator<? extends Integer> inner = dist.getIterator();
-
+ return boxed().distinct().mapToInt(new ToIntFunction<Integer>() {
@Override
- public int nextInt() {
- return inner.next().intValue();
- }
-
- @Override
- public boolean hasNext() {
- return inner.hasNext();
+ public int applyAsInt(Integer t) {
+ return t;
}
});
}
|
Optimize IntStream distinct with mapToInt. Close #<I>
|
aNNiMON_Lightweight-Stream-API
|
train
|
d9fe23f7d472fef1275b1b22d812b83549610a81
|
diff --git a/raft.go b/raft.go
index <HASH>..<HASH> 100644
--- a/raft.go
+++ b/raft.go
@@ -520,6 +520,9 @@ func (r *Raft) leaderLoop() {
}
}
+ var numProcessed int
+ start := time.Now()
+
for {
e := r.leaderState.inflight.Front()
if e == nil {
@@ -532,10 +535,19 @@ func (r *Raft) leaderLoop() {
}
// Measure the commit time
metrics.MeasureSince([]string{"raft", "commitTime"}, commitLog.dispatch)
+
r.processLogs(idx, commitLog)
+
r.leaderState.inflight.Remove(e)
+ numProcessed++
}
+ // Measure the time to enqueue batch of logs for FSM to apply
+ metrics.MeasureSince([]string{"raft", "fsm", "enqueue"}, start)
+
+ // Count the number of logs enqueued
+ metrics.SetGauge([]string{"raft", "commitNumLogs"}, float32(numProcessed))
+
if stepDown {
if r.conf.ShutdownOnRemove {
r.logger.Printf("[INFO] raft: Removed ourself, shutting down")
@@ -848,7 +860,10 @@ func (r *Raft) dispatchLogs(applyLogs []*logFuture) {
term := r.getCurrentTerm()
lastIndex := r.getLastIndex()
- logs := make([]*Log, len(applyLogs))
+
+ n := len(applyLogs)
+ logs := make([]*Log, n)
+ metrics.SetGauge([]string{"raft", "leader", "dispatchNumLogs"}, float32(n))
for idx, applyLog := range applyLogs {
applyLog.dispatch = now
@@ -879,10 +894,10 @@ func (r *Raft) dispatchLogs(applyLogs []*logFuture) {
}
}
-// processLogs is used to apply all the committed entires that haven't been
+// processLogs is used to apply all the committed entries that haven't been
// applied up to the given index limit.
// This can be called from both leaders and followers.
-// Followers call this from AppendEntires, for n entires at a time, and always
+// Followers call this from AppendEntries, for n entries at a time, and always
// pass future=nil.
// Leaders call this once per inflight when entries are committed. They pass
// the future from inflights.
@@ -899,7 +914,6 @@ func (r *Raft) processLogs(index uint64, future *logFuture) {
// Get the log, either from the future or from our log store
if future != nil && future.log.Index == idx {
r.processLog(&future.log, future)
-
} else {
l := new(Log)
if err := r.logs.GetLog(idx, l); err != nil {
|
Add additional metrics regarding log dispatching and committal (#<I>)
|
hashicorp_raft
|
train
|
d39ce4c04650bb359b1f95931b1a083bc3ed8e52
|
diff --git a/src/library.global.php b/src/library.global.php
index <HASH>..<HASH> 100644
--- a/src/library.global.php
+++ b/src/library.global.php
@@ -40,11 +40,6 @@ return array(
'file' => 'boldgrid-inspirations/boldgrid-inspirations.php',
'priority' => 20,
),
- 'boldgrid-staging' => array(
- 'key' => 'staging',
- 'file' => 'boldgrid-staging/boldgrid-staging.php',
- 'priority' => 60,
- ),
'boldgrid-gallery' => array(
'key' => 'gallery-wc-canvas',
'file' => 'boldgrid-gallery/wc-gallery.php',
|
Remove staging from recommend plugins. Resolves #<I>
|
BoldGrid_library
|
train
|
a28b5d295e5df4192da4519f242c4d55c5451828
|
diff --git a/config/interpolate_funcs_test.go b/config/interpolate_funcs_test.go
index <HASH>..<HASH> 100644
--- a/config/interpolate_funcs_test.go
+++ b/config/interpolate_funcs_test.go
@@ -2400,21 +2400,23 @@ type testFunctionCase struct {
}
func testFunction(t *testing.T, config testFunctionConfig) {
- for i, tc := range config.Cases {
- ast, err := hil.Parse(tc.Input)
- if err != nil {
- t.Fatalf("Case #%d: input: %#v\nerr: %v", i, tc.Input, err)
- }
-
- result, err := hil.Eval(ast, langEvalConfig(config.Vars))
- if err != nil != tc.Error {
- t.Fatalf("Case #%d:\ninput: %#v\nerr: %v", i, tc.Input, err)
- }
-
- if !reflect.DeepEqual(result.Value, tc.Result) {
- t.Fatalf("%d: bad output for input: %s\n\nOutput: %#v\nExpected: %#v",
- i, tc.Input, result.Value, tc.Result)
- }
+ t.Helper()
+ for _, tc := range config.Cases {
+ t.Run(tc.Input, func(t *testing.T) {
+ ast, err := hil.Parse(tc.Input)
+ if err != nil {
+ t.Fatalf("unexpected parse error: %s", err)
+ }
+
+ result, err := hil.Eval(ast, langEvalConfig(config.Vars))
+ if err != nil != tc.Error {
+ t.Fatalf("unexpected eval error: %s", err)
+ }
+
+ if !reflect.DeepEqual(result.Value, tc.Result) {
+ t.Errorf("wrong result\ngiven: %s\ngot: %#v\nwant: %#v", tc.Input, result.Value, tc.Result)
+ }
+ })
}
}
|
config: improve interpolation function test output
These tests were written before subtest support was available. By running
them as subtests we can get better output in the event of an error, or
in verbose mode.
|
hashicorp_terraform
|
train
|
ce7fea51f07887b22c09ec9f07ce454557fd79e4
|
diff --git a/lib/bonsai/page.rb b/lib/bonsai/page.rb
index <HASH>..<HASH> 100644
--- a/lib/bonsai/page.rb
+++ b/lib/bonsai/page.rb
@@ -145,7 +145,7 @@ module Bonsai
:navigation => Bonsai::Navigation.tree,
:updated_at => mtime,
:created_at => ctime
- }.merge(formatted_content).merge(disk_assets)
+ }.merge(formatted_content).merge(disk_assets).merge(Bonsai.site)
end
private
diff --git a/lib/bonsai/templates/site.yml b/lib/bonsai/templates/site.yml
index <HASH>..<HASH> 100644
--- a/lib/bonsai/templates/site.yml
+++ b/lib/bonsai/templates/site.yml
@@ -1,4 +1,7 @@
-:site:
- :name: Your website title
- :url: http://yourdomain.com
- :copyright: 2010
\ No newline at end of file
+# The key value pairs found below are available within the templates.
+# {{site_name}}, {{url}}, {{copyright}} and {{analytics_code}}
+
+:site_name: Your website title
+:url: http://yourdomain.com
+:copyright: 2010
+:analytics_code: UA-00000000-0
\ No newline at end of file
diff --git a/spec/bonsai/page_spec.rb b/spec/bonsai/page_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/bonsai/page_spec.rb
+++ b/spec/bonsai/page_spec.rb
@@ -180,6 +180,12 @@ describe Bonsai::Page do
end
end
+ it "should include global site variables from site.yml" do
+ @page.to_hash[:site_name].should == "Bonsai"
+ @page.to_hash[:url].should == "http://tinytree.info"
+ @page.to_hash[:copyright].should == 2010
+ end
+
describe "disk_assets" do
before :all do
@vars = @page.to_hash
|
Push variables from site.yml through to page variables
|
benschwarz_bonsai
|
train
|
bfea1c389b470e703ec6d667134186a5242652fe
|
diff --git a/lib/packages/ios.js b/lib/packages/ios.js
index <HASH>..<HASH> 100644
--- a/lib/packages/ios.js
+++ b/lib/packages/ios.js
@@ -1,5 +1,6 @@
var path = require('path');
var execFile = require('child_process').execFile;
+var fs = require('fs-extra');
var _ = require('underscore');
var copyAppFiles = function (util, callback) {
@@ -23,7 +24,7 @@ var xcrunPackage = function (util, source, destination, callback) {
util.log('Copy to ' + ipaPath + '.');
// Create dist directory.
- util.mkdirpSync('device');
+ fs.mkdirpSync(destination);
// Create ipa file.
var args = [
@@ -44,7 +45,7 @@ var packageIpa = function (util, callback) {
if (file) {
var source = util.source.resolve(file);
- var destination = path.join(util.destination.resolve('device'));
+ var destination = util.destination.resolve();
xcrunPackage(util, source, destination, callback);
}
@@ -66,7 +67,8 @@ module.exports = function (grunt, options, callback) {
var util = require('./util')(grunt,
options,
'ios',
- path.join('build', device));
+ path.join('build', device),
+ device);
if (device === 'device') {
// Device build.
|
iOS emulator app copy to dist/ios/emulator.
|
GrayBullet_grunt-cordova-ng
|
train
|
cdfab681d73927057ec4a352fe4f08a9f677b889
|
diff --git a/tests/force-del-outside-cwd.ava.js b/tests/force-del-outside-cwd.ava.js
index <HASH>..<HASH> 100644
--- a/tests/force-del-outside-cwd.ava.js
+++ b/tests/force-del-outside-cwd.ava.js
@@ -10,7 +10,10 @@ test.before(async () => {
plop = await nodePlop(`${mockPath}/sub/plopfile.js`);
});
-test('Force del outside cwd test', async function (t) {
+// chdir doesn't like to work in modern versions of ava (or many other test frameworks)
+// EG: process.chdir() is not supported in workers
+// We should rewrite this test
+test.skip('Force del outside cwd test', async function (t) {
process.chdir(`${mockPath}/sub`);
fs.mkdirSync(testSrcPath);
fs.writeFileSync(testSrcPath + '/test.txt', 'init content');
|
Skip test that breaks in new ava
|
amwmedia_node-plop
|
train
|
7126ff9c362c33e02b1b3b14f48804db2fe4e475
|
diff --git a/src/views/part/_objectParts.php b/src/views/part/_objectParts.php
index <HASH>..<HASH> 100644
--- a/src/views/part/_objectParts.php
+++ b/src/views/part/_objectParts.php
@@ -10,8 +10,10 @@
*/
/**
- * @var array
+ * @var array $data
*/
+
+use hipanel\helpers\StringHelper;
use yii\helpers\Html;
echo \hipanel\grid\GridView::widget([
@@ -66,5 +68,36 @@ echo \hipanel\grid\GridView::widget([
return implode(', ', $serials);
},
],
+ [
+ 'label' => Yii::t('hipanel:stock', 'Manufacturer'),
+ 'attribute' => 'model_brand_label',
+ 'value' => static function ($models) {
+ return implode(', ', array_unique(array_map(fn ($parts) => reset($parts)->model_brand_label, $models)));
+ }
+ ],
+ [
+ 'label' => Yii::t('hipanel.finance.price', 'Price'),
+ 'attribute' => 'price',
+ 'value' => static function ($models) {
+ return implode(', ', array_map(static function ($parts) {
+ $part = reset($parts);
+ if (empty($part->price)) {
+ return '';
+ }
+ return (count($parts) > 1 ? count($parts) . 'x' : '') . $part->price . StringHelper::getCurrencySymbol($part->currency);
+ }, $models));
+ }
+ ],
+ [
+ 'label' => Yii::t('hipanel:stock', 'Order No.'),
+ 'format' => 'raw',
+ 'attribute' => 'order_no',
+ 'value' => static function ($models) {
+ return implode(', ', array_map(static function ($parts) {
+ $part = reset($parts);
+ return Html::a($part->order_no, ['@order/view', 'id' => $part->order_id]);
+ }, $models));
+ },
+ ],
],
]);
|
added model_brand_label, price and order_no fields on objectParts view (#<I>)
* added model_brand_label, price and order_no fields on objectParts view
* minor
|
hiqdev_hipanel-module-stock
|
train
|
f780e469c1a3cc135c6184e0b18c8251a1af88dc
|
diff --git a/pysat/tests/test_instrument.py b/pysat/tests/test_instrument.py
index <HASH>..<HASH> 100644
--- a/pysat/tests/test_instrument.py
+++ b/pysat/tests/test_instrument.py
@@ -397,7 +397,7 @@ class TestBasics(object):
"""Test that correct day loads (checking down to the sec)."""
self.testInst.load(self.ref_time.year, self.ref_doy)
- assert (self.testInst.index[0] == self.ref_time)
+ self.eval_successful_load()
return
def test_basic_instrument_load_leap_year(self):
|
STY: use eval methods
|
rstoneback_pysat
|
train
|
ea42f97f1b3fcda0d6d02134fd903e40814f8419
|
diff --git a/src/js/SwipeableViews/SwipeableView.js b/src/js/SwipeableViews/SwipeableView.js
index <HASH>..<HASH> 100644
--- a/src/js/SwipeableViews/SwipeableView.js
+++ b/src/js/SwipeableViews/SwipeableView.js
@@ -29,7 +29,7 @@ export default class SwipeableView extends Component {
static defaultProps = {
initialIndex: 0,
- threshold: .4,
+ threshold: .15,
};
componentWillReceiveProps(nextProps) {
@@ -79,7 +79,7 @@ export default class SwipeableView extends Component {
let distance = this.calcSwipeDistance(x, 0);
if(swipeDistance > deltaX && activeIndex + 1 < this.props.children.length) {
activeIndex++;
- } else if(swipeDistance < deltaX && activeIndex - 1 >= 0) {
+ } else if(swipeDistance < -deltaX && activeIndex - 1 >= 0) {
activeIndex--;
}
diff --git a/src/js/Tabs/Tabs.js b/src/js/Tabs/Tabs.js
index <HASH>..<HASH> 100644
--- a/src/js/Tabs/Tabs.js
+++ b/src/js/Tabs/Tabs.js
@@ -91,7 +91,7 @@ export default class Tabs extends Component {
const tabContainer = node.querySelector('.md-tabs-scroll-container');
const tabs = Array.prototype.slice.call(node.querySelectorAll('.md-tab'));
let maxWidth = tabs.reduce((prev, curr) => prev + curr.offsetWidth, 0) + threshold;
- maxWidth -= (tabContainer.offsetWidth - parseInt(this.props.style.marginLeft));
+ maxWidth -= (tabContainer.offsetWidth - parseInt(this.props.style.marginLeft || 0));
if(distance > 0) { // moving content left
distance = Math.min(distance, threshold);
|
Fixed swipable views threshold for changing on swipe and fixed tabs touch scroll
|
mlaursen_react-md
|
train
|
28f287cfb2f8255dfd029e2df6fb8e47218924f7
|
diff --git a/lib/restclient/abstract_response.rb b/lib/restclient/abstract_response.rb
index <HASH>..<HASH> 100644
--- a/lib/restclient/abstract_response.rb
+++ b/lib/restclient/abstract_response.rb
@@ -167,6 +167,11 @@ module RestClient
# parse location header and merge into existing URL
url = headers[:location]
+ # cannot follow redirection if there is no location header
+ unless url
+ raise exception_with_response
+ end
+
# handle relative redirects
unless url.start_with?('http')
url = URI.parse(request.url).merge(url).to_s
diff --git a/spec/unit/abstract_response_spec.rb b/spec/unit/abstract_response_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/unit/abstract_response_spec.rb
+++ b/spec/unit/abstract_response_spec.rb
@@ -104,5 +104,12 @@ describe RestClient::AbstractResponse, :include_helpers do
@response.should_receive(:follow_redirection).and_return('fake-redirection')
@response.return!.should eq 'fake-redirection'
end
+
+ it "should gracefully handle 302 redirect with no location header" do
+ @net_http_res = response_double(code: 302, location: nil)
+ @request = request_double()
+ @response = MyAbstractResponse.new(@net_http_res, @request)
+ lambda { @response.return! }.should raise_error RestClient::Found
+ end
end
end
|
Don't attempt to follow redirection w/o Location.
When there is no Location header, we cannot follow any redirection.
Per RFC <I>, a server SHOULD generate a Location header field in the
response for redirection requests, but servers do not always do this.
<URL>
|
rest-client_rest-client
|
train
|
80ad19d09eaa66c07be80612f33aece81a4cf530
|
diff --git a/lib/multirepo/commands/update-command.rb b/lib/multirepo/commands/update-command.rb
index <HASH>..<HASH> 100644
--- a/lib/multirepo/commands/update-command.rb
+++ b/lib/multirepo/commands/update-command.rb
@@ -1,6 +1,7 @@
require "multirepo/utility/console"
require "multirepo/logic/performer"
require "multirepo/files/tracking-files"
+require "multirepo/git/git-runner"
module MultiRepo
class UpdateCommand < Command
@@ -48,29 +49,34 @@ module MultiRepo
end
def update_tracking_files_step
+ main_changed = false
if @main_only
Console.log_step("Updating main repo...")
- update_main
+ main_changed = update_main
elsif @deps_only
Console.log_step("Updating dependencies...")
update_dependencies
else
Console.log_step("Updating main repo and dependencies...")
update_dependencies
- update_main
+ main_changed = update_main
end
+
+ show_diff(".") if main_changed && Console.ask("Show diff?")
end
def update_dependencies
+ any_changed = false
Performer.dependencies.each do |dependency|
path = dependency.config_entry.path
name = dependency.config_entry.name
- update_tracking_files(path, name) if Utils.multirepo_enabled?(path)
+ any_changed |= update_tracking_files(path, name) if Utils.multirepo_enabled?(path)
end
+ return any_changed
end
def update_main
- update_tracking_files(".", "main repo")
+ return update_tracking_files(".", "main repo")
end
def update_tracking_files(path, name)
@@ -89,6 +95,12 @@ module MultiRepo
committed = tracking_files.commit("[multirepo] Updated tracking files manually")
Console.log_info("Committed tracking files") if committed
end
+
+ return changed
+ end
+
+ def show_diff(path)
+ GitRunner.run_as_system(path, "diff .multirepo.lock")
end
end
end
|
<I> : UpdateCommand now asks to show a lock file diff if the main repo's lock file has changed.
|
fortinmike_git-multirepo
|
train
|
2e8584eb69b3d3788f9c7c0429b827259d0c444b
|
diff --git a/src/Creiwork.php b/src/Creiwork.php
index <HASH>..<HASH> 100644
--- a/src/Creiwork.php
+++ b/src/Creiwork.php
@@ -44,6 +44,7 @@ class Creiwork
{
$this->configPath = $configPath;
$this->configDirectory = dirname($this->configPath) . '/';
+ $this->config = new Config($configPath);
$this->container = $this->buildContainer();
}
@@ -96,14 +97,14 @@ class Creiwork
{
return [
- Routerunner::class => function (ContainerInterface $container, Config $config) {
- $routerunner = new Routerunner($this->generateFilePath($config->get('router-config')), $container);
+ Routerunner::class => function (ContainerInterface $container) {
+ $routerunner = new Routerunner($this->getRouterConfigFile(), $container);
$routerunner->setPostProcessor($container->get(ResponseBuilder::class));
return $routerunner;
},
- Plates\Engine::class => function (Config $config) {
- return new Plates\Engine($this->generateFilePath($config->get('template-dir')));
+ Plates\Engine::class => function () {
+ return new Plates\Engine($this->getTemplateDirectory());
},
LoggerInterface::class => function (StreamHandler $streamHandler) {
@@ -112,8 +113,8 @@ class Creiwork
return $logger;
},
- StreamHandler::class => function (Config $config) {
- return new StreamHandler($this->generateFilePath($config->get('logger-dir') . '/info.log'), Logger::INFO);
+ StreamHandler::class => function () {
+ return new StreamHandler($this->getLoggerDirectory() . '/info.log', Logger::INFO);
},
ServerRequestInterface::class => factory([ServerRequest::class, 'fromGlobals']),
@@ -128,7 +129,6 @@ class Creiwork
return $session->getSegment('Creios\Creiwork');
},
- Config::class => object()->constructor($this->configPath)
];
}
@@ -151,4 +151,28 @@ class Creiwork
return $this->configDirectory . $filePath;
}
+ /**
+ * @return string
+ */
+ private function getLoggerDirectory()
+ {
+ return $this->generateFilePath($this->config->get('logger-dir'));
+ }
+
+ /**
+ * @return string
+ */
+ private function getTemplateDirectory()
+ {
+ return $this->generateFilePath($this->config->get('template-dir'));
+ }
+
+ /**
+ * @return string
+ */
+ private function getRouterConfigFile()
+ {
+ return $this->generateFilePath($this->config->get('router-config'));
+ }
+
}
\ No newline at end of file
|
Added methods for simple config access
|
creios_creiwork-framework
|
train
|
bd19b5610952ffa5b12792019c50c31d58bbf4e0
|
diff --git a/doc/examples/tornado_change_stream_example.py b/doc/examples/tornado_change_stream_example.py
index <HASH>..<HASH> 100644
--- a/doc/examples/tornado_change_stream_example.py
+++ b/doc/examples/tornado_change_stream_example.py
@@ -83,9 +83,15 @@ class ChangesHandler(tornado.websocket.WebSocketHandler):
ChangesHandler.update_cache(change)
+change_stream = None
+
+
async def watch(collection):
- async for change in collection.watch():
- ChangesHandler.on_change(change)
+ global change_stream
+
+ async with collection.watch() as change_stream:
+ async for change in change_stream:
+ ChangesHandler.on_change(change)
def main():
@@ -103,7 +109,13 @@ def main():
loop = tornado.ioloop.IOLoop.current()
# Start watching collection for changes.
loop.add_callback(watch, collection)
- loop.start()
+ try:
+ loop.start()
+ except KeyboardInterrupt:
+ pass
+ finally:
+ if change_stream is not None:
+ change_stream.close()
if __name__ == "__main__":
diff --git a/motor/core.py b/motor/core.py
index <HASH>..<HASH> 100644
--- a/motor/core.py
+++ b/motor/core.py
@@ -462,15 +462,56 @@ class AgnosticCollection(AgnosticBaseProperties):
Returns a :class:`~MotorChangeStream` cursor which iterates over changes
on this collection. Introduced in MongoDB 3.6.
+ A change stream continues waiting indefinitely for matching change
+ events. Code like the following allows a program to cancel the change
+ stream and exit.
+
.. code-block:: python3
- async with db.collection.watch() as stream:
- async for change in stream:
- print(change)
+ change_stream = None
+
+ async def watch_collection():
+ global change_stream
+
+ # Using the change stream in an "async with" block
+ # ensures it is canceled promptly if your code breaks
+ # from the loop or throws an exception.
+ async with db.collection.watch() as change_stream:
+ async for change in stream:
+ print(change)
+
+ # Tornado
+ from tornado.ioloop import IOLoop
- Using the change stream in an "async with" block as shown above ensures
- it is canceled promptly if your code breaks from the loop or throws an
- exception.
+ def main():
+ loop = IOLoop.current()
+ # Start watching collection for changes.
+ loop.add_callback(watch_collection)
+ try:
+ loop.start()
+ except KeyboardInterrupt:
+ pass
+ finally:
+ if change_stream is not None:
+ change_stream.close()
+
+ # asyncio
+ from asyncio import get_event_loop
+
+ def main():
+ loop = get_event_loop()
+ task = loop.create_task(watch_collection)
+
+ try:
+ loop.run_forever()
+ except KeyboardInterrupt:
+ pass
+ finally:
+ if change_stream is not None:
+ change_stream.close()
+
+ # Prevent "Task was destroyed but it is pending!"
+ loop.run_until_complete(task)
The :class:`~MotorChangeStream` async iterable blocks
until the next change document is returned or an error is raised. If
|
MOTOR-<I> Example clean shutdown w/ change stream
|
mongodb_motor
|
train
|
d14b0778f517558cc821451a31bebac17a64c16b
|
diff --git a/client/extensions/woocommerce/app/store-stats/controller.js b/client/extensions/woocommerce/app/store-stats/controller.js
index <HASH>..<HASH> 100644
--- a/client/extensions/woocommerce/app/store-stats/controller.js
+++ b/client/extensions/woocommerce/app/store-stats/controller.js
@@ -50,6 +50,8 @@ export default function StatsController( context ) {
// FIXME: Auto-converted from the Flux setTitle action. Please use <DocumentHead> instead.
context.store.dispatch( setTitle( translate( 'Stats', { textOnly: true } ) ) );
+ analytics.tracks.recordEvent( `calypso_woocommerce_stats_${ props.type }_page`, props );
+
const asyncComponent = ( props.type === 'orders' )
? <AsyncLoad
/* eslint-disable wpcalypso/jsx-classname-namespace */
|
WCS-stats: Add Tracks event for Order and ListView
|
Automattic_wp-calypso
|
train
|
65b9f5fabb92a18f114622536750305f87cd9421
|
diff --git a/massautocomplete.js b/massautocomplete.js
index <HASH>..<HASH> 100644
--- a/massautocomplete.js
+++ b/massautocomplete.js
@@ -12,20 +12,20 @@ angular.module('MassAutoComplete', [])
ESC: 27,
ENTER: 13,
UP: 38,
- DOWN: 40,
+ DOWN: 40
};
config.EVENTS = {
KEYDOWN: 'keydown',
RESIZE: 'resize',
- BLUR: 'blur',
+ BLUR: 'blur'
};
config.DEBOUNCE = {
position: 150,
attach: 300,
suggest: 200,
- blur: 150,
+ blur: 150
};
config.generate_random_id = function(prefix) {
@@ -60,7 +60,7 @@ angular.module('MassAutoComplete', [])
return {
restrict: 'A',
scope: {
- options: '&massAutocomplete',
+ options: '&massAutocomplete'
},
transclude: true,
template:
@@ -434,10 +434,10 @@ angular.module('MassAutoComplete', [])
restrict: 'A',
require: [
'^massAutocomplete',
- 'ngModel',
+ 'ngModel'
],
scope: {
- 'massAutocompleteItem' : '&',
+ 'massAutocompleteItem' : '&'
},
link: function (scope, element, attrs, required) {
// Prevent html5/browser auto completion.
|
Remove trailing commas
IE8 treats trailing commas badly, might as well avoid them.
|
hakib_MassAutocomplete
|
train
|
ab4743186316cf6897dd32d12452d45f746b09f0
|
diff --git a/src/Everzet/Behat/Formatter/PrettyFormatter.php b/src/Everzet/Behat/Formatter/PrettyFormatter.php
index <HASH>..<HASH> 100644
--- a/src/Everzet/Behat/Formatter/PrettyFormatter.php
+++ b/src/Everzet/Behat/Formatter/PrettyFormatter.php
@@ -340,16 +340,18 @@ class PrettyFormatter implements FormatterInterface
foreach ($scenariosStatusesCount as $status => $count) {
$statuses[] = sprintf('<%s>%s %s</%s>', $status, $count, $status, $status);
}
- $this->output->writeln(sprintf('%d scenarios (%s)',
- $scenariosCount, implode(', ', $statuses)
+ $this->output->writeln(sprintf('%d scenarios %s',
+ $scenariosCount
+ , count($statuses) ? sprintf('(%s)', implode(', ', $statuses)) : ''
));
$statuses = array();
foreach ($stepsStatusesCount as $status => $count) {
$statuses[] = sprintf('<%s>%s %s</%s>', $status, $count, $status, $status);
}
- $this->output->writeln(sprintf('%d steps (%s)',
- $stepsCount, implode(', ', $statuses)
+ $this->output->writeln(sprintf('%d steps %s',
+ $stepsCount
+ , count($statuses) ? sprintf('(%s)', implode(', ', $statuses)) : ''
));
$this->output->writeln(sprintf("%.3fs", $runner->getTime()));
|
don't show empty statistics parenths
|
Behat_Behat
|
train
|
9c6e71aa66cbcb5cf3a806d2b8005e53389f8110
|
diff --git a/test/integration-legacy/stop-on-error-request-spec.test.js b/test/integration-legacy/stop-on-error-request-spec.test.js
index <HASH>..<HASH> 100644
--- a/test/integration-legacy/stop-on-error-request-spec.test.js
+++ b/test/integration-legacy/stop-on-error-request-spec.test.js
@@ -150,9 +150,7 @@ describe('Option', function () {
iteration: function (err, cursor) {
check(function () {
expect(err).to.be.null;
- expect(cursor).to.deep.include({
- iteration: runStore.iteration
- });
+ expect(cursor).to.have.property('iteration', runStore.iteration);
testables.iterationsComplete.push(cursor.iteration);
});
@@ -256,7 +254,9 @@ describe('Option', function () {
scriptResult = results[0];
expect(result.error).to.be.undefined;
- expect(scriptResult.result.target).to.eql('test');
+ expect(scriptResult).to.deep.nested.include({
+ 'result.target': 'test'
+ });
// This should never be called for the
// second request.
@@ -299,7 +299,7 @@ describe('Option', function () {
return;
}
expect(err).to.be.null;
- expect(response.code).to.equal(200);
+ expect(response).to.have.property('code', 200);
expect(item.name).to.not.equal('Third Request');
});
|
Update request-spec tests with to.have.property chai helper
|
postmanlabs_postman-runtime
|
train
|
47b8caaf5f7e77045c308a0145e28a3910bdf195
|
diff --git a/openquakeserver/engine/views.py b/openquakeserver/engine/views.py
index <HASH>..<HASH> 100644
--- a/openquakeserver/engine/views.py
+++ b/openquakeserver/engine/views.py
@@ -153,9 +153,6 @@ def run_hazard_calc(request):
)
else:
# POST: run a new calculation
-
-
- # TODO: create temp dir
temp_dir = tempfile.mkdtemp()
files = {}
# Move each file to a new temp dir, using the upload file names
@@ -166,7 +163,6 @@ def run_hazard_calc(request):
files[key] = new_path
job_file = files.pop('job_config')
- # TODO: Get the user from the user authenticated in the `request`.
job = oq_engine.haz_job_from_file(
job_file, request.user.username, DEFAULT_LOG_LEVEL, []
)
|
engine/views:
Removed some TODO comments.
|
gem_oq-engine
|
train
|
4f2bb184f84081735499f797eb7b360b64cabae9
|
diff --git a/aeron-cluster/src/test/java/io/aeron/cluster/TestCluster.java b/aeron-cluster/src/test/java/io/aeron/cluster/TestCluster.java
index <HASH>..<HASH> 100644
--- a/aeron-cluster/src/test/java/io/aeron/cluster/TestCluster.java
+++ b/aeron-cluster/src/test/java/io/aeron/cluster/TestCluster.java
@@ -20,7 +20,6 @@ import io.aeron.Counter;
import io.aeron.archive.ArchiveThreadingMode;
import io.aeron.cluster.client.AeronCluster;
import io.aeron.cluster.client.EgressListener;
-import io.aeron.cluster.service.Cluster;
import io.aeron.driver.MediaDriver;
import io.aeron.driver.MinMulticastFlowControlSupplier;
import io.aeron.driver.ThreadingMode;
@@ -339,7 +338,7 @@ public class TestCluster implements AutoCloseable
continue;
}
- if (Cluster.Role.LEADER == node.role() && null == node.electionState())
+ if (node.isLeader() && null == node.electionState())
{
return node;
}
|
[Java] Make use of isLeader() method in test cluster.
|
real-logic_aeron
|
train
|
b7cce49dbc7d9dd6be1dd49e6764dd6f23f6d758
|
diff --git a/js/src/carousel.js b/js/src/carousel.js
index <HASH>..<HASH> 100644
--- a/js/src/carousel.js
+++ b/js/src/carousel.js
@@ -56,8 +56,8 @@ const CLASS_NAME_NEXT = 'carousel-item-next'
const CLASS_NAME_PREV = 'carousel-item-prev'
const SELECTOR_ACTIVE = '.active'
-const SELECTOR_ACTIVE_ITEM = '.active.carousel-item'
const SELECTOR_ITEM = '.carousel-item'
+const SELECTOR_ACTIVE_ITEM = SELECTOR_ACTIVE + SELECTOR_ITEM
const SELECTOR_ITEM_IMG = '.carousel-item img'
const SELECTOR_NEXT_PREV = '.carousel-item-next, .carousel-item-prev'
const SELECTOR_INDICATORS = '.carousel-indicators'
|
Carousel: use combined selector and drop variable used once
|
twbs_bootstrap
|
train
|
58101206cfbd7e5d7c5cee059cdd654f84810215
|
diff --git a/any_type_assert_impl.go b/any_type_assert_impl.go
index <HASH>..<HASH> 100644
--- a/any_type_assert_impl.go
+++ b/any_type_assert_impl.go
@@ -57,8 +57,11 @@ func (assert *anyTypeAssertImpl) AsInt() IntAssert {
func (assert *anyTypeAssertImpl) AsInt64() Int64Assert {
val, kind := valueWithKind(assert.actual)
- if kind == reflect.Int64 {
+ switch kind {
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return &int64AssertImpl{assert.logFacade, val.Int()}
+ case reflect.Uint8, reflect.Uint16, reflect.Uint32:
+ return &int64AssertImpl{assert.logFacade, int64(val.Uint())}
}
assert.isTrue(false, "Cannot convert <%v> of type <%T> to <int64>.", assert.actual, assert.actual)
return &int64AssertImpl{}
diff --git a/any_type_assert_test.go b/any_type_assert_test.go
index <HASH>..<HASH> 100644
--- a/any_type_assert_test.go
+++ b/any_type_assert_test.go
@@ -123,6 +123,18 @@ func TestThatAsInt64HasErrorMessages(t *testing.T) {
)
}
+func TestThatAsInt64CanAcceptOtherTypes(t *testing.T) {
+ assert, mockT := setupWithMockT(t)
+ assert.That(int8(-111)).AsInt64()
+ assert.That(uint8(111)).AsInt64()
+ assert.That(int16(-30000)).AsInt64()
+ assert.That(uint16(65535)).AsInt64()
+ assert.That(int32(-10000000)).AsInt64()
+ assert.That(uint32(10000000)).AsInt64()
+ assert.That(2147483647).AsInt64()
+ mockT.HasNoErrors()
+}
+
func TestThatAsStringHasNoErrors(t *testing.T) {
assert, mockT := setupWithMockT(t)
assert.That("").AsString().IsEmpty()
|
accepting other types when converting to int<I>
|
assertgo_assert
|
train
|
0f33ea15f97e270207965cba6dc81362d5fd745f
|
diff --git a/src/Numbers/Client.php b/src/Numbers/Client.php
index <HASH>..<HASH> 100644
--- a/src/Numbers/Client.php
+++ b/src/Numbers/Client.php
@@ -171,7 +171,8 @@ class Client implements ClientAwareInterface
$searchResults = json_decode($response->getBody()->getContents(), true);
if(empty($searchResults)){
- throw new Exception\Request('number not found', 404);
+ // we did not find any results, that's OK
+ return [];
}
if(!isset($searchResults['count']) OR !isset($searchResults['numbers'])){
|
Return empty array if we didn't get any matching numbers
|
Nexmo_nexmo-php
|
train
|
3d2392926225f8ef6af69518ee4c6ab43b0f3b8f
|
diff --git a/lib/config_curator/collection.rb b/lib/config_curator/collection.rb
index <HASH>..<HASH> 100644
--- a/lib/config_curator/collection.rb
+++ b/lib/config_curator/collection.rb
@@ -1,3 +1,4 @@
+require 'active_support/core_ext/hash'
require 'active_support/core_ext/string'
require 'logger'
require 'yaml'
@@ -39,7 +40,8 @@ module ConfigCurator
# @param file [Hash] the yaml file to load
# @return [Hash] the loaded manifest
def load_manifest(file)
- self.manifest = YAML.load_file file
+ self.manifest =
+ ActiveSupport::HashWithIndifferentAccess.new YAML.load_file(file)
end
# Unit objects defined by the manifest and organized by type.
diff --git a/spec/collection_spec.rb b/spec/collection_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/collection_spec.rb
+++ b/spec/collection_spec.rb
@@ -23,7 +23,9 @@ describe ConfigCurator::Collection do
end
describe "#load_manifest" do
- let(:manifest) { {root: 'tmp'} }
+ let(:manifest) do
+ ActiveSupport::HashWithIndifferentAccess.new(root: 'tmp')
+ end
it "loads the manifest" do
path = 'path/to/manifest'
|
Allow manifest keys to be strings or symbols.
|
razor-x_config_curator
|
train
|
130b90a36e6068de38940ce2bed5a69b01de2a76
|
diff --git a/commands/configure_topic.js b/commands/configure_topic.js
index <HASH>..<HASH> 100644
--- a/commands/configure_topic.js
+++ b/commands/configure_topic.js
@@ -1,9 +1,9 @@
'use strict';
let FLAGS = [
- {name: 'retention-time', description: 'The length of time in milliseconds messages in the topic should be retained for.', hasValue: true, optional: true},
- {name: 'compaction', description: 'Enables compaction on the topic if passed', hasValue: false, optional: true},
- {name: 'no-compaction', description: 'Disables compaction on the topic if passed', hasValue: false, optional: true}
+ {name: 'retention-time', description: 'length of time messages in the topic should be retained for', hasValue: true, optional: true},
+ {name: 'compaction', description: 'enables compaction on the topic if passed', hasValue: false, optional: true},
+ {name: 'no-compaction', description: 'disables compaction on the topic if passed', hasValue: false, optional: true}
];
let DOT_WAITING_TIME = 200;
diff --git a/commands/create_topic.js b/commands/create_topic.js
index <HASH>..<HASH> 100644
--- a/commands/create_topic.js
+++ b/commands/create_topic.js
@@ -1,11 +1,11 @@
'use strict';
let FLAGS = [
- {name: 'partitions', description: 'number of partitions to give the topic', hasValue: true, optional: false},
- {name: 'replication-factor', description: 'number of replicas the topic should be created across', hasValue: true, optional: true},
- {name: 'retention-time', description: 'the length of time messages in the topic should be retained for in milliseconds.', hasValue: true, optional: true},
- {name: 'compaction', description: 'whether to use compaction for this topic', hasValue: false, optional: true},
- {name: 'confirm', description: 'override the confirmation prompt. Needs the app name, or the command will fail.', hasValue: true, optional: true}
+ {name: 'partitions', description: 'number of partitions to give the topic', hasValue: true, optional: false},
+ {name: 'replication-factor', description: 'number of replicas the topic should be created across', hasValue: true, optional: true},
+ {name: 'retention-time', description: 'length of time messages in the topic should be retained for', hasValue: true, optional: true},
+ {name: 'compaction', description: 'whether to use compaction for this topic', hasValue: false, optional: true},
+ {name: 'confirm', description: 'override the confirmation prompt (needs the app name, or the command will fail)', hasValue: true, optional: true}
];
let DOT_WAITING_TIME = 200;
|
Clean up docs
* Remove obsolete 'in milliseconds' from relevant options
* Change naming to be more consistent with style guidelines <URL>
|
heroku_heroku-kafka-jsplugin
|
train
|
b57e7f0d0aea772a131b0f85f1848de21329587b
|
diff --git a/src/IpDecoder.js b/src/IpDecoder.js
index <HASH>..<HASH> 100644
--- a/src/IpDecoder.js
+++ b/src/IpDecoder.js
@@ -11,12 +11,12 @@ function IpDecoder() {
}
// @see "Special address blocks" at https://en.wikipedia.org/wiki/Reserved_IP_addresses#IPv6, "IPv4 mapped addresses." and "IPv4 translated addresses."
-IpDecoder.prototype.ipv4HybridTest = /^\:\:ffff\:(0\:){0,1}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$/;
+var ipv4HybridTest = /^\:\:ffff\:(0\:){0,1}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$/;
// Set an IP string and store it in the buffer. This method
// assumes you've already validated the IP and it's sane
IpDecoder.prototype.set = function(ip) {
- var mappedIPv4 = ip.match(IpDecoder.prototype.ipv4HybridTest);
+ var mappedIPv4 = ip.match(ipv4HybridTest);
if (mappedIPv4 && mappedIPv4.length === 6) {
this.set4(mappedIPv4.slice(2).join('.')); // convert back to ipV4
|
No real reason for the ipv4-in-ipv6 regex to live on the IpDecoder proto
|
gosquared_mmdb-reader
|
train
|
892b6e1c021d359929e8d7e24db587da795a69e5
|
diff --git a/lib/node_modules/@stdlib/math/special/abs/test/test.js b/lib/node_modules/@stdlib/math/special/abs/test/test.js
index <HASH>..<HASH> 100644
--- a/lib/node_modules/@stdlib/math/special/abs/test/test.js
+++ b/lib/node_modules/@stdlib/math/special/abs/test/test.js
@@ -80,12 +80,6 @@ tape( 'if a native implementation is not available, the main export is a JavaScr
}
});
-tape( 'attached to the main export is a method for assigning results to a provided output array', function test( t ) {
- t.strictEqual( hasProp( fcn, 'assign' ), true, 'has property' );
- t.strictEqual( typeof fcn.assign, 'function', 'has method' );
- t.end();
-});
-
tape( 'attached to the main export is a property for retrieving the number of input and output arrays', function test( t ) {
t.strictEqual( fcn.nargs, meta.nargs, 'returns expected value' );
t.end();
@@ -105,3 +99,29 @@ tape( 'attached to the main export is a property for retrieving the list of supp
t.deepEqual( fcn.types, dtypes2signatures( types, meta.nin, meta.nout ), 'returns expected value' );
t.end();
});
+
+tape( 'attached to the main export is an `assign` method for assigning results to a provided output array', function test( t ) {
+ t.strictEqual( hasProp( fcn, 'assign' ), true, 'has property' );
+ t.strictEqual( typeof fcn.assign, 'function', 'has method' );
+ t.end();
+});
+
+tape( 'attached to the `assign` method is a property for retrieving the number of input and output arrays', function test( t ) {
+ t.strictEqual( fcn.assign.nargs, meta.nargs, 'returns expected value' );
+ t.end();
+});
+
+tape( 'attached to the `assign` method is a property for retrieving the number of input arrays', function test( t ) {
+ t.strictEqual( fcn.assign.nin, meta.nin, 'returns expected value' );
+ t.end();
+});
+
+tape( 'attached to the `assign` method is a property for retrieving the number of output arrays', function test( t ) {
+ t.strictEqual( fcn.assign.nout, meta.nout, 'returns expected value' );
+ t.end();
+});
+
+tape( 'attached to the `assign` method is a property for retrieving the list of supported array data types', function test( t ) {
+ t.deepEqual( fcn.assign.types, dtypes2signatures( types, meta.nin, meta.nout ), 'returns expected value' );
+ t.end();
+});
|
Update tests to check for `assign` method properties
|
stdlib-js_stdlib
|
train
|
f26f3438338500b6cbf491b71b696d20b7bf1cd8
|
diff --git a/src/api/NB_App.php b/src/api/NB_App.php
index <HASH>..<HASH> 100644
--- a/src/api/NB_App.php
+++ b/src/api/NB_App.php
@@ -63,8 +63,15 @@ trait NB_App {
*
* @param string $endpoint string Endpoint to use
* @param array $data Post data for endpoint
+ *
+ * @throws \NeverBounce\API\NB_Exception
*/
public function request( $endpoint, array $data = [ ] ) {
+
+ if($endpoint == null) {
+ throw new NB_Exception('No endpoint was supplied');
+ }
+
// Add appID and secretKey
$data['app_id'] = NB_Auth::auth()->appID();
$data['key'] = NB_Auth::auth()->secretKey();
|
Added exception if endpoint is not supplied to request
|
NeverBounce_NeverBounceAPI-PHP
|
train
|
d94b1860806e11038086e91c693d86a009dcd3b0
|
diff --git a/api.go b/api.go
index <HASH>..<HASH> 100644
--- a/api.go
+++ b/api.go
@@ -878,23 +878,24 @@ func postContainersCopy(srv *Server, version float64, w http.ResponseWriter, r *
name := vars["name"]
copyData := &APICopy{}
- if r.Header.Get("Content-Type") == "application/json" {
+ contentType := r.Header.Get("Content-Type")
+ if contentType == "application/json" {
if err := json.NewDecoder(r.Body).Decode(copyData); err != nil {
return err
}
} else {
- return fmt.Errorf("Content-Type not supported: %s", r.Header.Get("Content-Type"))
+ return fmt.Errorf("Content-Type not supported: %s", contentType)
}
if copyData.Resource == "" {
return fmt.Errorf("Resource cannot be empty")
}
if copyData.Resource[0] == '/' {
- return fmt.Errorf("Resource cannot contain a leading /")
+ copyData.Resource = copyData.Resource[1:]
}
if err := srv.ContainerCopy(name, copyData.Resource, w); err != nil {
- utils.Debugf("%s", err)
+ utils.Debugf("%s", err.Error())
return err
}
return nil
diff --git a/api_test.go b/api_test.go
index <HASH>..<HASH> 100644
--- a/api_test.go
+++ b/api_test.go
@@ -1181,7 +1181,7 @@ func TestPostContainersCopy(t *testing.T) {
}
r := httptest.NewRecorder()
- copyData := APICopy{HostPath: ".", Resource: "test.txt"}
+ copyData := APICopy{HostPath: ".", Resource: "/test.txt"}
jsonData, err := json.Marshal(copyData)
if err != nil {
diff --git a/commands.go b/commands.go
index <HASH>..<HASH> 100644
--- a/commands.go
+++ b/commands.go
@@ -1492,8 +1492,8 @@ func (cli *DockerCli) CmdCp(args ...string) error {
return err
}
- r := bytes.NewReader(data)
if statusCode == 200 {
+ r := bytes.NewReader(data)
if err := Untar(r, copyData.HostPath); err != nil {
return err
}
diff --git a/container.go b/container.go
index <HASH>..<HASH> 100644
--- a/container.go
+++ b/container.go
@@ -1094,5 +1094,19 @@ func (container *Container) Copy(resource string) (Archive, error) {
if err := container.EnsureMounted(); err != nil {
return nil, err
}
- return TarFilter(container.RootfsPath(), Uncompressed, []string{resource})
+ var filter []string
+ basePath := path.Join(container.RootfsPath(), resource)
+ stat, err := os.Stat(basePath)
+ if err != nil {
+ return nil, err
+ }
+ if !stat.IsDir() {
+ d, f := path.Split(basePath)
+ basePath = d
+ filter = []string{f}
+ } else {
+ filter = []string{path.Base(basePath)}
+ basePath = path.Dir(basePath)
+ }
+ return TarFilter(basePath, Uncompressed, filter)
}
|
Strip leading forward slash from resource
|
containers_storage
|
train
|
29a1551eae05f11fc58b04b20a397fb6d044ef51
|
diff --git a/microsoft-azure-api/src/test/java/com/microsoft/windowsazure/services/serviceBus/ServiceBusIntegrationTest.java b/microsoft-azure-api/src/test/java/com/microsoft/windowsazure/services/serviceBus/ServiceBusIntegrationTest.java
index <HASH>..<HASH> 100644
--- a/microsoft-azure-api/src/test/java/com/microsoft/windowsazure/services/serviceBus/ServiceBusIntegrationTest.java
+++ b/microsoft-azure-api/src/test/java/com/microsoft/windowsazure/services/serviceBus/ServiceBusIntegrationTest.java
@@ -67,6 +67,14 @@ public class ServiceBusIntegrationTest extends IntegrationTestBase {
.setTimeout(5);
static ReceiveMessageOptions PEEK_LOCK_5_SECONDS = new ReceiveMessageOptions().setPeekLock().setTimeout(5);
+ private String createLongString(int length) {
+ String result = new String();
+ for (int i = 0; i < length; i++) {
+ result = result + "a";
+ }
+ return result;
+ }
+
@Before
public void createService() throws Exception {
// reinitialize configuration from known state
@@ -279,6 +287,26 @@ public class ServiceBusIntegrationTest extends IntegrationTestBase {
}
@Test
+ public void receiveLargeMessageWorks() throws Exception {
+ // Arrange
+ String queueName = "TestReceiveLargeMessageWorks";
+ service.createQueue(new QueueInfo(queueName));
+ String expectedBody = createLongString(64000);
+ BrokeredMessage expectedMessage = new BrokeredMessage(expectedBody);
+ service.sendQueueMessage(queueName, expectedMessage);
+
+ // Act
+ BrokeredMessage message = service.receiveQueueMessage(queueName, RECEIVE_AND_DELETE_5_SECONDS).getValue();
+ byte[] data = new byte[64000];
+ int size = message.getBody().read(data);
+
+ // Assert
+ assertEquals(expectedBody.length(), size);
+ assertArrayEquals(expectedBody.getBytes(), Arrays.copyOf(data, size));
+
+ }
+
+ @Test
public void renewSubscriptionMessageLockWorks() throws Exception {
// Arrange
String topicName = "TestRenewSubscriptionLockMessageWorks";
|
a unit test trying to repeat service bus large message failure.
|
Azure_azure-sdk-for-java
|
train
|
98354f240bcacc0424e4b9f39b9b34e7973de0e9
|
diff --git a/vaping/config.py b/vaping/config.py
index <HASH>..<HASH> 100644
--- a/vaping/config.py
+++ b/vaping/config.py
@@ -9,25 +9,28 @@ def parse_interval(val):
"""
converts a string to float of seconds
.5 = 500ms
+ 90 = 1m30s
"""
- re_intv = re.compile(r"(?P<count>\d+)(?P<unit>\w+)")
- match = re_intv.match(val.strip())
- if not match:
- raise ValueError("invalid interval string '%s'" % val)
- unit = match.group('unit')
- count = float(match.group('count'))
- if unit == 's':
- return count
- if unit == 'm':
- return count * 60
- if unit == 'ms':
- return count / 1000
- if unit == 'h':
- return count * 3600
- if unit == 'd':
- return count * 86400
+ re_intv = re.compile(r"([\d\.]+)([a-zA-Z]+)")
+ val = val.strip()
- raise ValueError("unknown unit from interval string '%s'" % val)
+ total = 0.0
+ for match in re_intv.findall(val):
+ unit = match[1]
+ count = float(match[0])
+ if unit == 's':
+ total += count
+ elif unit == 'm':
+ total += count * 60
+ elif unit == 'ms':
+ total += count / 1000
+ elif unit == "h":
+ total += count * 3600
+ elif unit == 'd':
+ total += count * 86400
+ else:
+ raise ValueError("unknown unit from interval string '%s'" % val)
+ return total
class Config(munge.Config):
defaults = {
|
parse_interval support multi-unit strings
|
20c_vaping
|
train
|
4edd23d23f187ef1f377b4f749dbe8c882145170
|
diff --git a/src/main/java/com/turn/ttorrent/client/Client.java b/src/main/java/com/turn/ttorrent/client/Client.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/turn/ttorrent/client/Client.java
+++ b/src/main/java/com/turn/ttorrent/client/Client.java
@@ -24,6 +24,7 @@ import com.turn.ttorrent.client.peer.PeerActivityListener;
import com.turn.ttorrent.client.peer.SharingPeer;
import com.turn.ttorrent.common.*;
import com.turn.ttorrent.common.protocol.PeerMessage;
+import com.turn.ttorrent.common.protocol.TrackerMessage;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -159,11 +160,21 @@ public class Client implements AnnounceResponseListener, PeerActivityListener, T
announceableTorrent.getTorrentStatistic().addLeft(torrent.getLeft());
}
- this.announce.forceAnnounce(torrent, this, finished ? COMPLETED : STARTED);
+ forceAnnounceAndLogError(torrent, finished ? COMPLETED : STARTED, announceableTorrent.getDotTorrentFilePath());
logger.info(String.format("Added torrent %s (%s)", torrent.getName(), torrent.getHexInfoHash()));
return torrent.getHexInfoHash();
}
+ private void forceAnnounceAndLogError(AnnounceableTorrent torrent, TrackerMessage.AnnounceRequestMessage.RequestEvent event,
+ String dotTorrentFilePath) {
+ try {
+ this.announce.forceAnnounce(torrent, this, event);
+ } catch (IOException e) {
+ logger.warn("unable to force announce torrent {}. Dot torrent path is {}", torrent.getHexInfoHash(), dotTorrentFilePath);
+ logger.debug("", e);
+ }
+ }
+
public void removeTorrent(TorrentHash torrentHash) {
logger.info("Stopping seeding " + torrentHash.getHexInfoHash());
final TorrentsPair torrentsPair = torrentsStorage.removeActiveAndAnnounceableTorrent(torrentHash.getHexInfoHash());
@@ -175,15 +186,7 @@ public class Client implements AnnounceResponseListener, PeerActivityListener, T
} else {
logger.warn(String.format("Torrent %s already removed from myTorrents", torrentHash.getHexInfoHash()));
}
- final AnnounceableFileTorrent announceableFileTorrent = torrentsPair.getAnnounceableFileTorrent();
- if (announceableFileTorrent == null) {
- logger.info("Announceable torrent {} not found in storage on removing torrent", torrentHash.getHexInfoHash());
- }
- try {
- this.announce.forceAnnounce(announceableFileTorrent, this, STOPPED);
- } catch (IOException e) {
- LoggerUtils.warnAndDebugDetails(logger, "can not send force stop announce event on delete torrent {}", torrentHash.getHexInfoHash(), e);
- }
+ sendStopEvent(torrentsPair.getAnnounceableFileTorrent(), torrentHash.getHexInfoHash());
}
public void removeAndDeleteTorrent(String torrentHash, SharedTorrent torrent) {
@@ -193,16 +196,15 @@ public class Client implements AnnounceResponseListener, PeerActivityListener, T
sharedTorrent.setClientState(ClientState.DONE);
sharedTorrent.delete();
}
- final AnnounceableFileTorrent announceableFileTorrent = torrentsPair.getAnnounceableFileTorrent();
- if (announceableFileTorrent != null) {
- try {
- this.announce.forceAnnounce(announceableFileTorrent, this, STOPPED);
- } catch (IOException e) {
- LoggerUtils.warnAndDebugDetails(logger, "can not send force stop announce event on delete torrent {}", torrentHash, e);
- }
- } else {
+ sendStopEvent(torrentsPair.getAnnounceableFileTorrent(), torrentHash);
+ }
+
+ private void sendStopEvent(AnnounceableFileTorrent announceableFileTorrent, String torrentHash) {
+ if (announceableFileTorrent == null) {
logger.info("Announceable torrent {} not found in storage after unsuccessful download attempt", torrentHash);
+ return;
}
+ forceAnnounceAndLogError(announceableFileTorrent, STOPPED, announceableFileTorrent.getDotTorrentFilePath());
}
public void setAnnounceInterval(final int announceInterval) {
|
added handling exception on incorrect URI in torrent file. Added logging dot torrent file path for debugging
|
mpetazzoni_ttorrent
|
train
|
b04d0ef377a1557d9d33c1c045065c91ac64a3d1
|
diff --git a/src/main/java/mServer/crawler/sender/srf/parser/SrfFilmJsonDeserializer.java b/src/main/java/mServer/crawler/sender/srf/parser/SrfFilmJsonDeserializer.java
index <HASH>..<HASH> 100644
--- a/src/main/java/mServer/crawler/sender/srf/parser/SrfFilmJsonDeserializer.java
+++ b/src/main/java/mServer/crawler/sender/srf/parser/SrfFilmJsonDeserializer.java
@@ -273,8 +273,8 @@ public class SrfFilmJsonDeserializer implements JsonDeserializer<Optional<DatenF
return url;
}
- private EnumMap readUrls(String aM3U8Url) {
- EnumMap urls = new EnumMap(Qualities.class);
+ private Map<Qualities, String> readUrls(String aM3U8Url) {
+ Map<Qualities, String> urls = new EnumMap<>(Qualities.class);
final String optimizedUrl = getOptimizedUrl(aM3U8Url);
Optional<String> content = loadM3u8(optimizedUrl);
@@ -288,17 +288,29 @@ public class SrfFilmJsonDeserializer implements JsonDeserializer<Optional<DatenF
m3u8Data.forEach(entry -> {
Optional<Qualities> resolution = getResolution(entry);
if (resolution.isPresent()) {
- urls.put(resolution.get(), entry.getUrl());
+ urls.put(resolution.get(), enrichUrl(optimizedUrl, entry.getUrl()));
}
});
} else {
- LOG.error(String.format("SrfFilmJsonDeserializer: Loading m3u8-url failed: %s", aM3U8Url));
+ LOG.error("SrfFilmJsonDeserializer: Loading m3u8-url failed: {}", aM3U8Url);
}
return urls;
}
+ private String enrichUrl(String m3u8Url, String videoUrl) {
+ // some video urls contain only filename
+ if (!UrlUtils.getProtocol(videoUrl).isPresent()) {
+ final String m3u8WithoutParameters = UrlUtils.removeParameters(m3u8Url);
+ final Optional<String> m3u8File = UrlUtils.getFileName(m3u8WithoutParameters);
+ if (m3u8File.isPresent()) {
+ return m3u8WithoutParameters.replace(m3u8File.get(), videoUrl);
+ }
+ }
+ return videoUrl;
+ }
+
private Optional<String> loadM3u8(String aM3U8Url) {
Request request = new Request.Builder()
|
#<I> fix videoUrl contains only filename
|
mediathekview_MServer
|
train
|
d11854b0188d60190a06377044969c7599543a10
|
diff --git a/salt/utils/ssdp.py b/salt/utils/ssdp.py
index <HASH>..<HASH> 100644
--- a/salt/utils/ssdp.py
+++ b/salt/utils/ssdp.py
@@ -185,6 +185,76 @@ class SSDPDiscoveryServer(SSDPBase):
self._config[self.ANSWER] = {}
self._config[self.ANSWER].update({'master': self.get_self_ip()})
+ @staticmethod
+ def create_datagram_endpoint(loop, protocol_factory, local_addr=None, remote_addr=None, family=0, proto=0, flags=0):
+ '''
+ Create datagram connection.
+
+ Based on code from Python 3.5 version, this method is used
+ only in Python 2.7+ versions, since Trollius library did not
+ ported UDP packets broadcast.
+ '''
+ if not (local_addr or remote_addr):
+ if not family:
+ raise ValueError('unexpected address family')
+ addr_pairs_info = (((family, proto), (None, None)),)
+ else:
+ addr_infos = OrderedDict()
+ for idx, addr in ((0, local_addr), (1, remote_addr)):
+ if addr is not None:
+ assert isinstance(addr, tuple) and len(addr) == 2, '2-tuple is expected'
+ infos = yield asyncio.coroutines.From(loop.getaddrinfo(
+ *addr, family=family, type=socket.SOCK_DGRAM, proto=proto, flags=flags))
+ if not infos:
+ raise socket.error('getaddrinfo() returned empty list')
+ for fam, _, pro, _, address in infos:
+ key = (fam, pro)
+ if key not in addr_infos:
+ addr_infos[key] = [None, None]
+ addr_infos[key][idx] = address
+ addr_pairs_info = [
+ (key, addr_pair) for key, addr_pair in addr_infos.items()
+ if not ((local_addr and addr_pair[0] is None) or
+ (remote_addr and addr_pair[1] is None))]
+ if not addr_pairs_info:
+ raise ValueError('can not get address information')
+ exceptions = []
+ for ((family, proto),
+ (local_address, remote_address)) in addr_pairs_info:
+ sock = r_addr = None
+ try:
+ sock = socket.socket(family=family, type=socket.SOCK_DGRAM, proto=proto)
+ for opt in [socket.SO_REUSEADDR, socket.SO_BROADCAST]:
+ sock.setsockopt(socket.SOL_SOCKET, opt, 1)
+ sock.setblocking(False)
+ if local_addr:
+ sock.bind(local_address)
+ if remote_addr:
+ yield asyncio.coroutines.From(loop.sock_connect(sock, remote_address))
+ r_addr = remote_address
+ except socket.error as exc:
+ if sock is not None:
+ sock.close()
+ exceptions.append(exc)
+ except Exception:
+ if sock is not None:
+ sock.close()
+ raise
+ else:
+ break
+ else:
+ raise exceptions[0]
+
+ protocol = protocol_factory()
+ waiter = asyncio.futures.Future(loop=loop)
+ transport = loop._make_datagram_transport(sock, protocol, r_addr, waiter)
+ try:
+ yield asyncio.coroutines.From(waiter)
+ except Exception:
+ transport.close()
+ raise
+ raise asyncio.coroutines.Return(transport, protocol)
+
def run(self):
'''
Run server.
|
Port create_datagram_endpoint UDP broadcast functionality from Python3/asyncio to Python2/trollius
|
saltstack_salt
|
train
|
64300fe73a08bcebdf6d96aaf7d603b914d7d44d
|
diff --git a/creamas/__init__.py b/creamas/__init__.py
index <HASH>..<HASH> 100644
--- a/creamas/__init__.py
+++ b/creamas/__init__.py
@@ -24,4 +24,4 @@ __all__ = [
'expose'
]
-__version__ = '0.4.1'
+__version__ = '0.5.0'
|
Changed version number to <I> to indicate major changes in the library.
|
assamite_creamas
|
train
|
46953c688233eb4f6cca9fa32e90cff16e929c3c
|
diff --git a/p2p/host/autorelay/autorelay.go b/p2p/host/autorelay/autorelay.go
index <HASH>..<HASH> 100644
--- a/p2p/host/autorelay/autorelay.go
+++ b/p2p/host/autorelay/autorelay.go
@@ -168,16 +168,29 @@ func (ar *AutoRelay) background(ctx context.Context) {
return
}
evt := ev.(event.EvtPeerConnectednessChanged)
- if evt.Connectedness != network.NotConnected {
- continue
- }
- ar.mx.Lock()
- if ar.usingRelay(evt.Peer) { // we were disconnected from a relay
- delete(ar.relays, evt.Peer)
- push = true
- continue
+ switch evt.Connectedness {
+ case network.Connected:
+ // If we just connect to one of our static relays, get a reservation immediately.
+ for _, pi := range ar.static {
+ if pi.ID == evt.Peer {
+ rsvp, ok := ar.tryRelay(ctx, pi)
+ if ok {
+ ar.mx.Lock()
+ ar.relays[pi.ID] = rsvp
+ ar.mx.Unlock()
+ }
+ push = true
+ break
+ }
+ }
+ case network.NotConnected:
+ ar.mx.Lock()
+ if ar.usingRelay(evt.Peer) { // we were disconnected from a relay
+ delete(ar.relays, evt.Peer)
+ push = true
+ }
+ ar.mx.Unlock()
}
- ar.mx.Unlock()
case ev, ok := <-subReachability.Out():
if !ok {
return
|
immediately get a reservation when connecting to a static relay
|
libp2p_go-libp2p
|
train
|
b41566ec794e21d0be6dd763555c379ffcf16760
|
diff --git a/SECEdgar/client/network_client.py b/SECEdgar/client/network_client.py
index <HASH>..<HASH> 100644
--- a/SECEdgar/client/network_client.py
+++ b/SECEdgar/client/network_client.py
@@ -27,6 +27,7 @@ class NetworkClient(Client):
@property
def retry_count(self):
+ """int: Number of times to retry request."""
return self._retry_count
@retry_count.setter
@@ -39,6 +40,7 @@ class NetworkClient(Client):
@property
def pause(self):
+ """Amount of time to pause between each unsuccessful request before making another."""
return self._pause
@pause.setter
@@ -51,6 +53,7 @@ class NetworkClient(Client):
@property
def count(self):
+ """Number of results to show per page."""
return self._count
@count.setter
diff --git a/SECEdgar/filings/base.py b/SECEdgar/filings/base.py
index <HASH>..<HASH> 100644
--- a/SECEdgar/filings/base.py
+++ b/SECEdgar/filings/base.py
@@ -46,18 +46,22 @@ class Filing(_EDGARBase):
@property
def path(self):
+ """str: Path added to client base."""
return "cgi-bin/browse-edgar"
@property
def params(self):
+ """:obj:`dict`: Parameters to include in requests."""
return self._params
@property
def client(self):
+ """``SECEdgar.client.base``: Client to use to make requests."""
return self._client
@property
def dateb(self):
+ """Union([datetime.datetime, str]): Date after which no filings are fetched."""
return self._dateb
@dateb.setter
@@ -66,6 +70,7 @@ class Filing(_EDGARBase):
@property
def filing_type(self):
+ """``SECEdgar.filings.FilingType``: FilingType enum of filing."""
return self._filing_type
@filing_type.setter
@@ -76,6 +81,7 @@ class Filing(_EDGARBase):
@property
def ciks(self):
+ """:obj:`list` of :obj:`str`: List of CIK strings."""
return self._ciks
def get_urls(self, **kwargs):
diff --git a/SECEdgar/filings/cik.py b/SECEdgar/filings/cik.py
index <HASH>..<HASH> 100644
--- a/SECEdgar/filings/cik.py
+++ b/SECEdgar/filings/cik.py
@@ -20,8 +20,10 @@ class CIK(object):
@property
def ciks(self):
+ """:obj:`list` of :obj:`str`: List of CIKs (as string of digits) transformed from lookups."""
return self._ciks
@property
def lookup_dict(self):
+ """:obj:`dict`: Dictionary that makes tickers and company names to CIKs."""
return self._lookup_dict
|
DOC: Update docstrings for properties
|
coyo8_sec-edgar
|
train
|
8196af32c5d75e329bf4b1954dcc470ef04bb03c
|
diff --git a/polyaxon/api/build_jobs/views.py b/polyaxon/api/build_jobs/views.py
index <HASH>..<HASH> 100644
--- a/polyaxon/api/build_jobs/views.py
+++ b/polyaxon/api/build_jobs/views.py
@@ -50,7 +50,6 @@ from libs.authentication.internal import InternalAuthentication
from libs.paths.jobs import get_job_logs_path
from libs.permissions.internal import IsAuthenticatedOrInternal
from libs.permissions.projects import get_permissible_project
-from libs.repos.utils import get_project_latest_code_reference
from polyaxon.celery_api import celery_app
from polyaxon.settings import SchedulerCeleryTasks
@@ -92,10 +91,8 @@ class ProjectBuildListView(BookmarkedListMixinView, ListCreateAPIView):
raise ValidationError('ttl must be an integer.')
project = get_permissible_project(view=self)
- code_reference = get_project_latest_code_reference(project=project)
instance = serializer.save(user=self.request.user,
- project=project,
- code_reference=code_reference)
+ project=project)
auditor.record(event_type=BUILD_JOB_CREATED, instance=instance)
if ttl:
RedisTTL.set_for_build(build_id=instance.id, value=ttl)
diff --git a/polyaxon/libs/repos/utils.py b/polyaxon/libs/repos/utils.py
index <HASH>..<HASH> 100644
--- a/polyaxon/libs/repos/utils.py
+++ b/polyaxon/libs/repos/utils.py
@@ -1,12 +1,21 @@
+from django.core.exceptions import ObjectDoesNotExist
+
from db.models.repos import CodeReference
-def get_project_latest_code_reference(project):
+def get_project_code_reference(project, commit=None):
if not project.has_code:
return None
- # Set the code reference to the experiment
repo = project.repo
+
+ if commit:
+ try:
+ return CodeReference.objects.get(repo=repo, commit=commit)
+ except ObjectDoesNotExist:
+ return None
+
+ # If no commit is provided we get the last commit, and save new ref if not found
last_commit = repo.last_commit
if not last_commit:
return None
@@ -15,12 +24,14 @@ def get_project_latest_code_reference(project):
return code_reference
-def get_latest_code_reference(instance):
- return get_project_latest_code_reference(instance.project)
+def get_code_reference(instance, commit):
+ return get_project_code_reference(instance.project, commit=commit)
-def assign_code_reference(instance):
- code_reference = get_latest_code_reference(instance=instance)
+def assign_code_reference(instance, commit=None):
+ if not commit and instance.specification and instance.specification.build:
+ commit = instance.specification.build.commit
+ code_reference = get_code_reference(instance=instance, commit=commit)
if code_reference:
instance.code_reference = code_reference
diff --git a/polyaxon/scheduler/dockerizer_scheduler.py b/polyaxon/scheduler/dockerizer_scheduler.py
index <HASH>..<HASH> 100644
--- a/polyaxon/scheduler/dockerizer_scheduler.py
+++ b/polyaxon/scheduler/dockerizer_scheduler.py
@@ -45,7 +45,7 @@ def create_build_job(user, project, config, code_reference):
# Check if image exists already
return build_job, True, False
- if build_job.succeeded and (now() - build_job.finished_at).seconds < 3600 * 6:
+ if build_job.succeeded and (now() - build_job.finished_at).seconds < 3600 * 24:
# Check if image was built in less than an 6 hours
return build_job, True, False
diff --git a/polyaxon/signals/build_jobs.py b/polyaxon/signals/build_jobs.py
index <HASH>..<HASH> 100644
--- a/polyaxon/signals/build_jobs.py
+++ b/polyaxon/signals/build_jobs.py
@@ -19,6 +19,7 @@ from event_manager.events.build_job import (
BUILD_JOB_SUCCEEDED
)
from libs.paths.jobs import delete_job_logs
+from libs.repos.utils import assign_code_reference
from polyaxon.celery_api import celery_app
from polyaxon.settings import SchedulerCeleryTasks
from signals.run_time import set_job_finished_at, set_job_started_at
@@ -33,6 +34,7 @@ _logger = logging.getLogger('polyaxon.signals.build_jobs')
def build_job_pre_save(sender, **kwargs):
instance = kwargs['instance']
set_tags(instance=instance)
+ assign_code_reference(instance)
@receiver(post_save, sender=BuildJob, dispatch_uid="build_job_post_save")
|
Add support for commits in code ref creation
|
polyaxon_polyaxon
|
train
|
c9930bd1f1bfd2a39daa58dbfb6e737640d65e62
|
diff --git a/code/src/main/com/lmax/disruptor/EventPublisher.java b/code/src/main/com/lmax/disruptor/EventPublisher.java
index <HASH>..<HASH> 100644
--- a/code/src/main/com/lmax/disruptor/EventPublisher.java
+++ b/code/src/main/com/lmax/disruptor/EventPublisher.java
@@ -57,13 +57,43 @@ public class EventPublisher<E>
* @param timeout period to wait
* @param units for the timeout period
* @throws TimeoutException if the timeout period has expired
+ * @deprecated Timeout based methods are a bad idea, if timeout functionality
+ * is required, then it can be implemented on top tryPublishEvent.
*/
+ @Deprecated
public void publishEvent(final EventTranslator<E> translator, long timeout, TimeUnit units) throws TimeoutException
{
final long sequence = ringBuffer.next(timeout, units);
translateAndPublish(translator, sequence);
}
+
+ /**
+ * Attempts to publish an event to the ring buffer. It handles
+ * claiming the next sequence, getting the current (uninitialized)
+ * event from the ring buffer and publishing the claimed sequence
+ * after translation. Will return false if specified capacity
+ * was not available.
+ *
+ * @param translator The user specified translation for the event
+ * @param capacity The capacity that should be available before publishing
+ * @returns true if the value was published, false if there was insufficient
+ * capacity.
+ */
+ public boolean tryPublishEvent(EventTranslator<E> translator, int capacity)
+ {
+ try
+ {
+ final long sequence = ringBuffer.tryNext(capacity);
+ translateAndPublish(translator, sequence);
+ return true;
+ }
+ catch (InsufficientCapacityException e)
+ {
+ return false;
+ }
+ }
+
private void translateAndPublish(final EventTranslator<E> translator, final long sequence)
{
try
diff --git a/code/src/test/com/lmax/disruptor/EventPublisherTest.java b/code/src/test/com/lmax/disruptor/EventPublisherTest.java
index <HASH>..<HASH> 100644
--- a/code/src/test/com/lmax/disruptor/EventPublisherTest.java
+++ b/code/src/test/com/lmax/disruptor/EventPublisherTest.java
@@ -24,10 +24,12 @@ import com.lmax.disruptor.support.LongEvent;
public class EventPublisherTest implements EventTranslator<LongEvent>
{
+ private static final int BUFFER_SIZE = 32;
+
@Test
public void shouldPublishEvent()
{
- RingBuffer<LongEvent> ringBuffer = new RingBuffer<LongEvent>(LongEvent.FACTORY, 32);
+ RingBuffer<LongEvent> ringBuffer = new RingBuffer<LongEvent>(LongEvent.FACTORY, BUFFER_SIZE);
ringBuffer.setGatingSequences(new NoOpEventProcessor(ringBuffer).getSequence());
EventPublisher<LongEvent> eventPublisher = new EventPublisher<LongEvent>(ringBuffer);
@@ -38,6 +40,26 @@ public class EventPublisherTest implements EventTranslator<LongEvent>
assertThat(Long.valueOf(ringBuffer.get(1).get()), is(Long.valueOf(1 + 29L)));
}
+ @Test
+ public void shouldTryPublishEvent() throws Exception
+ {
+ RingBuffer<LongEvent> ringBuffer = new RingBuffer<LongEvent>(LongEvent.FACTORY, BUFFER_SIZE);
+ ringBuffer.setGatingSequences(new Sequence());
+ EventPublisher<LongEvent> eventPublisher = new EventPublisher<LongEvent>(ringBuffer);
+
+ for (int i = 0; i < BUFFER_SIZE; i++)
+ {
+ assertThat(eventPublisher.tryPublishEvent(this, 1), is(true));
+ }
+
+ for (int i = 0; i < BUFFER_SIZE; i++)
+ {
+ assertThat(Long.valueOf(ringBuffer.get(i).get()), is(Long.valueOf(i + 29L)));
+ }
+
+ assertThat(eventPublisher.tryPublishEvent(this, 1), is(false));
+ }
+
@Override
public LongEvent translateTo(LongEvent event, long sequence)
{
|
Added tryPublishEvent() to EventPublisher, deprecate publishEvent() with timeout.
|
LMAX-Exchange_disruptor
|
train
|
8a4bcaa2eacfde97ab7aaa7b4872f0326696e956
|
diff --git a/src/font/font.js b/src/font/font.js
index <HASH>..<HASH> 100644
--- a/src/font/font.js
+++ b/src/font/font.js
@@ -125,7 +125,7 @@
* @memberOf me
* @constructor
* @param {String} font
- * @param {int} size
+ * @param {int/Object} size either an int value, or an object like {x:16,y:16}
* @param {int} [scale="1.0"]
* @param {String} [firstChar="0x20"]
@@ -139,6 +139,9 @@
sSize : null,
// first char in the ascii table
firstChar : 0x20,
+
+ // #char per row
+ charCount : 0,
/** @private */
init : function(font, size, scale, firstChar) {
@@ -175,9 +178,12 @@
this.font = me.loader.getImage(font);
// some cheap metrics
- this.size.x = size;
- this.size.y = this.font.height || 0;
+ this.size.x = size.x || size;
+ this.size.y = size.y || this.font.height;
this.sSize.copy(this.size);
+
+ // #char per row
+ this.charCount = ~~(this.font.width / this.size.x);
},
/**
@@ -232,10 +238,14 @@
if (this.align == this.ALIGN.RIGHT) {
x -= text.length * this.sSize.x;
}
-
+ // draw the text
for ( var i = 0; i < text.length; i++) {
+ // calculate the char index
+ var idx = text.charCodeAt(i) - this.firstChar;
+ // draw it
context.drawImage(this.font,
- (text.charCodeAt(i) - this.firstChar) * this.size.x, 0,
+ this.size.x * (idx % this.charCount),
+ this.size.y * ~~(idx / this.charCount),
this.size.x, this.size.y,
~~x, ~~y,
this.sSize.x, this.sSize.y);
|
Added support for multiline fontsheet (with fixed cell size)
|
melonjs_melonJS
|
train
|
9d255716b307f2e4e9ab7cfbad9c2844ac0d0d8d
|
diff --git a/driver/src/test/java/org/neo4j/driver/v1/integration/CausalClusteringIT.java b/driver/src/test/java/org/neo4j/driver/v1/integration/CausalClusteringIT.java
index <HASH>..<HASH> 100644
--- a/driver/src/test/java/org/neo4j/driver/v1/integration/CausalClusteringIT.java
+++ b/driver/src/test/java/org/neo4j/driver/v1/integration/CausalClusteringIT.java
@@ -21,7 +21,6 @@ package org.neo4j.driver.v1.integration;
import io.netty.channel.Channel;
import org.junit.After;
import org.junit.AfterClass;
-import org.junit.Ignore;
import org.junit.Rule;
import org.junit.Test;
@@ -409,7 +408,6 @@ public class CausalClusteringIT
}
@Test
- @Ignore
public void shouldNotServeWritesWhenMajorityOfCoresAreDead() throws Exception
{
Cluster cluster = clusterRule.getCluster();
@@ -441,7 +439,6 @@ public class CausalClusteringIT
}
@Test
- @Ignore
public void shouldServeReadsWhenMajorityOfCoresAreDead() throws Exception
{
Cluster cluster = clusterRule.getCluster();
|
Unignore couple CC integration tests
|
neo4j_neo4j-java-driver
|
train
|
bfe5a49234a77f90d9d58b4967ce92e31b7e334a
|
diff --git a/api/app/middlewares/app.php b/api/app/middlewares/app.php
index <HASH>..<HASH> 100644
--- a/api/app/middlewares/app.php
+++ b/api/app/middlewares/app.php
@@ -24,7 +24,7 @@ class AppMiddleware extends \Slim\Middleware
// Decode JSON data from query params
if (preg_match('/({[^$]+)/', urldecode($query_string), $query)) {
- $query_data = json_decode(urldecode($query[1]), true) ?: array();
+ $query_data = json_decode($query[1], true) ?: array();
}
// Parse remaining regular string variables
|
fix decoding JSON from query-string. fixes #<I>
|
doubleleft_hook
|
train
|
85c126cedb261e8d1c71c8204c4481bd8e5c7539
|
diff --git a/collections_extended/__init__.py b/collections_extended/__init__.py
index <HASH>..<HASH> 100644
--- a/collections_extended/__init__.py
+++ b/collections_extended/__init__.py
@@ -143,7 +143,7 @@ class _basesetlist(Collection, Sequence, Set):
def __init__(self, iterable=None):
self._list = list()
self._dict = dict()
- if iterable:
+ if iterable is not None:
for value in iterable:
if value not in self:
index = len(self)
@@ -184,7 +184,6 @@ class _basesetlist(Collection, Sequence, Set):
return self._list[index]
def __reversed__(self):
- # TODO this can be done more efficiently
return self._from_iterable(self._list.__reversed__())
def count(self, sub, start=0, end=-1):
@@ -213,18 +212,21 @@ class _basesetlist(Collection, Sequence, Set):
>>> sl.index('f')
5
"""
- start = self._fix_neg_index(start)
- if end == None:
- end = len(self)
- end = self._fix_neg_index(end)
+ # TODO add more tests with start and end
try:
index = self._dict[sub]
+ except KeyError:
+ raise ValueError
+ else:
+ start = self._fix_neg_index(start)
+ if end == None:
+ end = len(self)
+ else:
+ end = self._fix_neg_index(end)
if start <= index and index < end:
return index
else:
raise ValueError
- except KeyError:
- raise ValueError
def sub_index(self, sub, start=0, end=-1):
"""
@@ -617,12 +619,6 @@ class _basebag(Collection):
"""
return self._from_map(self._dict)
- ## Alias methods - these methods are just names for other operations
-
- def cardinality(self): return len(self)
- def underlying_set(self): return self.unique_elements()
- def multiplicity(self, elem): return self.count(elem)
-
## implementing Sized methods
def __len__(self):
@@ -876,6 +872,7 @@ class bag(_basebag, MutableCollection):
"""
def pop(self):
+ # TODO can this be done more efficiently (no need to create an iterator)?
it = iter(self)
try:
value = next(it)
|
Minor refactoring of collections_extended.py
|
mlenzen_collections-extended
|
train
|
ef239d0d84d52a58af3b4cdacc15f61826399ab1
|
diff --git a/src/View/Form/EntityContext.php b/src/View/Form/EntityContext.php
index <HASH>..<HASH> 100644
--- a/src/View/Form/EntityContext.php
+++ b/src/View/Form/EntityContext.php
@@ -19,6 +19,7 @@ use Cake\Datasource\EntityInterface;
use Cake\Http\ServerRequest;
use Cake\ORM\TableRegistry;
use Cake\Utility\Inflector;
+use ArrayAccess;
use RuntimeException;
use Traversable;
@@ -259,7 +260,7 @@ class EntityContext implements ContextInterface
return $this->_schemaDefault($part, $entity);
}
- if (is_array($entity)) {
+ if (is_array($entity) || $entity instanceof ArrayAccess) {
$key = array_pop($parts);
return isset($entity[$key]) ? $entity[$key] : null;
diff --git a/tests/TestCase/View/Form/EntityContextTest.php b/tests/TestCase/View/Form/EntityContextTest.php
index <HASH>..<HASH> 100644
--- a/tests/TestCase/View/Form/EntityContextTest.php
+++ b/tests/TestCase/View/Form/EntityContextTest.php
@@ -461,7 +461,8 @@ class EntityContextTest extends TestCase
'name' => 'Test tag',
],
'author' => new Entity([
- 'roles' => ['admin', 'publisher']
+ 'roles' => ['admin', 'publisher'],
+ 'aliases' => new ArrayObject(['dave', 'david']),
])
]);
$context = new EntityContext($this->request, [
@@ -477,11 +478,12 @@ class EntityContextTest extends TestCase
$result = $context->val('tag.name');
$this->assertEquals($row->tag['name'], $result);
- $result = $context->val('tag.nope');
- $this->assertNull($result);
+ $result = $context->val('author.aliases.0');
+ $this->assertEquals($row->author->aliases[0], $result, 'ArrayAccess can be read');
- $result = $context->val('author.roles.3');
- $this->assertNull($result);
+ $this->assertNull($context->val('author.aliases.3'));
+ $this->assertNull($context->val('tag.nope'));
+ $this->assertNull($context->val('author.roles.3'));
}
/**
|
Enable EntityContext to read from ArrayAccess properties
Traverse into ArrayAccess properties as custom type mappings should be
readable if they quack like arrays.
Refs #<I>
|
cakephp_cakephp
|
train
|
8ddbae358dcbac26a948046675f4a5216fb22a58
|
diff --git a/src/Composer/Command/SelfUpdateCommand.php b/src/Composer/Command/SelfUpdateCommand.php
index <HASH>..<HASH> 100644
--- a/src/Composer/Command/SelfUpdateCommand.php
+++ b/src/Composer/Command/SelfUpdateCommand.php
@@ -380,8 +380,8 @@ TAGSPUBKEY
// see if we can run this operation as an Admin on Windows
if (!is_writable(dirname($localFilename))
&& $io->isInteractive()
- && $this->isWindowsNonAdminUser($isCygwin)) {
- return $this->tryAsWindowsAdmin($localFilename, $newFilename, $isCygwin);
+ && $this->isWindowsNonAdminUser()) {
+ return $this->tryAsWindowsAdmin($localFilename, $newFilename);
}
$action = 'Composer '.($backupTarget ? 'update' : 'rollback');
@@ -467,20 +467,16 @@ TAGSPUBKEY
/**
* Returns true if this is a non-admin Windows user account
*
- * @param null|bool $isCygwin Set by method
* @return bool
*/
- protected function isWindowsNonAdminUser(&$isCygwin)
+ protected function isWindowsNonAdminUser()
{
- $isCygwin = preg_match('/cygwin/i', php_uname());
-
- if (!$isCygwin && !Platform::isWindows()) {
+ if (!Platform::isWindows()) {
return false;
}
// fltmc.exe manages filter drivers and errors without admin privileges
- $command = sprintf('%sfltmc.exe filters', $isCygwin ? 'cmd.exe /c ' : '');
- exec($command, $output, $exitCode);
+ exec('fltmc.exe filters', $output, $exitCode);
return $exitCode !== 0;
}
@@ -492,10 +488,9 @@ TAGSPUBKEY
*
* @param string $localFilename The composer.phar location
* @param string $newFilename The downloaded or backup phar
- * @param bool $isCygwin Whether we are running on Cygwin
* @return bool Whether composer.phar has been updated
*/
- protected function tryAsWindowsAdmin($localFilename, $newFilename, $isCygwin)
+ protected function tryAsWindowsAdmin($localFilename, $newFilename)
{
$io = $this->getIO();
@@ -515,15 +510,9 @@ TAGSPUBKEY
$checksum = hash_file('sha256', $newFilename);
- // format the file names for cmd.exe
- if ($isCygwin) {
- $source = exec(sprintf("cygpath -w '%s'", $newFilename));
- $destination = exec(sprintf("cygpath -w '%s'", $localFilename));
- } else {
- // cmd's internal move is fussy about backslashes
- $source = str_replace('/', '\\', $newFilename);
- $destination = str_replace('/', '\\', $localFilename);
- }
+ // cmd's internal move is fussy about backslashes
+ $source = str_replace('/', '\\', $newFilename);
+ $destination = str_replace('/', '\\', $localFilename);
$vbs = <<<EOT
Set UAC = CreateObject("Shell.Application")
@@ -532,16 +521,7 @@ Wscript.Sleep(300)
EOT;
file_put_contents($script, $vbs);
-
- if ($isCygwin) {
- chmod($script, 0755);
- $cygscript = sprintf('"%s"', exec(sprintf("cygpath -w '%s'", $script)));
- $command = sprintf("cmd.exe /c '%s'", $cygscript);
- } else {
- $command = sprintf('"%s"', $script);
- }
-
- exec($command);
+ exec('"'.$script.'"');
@unlink($script);
// see if the file was moved
|
Remove cygwin php handling in UAC elevation
When called from a native shell and using cygwin PHP, cygpath translates
`/tmp` to `User\AppData\Local\Temp`, rather than `/cygdrive/.../tmp`.
This change does not affect using windows PHP from a Cygwin shell.
|
composer_composer
|
train
|
3c432b9bd6eb91e8fa96f3e34cf66949f0b0c66a
|
diff --git a/src/main/java/org/wildfly/swarm/spi/api/annotations/ConfigurationValue.java b/src/main/java/org/wildfly/swarm/spi/api/annotations/ConfigurationValue.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/wildfly/swarm/spi/api/annotations/ConfigurationValue.java
+++ b/src/main/java/org/wildfly/swarm/spi/api/annotations/ConfigurationValue.java
@@ -31,6 +31,5 @@ import javax.inject.Qualifier;
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.METHOD, ElementType.FIELD, ElementType.PARAMETER, ElementType.TYPE})
public @interface ConfigurationValue {
- @Nonbinding String property() default "";
- @Nonbinding String defaultValue() default "";
+ @Nonbinding String value() default "";
}
|
Re-simplify the @ConfigurationValue to only have a value().
|
thorntail_thorntail
|
train
|
530cd5ce27852adeb94a05ef97c52022af73d8eb
|
diff --git a/src/Configuration/OptionsInterface.php b/src/Configuration/OptionsInterface.php
index <HASH>..<HASH> 100644
--- a/src/Configuration/OptionsInterface.php
+++ b/src/Configuration/OptionsInterface.php
@@ -11,17 +11,23 @@
namespace Predis\Configuration;
+use Predis\Command\FactoryInterface as CommandFactoryInterface;
+use Predis\Command\Processor\ProcessorInterface;
+use Predis\Connection\FactoryInterface as ConnectionFactoryInterface;
+use Predis\Connection\Cluster\ClusterInterface;
+use Predis\Connection\Replication\ReplicationInterface;
+
/**
* Interface defining a container for client options.
*
- * @property-read mixed aggregate Custom connection aggregator.
- * @property-read mixed cluster Aggregate connection for clustering.
- * @property-read mixed connections Connection factory.
- * @property-read mixed exceptions Toggles exceptions in client for -ERR responses.
- * @property-read mixed prefix Key prefixing strategy using the given prefix.
- * @property-read mixed commands Command factory.
- * @property-read mixed replication Aggregate connection for replication.
- * @property-read mixed parameters Default connection parameters for aggregate connections.
+ * @property-read callable aggregate Custom connection aggregator.
+ * @property-read ClusterInterface cluster Aggregate connection for clustering.
+ * @property-read ConnectionFactoryInterface connections Connection factory.
+ * @property-read bool exceptions Toggles exceptions in client for -ERR responses.
+ * @property-read ProcessorInterface prefix Key prefixing strategy using the given prefix.
+ * @property-read CommandFactoryInterface commands Command factory.
+ * @property-read ReplicationInterface replication Aggregate connection for replication.
+ * @property-read array parameters Default connection parameters for aggregate connections.
*
* @author Daniele Alessandri <suppakilla@gmail.com>
*/
|
Fix typehints for OptionsInterface.
Ported from #<I> (which targets <I>) due to changes in <I>.
|
imcj_predis
|
train
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.