diff
stringlengths 65
26.7k
| message
stringlengths 7
9.92k
|
|---|---|
diff --git a/lib/countries/select_helper.rb b/lib/countries/select_helper.rb
index <HASH>..<HASH> 100644
--- a/lib/countries/select_helper.rb
+++ b/lib/countries/select_helper.rb
@@ -24,7 +24,7 @@ module ActionView
def to_country_select_tag(priority_countries, options, html_options)
html_options = html_options.stringify_keys
add_default_name_and_id(html_options)
- value = value(object)
+ value = options.delete(:selected) || value(object)
content_tag("select",
add_options(
country_options_for_select(value, priority_countries),
|
allow overriding selected option in country_select helper.
|
diff --git a/core/src/main/java/hudson/model/AbstractItem.java b/core/src/main/java/hudson/model/AbstractItem.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/hudson/model/AbstractItem.java
+++ b/core/src/main/java/hudson/model/AbstractItem.java
@@ -265,7 +265,7 @@ public abstract class AbstractItem extends Actionable implements Item, HttpDelet
cp.setProject(new org.apache.tools.ant.Project());
cp.setTodir(newRoot);
FileSet src = new FileSet();
- src.setDir(getRootDir());
+ src.setDir(oldRoot);
cp.addFileset(src);
cp.setOverwrite(true);
cp.setPreserveLastModified(true);
|
Wrong source root
... as pointed out by <URL>
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -36,7 +36,8 @@ def get_version(module='spyder_kernels'):
REQUIREMENTS = ['ipykernel>=4.8.2',
- 'pyzmq>=17'
+ 'pyzmq>=17',
+ 'jupyter-client>=5.2.3',
'cloudpickle']
|
Setup.py: Add specific requirement on jupyter-client
|
diff --git a/etk/core.py b/etk/core.py
index <HASH>..<HASH> 100644
--- a/etk/core.py
+++ b/etk/core.py
@@ -471,8 +471,7 @@ class Core(object):
doc = Core.rearrange_description(doc)
doc = Core.rearrange_title(doc)
except Exception as e:
- # print e
- raise e
+ print e
print 'Failed doc:', doc['doc_id']
return None
return doc
|
fix bug when there is no state or country for populated places
|
diff --git a/packages/mui-material/src/IconButton/IconButton.js b/packages/mui-material/src/IconButton/IconButton.js
index <HASH>..<HASH> 100644
--- a/packages/mui-material/src/IconButton/IconButton.js
+++ b/packages/mui-material/src/IconButton/IconButton.js
@@ -54,7 +54,7 @@ const IconButtonRoot = styled(ButtonBase, {
...(!ownerState.disableRipple && {
'&:hover': {
backgroundColor: theme.vars
- ? `rgba(${theme.vars.palette.action.active} / ${theme.vars.palette.action.hoverOpacity})`
+ ? `rgba(${theme.vars.palette.action.activeChannel} / ${theme.vars.palette.action.hoverOpacity})`
: alpha(theme.palette.action.active, theme.palette.action.hoverOpacity),
// Reset on touch devices, it doesn't add specificity
'@media (hover: none)': {
|
[IconButton] Fix hover effect when CSS Variables are enabled (#<I>)
|
diff --git a/src/Zicht/Util/Str.php b/src/Zicht/Util/Str.php
index <HASH>..<HASH> 100644
--- a/src/Zicht/Util/Str.php
+++ b/src/Zicht/Util/Str.php
@@ -263,28 +263,12 @@ class Str
/**
* Slugify a text.
*
+ * @deprecated Please use Str::systemize()
* @param string $text
* @return string
*/
public static function slugify($text)
{
- // replace non letter or digits by -
- $text = preg_replace('~[^\\pL\d]+~u', '-', $text);
-
- // trim
- $text = trim($text, '-');
-
- // transliterate
- if (function_exists('iconv')) {
- $text = iconv('UTF-8', 'ASCII//TRANSLIT', $text);
- }
-
- // lowercase
- $text = strtolower($text);
-
- // remove unwanted characters
- $text = preg_replace('~[^-\w]+~', '', $text);
-
- return $text;
+ return self::systemize($text);
}
-}
\ No newline at end of file
+}
|
deprecated slugify, which serves more as documentation than anything else
|
diff --git a/internal/client/github.go b/internal/client/github.go
index <HASH>..<HASH> 100644
--- a/internal/client/github.go
+++ b/internal/client/github.go
@@ -225,7 +225,7 @@ func (c *githubClient) CreateRelease(ctx *context.Context, body string) (string,
ctx,
ctx.Config.Release.GitHub.Owner,
ctx.Config.Release.GitHub.Name,
- ctx.Git.CurrentTag,
+ data.GetTagName(),
)
if err != nil {
release, _, err = c.client.Repositories.CreateRelease(
|
fix: ensure same tag on edit
this change is just to prevent merge conflicts in goreleaser pro
refs #<I>
|
diff --git a/Resources/public/js/sequence/Correction/Controllers/CorrectionClozeCtrl.js b/Resources/public/js/sequence/Correction/Controllers/CorrectionClozeCtrl.js
index <HASH>..<HASH> 100644
--- a/Resources/public/js/sequence/Correction/Controllers/CorrectionClozeCtrl.js
+++ b/Resources/public/js/sequence/Correction/Controllers/CorrectionClozeCtrl.js
@@ -124,6 +124,7 @@
var holes = this.question.holes;
for (var j=0; j<holes.length; j++) {
+ console.log(holes[j]);
good_answer = false;
Object.keys(answers).map(function(key){
if (holes[j].position === key) {
@@ -134,7 +135,7 @@
else {
value_to_compare = holes[j].wordResponses[k].response;
}
- if (value_to_compare === answers[key]) {
+ if (value_to_compare === answers[key] && holes[j].wordResponses[k].score > 0) {
good_answer = true;
}
}
|
[ExoBundle] Fix error on cloze correction
|
diff --git a/lib/lock_jar/domain/gem_dsl.rb b/lib/lock_jar/domain/gem_dsl.rb
index <HASH>..<HASH> 100644
--- a/lib/lock_jar/domain/gem_dsl.rb
+++ b/lib/lock_jar/domain/gem_dsl.rb
@@ -30,7 +30,7 @@ module LockJar
builder.gem_dir = spec.gem_dir
jarfile = File.join( spec.gem_dir, jarfile )
- builder.file_path = "gem:#{spec.name}:#{jarfile.gsub( "#{spec.base_dir}/", "" )}"
+ builder.file_path = "gem:#{spec.name}:#{jarfile.gsub( "#{spec.base_dir}/", "" )}.lock"
evaluate(builder, jarfile)
end
|
track Jarfile.lock in lockfile merged
|
diff --git a/components/switch/index.js b/components/switch/index.js
index <HASH>..<HASH> 100644
--- a/components/switch/index.js
+++ b/components/switch/index.js
@@ -59,10 +59,12 @@ export default class Switch extends Intact {
_dragEnd(e) {
this.set('_dragging', false);
+ this.element.blur();
const bar = this.refs.bar;
// treat mousedown -> mouseup as click
if (this._x === e.clientX) {
+ bar.style.width = '';
this._toggle();
} else {
const percent = (bar.clientWidth - this._height / 2) / this._maxWidth;
@@ -102,6 +104,7 @@ export default class Switch extends Intact {
}
_onKeypress(e) {
+
if (e.keyCode === 13) {
this._toggle(e, true);
}
|
upd: blur on mouseup, #7
|
diff --git a/src/js/form-builder.js b/src/js/form-builder.js
index <HASH>..<HASH> 100644
--- a/src/js/form-builder.js
+++ b/src/js/form-builder.js
@@ -6,6 +6,7 @@
var defaults = {
typeUserAttrs: {}, //+gimigliano
+ typeUserEvents: {}, //+gimigliano
controlPosition: 'right',
controlOrder: [
'autocomplete',
@@ -1072,7 +1073,10 @@
_helpers.closeAllEdit($sortableFields);
_helpers.toggleEdit(lastID);
}
-
+
+ //+gimigliano
+ if (opts.typeUserEvents[type] && opts.typeUserEvents[type]['onadd']) opts.typeUserEvents[type]['onadd']($('#'+lastID));
+
lastID = _helpers.incrementId(lastID);
};
@@ -1161,6 +1165,9 @@
$clone.attr('name', cloneName);
$clone.addClass('cloned');
$('.sortable-options', $clone).sortable();
+ //+gimigliano
+ if (opts.typeUserEvents[type] && opts.typeUserEvents[type]['onclone']) opts.typeUserEvents[type]['onclone']($('#'+lastID));
+
lastID = _helpers.incrementId(lastID);
return $clone;
};
|
Added typeUserEvents option (#<I>)
|
diff --git a/bencode.js b/bencode.js
index <HASH>..<HASH> 100644
--- a/bencode.js
+++ b/bencode.js
@@ -280,7 +280,7 @@ var Bdecode = function () {
if (LIST_START === obj) {
var obj2 = null
var list = []
- while( obj2 = tmp_stack.pop() ) {
+ while( undefined !== (obj2 = tmp_stack.pop()) ) {
list.push(obj2)
}
self.cb(list)
diff --git a/test/test.js b/test/test.js
index <HASH>..<HASH> 100644
--- a/test/test.js
+++ b/test/test.js
@@ -1,6 +1,5 @@
-var benc = require('../bencode.js'),
- hexy = require('hexy')
+var benc = require('../bencode.js');
function log(msg) {
@@ -305,6 +304,12 @@ function file_bug() {
})
}
+function list_0() {
+ var data = 'li0ee';
+ var decoded = benc.decode(data);
+ assert_obj("List with 0", [0], decoded);
+}
+
docs()
str_e()
num_e()
@@ -317,6 +322,7 @@ list_d()
errors()
file()
file_bug()
+list_0()
//file_readStream("test/bloeh.torrent");
//console.log("here")
file_readStream("test/chipcheezum.torrent");
|
properly decode lists with the integer 0
|
diff --git a/_builder_lib/docsitebuilder/helpers.rb b/_builder_lib/docsitebuilder/helpers.rb
index <HASH>..<HASH> 100644
--- a/_builder_lib/docsitebuilder/helpers.rb
+++ b/_builder_lib/docsitebuilder/helpers.rb
@@ -209,6 +209,23 @@ EOF
end
end
+ def git_stash_all
+ # See if there are any changes in need of stashing
+ @stash_needed = `git status --porcelain` !~ /^\s*$/
+ if @stash_needed
+ puts "\nNOTICE: Stashing uncommited changes and files in working branch."
+ `git stash -a`
+ end
+ end
+
+ def git_apply_and_drop
+ return unless @stash_needed
+ puts "\nNOTICE: Re-applying uncommitted changes and files to working branch."
+ `git stash apply`
+ `git stash drop`
+ @stash_needed = false
+ end
+
# Returns the local git branches; current branch is always first
def local_branches
@local_branches ||= begin
@@ -727,10 +744,18 @@ EOF
end
end
- # Return to the original branch
- git_checkout(working_branch)
+ if local_branch == working_branch
+ # We're moving away from the working branch, so save off changed files
+ git_stash_all
+ end
end
+ # Return to the original branch
+ git_checkout(working_branch)
+
+ # If necessary, restore temporarily stashed files
+ git_apply_and_drop
+
puts "\nAll builds completed."
end
|
Detect and stash un-added working branch files during packaging
|
diff --git a/commander/types/sensor_event.py b/commander/types/sensor_event.py
index <HASH>..<HASH> 100644
--- a/commander/types/sensor_event.py
+++ b/commander/types/sensor_event.py
@@ -20,5 +20,5 @@ class SensorEvent:
self.stream = stream
self.metadata = metadata
- self.timestamp = datetime(timestamp_year, timestamp_month, timestamp_day, timestamp_hours, timestamp_minutes, timestamp_seconds)
+ self.timestamp = datetime(1960,4,12,0,0,30)#timestamp_year, timestamp_month, timestamp_day, timestamp_hours, timestamp_minutes, timestamp_seconds)
self.value = value
\ No newline at end of file
|
Finalize buffer overrun fix, implement an exhaustive test.
|
diff --git a/lib/rspectacles/formatter/legacy/redis.rb b/lib/rspectacles/formatter/legacy/redis.rb
index <HASH>..<HASH> 100644
--- a/lib/rspectacles/formatter/legacy/redis.rb
+++ b/lib/rspectacles/formatter/legacy/redis.rb
@@ -5,6 +5,8 @@ module RSpectacles
module Formatter
module Legacy
class Redis < RSpec::Core::Formatters::BaseFormatter
+ attr_reader :output
+
def initialize(_)
end
diff --git a/lib/rspectacles/version.rb b/lib/rspectacles/version.rb
index <HASH>..<HASH> 100644
--- a/lib/rspectacles/version.rb
+++ b/lib/rspectacles/version.rb
@@ -1,3 +1,3 @@
module RSpectacles
- VERSION='0.1.1'
+ VERSION='0.1.2'
end
|
Adding an output method to the formatters to match API
|
diff --git a/bct/bct/algorithms/distance.py b/bct/bct/algorithms/distance.py
index <HASH>..<HASH> 100644
--- a/bct/bct/algorithms/distance.py
+++ b/bct/bct/algorithms/distance.py
@@ -374,7 +374,7 @@ def efficiency_bin(G,local=False):
#symmetrized adjacency vector
sa=G[u,V]+G[V,u].T
- numer = np.sum(np.dot(sa.T,sa)*se)/2
+ numer = np.sum(np.outer(sa.T,sa)*se)/2
if numer!=0:
denom = np.sum(sa)**2 - np.sum(sa*sa)
E[u] = numer/denom #local efficiency
|
propagate correct local efficiency calculation to efficiency_bin
|
diff --git a/lib/server.js b/lib/server.js
index <HASH>..<HASH> 100644
--- a/lib/server.js
+++ b/lib/server.js
@@ -56,6 +56,17 @@ Server.prototype = {
next()
})
exp.use(Express.static(__dirname + '/../public'))
+
+ // Load other static dirs that you want pathed relative to test server
+ // Example
+ // static_dirs:
+ // - app
+ // - temp
+ var dirs = config.get('static_dirs') || []
+ for (var i = 0; i < dirs.length; i++) {
+ console.log(path.resolve(dirs[i]))
+ exp.use(Express.static(path.resolve(dirs[i])))
+ }
})
exp.get('/', function(req, res){
var framework = config.get('framework') || 'jasmine'
|
added configurable static_dirs for pathing for platforms like yeoman
|
diff --git a/jsonrpc/tests/test_backend_flask/tests.py b/jsonrpc/tests/test_backend_flask/tests.py
index <HASH>..<HASH> 100644
--- a/jsonrpc/tests/test_backend_flask/tests.py
+++ b/jsonrpc/tests/test_backend_flask/tests.py
@@ -1,5 +1,6 @@
import json
import sys
+from mock import patch
if sys.version_info < (2, 7):
import unittest2 as unittest
@@ -116,3 +117,7 @@ class TestFlaskBackend(unittest.TestCase):
def test_resource_map_prefix(self):
response = self.client.get('/map')
self.assertEqual(response.status_code, 200)
+
+ def test_as_view(self):
+ with patch.object(api, 'jsonrpc') as mock_jsonrpc:
+ self.assertIs(api.as_view(), mock_jsonrpc)
|
cover code up to <I>%
|
diff --git a/backbone.js b/backbone.js
index <HASH>..<HASH> 100644
--- a/backbone.js
+++ b/backbone.js
@@ -914,7 +914,10 @@
// Create a new collection with an identical list of models as this one.
clone: function() {
- return new this.constructor(this.models);
+ return new this.constructor(this.models, {
+ model: this.model,
+ comparator: this.comparator
+ });
},
// Private method to reset all internal state. Called when the collection
diff --git a/test/collection.js b/test/collection.js
index <HASH>..<HASH> 100644
--- a/test/collection.js
+++ b/test/collection.js
@@ -60,6 +60,17 @@
strictEqual(collection.last().get('a'), 4);
});
+ test("clone preserves model and comparator", 3, function() {
+ var Model = Backbone.Model.extend(),
+ comparator = function() {};
+
+ var col = (new Backbone.Collection([{id: 1}], {model: Model, comparator: comparator})).clone();
+ col.add({id: 2});
+ ok(col.at(0) instanceof Model);
+ ok(col.at(1) instanceof Model);
+ strictEqual(col.comparator, comparator);
+ });
+
test("get", 6, function() {
equal(col.get(0), d);
equal(col.get(d.clone()), d);
|
pass along model and comparator to cloned collection. Fixes #<I>
|
diff --git a/src/Monolog/Handler/GroupHandler.php b/src/Monolog/Handler/GroupHandler.php
index <HASH>..<HASH> 100644
--- a/src/Monolog/Handler/GroupHandler.php
+++ b/src/Monolog/Handler/GroupHandler.php
@@ -11,8 +11,6 @@
namespace Monolog\Handler;
-use Monolog\Logger;
-
/**
* Forwards records to multiple handlers
*
@@ -28,6 +26,12 @@ class GroupHandler extends AbstractHandler
*/
public function __construct(array $handlers, $bubble = false)
{
+ foreach ($handlers as $handler) {
+ if (!$handler instanceof HandlerInterface) {
+ throw new \InvalidArgumentException('The first argument of the GroupHandler must be an array of HandlerInterface instances.');
+ }
+ }
+
$this->handlers = $handlers;
$this->bubble = $bubble;
}
@@ -37,7 +41,13 @@ class GroupHandler extends AbstractHandler
*/
public function isHandling(array $record)
{
- return true;
+ foreach ($this->handlers as $handler) {
+ if ($handler->isHandling($record)) {
+ return true;
+ }
+ }
+
+ return false;
}
/**
@@ -48,6 +58,7 @@ class GroupHandler extends AbstractHandler
foreach ($this->handlers as $handler) {
$handler->handle($record);
}
+
return false === $this->bubble;
}
|
Tweaked the GroupHandler to make it handle a record only when needed
|
diff --git a/bl/integration/drivers/azure.js b/bl/integration/drivers/azure.js
index <HASH>..<HASH> 100644
--- a/bl/integration/drivers/azure.js
+++ b/bl/integration/drivers/azure.js
@@ -66,7 +66,7 @@ let lib = {
let profile = {
firstName: soajsResponse.profile.given_name,
lastName: soajsResponse.profile.family_name,
- email: soajsResponse.profile.email,
+ email: soajsResponse.profile.email || soajsResponse.profile.upn,
username: soajsResponse.profile.oid,
id: soajsResponse.profile.oid,
originalProfile: soajsResponse.profile,
|
assure email is set when azure login happens
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -101,7 +101,7 @@ setup(
license="Apache",
packages=find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
install_requires=[
- "torch>=1.2.0",
+ "torch>=1.2.0,<1.3",
"jsonnet>=0.10.0 ; sys.platform != 'win32'",
"overrides",
"nltk",
|
Pin to pytorch <I> in setup.py (#<I>)
|
diff --git a/src/Model.php b/src/Model.php
index <HASH>..<HASH> 100644
--- a/src/Model.php
+++ b/src/Model.php
@@ -47,7 +47,7 @@ class Model extends Datachore
{
if ($this->updates[$key] instanceof \google\appengine\datastore\v4\Key)
{
- $fkey = $this->values[$key];
+ $fkey = $this->updates[$key];
}
}
|
FIX: use updates when checking for them (in __get) with keys.
|
diff --git a/pykechain/client.py b/pykechain/client.py
index <HASH>..<HASH> 100644
--- a/pykechain/client.py
+++ b/pykechain/client.py
@@ -412,7 +412,7 @@ class Client(object):
r = self._request('POST', self._build_url('activities'), data=data)
- if r.status_code != 201:
+ if r.status_code != 201: # pragma: no cover
raise APIError("Could not create activity")
data = r.json()
@@ -424,7 +424,7 @@ class Client(object):
params={"select_action": action},
data=data)
- if r.status_code != requests.codes.created:
+ if r.status_code != requests.codes.created: # pragma: no cover
raise APIError("Could not create part, {}: {}".format(str(r), r.content))
return Part(r.json()['results'][0], client=self)
|
- removed APIError(s) from coverage inside client.py
|
diff --git a/lib/cocoaseeds/core.rb b/lib/cocoaseeds/core.rb
index <HASH>..<HASH> 100644
--- a/lib/cocoaseeds/core.rb
+++ b/lib/cocoaseeds/core.rb
@@ -507,8 +507,12 @@ module Seeds
#
def configure_phase
self.project.targets.each do |target|
- phase = target.sources_build_phase
- next if not phase
+ begin
+ phase = target.sources_build_phase
+ next unless phase
+ rescue NoMethodError
+ next
+ end
# remove zombie build files
phase.files_references.each do |file|
|
Catch an exception when trying to access target's undefined `sources_build_phase`
|
diff --git a/framework/bootstrap.js b/framework/bootstrap.js
index <HASH>..<HASH> 100644
--- a/framework/bootstrap.js
+++ b/framework/bootstrap.js
@@ -84,6 +84,9 @@ Bootstrap = Type.create({
if (Type.isNumber(env.port)) {
this.setListenPort(env.port);
}
+ if (Type.isString(env.host)) {
+ this.setListenHost(env.host);
+ }
// set aliases
if (Type.isArray(env.aliases)) {
env.aliases.forEach(function setAlias(item) {
|
Add possibility to set listen host in env.son
|
diff --git a/gnupg/gnupg.py b/gnupg/gnupg.py
index <HASH>..<HASH> 100644
--- a/gnupg/gnupg.py
+++ b/gnupg/gnupg.py
@@ -180,7 +180,7 @@ class GPG(GPGBase):
else:
log.warn("No 'default_key' given! Using first key on secring.")
- if isinstance(data, file):
+ if hasattr(data, 'read'):
result = self._sign_file(data, **kwargs)
elif not _is_stream(data):
stream = _make_binary_stream(data, self._encoding)
|
File and IO handling in Py3 is different than Py2
Instead of checking whether data isinstance of 'file', then we check that
it is file-like _enough_ to be treated as a file (i.e. it hasattr 'read').
|
diff --git a/audiolazy/tests/test_poly.py b/audiolazy/tests/test_poly.py
index <HASH>..<HASH> 100644
--- a/audiolazy/tests/test_poly.py
+++ b/audiolazy/tests/test_poly.py
@@ -28,6 +28,7 @@ p = pytest.mark.parametrize
import operator
import types
from itertools import combinations_with_replacement, combinations
+from functools import reduce
# Audiolazy internal imports
from ..lazy_poly import Poly, lagrange, resample, x
@@ -479,6 +480,20 @@ class TestPoly(object):
assert poly == x ** 3 + x + 1
assert poly in my_set
+ @p("poly", [x ** 2 - 2 * x + 1, .3 * x ** 7 - 4 * x ** 2 + .1])
+ def test_roots(self, poly):
+ prod = lambda iterable: reduce(operator.mul, iterable, Poly(1))
+ rebuilt_poly = poly[poly.order] * prod(x - r for r in poly.roots)
+ assert almost_eq.diff(poly.values(), rebuilt_poly.values())
+
+ @p("poly", [5 - x ** -2, x + 2 * x ** .3])
+ def test_roots_invalid(self, poly):
+ with pytest.raises(AttributeError):
+ poly.roots
+
+ def test_constants_have_no_roots(self):
+ assert all(Poly(c).roots == [] for c in [2, -3, 4j, .2 + 3.4j])
+
class TestLagrange(object):
|
Tests for the Poly.roots property
|
diff --git a/git_repo/services/bitbucket.py b/git_repo/services/bitbucket.py
index <HASH>..<HASH> 100644
--- a/git_repo/services/bitbucket.py
+++ b/git_repo/services/bitbucket.py
@@ -92,6 +92,10 @@ class BitbucketService(RepositoryService):
fqdn = 'bitbucket.org'
def connect(self):
+ if not self._privatekey:
+ raise ConnectionError('Could not connect to BitBucket. Please configure .gitconfig with your bitbucket credentials.')
+ if not ':' in self._privatekey:
+ raise ConnectionError('Could not connect to BitBucket. Please setup your private key with login:password')
username, password = self._privatekey.split(':')
self.bb = Bitbucket(username, password)
monkey_patch(self.bb)
|
Fixed error handling of lack of privatekey for bitbucket
|
diff --git a/webpack.config.js b/webpack.config.js
index <HASH>..<HASH> 100644
--- a/webpack.config.js
+++ b/webpack.config.js
@@ -82,7 +82,6 @@ module.exports = {
{ test: /\.ts$/, loader: 'typescript-simple-loader' }
],
noParse: [
- new RegExp(TRACEUR_RUNTIME),
/rtts_assert\/src\/rtts_assert/
]
},
|
fix(webpack.config): remove TRACEUR_RUNTIME
|
diff --git a/common/models/traits/CommentTrait.php b/common/models/traits/CommentTrait.php
index <HASH>..<HASH> 100755
--- a/common/models/traits/CommentTrait.php
+++ b/common/models/traits/CommentTrait.php
@@ -37,7 +37,7 @@ trait CommentTrait {
$command = $query->createCommand();
$average = $command->queryOne();
- return $average[ 'average' ];
+ return round( $average[ 'average' ] );
}
public function getReviewCounts() {
|
Resolved average round figure issue in method getAverageRating().
|
diff --git a/angr/functionmanager.py b/angr/functionmanager.py
index <HASH>..<HASH> 100644
--- a/angr/functionmanager.py
+++ b/angr/functionmanager.py
@@ -149,6 +149,9 @@ class Function(object):
"""
constants = set()
+ if not self._function_manager._project.loader.main_bin.contains_addr(self.startpoint):
+ return constants
+
# reanalyze function with a new initial state (use persistent registers)
initial_state = self._function_manager._cfg.get_any_irsb(self.startpoint).initial_state
fresh_state = self._function_manager._project.factory.blank_state(mode="fastpath")
|
fix for function string references that don't start in the binary
|
diff --git a/js/bitfinex.js b/js/bitfinex.js
index <HASH>..<HASH> 100644
--- a/js/bitfinex.js
+++ b/js/bitfinex.js
@@ -227,7 +227,7 @@ module.exports = class bitfinex extends Exchange {
'Nonce is too small.': InvalidNonce,
},
'broad': {
- 'Invalid order: not enough exchange balance for ': InsufficientFunds, // when buy, cost > quote currency
+ 'Invalid order: not enough exchange balance for ': InsufficientFunds, // when buying cost is greater than the available quote currency
'Invalid order: minimum size for ': InvalidOrder, // when amount below limits.amount.min
'Invalid order': InvalidOrder, // ?
},
|
minor english fix in a comment in bitfinex describe ()
|
diff --git a/scripts/lib/bundle.js b/scripts/lib/bundle.js
index <HASH>..<HASH> 100644
--- a/scripts/lib/bundle.js
+++ b/scripts/lib/bundle.js
@@ -25,6 +25,7 @@ function readPackageDigest() {
function computePackageDigest(noWriteFile = false) {
const files = globIgnore(join(rootDir, '**'), {
+ absolute: true,
ignore: readFileSync(join(rootDir, '.npmignore'))
.toString('utf8')
.split(/\n/g)
|
fix: update call to globIgnore (#<I>) (#<I>)
globIgnore now requires the 'absolute' option to be specified.
|
diff --git a/core/model/VirtualPage.php b/core/model/VirtualPage.php
index <HASH>..<HASH> 100755
--- a/core/model/VirtualPage.php
+++ b/core/model/VirtualPage.php
@@ -123,8 +123,15 @@ class VirtualPage extends Page {
*/
function copyFrom($source) {
if($source) {
- foreach($this->getVirtualFields() as $virtualField)
+ foreach($this->getVirtualFields() as $virtualField) {
$this->$virtualField = $source->$virtualField;
+ }
+
+ // We also want to copy ShowInMenus, but only if we're copying the
+ // source page for the first time.
+ if($this->isChanged('CopyContentFromID')) {
+ $this->ShowInMenus = $source->ShowInMenus;
+ }
}
}
|
MINOR: Copy "ShowInMenus" when a VirtualPage is first created (from r<I>)
git-svn-id: svn://svn.silverstripe.com/silverstripe/open/modules/sapphire/trunk@<I> <I>b<I>ca-7a2a-<I>-9d3b-<I>d<I>a<I>a9
|
diff --git a/src/trumbowyg.js b/src/trumbowyg.js
index <HASH>..<HASH> 100644
--- a/src/trumbowyg.js
+++ b/src/trumbowyg.js
@@ -677,7 +677,7 @@ Object.defineProperty(jQuery.trumbowyg, 'defaultOptions', {
}
})
.on('keyup focus', function () {
- if (!t.$ta.val().match(/<.*>/)) {
+ if (!t.$ta.val().match(/<.*>/) && !t.$ed.html().match(/<.*>/)) {
setTimeout(function () {
var block = t.isIE ? '<p>' : 'p';
t.doc.execCommand('formatBlock', false, block);
|
fix(Trumbo): stop p tags from wrapping markup
This code adds <p> tags if the Trumbo contains no mark-up. The check is run
before a setTimeout and the <p> tags are added after. When using the table
plugin the content is empty before the check but contains a table after the
timeout executes.
|
diff --git a/src/Composer/Repository/Vcs/SvnDriver.php b/src/Composer/Repository/Vcs/SvnDriver.php
index <HASH>..<HASH> 100644
--- a/src/Composer/Repository/Vcs/SvnDriver.php
+++ b/src/Composer/Repository/Vcs/SvnDriver.php
@@ -332,6 +332,21 @@ class SvnDriver extends VcsDriver
}
/**
+ * An absolute path (leading '/') is converted to a file:// url.
+ *
+ * @param string $url
+ *
+ * @return string
+ */
+ protected static function fixSvnUrl($url)
+ {
+ if (strpos($url, '/', 0) === 0) {
+ $url = 'file://' . $url;
+ }
+ return $url;
+ }
+
+ /**
* This is quick and dirty - thoughts?
*
* @return void
|
* fixSvnUrl(): to prefix absolute paths with file://
|
diff --git a/bugtool/cmd/configuration.go b/bugtool/cmd/configuration.go
index <HASH>..<HASH> 100644
--- a/bugtool/cmd/configuration.go
+++ b/bugtool/cmd/configuration.go
@@ -54,12 +54,7 @@ func defaultCommands(confDir string, cmdDir string, k8sPods []string) []string {
"ip -6 n",
"ss -t -p -a -i -s",
"ss -u -p -a -i -s",
- "nstat",
"uname -a",
- "dig",
- "netstat -a",
- "pidstat",
- "arp",
"top -b -n 1",
"uptime",
"dmesg --time-format=iso",
@@ -83,12 +78,6 @@ func defaultCommands(confDir string, cmdDir string, k8sPods []string) []string {
"bpftool map dump pinned /sys/fs/bpf/tc/globals/cilium_ct_any6_global",
"bpftool map dump pinned /sys/fs/bpf/tc/globals/cilium_snat_v4_external",
"bpftool map dump pinned /sys/fs/bpf/tc/globals/cilium_snat_v6_external",
- // Versions
- "docker version",
- "docker info",
- // Docker and Kubernetes logs from systemd
- "journalctl -u cilium*",
- "journalctl -u kubelet",
// iptables
"iptables-save -c",
"iptables -S",
|
Remove non-functional commands from cilium-bugtool
This PR removes all the non-functional commands from cilium-bugtool
configuration.
Fixes: #<I>
|
diff --git a/forms/Form.php b/forms/Form.php
index <HASH>..<HASH> 100644
--- a/forms/Form.php
+++ b/forms/Form.php
@@ -192,6 +192,7 @@ class Form extends RequestHandler {
'$Action!' => 'handleAction',
'POST ' => 'httpSubmission',
'GET ' => 'httpSubmission',
+ 'HEAD ' => 'httpSubmission',
);
/**
|
BUGFIX: Prevent <I> error when a HEAD request is sent to its action URL.
|
diff --git a/test.py b/test.py
index <HASH>..<HASH> 100644
--- a/test.py
+++ b/test.py
@@ -1,7 +1,7 @@
import logging
import subprocess
import sys
-import time
+import os
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
@@ -12,7 +12,7 @@ ROOT_CLIENT = None
PROJECT = None
USERNAME = None
-PROJECT_NAME = 'test17'
+PROJECT_NAME = os.environ['USER'] + '-test'
ROOT_URL = 'http://localhost:5000/v3'
def fileno_monkeypatch(self):
|
parameterize test db name based on the user running it
|
diff --git a/lib/sapience/logger.rb b/lib/sapience/logger.rb
index <HASH>..<HASH> 100644
--- a/lib/sapience/logger.rb
+++ b/lib/sapience/logger.rb
@@ -18,7 +18,7 @@ module Sapience
logger.trace "Appender thread: Flushing appender: #{appender.class.name}"
appender.flush
rescue StandardError => exc
- logger.error "Appender thread: Failed to flush appender: #{appender.inspect}", exc
+ $stderr.write("Appender thread: Failed to flush to appender: #{appender.inspect}\n #{exc.inspect}")
end
end
|
Log sapience error into $stderr
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -32,7 +32,7 @@ def version():
README = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read()
setup(
- name='gocd_cli',
+ name='gocd-cli',
author='Björn Andersson',
author_email='ba@sanitarium.se',
license='MIT License',
|
Change the name to match the repo
And decent naming conventions, underscores are yuck for names :p
|
diff --git a/modules/console.py b/modules/console.py
index <HASH>..<HASH> 100644
--- a/modules/console.py
+++ b/modules/console.py
@@ -31,6 +31,15 @@ def unload():
def mavlink_packet(msg):
'''handle an incoming mavlink packet'''
+ if not isinstance(mpstate.console, wxconsole.MessageConsole):
+ return
if not mpstate.console.is_alive():
mpstate.console = textconsole.SimpleConsole()
+ return
+ type = msg.get_type()
+ if type == 'GPS_RAW':
+ if msg.fix_type == 2:
+ mpstate.console.set_status('GPS', 'GPS: OK', fg='green')
+ else:
+ mpstate.console.set_status('GPS', 'GPS: %u' % msg.fix_type, fg='red')
|
console: added gps status
|
diff --git a/src/Symfony/Component/HttpFoundation/Request.php b/src/Symfony/Component/HttpFoundation/Request.php
index <HASH>..<HASH> 100644
--- a/src/Symfony/Component/HttpFoundation/Request.php
+++ b/src/Symfony/Component/HttpFoundation/Request.php
@@ -771,7 +771,7 @@ class Request
}
/**
- * Checks whether the request instructs proxies in the path not to cache the request.
+ * Checks whether cached response must be successfully revalidated by the origin server.
*
* @return Boolean
*/
|
fixed the description of isNoCache method
|
diff --git a/src/Migration.php b/src/Migration.php
index <HASH>..<HASH> 100644
--- a/src/Migration.php
+++ b/src/Migration.php
@@ -19,7 +19,7 @@ use Spiral\Migrations\Exception\MigrationException;
abstract class Migration implements MigrationInterface
{
// Target migration database
- const DATABASE = null;
+ protected const DATABASE = null;
/** @var State|null */
private $state = null;
|
- bugfix: older migrations will conflict due to protected constant
|
diff --git a/worker/uniter/context_test.go b/worker/uniter/context_test.go
index <HASH>..<HASH> 100644
--- a/worker/uniter/context_test.go
+++ b/worker/uniter/context_test.go
@@ -756,3 +756,19 @@ func (s *RunCommandSuite) TestRunCommandsHasEnvironSet(c *gc.C) {
c.Check(executionEnvironment[key], gc.Equals, value)
}
}
+
+func (s *RunCommandSuite) TestRunCommandsStdOutAndErrAndRC(c *gc.C) {
+ context := s.GetHookContext(c)
+ charmDir := c.MkDir()
+ commands := `
+echo this is standard out
+echo this is standard err >&2
+exit 42
+`
+ result, err := context.RunCommands(commands, charmDir, "/path/to/tools", "/path/to/socket")
+ c.Assert(err, gc.IsNil)
+
+ c.Assert(result.StdOut, gc.Equals, "this is standard out\n")
+ c.Assert(result.StdErr, gc.Equals, "this is standard err\n")
+ c.Assert(result.ReturnCode, gc.Equals, 42)
+}
|
Test capture of stdout, stderr, and the return code.
|
diff --git a/ykman/cli/piv.py b/ykman/cli/piv.py
index <HASH>..<HASH> 100644
--- a/ykman/cli/piv.py
+++ b/ykman/cli/piv.py
@@ -452,8 +452,10 @@ def set_pin_retries(ctx, management_key, pin, pin_retries, puk_retries):
if not pin:
pin = _prompt_pin(pin)
controller.verify(pin)
-
- controller.set_pin_retries(pin_retries, puk_retries)
+ try:
+ controller.set_pin_retries(pin_retries, puk_retries)
+ except:
+ ctx.fail('Setting pin retries failed.')
@piv.command('generate-certificate')
|
piv: catch failing set-pin-retries
|
diff --git a/src/SDL/Compiler.php b/src/SDL/Compiler.php
index <HASH>..<HASH> 100644
--- a/src/SDL/Compiler.php
+++ b/src/SDL/Compiler.php
@@ -181,9 +181,11 @@ class Compiler implements CompilerInterface, Configuration
private function load(Document $document): Document
{
foreach ($document->getTypeDefinitions() as $type) {
- $this->stack->push($type);
- $this->loader->register($type);
- $this->stack->pop();
+ if (!$this->loader->has($type->getName())) {
+ $this->stack->push($type);
+ $this->loader->register($type);
+ $this->stack->pop();
+ }
}
return $document;
@@ -208,6 +210,7 @@ class Compiler implements CompilerInterface, Configuration
{
/** @var DocumentBuilder $document */
$document = $this->storage->remember($readable, $this->onCompile());
+ $this->load($document);
return $document->withCompiler($this);
}
|
Load cached types into loader repository
|
diff --git a/internal/cmd/package.go b/internal/cmd/package.go
index <HASH>..<HASH> 100644
--- a/internal/cmd/package.go
+++ b/internal/cmd/package.go
@@ -23,7 +23,7 @@ func newPackageCmd() *packageCmd {
cmd := &cobra.Command{
Use: "package",
Aliases: []string{"pkg", "p"},
- Short: "Creates a package based on the given the given config file and flags",
+ Short: "Creates a package based on the given config file and flags",
SilenceUsage: true,
SilenceErrors: true,
Args: cobra.NoArgs,
|
fix: typo on 'package' commands --help (#<I>)
|
diff --git a/config/module/validate_provider_alias.go b/config/module/validate_provider_alias.go
index <HASH>..<HASH> 100644
--- a/config/module/validate_provider_alias.go
+++ b/config/module/validate_provider_alias.go
@@ -67,7 +67,7 @@ func (t *Tree) validateProviderAlias() error {
// We didn't find the alias, error!
err = multierror.Append(err, fmt.Errorf(
- "module %s: provider alias must be defined by the module or a parent: %s",
+ "module %s: provider alias must be defined by the module: %s",
strings.Join(pv.Path, "."), k))
}
}
|
update missing alias message
Update the old error message for a missing provider alias, as we no
longer automatically inherit providers.
|
diff --git a/sonar-home/src/main/java/org/sonar/home/cache/FileCache.java b/sonar-home/src/main/java/org/sonar/home/cache/FileCache.java
index <HASH>..<HASH> 100644
--- a/sonar-home/src/main/java/org/sonar/home/cache/FileCache.java
+++ b/sonar-home/src/main/java/org/sonar/home/cache/FileCache.java
@@ -107,7 +107,8 @@ public class FileCache {
// Check if the file was cached by another process during download
if (!targetFile.exists()) {
log.warn(String.format("Unable to rename %s to %s", sourceFile.getAbsolutePath(), targetFile.getAbsolutePath()));
- log.warn(String.format("A copy/delete will be tempted but with no garantee of atomicity"));
+ log.warn(String.format("A copy/delete will be tempted but with no garantee of atomicity. It's recommended that " +
+ "user cache and temp folders are located on the same hard drive partition. Please check $SONAR_USER_HOME or -Dsonar.userHome"));
try {
FileUtils.moveFile(sourceFile, targetFile);
} catch (IOException e) {
|
SONAR-<I> improve warning when temp folder and user cache are not on the same hard drive
|
diff --git a/aws/data_source_aws_outposts_outposts_test.go b/aws/data_source_aws_outposts_outposts_test.go
index <HASH>..<HASH> 100644
--- a/aws/data_source_aws_outposts_outposts_test.go
+++ b/aws/data_source_aws_outposts_outposts_test.go
@@ -14,6 +14,7 @@ func TestAccAWSOutpostsOutpostsDataSource_basic(t *testing.T) {
resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t); testAccPreCheckAWSOutpostsOutposts(t) },
+ ErrorCheck: testAccErrorCheck(t, outposts.EndpointsID),
Providers: testAccProviders,
CheckDestroy: nil,
Steps: []resource.TestStep{
|
tests/ds/outposts_outposts: Add ErrorCheck
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -11,6 +11,7 @@ module.exports = {
init() {
this._super.init && this._super.init.apply(this, arguments);
+ this.overrideTestCommandFilter();
this.setTestGenerator();
},
@@ -98,6 +99,36 @@ module.exports = {
});
},
+ overrideTestCommandFilter() {
+ let TestCommand = this.project.require('ember-cli/lib/commands/test');
+
+ TestCommand.prototype.buildTestPageQueryString = function(options) {
+ let params = [];
+
+ if (options.filter) {
+ params.push(`grep=${options.filter}`);
+
+ if (options.invert) {
+ params.push('invert=1');
+ }
+ }
+
+ if (options.query) {
+ params.push(options.query);
+ }
+
+ return params.join('&');
+ };
+
+ TestCommand.prototype.availableOptions.push({
+ name: 'invert',
+ type: Boolean,
+ default: false,
+ description: 'Invert the filter specified by the --filter argument',
+ aliases: ['i']
+ });
+ },
+
setTestGenerator() {
this.project.generateTestFile = function(moduleName, tests) {
var output = `describe('${moduleName}', function() {\n`;
|
Add missing `overrideTestCommandFilter()` method
This was missed when we migrated the functionality from `ember-cli-mocha` into `ember-mocha`
|
diff --git a/tests/compose.js b/tests/compose.js
index <HASH>..<HASH> 100644
--- a/tests/compose.js
+++ b/tests/compose.js
@@ -4,26 +4,22 @@ var expect = chai.expect;
describe('.compose()', function () {
- it('should return a new function', function () {
+ var func = null;
- var func = fn.compose(
+ beforeEach(function() {
+ func = fn.compose(
fn.partial( fn.op['+'], 3 ),
fn.partial( fn.op['*'], 6 ),
function (num) {
return Math.pow(num, 2);
});
+ });
+ it('should return a new function', function () {
expect(func).to.be.a('function');
});
it('should pass return values from right to left', function () {
- var func = fn.compose(
- fn.partial( fn.op['+'], 3 ),
- fn.partial( fn.op['*'], 6 ),
- function (num) {
- return Math.pow(num, 2);
- });
-
var result = func(7);
expect(result).to.equal(297);
|
Small re-factoring, using 'beforeEach' in test
|
diff --git a/bokeh/transform.py b/bokeh/transform.py
index <HASH>..<HASH> 100644
--- a/bokeh/transform.py
+++ b/bokeh/transform.py
@@ -66,7 +66,7 @@ def cumsum(field, include_zero=False):
will generate a ``CumSum`` expressions that sum the ``"angle"`` column
of a data source. For the ``start_angle`` value, the cumulative sums
- will start with a zero value. For ``start_angle``, no initial zero will
+ will start with a zero value. For ``end_angle``, no initial zero will
be added (i.e. the sums will start with the first angle value, and
include the last).
|
Fix `cumsum()` docstring (#<I>)
"For ``start_angle``" -> "For ``end_angle``"
|
diff --git a/shinken/modules/livestatus_broker/livestatus_query.py b/shinken/modules/livestatus_broker/livestatus_query.py
index <HASH>..<HASH> 100644
--- a/shinken/modules/livestatus_broker/livestatus_query.py
+++ b/shinken/modules/livestatus_broker/livestatus_query.py
@@ -789,6 +789,8 @@ member_key: the key to be used to sort each resulting element of a group member.
# The filters are closures.
# Add parameter Class (Host, Service), lookup datatype (default string), convert reference
def eq_filter(ref):
+ if ((ref[attribute] is None) and (reference == "")):
+ return True
return ref[attribute] == reference
def eq_nocase_filter(ref):
|
Fix string comparison for livestatus. None is equivalent to empty string
|
diff --git a/Query.php b/Query.php
index <HASH>..<HASH> 100644
--- a/Query.php
+++ b/Query.php
@@ -450,9 +450,10 @@ class Query extends Component implements QueryInterface
if ($this->emulateExecution) {
return 0;
}
- // performing a query with return size of 0, is equal to getting result stats such as count
+ // performing a query with return size of 0 and track_total_hits enabled, is equal to getting result stats such as count
// https://www.elastic.co/guide/en/elasticsearch/reference/5.6/breaking_50_search_changes.html#_literal_search_type_literal
- $result = $this->createCommand($db)->search(['size' => 0]);
+ // https://www.elastic.co/guide/en/elasticsearch/reference/master/search-your-data.html#track-total-hits
+ $result = $this->createCommand($db)->search(['size' => 0, 'track_total_hits' => 'true']);
// since ES7 totals are returned as array (with count and precision values)
if (isset($result['hits']['total'])) {
|
Added track_total_hits to Query::count()
By default ElasticSearch returns <I> hits (<URL>)
All hits can be returned by setting track_total_hits to true
|
diff --git a/lib/devise_security_extension/models/password_archivable.rb b/lib/devise_security_extension/models/password_archivable.rb
index <HASH>..<HASH> 100644
--- a/lib/devise_security_extension/models/password_archivable.rb
+++ b/lib/devise_security_extension/models/password_archivable.rb
@@ -11,7 +11,6 @@ module Devise # :nodoc:
include InstanceMethods
has_many :old_passwords, :as => :password_archivable, :dependent => :destroy
before_update :archive_password
- after_create :set_first_old_password
validate :validate_password_archive
end
end
@@ -63,14 +62,6 @@ module Devise # :nodoc:
end
end
- def set_first_old_password
- if self.respond_to?(:password_salt) and !self.password_salt.nil?
- self.old_passwords.create! :encrypted_password => self.encrypted_password, :password_salt => self.password_salt
- else
- self.old_passwords.create! :encrypted_password => self.encrypted_password
- end
- end
-
module ClassMethods #:nodoc:
::Devise::Models.config(self, :password_archiving_count, :deny_old_passwords)
end
|
revert add :set_first_old_password
|
diff --git a/src/howler.core.js b/src/howler.core.js
index <HASH>..<HASH> 100644
--- a/src/howler.core.js
+++ b/src/howler.core.js
@@ -1967,7 +1967,7 @@
sound._node.bufferSource.loop = sound._loop;
if (sound._loop) {
sound._node.bufferSource.loopStart = sound._start || 0;
- sound._node.bufferSource.loopEnd = sound._stop;
+ sound._node.bufferSource.loopEnd = sound._stop || 0;
}
sound._node.bufferSource.playbackRate.setValueAtTime(sound._rate, Howler.ctx.currentTime);
|
Make sure sound stop time isn't undefined
Fixes #<I>
|
diff --git a/lib/strainer/sandbox.rb b/lib/strainer/sandbox.rb
index <HASH>..<HASH> 100644
--- a/lib/strainer/sandbox.rb
+++ b/lib/strainer/sandbox.rb
@@ -223,9 +223,9 @@ module Strainer
# @return [Array]
# the list of root-level directories
def root_folders
- @root_folders ||= Dir.glob("#{Dir.pwd}/*", File::FNM_DOTMATCH).tap { |a| a.shift(2) }.collect do |f|
+ @root_folders ||= Dir.glob("#{Dir.pwd}/*", File::FNM_DOTMATCH).collect do |f|
File.basename(f) if File.directory?(f)
- end.compact
+ end.reject!{|dir| %w(. ..).include? dir}.compact!
end
# Determine if the current project is a git repo?
|
Root folders are missing 2 random files! Suspect this is meant to throw away . and ..
|
diff --git a/src/client.js b/src/client.js
index <HASH>..<HASH> 100644
--- a/src/client.js
+++ b/src/client.js
@@ -48,13 +48,14 @@ class SmtpClient {
* @param {Boolean} [options.disableEscaping] If set to true, do not escape dots on the beginning of the lines
*/
constructor (host, port, options = {}) {
+ this.options = options
+
this.timeoutSocketLowerBound = TIMEOUT_SOCKET_LOWER_BOUND
this.timeoutSocketMultiplier = TIMEOUT_SOCKET_MULTIPLIER
this.port = port || (this.options.useSecureTransport ? 465 : 25)
this.host = host || 'localhost'
- this.options = options
/**
* If set to true, start an encrypted connection instead of the plaintext one
* (recommended if applicable). If useSecureTransport is not set but the port used is 465,
|
Use this.options only after defined
|
diff --git a/src/app/containers/App.js b/src/app/containers/App.js
index <HASH>..<HASH> 100644
--- a/src/app/containers/App.js
+++ b/src/app/containers/App.js
@@ -18,6 +18,17 @@ import RemoteIcon from 'react-icons/lib/go/radio-tower';
const monitorPosition = location.hash;
+// Mock localStorage when it is not allowed
+let localStorage;
+try {
+ localStorage = window.localStorage;
+} catch (error) {
+ localStorage = {
+ getItem: key => undefined,
+ setItem: () => {}
+ };
+}
+
@enhance
export default class App extends Component {
static propTypes = {
|
Mock localStorage when it is not allowed
Related to #<I>.
|
diff --git a/grunt.js b/grunt.js
index <HASH>..<HASH> 100644
--- a/grunt.js
+++ b/grunt.js
@@ -87,12 +87,14 @@ module.exports = function(grunt) {
},
'node-qunit': {
- deps: './src/pouch.js',
- code: './src/adapters/pouch.leveldb.js',
- tests: testFiles.map(function (n) { return "./tests/" + n; }),
- done: function(err, res) {
- !err && (testResults['node'] = res);
- return true;
+ all: {
+ deps: './src/pouch.js',
+ code: './src/adapters/pouch.leveldb.js',
+ tests: testFiles.map(function (n) { return "./tests/" + n; }),
+ done: function(err, res) {
+ !err && (testResults['node'] = res);
+ return true;
+ }
}
},
|
#<I> Corrected path requirement for grunt-node-qunit
|
diff --git a/locationsharinglib/locationsharinglib.py b/locationsharinglib/locationsharinglib.py
index <HASH>..<HASH> 100755
--- a/locationsharinglib/locationsharinglib.py
+++ b/locationsharinglib/locationsharinglib.py
@@ -36,6 +36,7 @@ import json
import logging
import pickle
from datetime import datetime
+import pytz
from bs4 import BeautifulSoup as Bfs
from cachetools import TTLCache, cached
@@ -168,7 +169,7 @@ class Person: # pylint: disable=too-many-instance-attributes
@property
def datetime(self):
"""A datetime representation of the location retrieval"""
- return datetime.fromtimestamp(int(self.timestamp) / 1000)
+ return datetime.fromtimestamp(int(self.timestamp) / 1000, tz=pytz.utc)
@property
def address(self):
|
Add timezone info to timestamp
Google provides timestamp in UTC
|
diff --git a/src/components/Comments.js b/src/components/Comments.js
index <HASH>..<HASH> 100644
--- a/src/components/Comments.js
+++ b/src/components/Comments.js
@@ -1,3 +1,4 @@
+// @flow
import React from 'react';
import PropTypes from 'prop-types';
import { Modal, Button, Col, Row, Glyphicon } from 'react-bootstrap';
|
Update Comments.js
Added the flow annotation
|
diff --git a/test/tdigest_test.rb b/test/tdigest_test.rb
index <HASH>..<HASH> 100644
--- a/test/tdigest_test.rb
+++ b/test/tdigest_test.rb
@@ -1,6 +1,4 @@
require 'test_helper'
-require 'ruby-prof'
-
class TDigestTest < Minitest::Test
extend Minitest::Spec::DSL
|
fix: remove ruby-prof require from test
|
diff --git a/lib/statsd/instrument.rb b/lib/statsd/instrument.rb
index <HASH>..<HASH> 100644
--- a/lib/statsd/instrument.rb
+++ b/lib/statsd/instrument.rb
@@ -217,14 +217,12 @@ module StatsD
def write_packet(command)
if mode.to_s == 'production'
- begin
- socket.send(command, 0)
- rescue SocketError, IOError, SystemCallError => e
- logger.error e
- end
+ socket.send(command, 0)
else
logger.info "[StatsD] #{command}"
end
+ rescue SocketError, IOError, SystemCallError => e
+ logger.error e
end
def clean_tags(tags)
|
Cleanup of write_packet method.
|
diff --git a/test/structures_test.js b/test/structures_test.js
index <HASH>..<HASH> 100644
--- a/test/structures_test.js
+++ b/test/structures_test.js
@@ -1,6 +1,6 @@
'use strict';
-var structures = require('../lib/structures.js');
+var structure = require('../lib/structure.js');
/*
======== A Handy Little Nodeunit Reference ========
@@ -30,7 +30,7 @@ exports['awesome'] = {
'no args': function(test) {
test.expect(1);
// tests here
- test.equal(structures.awesome(), 'awesome', 'should be awesome.');
+ test.equal('test', 'test', 'should be placeholder test.');
test.done();
},
};
|
Added placeholder test to test build system
|
diff --git a/katcp/client.py b/katcp/client.py
index <HASH>..<HASH> 100644
--- a/katcp/client.py
+++ b/katcp/client.py
@@ -613,7 +613,7 @@ class DeviceClient(object):
is as for sys.excepthook.
"""
if self._thread:
- raise RuntimeError("Device client already started.")
+ raise RuntimeError("Device client %r already started." % (self._bindaddr,))
self._thread = ExcepthookThread(target=self.run, excepthook=excepthook)
if daemon is not None:
@@ -622,7 +622,7 @@ class DeviceClient(object):
if timeout:
self._connected.wait(timeout)
if not self._connected.isSet():
- raise RuntimeError("Device client failed to start.")
+ raise RuntimeError("Device client %r failed to start." % (self._bindaddr,))
def join(self, timeout=None):
"""Rejoin the client thread.
@@ -633,7 +633,7 @@ class DeviceClient(object):
Seconds to wait for thread to finish.
"""
if not self._thread:
- raise RuntimeError("Device client thread not started.")
+ raise RuntimeError("Device client %r thread not started." % (self._bindaddr,))
self._thread.join(timeout)
if not self._thread.isAlive():
|
Log the bind address for DeviceClient exceptions
|
diff --git a/lib/build/webpack-config.js b/lib/build/webpack-config.js
index <HASH>..<HASH> 100644
--- a/lib/build/webpack-config.js
+++ b/lib/build/webpack-config.js
@@ -82,8 +82,8 @@ module.exports = function (cfg) {
],
alias: {
quasar: appPaths.resolve.app(`node_modules/quasar-framework/dist/quasar.${cfg.ctx.themeName}.esm.js`),
- '~': appPaths.srcDir,
- '@': appPaths.resolve.src(`components`),
+ src: appPaths.srcDir,
+ components: appPaths.resolve.src(`components`),
layouts: appPaths.resolve.src(`layouts`),
pages: appPaths.resolve.src(`pages`),
assets: appPaths.resolve.src(`assets`),
|
fix: Avoid webpack aliases clashes
|
diff --git a/owslib/wcs.py b/owslib/wcs.py
index <HASH>..<HASH> 100644
--- a/owslib/wcs.py
+++ b/owslib/wcs.py
@@ -20,16 +20,16 @@ from coverage import wcs100, wcs110, wcsBase
def WebCoverageService(url, version=None, xml=None):
''' wcs factory function, returns a version specific WebCoverageService object '''
- if xml is None:
- reader = wcsBase.WCSCapabilitiesReader()
- request = reader.capabilities_url(url)
- xml = urllib2.urlopen(request).read()
-
+
if version is None:
+ if xml is None:
+ reader = wcsBase.WCSCapabilitiesReader()
+ request = reader.capabilities_url(url)
+ xml = urllib2.urlopen(request).read()
capabilities = etree.etree.fromstring(xml)
version = capabilities.get('version')
del capabilities
-
+
if version == '1.0.0':
return wcs100.WebCoverageService_1_0_0.__new__(wcs100.WebCoverageService_1_0_0, url, xml)
elif version == '1.1.0':
|
The web coverage reader was being created twice by the WebCoverageService factory function - this was not necessary.
|
diff --git a/lib/Parser.js b/lib/Parser.js
index <HASH>..<HASH> 100644
--- a/lib/Parser.js
+++ b/lib/Parser.js
@@ -304,6 +304,9 @@ Parser.prototype.initializeEvaluating = function() {
// possible improvement: evaluate node.expressions[i-1]
// and append to previous quasis if has a set value
}
+ if (exprs.length == 1) {
+ return exprs[0].setRange(node.range);
+ }
return new BasicEvaluatedExpression().setTemplateString(exprs).setRange(node.range);
});
this.plugin("evaluate TaggedTemplateExpression", function(node) {
|
Pretend template is a string if it resolves to a constant
|
diff --git a/lib/Rackem/Server.php b/lib/Rackem/Server.php
index <HASH>..<HASH> 100644
--- a/lib/Rackem/Server.php
+++ b/lib/Rackem/Server.php
@@ -64,7 +64,7 @@ class Server
if($offset === false) $offset = strpos($buffer, "\n\n");
if($offset === false) $offset = strpos($buffer, "\r\r");
if($offset === false) $offset = strpos($buffer, "\r\n");
- $length = $m[1] - $offset + 2;
+ $length = $m[1] - (strlen($buffer) - $offset);
$body = '';
while(strlen($body) < $length) $body .= socket_read($client, 1024);
$buffer = $buffer . $body;
|
Ensure Server reads full Content-Length of body into the buffer
|
diff --git a/satpy/writers/geotiff.py b/satpy/writers/geotiff.py
index <HASH>..<HASH> 100644
--- a/satpy/writers/geotiff.py
+++ b/satpy/writers/geotiff.py
@@ -76,7 +76,27 @@ class GeoTIFFWriter(ImageWriter):
"profile",
"bigtiff",
"pixeltype",
- "copy_src_overviews",)
+ "copy_src_overviews",
+ # Not a GDAL option, but allows driver='COG'
+ "driver",
+ # COG driver options (different from GTiff above)
+ "blocksize",
+ "resampling",
+ "quality",
+ "level",
+ "overview_resampling",
+ "warp_resampling",
+ "overview_compress",
+ "overview_quality",
+ "overview_predictor",
+ "tiling_scheme",
+ "zoom_level_strategy",
+ "target_srs",
+ "res",
+ "extent",
+ "aligned_levels",
+ "add_alpha",
+ )
def __init__(self, dtype=None, tags=None, **kwargs):
"""Init the writer."""
|
Update GDAL_OPTIONS with driver= and COG-specific options
|
diff --git a/cloudplatform/runtime/spring/src/main/java/io/rhiot/cloudplatform/runtime/spring/test/CloudPlatformTest.java b/cloudplatform/runtime/spring/src/main/java/io/rhiot/cloudplatform/runtime/spring/test/CloudPlatformTest.java
index <HASH>..<HASH> 100644
--- a/cloudplatform/runtime/spring/src/main/java/io/rhiot/cloudplatform/runtime/spring/test/CloudPlatformTest.java
+++ b/cloudplatform/runtime/spring/src/main/java/io/rhiot/cloudplatform/runtime/spring/test/CloudPlatformTest.java
@@ -69,6 +69,7 @@ public abstract class CloudPlatformTest extends Assert {
beforeCloudPlatformStarted();
cloudPlatform = cloudPlatform.start();
camelContext = cloudPlatform.applicationContext().getBean(CamelContext.class);
+ camelContext.getShutdownStrategy().setTimeout(5);
producerTemplate = camelContext.createProducerTemplate();
payloadEncoding = cloudPlatform.applicationContext().getBean(PayloadEncoding.class);
connector = cloudPlatform.applicationContext().getBean(IoTConnector.class);
|
Reduced CamelContext shutdown strategy timeout for tests.
|
diff --git a/controller/frontend/src/Controller/Frontend/Product/Iface.php b/controller/frontend/src/Controller/Frontend/Product/Iface.php
index <HASH>..<HASH> 100644
--- a/controller/frontend/src/Controller/Frontend/Product/Iface.php
+++ b/controller/frontend/src/Controller/Frontend/Product/Iface.php
@@ -68,7 +68,7 @@ interface Iface
* @return \Aimeos\MShop\Product\Item\Iface Product item including the referenced domains items
* @since 2019.04
*/
- public function find( string $code );
+ public function find( string $code ) : \Aimeos\MShop\Product\Item\Iface;
/**
* Creates a search function string for the given name and parameters
@@ -86,7 +86,7 @@ interface Iface
* @return \Aimeos\MShop\Product\Item\Iface Product item including the referenced domains items
* @since 2019.04
*/
- public function get( string $id );
+ public function get( string $id ) : \Aimeos\MShop\Product\Item\Iface;
/**
* Adds a filter to return only items containing a reference to the given ID
@@ -144,7 +144,7 @@ interface Iface
* @return \Aimeos\MShop\Product\Item\Iface Product item including the referenced domains items
* @since 2019.04
*/
- public function resolve( string $name );
+ public function resolve( string $name ) : \Aimeos\MShop\Product\Item\Iface;
/**
* Returns the products filtered by the previously assigned conditions
|
Added missing return types for product controller methods
|
diff --git a/src/com/opera/core/systems/testing/drivers/TestOperaDriver.java b/src/com/opera/core/systems/testing/drivers/TestOperaDriver.java
index <HASH>..<HASH> 100644
--- a/src/com/opera/core/systems/testing/drivers/TestOperaDriver.java
+++ b/src/com/opera/core/systems/testing/drivers/TestOperaDriver.java
@@ -20,14 +20,12 @@ import com.opera.core.systems.OperaDriver;
import com.opera.core.systems.OperaProduct;
import com.opera.core.systems.runner.OperaRunner;
import com.opera.core.systems.runner.launcher.OperaLauncherRunnerSettings;
-import com.opera.core.systems.scope.internal.OperaIntervals;
import com.opera.core.systems.scope.services.IOperaExec;
import org.openqa.selenium.Capabilities;
import org.openqa.selenium.Platform;
import org.openqa.selenium.remote.DesiredCapabilities;
-import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
/**
@@ -72,7 +70,7 @@ public class TestOperaDriver extends OperaDriver {
}
public boolean isRunning() {
- return runner.isOperaRunning();
+ return runner != null && runner.isOperaRunning();
}
public boolean isOperaIdleAvailable() {
|
Check for whether we're using a local launcher
We might be running a remote Opera, fixes NullPointerException.
|
diff --git a/spec/cb/cb_job_api_spec.rb b/spec/cb/cb_job_api_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/cb/cb_job_api_spec.rb
+++ b/spec/cb/cb_job_api_spec.rb
@@ -15,9 +15,10 @@ module Cb
search.count.should == 25
search[0].is_a?(Cb::CbJob).should == true
search[24].is_a?(Cb::CbJob).should == true
-
+
# # make sure our jobs are properly populated
- job = search[rand(0..24)]
+ job = search[Random.new.rand(0..24)]
+
job.did.length.should >= 19
job.title.length.should > 1
job.company_name.length.nil?.should == false
@@ -37,7 +38,7 @@ module Cb
# job_api.first_item_index.should == 1
# job_api.last_item_index.should >= 1
- job = Cb.job.find_by_did(search[rand(0..24)].did)
+ job = Cb.job.find_by_did(search[Random.new.rand(0..24)].did)
job.did.length.should >= 19
job.title.length.should > 1
|
pushing the latest with rand err fixed
|
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index <HASH>..<HASH> 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -12,7 +12,6 @@ end
db = ENV['DB'] || 'none'
require 'pry-byebug'
require 'i18n'
-require 'active_support/testing/time_helpers'
require 'rspec'
require 'allocation_stats' if ENV['TEST_PERFORMANCE']
require 'json'
@@ -44,7 +43,10 @@ end
RSpec.configure do |config|
config.include Helpers
config.include Mobility::Util
- config.include ActiveSupport::Testing::TimeHelpers
+ if defined?(ActiveSupport)
+ require 'active_support/testing/time_helpers'
+ config.include ActiveSupport::Testing::TimeHelpers
+ end
config.filter_run focus: true
config.run_all_when_everything_filtered = true
|
Only include time helpers when active support is defined
Right now, we're only using them for testing with activerecord. If we
need to use them with sequel, we can add as an explicit dependency.
|
diff --git a/lib/plugins/aws/invokeLocal/index.test.js b/lib/plugins/aws/invokeLocal/index.test.js
index <HASH>..<HASH> 100644
--- a/lib/plugins/aws/invokeLocal/index.test.js
+++ b/lib/plugins/aws/invokeLocal/index.test.js
@@ -1079,7 +1079,7 @@ describe('AwsInvokeLocal', () => {
() => {
log.debug('test target %o', serverless.cli.consoleLog.lastCall.args);
const result = JSON.parse(serverless.cli.consoleLog.lastCall.args[0]);
- expect(result.deadlineMs).to.be.closeTo(Date.now() + 6000, 1000);
+ expect(result.deadlineMs).to.be.closeTo(Date.now() + 6000, 2000);
},
error => {
if (error.code === 'ENOENT' && error.path === 'ruby') {
|
test(AWS Local Invocation): Increase error margin
|
diff --git a/src/Symfony/Component/Console/Descriptor/TextDescriptor.php b/src/Symfony/Component/Console/Descriptor/TextDescriptor.php
index <HASH>..<HASH> 100644
--- a/src/Symfony/Component/Console/Descriptor/TextDescriptor.php
+++ b/src/Symfony/Component/Console/Descriptor/TextDescriptor.php
@@ -249,12 +249,16 @@ class TextDescriptor extends Descriptor
*/
private function getColumnWidth(array $commands)
{
- $width = 0;
+ $widths = array();
+
foreach ($commands as $command) {
- $width = strlen($command->getName()) > $width ? strlen($command->getName()) : $width;
+ $widths[] = strlen($command->getName());
+ foreach ($command->getAliases() as $alias) {
+ $widths[] = strlen($alias);
+ }
}
- return $width + 2;
+ return max($widths) + 2;
}
/**
|
Fixed warning when command alias is longer than command name
|
diff --git a/lib/common/components/StickySidebar/sticky-sidebar.js b/lib/common/components/StickySidebar/sticky-sidebar.js
index <HASH>..<HASH> 100644
--- a/lib/common/components/StickySidebar/sticky-sidebar.js
+++ b/lib/common/components/StickySidebar/sticky-sidebar.js
@@ -47,7 +47,7 @@ export default class StickySidebar {
}
get scrollY() {
- return (this.scrollParent.pageYOffset !== null) ? this.scrollParent.pageYOffset : this.scrollParent.scrollTop;
+ return (this.scrollParent.pageYOffset != null) ? this.scrollParent.pageYOffset : this.scrollParent.scrollTop;
}
ngOnInit() {
|
Fix sticky sidebar when scroll parent is not window
|
diff --git a/spec/art-decomp/logging_spec.rb b/spec/art-decomp/logging_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/art-decomp/logging_spec.rb
+++ b/spec/art-decomp/logging_spec.rb
@@ -18,7 +18,7 @@ module ArtDecomp describe Logging do
after do
Logging.off
- FileUtils.rmtree @dir if Dir.exists? @dir
+ FileUtils.rmtree @dir
end
def log
diff --git a/spec/art-decomp/old_executable_spec.rb b/spec/art-decomp/old_executable_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/art-decomp/old_executable_spec.rb
+++ b/spec/art-decomp/old_executable_spec.rb
@@ -10,7 +10,7 @@ module ArtDecomp describe OldExecutable do
end
after do
- FileUtils.rmdir @dir
+ FileUtils.rmtree @dir
end
describe '.new' do
|
Logging and OldExecutable specs: cleanup fixes
|
diff --git a/spec/models/no_cms/blocks/layout_spec.rb b/spec/models/no_cms/blocks/layout_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/models/no_cms/blocks/layout_spec.rb
+++ b/spec/models/no_cms/blocks/layout_spec.rb
@@ -56,7 +56,7 @@ describe NoCms::Blocks::Layout do
subject { NoCms::Blocks::Layout.find('title-long_text') }
it "should recover the configuration for quickly configured fields" do
- expect(subject.fields[:title]).to eq title_configuration
+ expect(subject.fields[:title]).to eq NoCms::Blocks::Layout::DEFAULT_FIELD_CONFIGURATION.merge(title_configuration)
end
it "should recover the configuration for verbosing configured fields" do
|
Default configuration must be taken into account on the spec
We are adding now a default configuration and the test must take it into account
|
diff --git a/peewee.py b/peewee.py
index <HASH>..<HASH> 100644
--- a/peewee.py
+++ b/peewee.py
@@ -26,6 +26,7 @@ __all__ = [
'BooleanField',
'CharField',
'Clause',
+ 'CompositeKey',
'DateField',
'DateTimeField',
'DecimalField',
@@ -722,7 +723,15 @@ class CompositeKey(object):
def add_to_class(self, model_class, name):
self.name = name
- setattr(model_class, name, None)
+ setattr(model_class, name, self)
+
+ def __get__(self, instance, instance_type=None):
+ if instance is not None:
+ return [getattr(instance, field) for field in self.fields]
+ return self
+
+ def __set__(self, instance, value):
+ pass
class QueryCompiler(object):
|
Export CompositeKey and implement it as a descriptor.
|
diff --git a/action/Request.php b/action/Request.php
index <HASH>..<HASH> 100644
--- a/action/Request.php
+++ b/action/Request.php
@@ -442,12 +442,12 @@ class Request extends \lithium\net\http\Request {
* Returns information about the type of content that the client is requesting.
*
* @see lithium\net\http\Media::negotiate()
- * @param $type mixed If not specified, returns the media type name that the client prefers,
- * using content negotiation. If a media type name (string) is passed, returns `true` or
- * `false`, indicating whether or not that type is accepted by the client at all.
- * If `true`, returns the raw content types from the `Accept` header, parsed into an array
- * and sorted by client preference.
- * @return string Returns a simple type name if the type is registered (i.e. `'json'`), or
+ * @param boolean|string $type If not specified, returns the media type name that the client
+ * prefers, using content negotiation. If a media type name (string) is passed, returns
+ * `true` or `false`, indicating whether or not that type is accepted by the client at
+ * all. If `true`, returns the raw content types from the `Accept` header, parsed into
+ * an array and sorted by client preference.
+ * @return mixed Returns a simple type name if the type is registered (i.e. `'json'`), or
* a fully-qualified content-type if not (i.e. `'image/jpeg'`), or a boolean or array,
* depending on the value of `$type`.
*/
|
Updating docs for param/return types of Request::type().
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -2,7 +2,6 @@
var pullWeird = require('./pull-weird')
var PacketStream = require('packet-stream')
var EventEmitter = require('events').EventEmitter
-var PullSerializer = require('pull-serializer')
function isFunction (f) {
return 'function' === typeof f
@@ -92,10 +91,7 @@ module.exports = function (remoteApi, localApi, serializer) {
emitter.createStream = function () {
- var pullPs = pullWeird(ps)
- if (serializer)
- pullPs = PullSerializer(pullPs, serializer)
- return pullPs
+ return (serializer) ? serializer(pullWeird(ps)) : pullWeird(ps)
}
return emitter
diff --git a/test/jsonb.js b/test/jsonb.js
index <HASH>..<HASH> 100644
--- a/test/jsonb.js
+++ b/test/jsonb.js
@@ -1,2 +1,4 @@
+var PullSerializer = require('pull-serializer')
+
// run tests with jsonb serialization
-require('./async')(require('json-buffer'))
\ No newline at end of file
+require('./async')(function(stream) { return PullSerializer(stream, require('json-buffer')) })
\ No newline at end of file
|
generalized to allow non-line-delimited serializers
|
diff --git a/OMMBV/trans.py b/OMMBV/trans.py
index <HASH>..<HASH> 100644
--- a/OMMBV/trans.py
+++ b/OMMBV/trans.py
@@ -20,7 +20,7 @@ try:
# geocentric Earth.
# ecef_to_geodetic = trans.ecef_to_geocentric
-except (AttributeError, NameError):
+except (AttributeError, NameError, ModuleNotFoundError):
estr = ''.join(['Unable to use Fortran version of ecef_to_geodetic.',
' Please check installation.'])
warnings.warn(estr)
|
BUG/DOC: Expand exceptions for not importing fortran
|
diff --git a/autopep8.py b/autopep8.py
index <HASH>..<HASH> 100755
--- a/autopep8.py
+++ b/autopep8.py
@@ -794,7 +794,7 @@ class FixPEP8(object):
if fixed:
for line_index in range(start_line_index, end_line_index + 1):
self.source[line_index] = ''
- self.source[start_line_index] = fixed
+ self.source[start_line_index] = fixed.rstrip() + '\n'
return range(start_line_index + 1, end_line_index + 1)
else:
return []
|
Avoid add trailing in double aggressive mode
This fixes #<I>.
|
diff --git a/pyes/query.py b/pyes/query.py
index <HASH>..<HASH> 100644
--- a/pyes/query.py
+++ b/pyes/query.py
@@ -793,7 +793,7 @@ class TermQuery(Query):
self._values = {}
if field is not None and value is not None:
- self.add(field, value)
+ self.add(field, value, boost)
def add(self, field, value, boost=None):
if not value.strip():
|
Bug Fix: On TermQuery if boost is provided from the constructor is also passed to the add method.
|
diff --git a/lib/fugue.js b/lib/fugue.js
index <HASH>..<HASH> 100644
--- a/lib/fugue.js
+++ b/lib/fugue.js
@@ -34,7 +34,7 @@ var stop = exports.stop = function() {
worker.kill();
} catch(excep) {
// do nothing, just log
- log('Error killing worker with pid ' + worker.pid + ': ' + excep.message)
+ console.log('Error killing worker with pid ' + worker.pid + ': ' + excep.message)
}
});
workers = [];
|
protected worker killing sequence on shutdown from workers quitting themselves
|
diff --git a/mode/dockerfile/dockerfile.js b/mode/dockerfile/dockerfile.js
index <HASH>..<HASH> 100644
--- a/mode/dockerfile/dockerfile.js
+++ b/mode/dockerfile/dockerfile.js
@@ -24,14 +24,12 @@
// Block comment: This is a line starting with a comment
{
regex: /#.*$/,
- token: "comment",
- next: "start"
+ token: "comment"
},
// Highlight an instruction without any arguments (for convenience)
{
regex: instructionOnlyLine,
- token: "variable-2",
- next: "start"
+ token: "variable-2"
},
// Highlight an instruction followed by arguments
{
@@ -39,10 +37,9 @@
token: ["variable-2", null],
next: "arguments"
},
- // Fail-safe return to start
{
- token: null,
- next: "start"
+ regex: /./,
+ token: null
}
],
arguments: [
@@ -54,8 +51,7 @@
},
{
regex: /[^#]+\\$/,
- token: null,
- next: "arguments"
+ token: null
},
{
// Match everything except for the inline comment
|
[dockerfile mode] Clean up state machine
Issue #<I>
|
diff --git a/pywavefront/texture.py b/pywavefront/texture.py
index <HASH>..<HASH> 100644
--- a/pywavefront/texture.py
+++ b/pywavefront/texture.py
@@ -31,15 +31,10 @@
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
-import os
-
-from pywavefront.exceptions import PywavefrontException
class Texture(object):
def __init__(self, path):
- self.image_name = path
+ # Treat path as part of a file uri always using forward slashes
+ self.path = path.replace('\\', '/')
self.image = None
-
- if not os.path.exists(path):
- raise PywavefrontException("Requested file does not exist")
|
Treat texture path as a file uri + skip early exist check
|
diff --git a/src/main/java/com/couchbase/lite/router/Router.java b/src/main/java/com/couchbase/lite/router/Router.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/couchbase/lite/router/Router.java
+++ b/src/main/java/com/couchbase/lite/router/Router.java
@@ -411,6 +411,10 @@ public class Router implements Database.ChangeListener {
} else if(name.startsWith("_design") || name.startsWith("_local")) {
// This is also a document, just with a URL-encoded "/"
docID = name;
+ } else if (name.equals("_session")) {
+ // There are two possible uri to get a session, /<db>/_session or /_session.
+ // This is for /<db>/_session.
+ message = message.replaceFirst("_Document", name);
} else {
// Special document name like "_all_docs":
message += name;
|
Fix router creating a wrong handler when getting /db/_session
There are two possible uri to get a session, /<db>/_session or /_session. The current code has already taken care the /_session message. But when it got /<db>/_session, it was turn the message into do_GET_Document_session instead of do_GET_session.
#<I>
|
diff --git a/lib/discordrb/data.rb b/lib/discordrb/data.rb
index <HASH>..<HASH> 100644
--- a/lib/discordrb/data.rb
+++ b/lib/discordrb/data.rb
@@ -840,7 +840,6 @@ module Discordrb
def initialize(data, bot)
@bot = bot
@owner_id = data['owner_id'].to_i
- @owner = bot.user(@owner_id)
@id = data['id'].to_i
update_data(data)
@@ -852,6 +851,8 @@ module Discordrb
process_presences(data['presences'])
process_channels(data['channels'])
process_voice_states(data['voice_states'])
+
+ @owner = self.member(@owner_id)
end
# @return [Channel] The default channel on this server (usually called #general)
|
Properly initialize the server owner and make it a member
|
diff --git a/internal/service/meta/arn_data_source_fw.go b/internal/service/meta/arn_data_source_fw.go
index <HASH>..<HASH> 100644
--- a/internal/service/meta/arn_data_source_fw.go
+++ b/internal/service/meta/arn_data_source_fw.go
@@ -72,4 +72,5 @@ type dataSourceARN struct{}
// Read is called when the provider must read data source values in order to update state.
// Config values should be read from the ReadDataSourceRequest and new state values set on the ReadDataSourceResponse.
func (d *dataSourceARN) Read(ctx context.Context, request tfsdk.ReadDataSourceRequest, response *tfsdk.ReadDataSourceResponse) {
+ tflog.Trace(ctx, "dataSourceARN.Read enter")
}
|
d/aws_arn: Trace data source Read entry.
|
diff --git a/tests/CommandTest.php b/tests/CommandTest.php
index <HASH>..<HASH> 100644
--- a/tests/CommandTest.php
+++ b/tests/CommandTest.php
@@ -79,5 +79,14 @@ class CommandTest extends PHPUnit
'--validator' => 'title:required|unique:tweets,id',
'--no-interaction'
]);
+
+ Artisan::call('make:scaffold',
+ [
+ 'name' => 'Tweet',
+ '--schema' => 'title:string',
+ '--localization' => 'title:required',
+ '--lang' => 'fr',
+ '--no-interaction'
+ ]);
}
}
\ No newline at end of file
|
Setup tests for localization and lang command.
|
diff --git a/pipe.go b/pipe.go
index <HASH>..<HASH> 100644
--- a/pipe.go
+++ b/pipe.go
@@ -181,9 +181,12 @@ func DialPipe(path string, timeout *time.Duration) (net.Conn, error) {
return nil, err
}
- if state&cPIPE_READMODE_MESSAGE != 0 {
+ /**
+ Windows support message type pipes in message-read mode only. Removing this check to allow for windows named pipes.
+ */
+ /*if state&cPIPE_READMODE_MESSAGE != 0 {
return nil, &os.PathError{Op: "open", Path: path, Err: errors.New("message readmode pipes not supported")}
- }
+ }*/
f, err := makeWin32File(h)
if err != nil {
|
Allow for message pipes in message readmode for Windows named pipes.
|
diff --git a/lxd/db/storage_pools.go b/lxd/db/storage_pools.go
index <HASH>..<HASH> 100644
--- a/lxd/db/storage_pools.go
+++ b/lxd/db/storage_pools.go
@@ -140,13 +140,12 @@ INSERT INTO storage_volumes(name, storage_pool_id, node_id, type, description)
return errors.Wrap(err, "failed to create node ceph volumes")
}
+ // Create entries of all the ceph volumes configs for the new node.
stmt = `
SELECT id FROM storage_volumes WHERE storage_pool_id=? AND node_id=?
ORDER BY name, type
`
-
- // Create entries of all the ceph volumes configs for the new node.
- volumeIDs, err := query.SelectIntegers(c.tx, stmt, poolID, otherNodeID)
+ volumeIDs, err := query.SelectIntegers(c.tx, stmt, poolID, nodeID)
if err != nil {
return errors.Wrap(err, "failed to get joining node's ceph volume IDs")
}
|
Fix broken copy of storage volume configs when joining a cluster
|
diff --git a/anharmonic/phonon3/imag_self_energy.py b/anharmonic/phonon3/imag_self_energy.py
index <HASH>..<HASH> 100644
--- a/anharmonic/phonon3/imag_self_energy.py
+++ b/anharmonic/phonon3/imag_self_energy.py
@@ -93,7 +93,6 @@ class ImagSelfEnergy:
len(bi_set))
return imag_se
-
def get_phonon_at_grid_point(self):
return (self._frequencies[self._grid_point],
self._eigenvectors[self._grid_point])
|
Improved averaging among degenerate bands
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.