diff
stringlengths 65
26.7k
| message
stringlengths 7
9.92k
|
|---|---|
diff --git a/tests/loaders/http.tests.js b/tests/loaders/http.tests.js
index <HASH>..<HASH> 100644
--- a/tests/loaders/http.tests.js
+++ b/tests/loaders/http.tests.js
@@ -68,7 +68,7 @@ describe('http', function() {
it('should report errors', function(done) {
http({ url: 'http://localhost:9999/config' })(confabulous, function(err, config) {
assert(err)
- assert.equal(err.message, 'connect ECONNREFUSED 127.0.0.1:9999')
+ assert.ok(/connect ECONNREFUSED/.test(err.message))
done()
})
})
|
Working around differences between local and travis
|
diff --git a/spec/fixtures/movie.rb b/spec/fixtures/movie.rb
index <HASH>..<HASH> 100644
--- a/spec/fixtures/movie.rb
+++ b/spec/fixtures/movie.rb
@@ -1,5 +1,5 @@
class Movie
- attr_accessor :title, :rating, :year, :country, :seen, :star_rating
+ attr_accessor :title, :rating, :year, :country, :seen, :star_rating, :home_formats
def self.random_collection(count = 100)
(1...count).map { |_| Movie.random }
@@ -16,6 +16,7 @@ class Movie
self.country = random_country
self.seen = [true, false].sample
self.star_rating = [1, 2, 3, 4, 5].sample
+ self.home_formats = %w(BD DVD Hulu Amazon Netflix).sample(2)
end
def seen?
|
Update fixture to give it a collection attribute
home_formats will return an array of 2 of the available home formats
|
diff --git a/pyecoregen/ecore.py b/pyecoregen/ecore.py
index <HASH>..<HASH> 100644
--- a/pyecoregen/ecore.py
+++ b/pyecoregen/ecore.py
@@ -93,7 +93,7 @@ class EcorePackageModuleTask(EcoreTask):
attributes = itertools.chain(*(c.eAttributes for c in classes))
attributes_types = (a.eType for a in attributes)
- imported |= {t for t in attributes_types if t.ePackage not in {p, ecore.eClass}}
+ imported |= {t for t in attributes_types if t.ePackage not in {p, ecore.eClass, None}}
imported_dict = {}
for classifier in imported:
|
Fix generation when attribute's type is not contained in a package
In some cases, attributes' type can be outside a Resource and an
EPackage (when build in memory). In thoses cases, as the EPackage is
'None', the code generation would raise an error.
|
diff --git a/packages/openneuro-server/datalad/draft.js b/packages/openneuro-server/datalad/draft.js
index <HASH>..<HASH> 100644
--- a/packages/openneuro-server/datalad/draft.js
+++ b/packages/openneuro-server/datalad/draft.js
@@ -22,6 +22,7 @@ const draftFilesKey = (datasetId, revision) => {
export const getDraftFiles = async (datasetId, revision, options = {}) => {
// If untracked is set and true
const untracked = 'untracked' in options && options.untracked
+ const query = untracked ? { untracked: true } : {}
const filesUrl = `${uri}/datasets/${datasetId}/files`
const key = draftFilesKey(datasetId, revision)
return redis.get(key).then(data => {
@@ -29,7 +30,7 @@ export const getDraftFiles = async (datasetId, revision, options = {}) => {
else
return request
.get(filesUrl)
- .query({ untracked })
+ .query(query)
.set('Accept', 'application/json')
.then(({ body: { files } }) => {
const filesWithUrls = files.map(addFileUrl(datasetId))
|
Fix query string generation for getDraftFiles.
|
diff --git a/examples/with-webassembly/next.config.js b/examples/with-webassembly/next.config.js
index <HASH>..<HASH> 100644
--- a/examples/with-webassembly/next.config.js
+++ b/examples/with-webassembly/next.config.js
@@ -1,6 +1,10 @@
module.exports = {
webpack(config) {
config.output.webassemblyModuleFilename = 'static/wasm/[modulehash].wasm'
+
+ // Since Webpack 5 doesn't enable WebAssembly by default, we should do it manually
+ config.experiments = { asyncWebAssembly: true }
+
return config
},
}
|
(examples/with-webassembly) fixed for webpack 5 (#<I>)
## Documentation / Examples
- [x] Make sure the linting passes
Fixes #<I>
As mention in #<I> `with-webassembly` example doesn't work anymore after switching to webpack 5.
This PR adds webpack experimental configuration.
|
diff --git a/test/test_helper.rb b/test/test_helper.rb
index <HASH>..<HASH> 100644
--- a/test/test_helper.rb
+++ b/test/test_helper.rb
@@ -38,6 +38,12 @@ class JSISpec < Minitest::Spec
matcher = Regexp.new Regexp.escape matcher if String === matcher
assert matcher =~ obj, msg
end
+
+ def assert_is_a mod, obj, msg = nil
+ msg = message(msg) { "Expected instance of #{mod}. received #{obj.class}: #{mu_pp(obj)}" }
+
+ assert obj.is_a?(mod), msg
+ end
end
# register this to be the base class for specs instead of Minitest::Spec
|
JSISpec#assert_is_a, because apparently that's not something minitest feels like including
|
diff --git a/src/core/cb.run.project/project.js b/src/core/cb.run.project/project.js
index <HASH>..<HASH> 100644
--- a/src/core/cb.run.project/project.js
+++ b/src/core/cb.run.project/project.js
@@ -55,7 +55,7 @@ ProjectRunner.prototype.runScript = function(projectType, port) {
env: _.defaults({
PORT: port,
HTTP_PORT: port
- })
+ }, process.env)
});
// Id of our harbor port (to release)
|
Mixin current env variables when running a project
|
diff --git a/models/ticket.php b/models/ticket.php
index <HASH>..<HASH> 100644
--- a/models/ticket.php
+++ b/models/ticket.php
@@ -1,7 +1,7 @@
<?php
class Ticket extends TORM\Model {
public static function getNewPKValue() {
- return mktime();
+ return time();
}
}
diff --git a/test/modelTest.php b/test/modelTest.php
index <HASH>..<HASH> 100644
--- a/test/modelTest.php
+++ b/test/modelTest.php
@@ -392,7 +392,7 @@
$this->assertTrue($ticket->save());
$ticket = Ticket::last();
- $this->assertTrue($ticket->id>=mktime()-1000);
+ $this->assertTrue($ticket->id>=time()-1000);
$this->assertTrue($ticket->destroy());
}
}
|
Changed mktime to time
|
diff --git a/pkg/kubelet/apis/config/types.go b/pkg/kubelet/apis/config/types.go
index <HASH>..<HASH> 100644
--- a/pkg/kubelet/apis/config/types.go
+++ b/pkg/kubelet/apis/config/types.go
@@ -291,12 +291,12 @@ type KubeletConfiguration struct {
/* the following fields are meant for Node Allocatable */
- // A set of ResourceName=ResourceQuantity (e.g. cpu=200m,memory=150G,pids=100) pairs
+ // A set of ResourceName=ResourceQuantity (e.g. cpu=200m,memory=150G,pid=100) pairs
// that describe resources reserved for non-kubernetes components.
// Currently only cpu and memory are supported.
// See http://kubernetes.io/docs/user-guide/compute-resources for more detail.
SystemReserved map[string]string
- // A set of ResourceName=ResourceQuantity (e.g. cpu=200m,memory=150G,pids=100) pairs
+ // A set of ResourceName=ResourceQuantity (e.g. cpu=200m,memory=150G,pid=100) pairs
// that describe resources reserved for kubernetes system components.
// Currently cpu, memory and local ephemeral storage for root file system are supported.
// See http://kubernetes.io/docs/user-guide/compute-resources for more detail.
|
Fix typo in comments on SystemReserved and KubeReserved
|
diff --git a/test/Component.test.js b/test/Component.test.js
index <HASH>..<HASH> 100644
--- a/test/Component.test.js
+++ b/test/Component.test.js
@@ -1652,6 +1652,29 @@ function ComponentTests (debug, memory) {
t.end()
})
+ test('[Forwarding Component] updating attributes of a forwarded component', t => {
+ class Parent extends TestComponent {
+ render ($$) {
+ let el = $$(Child)
+ if (this.props.mode === 1) {
+ el.attr('disabled', true)
+ }
+ return el
+ }
+ }
+ class Child extends TestComponent {
+ render ($$) {
+ return $$('div').addClass('sc-child')
+ }
+ }
+ let parent = Parent.render({ mode: 0 })
+ parent.setProps({ mode: 1 })
+ t.ok(parent.el.hasAttribute('disabled'), 'forwarded element should have attribute "disabled"')
+ parent.setProps({ mode: 0 })
+ t.notOk(parent.el.hasAttribute('disabled'), 'forwarded element should not have attribute "disabled"')
+ t.end()
+ })
+
test('[Preserving] components that do not change the structure preserve child components', t => {
class MyComponent extends Component {
render ($$) {
|
Add a test revealing an issue related to Forwarding Components.
|
diff --git a/satpy/tests/writer_tests/test_mitiff.py b/satpy/tests/writer_tests/test_mitiff.py
index <HASH>..<HASH> 100644
--- a/satpy/tests/writer_tests/test_mitiff.py
+++ b/satpy/tests/writer_tests/test_mitiff.py
@@ -405,7 +405,7 @@ class TestMITIFFWriter(unittest.TestCase):
w.save_dataset(dataset)
tif = TIFF.open(os.path.join(self.base_dir, os.listdir(self.base_dir)[0]))
IMAGEDESCRIPTION = 270
- imgdesc = str(tif.GetField(IMAGEDESCRIPTION)).split('\\n')
+ imgdesc = (tif.GetField(IMAGEDESCRIPTION)).decode('utf-8').split('\n')
for key in imgdesc:
if 'In this file' in key:
self.assertEqual(key, ' Channels: 1 In this file: 1')
|
Need to decode to 'utf-8' to handle correct in both <I> and <I>
|
diff --git a/blocks/loops.js b/blocks/loops.js
index <HASH>..<HASH> 100644
--- a/blocks/loops.js
+++ b/blocks/loops.js
@@ -257,7 +257,7 @@ Blockly.Blocks['controls_flow_statements'] = {
* @this Blockly.Block
*/
onchange: function(e) {
- if (this.workspace.isDragging()) {
+ if (!this.workspace.isDragging || this.workspace.isDragging()) {
return; // Don't change state at the start of a drag.
}
var legal = false;
diff --git a/blocks/procedures.js b/blocks/procedures.js
index <HASH>..<HASH> 100644
--- a/blocks/procedures.js
+++ b/blocks/procedures.js
@@ -843,7 +843,7 @@ Blockly.Blocks['procedures_ifreturn'] = {
* @this Blockly.Block
*/
onchange: function(e) {
- if (this.workspace.isDragging()) {
+ if (!this.workspace.isDragging || this.workspace.isDragging()) {
return; // Don't change state at the start of a drag.
}
var legal = false;
|
Stop some blocks from throwing errors in headless workspaces.
|
diff --git a/unifiedpush-test-extension/unifiedpush-test-extension-server/src/main/java/org/jboss/aerogear/unifiedpush/test/ProxySetup.java b/unifiedpush-test-extension/unifiedpush-test-extension-server/src/main/java/org/jboss/aerogear/unifiedpush/test/ProxySetup.java
index <HASH>..<HASH> 100644
--- a/unifiedpush-test-extension/unifiedpush-test-extension-server/src/main/java/org/jboss/aerogear/unifiedpush/test/ProxySetup.java
+++ b/unifiedpush-test-extension/unifiedpush-test-extension-server/src/main/java/org/jboss/aerogear/unifiedpush/test/ProxySetup.java
@@ -162,9 +162,9 @@ public class ProxySetup {
if (backgroundThread.isAlive() && !backgroundThread.isInterrupted()) {
backgroundThread.closeChannel();
backgroundThread.interrupt();
- backgroundThread = null;
logger.log(Level.INFO, "Background thread interrupted in ProxySetup.");
}
+ backgroundThread = null;
}
}
|
proxy background thread is set to null after server stopping
|
diff --git a/backend/server.js b/backend/server.js
index <HASH>..<HASH> 100644
--- a/backend/server.js
+++ b/backend/server.js
@@ -303,7 +303,7 @@ if (macros.DEV) {
port = 5000;
}
else {
- port = 80;
+ port = 5000;
}
|
"Changed port to <I> for prod because of nginx is now used"
|
diff --git a/src/FieldHandlers/Renderer/TextRenderer.php b/src/FieldHandlers/Renderer/TextRenderer.php
index <HASH>..<HASH> 100644
--- a/src/FieldHandlers/Renderer/TextRenderer.php
+++ b/src/FieldHandlers/Renderer/TextRenderer.php
@@ -17,7 +17,7 @@ class TextRenderer extends BaseRenderer
*/
public function renderValue($value, array $options = [])
{
- $result = (string)$value;
+ $result = parent::renderValue($value, $options);
if (empty($result)) {
return $result;
|
TextRenderer uses BaseRenderer for sanitization (task #<I>)
|
diff --git a/src/AccessControl/AccessChecker.php b/src/AccessControl/AccessChecker.php
index <HASH>..<HASH> 100644
--- a/src/AccessControl/AccessChecker.php
+++ b/src/AccessControl/AccessChecker.php
@@ -73,6 +73,9 @@ class AccessChecker
$this->permissions = $permissions;
$this->randomGenerator = $randomGenerator;
$this->cookieOptions = $cookieOptions;
+
+ // Disable password saves by default
+ $this->repositoryUsers->getPersister()->disableField('password');
}
/**
diff --git a/src/Users.php b/src/Users.php
index <HASH>..<HASH> 100644
--- a/src/Users.php
+++ b/src/Users.php
@@ -35,6 +35,8 @@ class Users
{
$this->app = $app;
$this->repository = $this->app['storage']->getRepository('Bolt\Storage\Entity\Users');
+ // Disable password saves by default
+ $this->repository->getPersister()->disableField('password');
/** @deprecated Will be removed in Bolt 3.0 */
$this->usertable = $this->app['storage']->getTablename('users');
|
Set the persister to disable password fields by default
|
diff --git a/hot_redis.py b/hot_redis.py
index <HASH>..<HASH> 100644
--- a/hot_redis.py
+++ b/hot_redis.py
@@ -104,6 +104,12 @@ class Iterable(Base):
elif t != self.type:
raise TypeError("%s != %s" % (t, self.type))
+ def __eq__(self, value):
+ return self.value == self._to_value(value)
+
+ def __iter__(self):
+ return iter(self.value)
+
class List(Iterable):
@@ -121,12 +127,6 @@ class List(Iterable):
def value(self):
return self[:]
- def __eq__(self, l):
- return self.value == self._to_value(l)
-
- def __iter__(self):
- return iter(self.value)
-
def __add__(self, l):
return List(self.value + self._to_value(l))
|
Generic iter/cmp for iterables
|
diff --git a/lib/client.js b/lib/client.js
index <HASH>..<HASH> 100644
--- a/lib/client.js
+++ b/lib/client.js
@@ -242,6 +242,8 @@ Client.prototype.connect = function(cfg) {
self.emit('drain');
}).once('header', function(header) {
self._remoteVer = header.versions.software;
+ }).on('continue', function() {
+ self.emit('continue');
});
if (typeof cfg.hostVerifier === 'function'
diff --git a/lib/server.js b/lib/server.js
index <HASH>..<HASH> 100644
--- a/lib/server.js
+++ b/lib/server.js
@@ -168,6 +168,8 @@ function Client(stream, socket) {
self.emit('error', err);
}).on('drain', function() {
self.emit('drain');
+ }).on('continue', function() {
+ self.emit('continue');
});
var exchanges = 0,
|
lib: re-emit continue event for client and server
|
diff --git a/pyghmi/ipmi/private/session.py b/pyghmi/ipmi/private/session.py
index <HASH>..<HASH> 100644
--- a/pyghmi/ipmi/private/session.py
+++ b/pyghmi/ipmi/private/session.py
@@ -315,7 +315,8 @@ class Session(object):
if sockaddr in cls.bmc_handlers:
self = cls.bmc_handlers[sockaddr]
if (self.bmc == bmc and self.userid == userid and
- self.password == password and self.kgo == kg):
+ self.password == password and self.kgo == kg and
+ self.logged):
trueself = self
else:
del cls.bmc_handlers[sockaddr]
|
Do not reuse a session that is not logged
If a session was not logged, it would still be considered a candidate for
new session objects. Disqualify such sessions so that new session
objects after a 'logout' or similar will be fulfilled.
Change-Id: I7af<I>a8a<I>b7aedcadcec<I>d<I>e3b<I>f<I>d
|
diff --git a/package.php b/package.php
index <HASH>..<HASH> 100644
--- a/package.php
+++ b/package.php
@@ -4,7 +4,7 @@
require_once 'PEAR/PackageFileManager2.php';
-$version = '1.4.108';
+$version = '1.4.109';
$notes = <<<EOT
No release notes for you!
EOT;
|
prepare for release of <I>
svn commit r<I>
|
diff --git a/gae_memcache_store.go b/gae_memcache_store.go
index <HASH>..<HASH> 100644
--- a/gae_memcache_store.go
+++ b/gae_memcache_store.go
@@ -190,7 +190,7 @@ func (s *memcacheStore) Get(id string) Session {
// Service error? Retry..
continue
}
- if e.Expires.After(time.Now()) {
+ if e.Expires.Before(time.Now()) {
// Session expired.
datastore.Delete(s.ctx, key) // Omitting error check...
return nil
|
Fixed timeout check logic in the Datastore. Issue #2.
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -10,7 +10,7 @@ METADATA = dict(
long_description=open('README.rst').read(),
url='http://github.com/flashingpumpkin/django-socialregistration',
keywords='django facebook twitter oauth openid registration',
- install_requires=['django', 'oauth2', 'python-openid'],
+ install_requires=['oauth2', 'python-openid'],
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
|
Remove django requirement to prevent version conflicts when using pip
|
diff --git a/ccxt/async/exchange.py b/ccxt/async/exchange.py
index <HASH>..<HASH> 100644
--- a/ccxt/async/exchange.py
+++ b/ccxt/async/exchange.py
@@ -81,7 +81,7 @@ class Exchange(BaseExchange):
# if self.verbose:
# print('Waiting for tokens: Exchange: {0}'.format(self.id))
self.add_new_tokens()
- seconds_delays = [0.01, 0.1, 0.7, 1, 1.5, 2]
+ seconds_delays = [0.001, 0.005, 0.022, 0.106, 0.5]
delay = random.choice(seconds_delays)
await asyncio.sleep(delay)
self.rateLimitTokens -= 1
diff --git a/ccxt/async/exchanges.py b/ccxt/async/exchanges.py
index <HASH>..<HASH> 100644
--- a/ccxt/async/exchanges.py
+++ b/ccxt/async/exchanges.py
@@ -14675,6 +14675,8 @@ class kraken (Exchange):
'hasFetchClosedOrders': True,
'hasFetchMyTrades': True,
'hasWithdraw': True,
+ 'rateLimitTokens': 8,
+ 'rateLimitMaxTokens': 8,
'marketsByAltname': {},
'timeframes': {
'1m': '1',
|
Implemented fixes for issues raised on Pull Request #<I>, proper exponential backoff random selection times (in seconds) and lowering Krakens starting tokens, to avoid being rate limited on Kraken
|
diff --git a/framework/web/CHttpRequest.php b/framework/web/CHttpRequest.php
index <HASH>..<HASH> 100644
--- a/framework/web/CHttpRequest.php
+++ b/framework/web/CHttpRequest.php
@@ -93,7 +93,7 @@ class CHttpRequest extends CApplicationComponent
private $_hostInfo;
private $_baseUrl;
private $_cookies;
- private $_preferredLanguage;
+ private $_preferredLanguages;
private $_csrfToken;
private $_restParams;
|
Update framework/web/CHttpRequest.php
Updated the name of the private property used to store the list of accepted languages (no further need for a store for the preferred language since we just take the first value from this list).
|
diff --git a/lib/assets/javascripts/unobtrusive_flash.js b/lib/assets/javascripts/unobtrusive_flash.js
index <HASH>..<HASH> 100644
--- a/lib/assets/javascripts/unobtrusive_flash.js
+++ b/lib/assets/javascripts/unobtrusive_flash.js
@@ -53,7 +53,7 @@ $(function() {
}
}
- $(function() {
+ $(window).load(function() {
UnobtrusiveFlash.showFlashFromCookies();
});
|
Transfer flash rendered from document load to window load [#<I>]
|
diff --git a/spec/chicanery/state_comparison_spec.rb b/spec/chicanery/state_comparison_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/chicanery/state_comparison_spec.rb
+++ b/spec/chicanery/state_comparison_spec.rb
@@ -1,6 +1,10 @@
describe Chicanery::StateComparison do
include Chicanery::StateComparison
+ it 'should fail' do
+ 1.should == 2
+ end
+
describe '#compare_jobs' do
let(:current_jobs) { {} }
let(:previous_jobs) { {} }
|
added a failing spec to break build
|
diff --git a/uncompyle6/semantics/pysource.py b/uncompyle6/semantics/pysource.py
index <HASH>..<HASH> 100644
--- a/uncompyle6/semantics/pysource.py
+++ b/uncompyle6/semantics/pysource.py
@@ -650,6 +650,7 @@ class SourceWalker(GenericASTTraversal, object):
out = out[:-self.pending_newlines]
if (isinstance(out, str) and
not (PYTHON3 or self.FUTURE_UNICODE_LITERALS)):
+ from trepan.api import debug; debug()
out = unicode(out, 'utf-8')
self.f.write(out)
@@ -854,7 +855,7 @@ class SourceWalker(GenericASTTraversal, object):
# strings are interpreted:
# u'xxx' -> 'xxx'
# xxx' -> b'xxx'
- if isinstance(data, unicode):
+ if not PYTHON3 and isinstance(data, unicode):
try:
try:
data = str(data)
|
unicode bug fix try #2...
this time, for sure!
|
diff --git a/lib/fake_stripe/stub_app.rb b/lib/fake_stripe/stub_app.rb
index <HASH>..<HASH> 100644
--- a/lib/fake_stripe/stub_app.rb
+++ b/lib/fake_stripe/stub_app.rb
@@ -74,6 +74,10 @@ module FakeStripe
json_response 200, fixture('list_cards')
end
+ get '/v1/customers/:customer_id/sources/:id' do
+ json_response 200, fixture('retrieve_card')
+ end
+
# Subscriptions
post '/v1/customers/:customer_id/subscriptions' do
FakeStripe.subscription_count += 1
|
Successfully retrieve a source
Assume that the `source` (which could be a credit card or a bank account) is in
fact a card, since the `create_customer.json` lists the source as a card.
Without this, Stripe <I>s when it tries to retreive an customer's source.
|
diff --git a/lib/controllers/media.js b/lib/controllers/media.js
index <HASH>..<HASH> 100644
--- a/lib/controllers/media.js
+++ b/lib/controllers/media.js
@@ -74,6 +74,9 @@ MediaController.prototype.load = function(media, options, callback) {
if(response.type === 'LOAD_FAILED') {
return callback(new Error('Load failed'));
}
+ if(response.type === 'LOAD_CANCELLED'){
+ return callback(new Error('Load cancelled'));
+ }
var status = response.status[0];
callback(null, status);
});
@@ -113,4 +116,4 @@ MediaController.prototype.seek = function(currentTime, callback) {
this.sessionRequest(data, callback);
};
-module.exports = MediaController;
\ No newline at end of file
+module.exports = MediaController;
|
added LOAD_CANCELLED error to stop a crash
sometimes the load responses with LOAD_CANCELLED and this causes a crash since there isn't a response.status
|
diff --git a/canvasapi/communication_channel.py b/canvasapi/communication_channel.py
index <HASH>..<HASH> 100644
--- a/canvasapi/communication_channel.py
+++ b/canvasapi/communication_channel.py
@@ -161,7 +161,7 @@ class CommunicationChannel(CanvasObject):
try:
if not bool(value['frequency']):
return False
- except:
+ except KeyError:
return False
kwargs['notification_preferences'] = notification_preferences
|
added KeyError specific exception to make linters happy
|
diff --git a/lib/phusion_passenger/packaging.rb b/lib/phusion_passenger/packaging.rb
index <HASH>..<HASH> 100644
--- a/lib/phusion_passenger/packaging.rb
+++ b/lib/phusion_passenger/packaging.rb
@@ -63,8 +63,8 @@ module Packaging
'lib/phusion_passenger/templates/*',
'lib/phusion_passenger/templates/apache2/*',
'lib/phusion_passenger/templates/nginx/*',
- 'lib/phusion_passenger/templates/lite/*',
- 'lib/phusion_passenger/templates/lite_default_root/*',
+ 'lib/phusion_passenger/templates/standalone/*',
+ 'lib/phusion_passenger/templates/standalone_default_root/*',
'bin/*',
'doc/**/*',
'man/*',
|
Oops, fix packaging of Phusion Passenger Standalone.
|
diff --git a/examples/nautilus.py b/examples/nautilus.py
index <HASH>..<HASH> 100755
--- a/examples/nautilus.py
+++ b/examples/nautilus.py
@@ -37,7 +37,7 @@ class ChooseHandler(object):
def __init__(self, config, video, subtitles):
self.config = config
self.video = video
- self.subtitles = {s.id: s for s in subtitles}
+ self.subtitles = {s.provider_name + '-' + s.id: s for s in subtitles}
def on_subtitles_treeview_row_activated(self, treeview, path, view_column):
model = treeview.get_model()
@@ -48,7 +48,7 @@ class ChooseHandler(object):
return
# get the subtitle object
- subtitle = self.subtitles[model.get_value(iter, 0)]
+ subtitle = self.subtitles[model.get_value(iter, 3).lower() + '-' + model.get_value(iter, 0)]
# download the subtitle
with ProviderPool(providers=self.config.providers, provider_configs=self.config.provider_configs) as pool:
|
Make sure dict keys are unique in nautilus
|
diff --git a/src/Hashing/BcryptHashing.php b/src/Hashing/BcryptHashing.php
index <HASH>..<HASH> 100644
--- a/src/Hashing/BcryptHashing.php
+++ b/src/Hashing/BcryptHashing.php
@@ -47,7 +47,7 @@ class BcryptHashing extends AbstractHashing
{
$this->setCost($options['cost']);
- $this->verifyAlgorithm = $options['verify'];
+ $this->verifyAlgorithm = (isset($options['verify'])) ? $options['verify'] : false;
}
/**
|
Update BcryptHashing.php
|
diff --git a/app/Module/StoriesModule.php b/app/Module/StoriesModule.php
index <HASH>..<HASH> 100644
--- a/app/Module/StoriesModule.php
+++ b/app/Module/StoriesModule.php
@@ -168,7 +168,7 @@ class StoriesModule extends AbstractModule implements ModuleConfigInterface, Mod
*/
public function isGrayedOut(Individual $individual): bool
{
- return $this->getStoriesForIndividual($individual) !== [];
+ return $this->getStoriesForIndividual($individual) === [];
}
/**
|
Tab should be grayed out when the array is empty (#<I>)
|
diff --git a/cmd/env.go b/cmd/env.go
index <HASH>..<HASH> 100644
--- a/cmd/env.go
+++ b/cmd/env.go
@@ -857,7 +857,8 @@ func printPropertyValueDiff(b *bytes.Buffer, title func(string), diff resource.V
printPropertyValue(b, delete, deleteIndent(newIndent))
b.WriteString(colors.Reset)
} else if update, isupdate := a.Updates[i]; isupdate {
- printPropertyValueDiff(b, title, update, causedReplace, indent)
+ title(indent)
+ printPropertyValueDiff(b, func(string) {}, update, causedReplace, newIndent)
} else {
title(indent)
printPropertyValue(b, a.Sames[i], newIndent)
|
Fix a slight diffing formatting bug
|
diff --git a/tests/bootstrap.php b/tests/bootstrap.php
index <HASH>..<HASH> 100644
--- a/tests/bootstrap.php
+++ b/tests/bootstrap.php
@@ -5,6 +5,7 @@ $loader = require dirname(__DIR__) . '/vendor/autoload.php';
/** @var $loader \Composer\Autoload\ClassLoader */
$loader->addPsr4('BEAR\Package\\', __DIR__);
$loader->addPsr4('FakeVendor\HelloWorld\\', __DIR__ . '/Fake/FakeVendor/HelloWorld/src');
+\Doctrine\Common\Annotations\AnnotationRegistry::registerLoader([$loader, 'loadClass']);
$_ENV['TEST_DIR'] = __DIR__;
$_ENV['TMP_DIR'] = __DIR__ . '/tmp';
|
add annotation loader at tests/bootstrap
|
diff --git a/spec/lib/matchers/include.rb b/spec/lib/matchers/include.rb
index <HASH>..<HASH> 100644
--- a/spec/lib/matchers/include.rb
+++ b/spec/lib/matchers/include.rb
@@ -4,7 +4,7 @@ module Matchers; module Include
matcher :include_in_any_order do |*matchers|
match do |enumerable|
@not_matched = []
- expected.each do |matcher|
+ expected_as_array.each do |matcher|
if enumerable.empty?
break
end
|
(PUP-<I>) Update to use expected_as_array in rspec 3
|
diff --git a/values.js b/values.js
index <HASH>..<HASH> 100644
--- a/values.js
+++ b/values.js
@@ -12,7 +12,7 @@ ValueStream.prototype.resume = function () {
while(!this.sink.paused && !(this.ended = this._i >= this._values.length))
this.sink.write(this._values[this._i++])
- if(this.ended && !this.sink.paused && !this.sink.ended)
+ if(this.ended && !this.sink.ended)
this.sink.end()
}
|
ended does not wait for unpaused
|
diff --git a/documentation-website/src/client/Guides/GettingStarted.js b/documentation-website/src/client/Guides/GettingStarted.js
index <HASH>..<HASH> 100644
--- a/documentation-website/src/client/Guides/GettingStarted.js
+++ b/documentation-website/src/client/Guides/GettingStarted.js
@@ -27,17 +27,16 @@ export default ({ name }) => (
</p>
<PrismBlock lang='javascript'>
{
-`import Browser from '@hickory/browser';
-import Hash from '@hickory/hash';
-import InMemory from '@hickory/in-memory';
-
-// Use Browser when your website has a dynamic server
+`// Use Browser when your website has a dynamic server
+import Browser from '@hickory/browser';
const browserHistory = Browser();
// Use Hash when your website uses a static file server
+import Hash from '@hickory/hash';
const hashHistory = Hash();
// Use InMemory when your application doesn't run in a browser
+import InMemory from '@hickory/in-memory';
const memoryHistory = InMemory();`
}
</PrismBlock>
|
(Docs) Tweak Getting started guide [ci skip]
|
diff --git a/lib/foreigner/connection_adapters/sql2003.rb b/lib/foreigner/connection_adapters/sql2003.rb
index <HASH>..<HASH> 100644
--- a/lib/foreigner/connection_adapters/sql2003.rb
+++ b/lib/foreigner/connection_adapters/sql2003.rb
@@ -47,10 +47,7 @@ module Foreigner
def proper_table_name(to_table)
if ActiveRecord::Migration.instance_methods(false).include? :proper_table_name
- ActiveRecord::Migration.new.proper_table_name(to_table, options = {
- table_name_prefix: ActiveRecord::Base.table_name_prefix,
- table_name_suffix: ActiveRecord::Base.table_name_suffix
- })
+ ActiveRecord::Migration.new.proper_table_name(to_table)
else
ActiveRecord::Migrator.proper_table_name(to_table)
end
|
(maybe I don't need to explicitly send in the prefix and suffix?)
|
diff --git a/tests/Token/IncludeTest.php b/tests/Token/IncludeTest.php
index <HASH>..<HASH> 100644
--- a/tests/Token/IncludeTest.php
+++ b/tests/Token/IncludeTest.php
@@ -73,8 +73,8 @@ class PHP_Reflect_Token_IncludeTest extends PHPUnit_Framework_TestCase
}
/**
- * @covers PHP_TokenIncludes::getName
- * @covers PHP_TokenIncludes::getType
+ * @covers PHP_Reflect_Token_Includes::getName
+ * @covers PHP_Reflect_Token_Includes::getType
*/
public function testGetIncludes()
{
@@ -85,8 +85,8 @@ class PHP_Reflect_Token_IncludeTest extends PHPUnit_Framework_TestCase
}
/**
- * @covers PHP_TokenIncludes::getName
- * @covers PHP_TokenIncludes::getType
+ * @covers PHP_Reflect_Token_Includes::getName
+ * @covers PHP_Reflect_Token_Includes::getType
*/
public function testGetIncludesCategorized()
{
|
FIX typo errors on classnames covers
|
diff --git a/galpy/potential/NumericalPotentialDerivativesMixin.py b/galpy/potential/NumericalPotentialDerivativesMixin.py
index <HASH>..<HASH> 100644
--- a/galpy/potential/NumericalPotentialDerivativesMixin.py
+++ b/galpy/potential/NumericalPotentialDerivativesMixin.py
@@ -10,9 +10,11 @@ class NumericalPotentialDerivativesMixin(object):
def _Rforce(self,R,z,phi=0.,t=0.):
# Do forward difference because R cannot be negative
RplusdR= R+self._dR
- dR= RplusdR-R
- return (self._evaluate(R,z,phi=phi,t=t)
- -self._evaluate(RplusdR,z,phi=phi,t=t))/dR
+ Rplus2dR= R+2.*self._dR
+ dR= (Rplus2dR-R)/2.
+ return (1.5*self._evaluate(R,z,phi=phi,t=t)
+ -2.*self._evaluate(RplusdR,z,phi=phi,t=t)
+ +0.5*self._evaluate(Rplus2dR,z,phi=phi,t=t))/dR
def _zforce(self,R,z,phi=0.,t=0.):
# Central difference to get derivative at z=0 right
|
Switch to 2nd-order forward difference method for numerical R derivative
|
diff --git a/hazelcast/src/main/java/com/hazelcast/examples/TestApp.java b/hazelcast/src/main/java/com/hazelcast/examples/TestApp.java
index <HASH>..<HASH> 100644
--- a/hazelcast/src/main/java/com/hazelcast/examples/TestApp.java
+++ b/hazelcast/src/main/java/com/hazelcast/examples/TestApp.java
@@ -1152,12 +1152,12 @@ public class TestApp implements EntryListener, ItemListener, MessageListener {
silent = silentBefore;
}
- void println(Object obj) {
+ public void println(Object obj) {
if (!silent)
System.out.println(obj);
}
- void print(Object obj) {
+ public void print(Object obj) {
if (!silent)
System.out.print(obj);
}
|
made print methods public in order to be able to override
git-svn-id: <URL>
|
diff --git a/simulator/src/main/java/com/hazelcast/simulator/agent/workerjvm/WorkerJvmFailureMonitor.java b/simulator/src/main/java/com/hazelcast/simulator/agent/workerjvm/WorkerJvmFailureMonitor.java
index <HASH>..<HASH> 100644
--- a/simulator/src/main/java/com/hazelcast/simulator/agent/workerjvm/WorkerJvmFailureMonitor.java
+++ b/simulator/src/main/java/com/hazelcast/simulator/agent/workerjvm/WorkerJvmFailureMonitor.java
@@ -229,7 +229,7 @@ public class WorkerJvmFailureMonitor {
Response response = agentConnector.write(SimulatorAddress.COORDINATOR, operation);
if (response.getFirstErrorResponseType() != ResponseType.SUCCESS) {
LOGGER.error(format("Could not send failure to coordinator! %s", operation));
- } else {
+ } else if (isFailure) {
LOGGER.info("Failure successfully sent to Coordinator!");
}
} catch (SimulatorProtocolException e) {
|
Suppressed "Failure successfully sent to Coordinator" Agent logs for normally finished workers.
|
diff --git a/travis_docs_builder.py b/travis_docs_builder.py
index <HASH>..<HASH> 100644
--- a/travis_docs_builder.py
+++ b/travis_docs_builder.py
@@ -183,3 +183,26 @@ def commit_docs(*, built_docs='docs/_build/html', gh_pages_docs='docs', tmp_dir=
run(['git', 'push', '-q', 'origin_token', 'gh-pages'])
else:
print("The docs have not changed. Not updating")
+
+if __name__ == '__main__':
+ on_travis = os.environ.get("TRAVIS_JOB_NUMBER", '')
+
+ if on_travis:
+ # TODO: Get this automatically
+ repo = sys.argv[1]
+ setup_GitHub_push()
+ commit_docs()
+ else:
+ repo = input("What repo to you want to build the docs for? ")
+ username = input("What is your GitHub username? ")
+
+ token = generate_GitHub_token(username)
+ encrypted_variable = encrypt_variable("GH_TOKEN={token}".format(token=token), repo=repo)
+ travis_content = """
+env:
+ global:
+ secure: "{encrypted_variable}
+
+""".format(encrypted_variable=encrypted_variable)
+
+ print("Put", travis_content, "in your .travis.yml")
|
First pass at running the command (not tested yet)
|
diff --git a/bcbio/install.py b/bcbio/install.py
index <HASH>..<HASH> 100644
--- a/bcbio/install.py
+++ b/bcbio/install.py
@@ -212,7 +212,8 @@ def _update_conda_packages():
"""If installed in an anaconda directory, upgrade conda packages.
"""
conda_bin = _get_conda_bin()
- assert conda_bin, "Could not find anaconda distribution for upgrading bcbio"
+ assert conda_bin, ("Could not find anaconda distribution for upgrading bcbio.\n"
+ "Using python at %s but could not find conda." % (os.path.realpath(sys.executable)))
req_file = "bcbio-update-requirements.txt"
if os.path.exists(req_file):
os.remove(req_file)
|
Better error message for not finding conda install
Tries to provide better clues for debugging install problems #<I>
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -3,6 +3,7 @@
let Service, Characteristic;
const request = require("request");
const async = require("async");
+const packageJSON = require('./package.json');
module.exports = function (homebridge) {
Service = homebridge.hap.Service;
@@ -61,7 +62,7 @@ HTTP_SWITCH.prototype = {
.setCharacteristic(Characteristic.Manufacturer, "Andreas Bauer")
.setCharacteristic(Characteristic.Model, "HTTP Switch")
.setCharacteristic(Characteristic.SerialNumber, "SW01")
- .setCharacteristic(Characteristic.FirmwareRevision, "0.3.1");
+ .setCharacteristic(Characteristic.FirmwareRevision, packageJSON.version);
return [informationService, this.homebridgeService];
},
|
Automatically reflect current package version in FirmwareRevision characteristic
|
diff --git a/app/src/Bolt/CronEvent.php b/app/src/Bolt/CronEvent.php
index <HASH>..<HASH> 100644
--- a/app/src/Bolt/CronEvent.php
+++ b/app/src/Bolt/CronEvent.php
@@ -50,7 +50,6 @@ class CronEvent extends Event
}
}
-
/**
* Hourly jobs
*/
@@ -58,7 +57,6 @@ class CronEvent extends Event
{
}
-
/**
* Daily jobs
*/
@@ -67,7 +65,6 @@ class CronEvent extends Event
// Check for Bolt updates
}
-
/**
* Weekly jobs
*/
@@ -82,7 +79,6 @@ class CronEvent extends Event
$this->notify("Trimming logs");
}
-
/**
* Monthly jobs
*/
@@ -90,7 +86,6 @@ class CronEvent extends Event
{
}
-
/**
* Yearly jobs
*/
@@ -98,7 +93,6 @@ class CronEvent extends Event
{
}
-
/**
* If we're passed an OutputInterface, we're called from Nut and can notify
* the end user
|
PSR-2 clean up of CronEvent.php
|
diff --git a/lib/Drawer/Item.js b/lib/Drawer/Item.js
index <HASH>..<HASH> 100644
--- a/lib/Drawer/Item.js
+++ b/lib/Drawer/Item.js
@@ -59,7 +59,6 @@ const styles = {
},
icon: {
position: 'relative',
- top: -1
},
value: {
flex: 1,
|
Final Fix : Drawer menu items are not centered
|
diff --git a/pypd/mixins.py b/pypd/mixins.py
index <HASH>..<HASH> 100644
--- a/pypd/mixins.py
+++ b/pypd/mixins.py
@@ -92,7 +92,7 @@ class ClientMixin(object):
if add_headers is not None:
headers.update(**add_headers)
- for k, v in query_params.items():
+ for k, v in query_params.copy().items():
if isinstance(v, stringtype):
continue
elif isinstance(v, Number):
diff --git a/test/unit/clientmixin.py b/test/unit/clientmixin.py
index <HASH>..<HASH> 100644
--- a/test/unit/clientmixin.py
+++ b/test/unit/clientmixin.py
@@ -83,5 +83,15 @@ class ClientMixinTestCase(unittest.TestCase):
headers=''
)
+ def test_statuses_array(self, m):
+ method = 'GET'
+ body = {'status': 'OK'}
+ url = '%s?statuses[]=triggered&statuses[]=acknowledged' % self.url
+ m.register_uri(method, url, json=body)
+ result = self.requester.request(method, self.endpoint,
+ query_params={'statuses': ['triggered',
+ 'acknowledged']})
+ self.assertEqual(body, result)
+
if __name__ == '__main__':
unittest.main()
|
Python3 reevaluates the map at each iteration of the loop, use a copy of query_params to avoid reprocessing already seen items
|
diff --git a/staging/src/k8s.io/kubelet/config/v1beta1/types.go b/staging/src/k8s.io/kubelet/config/v1beta1/types.go
index <HASH>..<HASH> 100644
--- a/staging/src/k8s.io/kubelet/config/v1beta1/types.go
+++ b/staging/src/k8s.io/kubelet/config/v1beta1/types.go
@@ -327,10 +327,10 @@ type KubeletConfiguration struct {
// status to master if node status does not change. Kubelet will ignore this
// frequency and post node status immediately if any change is detected. It is
// only used when node lease feature is enabled. nodeStatusReportFrequency's
- // default value is 1m. But if nodeStatusUpdateFrequency is set explicitly,
+ // default value is 5m. But if nodeStatusUpdateFrequency is set explicitly,
// nodeStatusReportFrequency's default value will be set to
// nodeStatusUpdateFrequency for backward compatibility.
- // Default: "1m"
+ // Default: "5m"
// +optional
NodeStatusReportFrequency metav1.Duration `json:"nodeStatusReportFrequency,omitempty"`
// nodeLeaseDurationSeconds is the duration the Kubelet will set on its corresponding Lease,
|
bump NodeStatusReportFrequency default value to 5min in comment
|
diff --git a/src/Sylius/Bundle/CoreBundle/Command/InstallSampleDataCommand.php b/src/Sylius/Bundle/CoreBundle/Command/InstallSampleDataCommand.php
index <HASH>..<HASH> 100644
--- a/src/Sylius/Bundle/CoreBundle/Command/InstallSampleDataCommand.php
+++ b/src/Sylius/Bundle/CoreBundle/Command/InstallSampleDataCommand.php
@@ -52,7 +52,7 @@ EOT
$outputStyle->writeln(sprintf(
'Loading sample data for environment <info>%s</info> from suite <info>%s</info>.',
$this->getEnvironment(),
- $suite
+ $suite ?? 'default'
));
$outputStyle->writeln('<error>Warning! This action will erase your database.</error>');
@@ -72,10 +72,10 @@ EOT
return 1;
}
- $parameters = ['--no-interaction' => true];
- if (null !== $suite) {
- $parameters['suite'] = $suite;
- }
+ $parameters = [
+ 'suite' => $suite,
+ '--no-interaction' => true,
+ ];
$commands = [
'sylius:fixtures:load' => $parameters,
|
Added: Suggested fixes
|
diff --git a/lib/outputrenderers.php b/lib/outputrenderers.php
index <HASH>..<HASH> 100644
--- a/lib/outputrenderers.php
+++ b/lib/outputrenderers.php
@@ -2511,7 +2511,10 @@ EOD;
if ($item->hidden) {
$link->add_class('dimmed');
}
- $link->text = $content.$link->text; // add help icon
+ if (!empty($content)) {
+ // Providing there is content we will use that for the link content.
+ $link->text = $content;
+ }
$content = $this->render($link);
} else if ($item->action instanceof moodle_url) {
$attributes = array();
@@ -2896,4 +2899,4 @@ class core_renderer_ajax extends core_renderer {
* @param string $id
*/
public function heading($text, $level = 2, $classes = 'main', $id = null) {}
-}
\ No newline at end of file
+}
|
MDL-<I> navigation: Fixed up issue when rendering action_link instances for the navigation
|
diff --git a/Helper/Logger.php b/Helper/Logger.php
index <HASH>..<HASH> 100644
--- a/Helper/Logger.php
+++ b/Helper/Logger.php
@@ -69,7 +69,7 @@ class Logger
$writer = new \Zend\Log\Writer\Stream(BP . '/var/log/checkoutcom_magento2.log');
$logger = new \Zend\Log\Logger();
$logger->addWriter($writer);
- $logger->info(json_encode($msg, JSON_PRETTY_PRINT));
+ $logger->info($msg);
}
}
@@ -93,7 +93,13 @@ class Logger
);
if ($debug && $gatewayResponses) {
- $output = json_encode($response, JSON_PRETTY_PRINT);
+ // Prepare the output
+ $output = json_encode(
+ json_decode($response),
+ JSON_PRETTY_PRINT
+ );
+
+ // Display the content
$this->messageManager->addComplexSuccessMessage(
'ckoMessages',
['output' => $output]
|
Updated the logger class outputs
|
diff --git a/dragonlib/utils/logging_utils.py b/dragonlib/utils/logging_utils.py
index <HASH>..<HASH> 100755
--- a/dragonlib/utils/logging_utils.py
+++ b/dragonlib/utils/logging_utils.py
@@ -66,7 +66,7 @@ def setup_logging(level, logger_name=None, handler=None, log_formatter=None):
root_logger.info("Set %i level to logger %r", level, logger_name)
if level == 100:
- logger.handlers = ()
+ logger.handlers = (logging.NullHandler(),)
logger.disabled = True
return
|
Bugfix disable logging:
in logging is something like:
if len(root.handlers) == 0:
basicConfig()
;)
|
diff --git a/lib/Doctrine/Common/Proxy/ProxyGenerator.php b/lib/Doctrine/Common/Proxy/ProxyGenerator.php
index <HASH>..<HASH> 100644
--- a/lib/Doctrine/Common/Proxy/ProxyGenerator.php
+++ b/lib/Doctrine/Common/Proxy/ProxyGenerator.php
@@ -7,7 +7,6 @@ use Doctrine\Common\Proxy\Exception\UnexpectedValueException;
use Doctrine\Common\Util\ClassUtils;
use Doctrine\Persistence\Mapping\ClassMetadata;
use ReflectionMethod;
-use ReflectionNamedType;
use ReflectionParameter;
use ReflectionProperty;
use ReflectionType;
@@ -1110,7 +1109,7 @@ EOT;
* @return string
*/
private function formatType(
- ReflectionNamedType $type,
+ ReflectionType $type,
ReflectionMethod $method,
?ReflectionParameter $parameter = null
) {
|
Loosen up type constraint from ReflectionNamedType to ReflectionType.
|
diff --git a/src/biojs-io-biom.js b/src/biojs-io-biom.js
index <HASH>..<HASH> 100644
--- a/src/biojs-io-biom.js
+++ b/src/biojs-io-biom.js
@@ -590,5 +590,6 @@ export class Biom {
throw new Error('The given biomString is not in json format and no conversion server is specified.\n' + e.message);
}
}
+ return new Biom(json_obj);
}
}
|
Add return of the newly created biom object
|
diff --git a/scapy/layers/dhcp6.py b/scapy/layers/dhcp6.py
index <HASH>..<HASH> 100644
--- a/scapy/layers/dhcp6.py
+++ b/scapy/layers/dhcp6.py
@@ -1169,7 +1169,7 @@ class DHCP6_RelayReply(DHCP6_RelayForward):
return inet_pton(socket.AF_INET6, self.peeraddr)
def answers(self, other):
return (isinstance(other, DHCP6_RelayForward) and
- self.count == other.count and
+ self.hopcount == other.hopcount and
self.linkaddr == other.linkaddr and
self.peeraddr == other.peeraddr )
|
Use "hopcount" instead of "count" for DHCPv6 RelayForward msgs.
|
diff --git a/src/python/twitter/pants/commands/goal.py b/src/python/twitter/pants/commands/goal.py
index <HASH>..<HASH> 100644
--- a/src/python/twitter/pants/commands/goal.py
+++ b/src/python/twitter/pants/commands/goal.py
@@ -452,7 +452,8 @@ if NailgunTask.killall:
# TODO(John Sirois): Resolve eggs
goal(
name='ivy',
- action=IvyResolve
+ action=IvyResolve,
+ dependencies=['gen']
).install('resolve').with_description('Resolves jar dependencies and produces dependency reports.')
|
Make resolve depend on gen.
This is so any deps injected into synthetic codegen targets
participate in the resolve.
(sapling split of 9bc6d<I>b<I>bc8d<I>f<I>bf<I>fc<I>cf<I>d<I>)
|
diff --git a/structr-core/src/main/java/org/structr/core/script/polyglot/PolyglotWrapper.java b/structr-core/src/main/java/org/structr/core/script/polyglot/PolyglotWrapper.java
index <HASH>..<HASH> 100644
--- a/structr-core/src/main/java/org/structr/core/script/polyglot/PolyglotWrapper.java
+++ b/structr-core/src/main/java/org/structr/core/script/polyglot/PolyglotWrapper.java
@@ -139,17 +139,6 @@ public abstract class PolyglotWrapper {
}
}
- protected static List<Object> wrapIterable(ActionContext actionContext, final Iterable<Object> iterable) {
-
- final List<Object> wrappedList = new ArrayList<>();
-
- for (Object o : iterable) {
-
- wrappedList.add(wrap(actionContext, o));
- }
- return wrappedList;
- }
-
protected static List<Object> unwrapIterable(final ActionContext actionContext, final Iterable<Object> iterable) {
final List<Object> unwrappedList = new ArrayList<>();
@@ -208,6 +197,8 @@ public abstract class PolyglotWrapper {
public FunctionWrapper(final ActionContext actionContext, final Value func) {
+ this.actionContext = actionContext;
+
if (func.canExecute()) {
this.func = func;
|
Fixes minor issue in polyglot FunctionWrapper.
|
diff --git a/backbone.js b/backbone.js
index <HASH>..<HASH> 100644
--- a/backbone.js
+++ b/backbone.js
@@ -826,6 +826,7 @@
// Get the model at the given index.
at: function(index) {
+ if (index < 0) index += this.length;
return this.models[index];
},
|
Allow Collection.at to accept negative indexes
This allows you to do things like
```javascript
mycollection.at(-2) // get second to last, etc.
```
|
diff --git a/pyemma/_base/serialization/h5file.py b/pyemma/_base/serialization/h5file.py
index <HASH>..<HASH> 100644
--- a/pyemma/_base/serialization/h5file.py
+++ b/pyemma/_base/serialization/h5file.py
@@ -72,7 +72,7 @@ class H5File(object):
# used during saving.
if name in self._parent:
if overwrite:
- logger.info('overwriting model "%s" in file %s', name, self._file.name)
+ logger.info('overwriting model "%s" in file %s', name, self._file.filename)
self.__group = self._parent[name]
del self._current_model_group
else:
@@ -96,7 +96,7 @@ class H5File(object):
# not existent and read only
if model_name not in self._parent and self._file.mode == 'r':
- raise ValueError('model_name "{n}" not found in file {f}'.format(n=model_name, f=self._file.name))
+ raise ValueError('model_name "{n}" not found in file {f}'.format(n=model_name, f=self._file.filename))
self.__group = self._parent.require_group(model_name)
@_current_model_group.deleter
|
[serialization] fix file name in exceptions
|
diff --git a/stories/DynamicWidgets.stories.js b/stories/DynamicWidgets.stories.js
index <HASH>..<HASH> 100644
--- a/stories/DynamicWidgets.stories.js
+++ b/stories/DynamicWidgets.stories.js
@@ -22,15 +22,4 @@ storiesOf('ais-dynamic-widgets', module)
],
};
},
- methods: {
- transformItems(_attributes, { results }) {
- if (results._state.query === 'dog') {
- return ['categories'];
- }
- if (results._state.query === 'lego') {
- return ['categories', 'brand'];
- }
- return ['brand', 'hierarchicalCategories.lvl0', 'categories'];
- },
- },
}));
|
chore(stories): dynamic widgets dashboard usage
|
diff --git a/test/pogoplug/client_test.rb b/test/pogoplug/client_test.rb
index <HASH>..<HASH> 100644
--- a/test/pogoplug/client_test.rb
+++ b/test/pogoplug/client_test.rb
@@ -188,7 +188,6 @@ module PogoPlug
file_to_download = @fileListing.files.select { |f| f.file? }.first
if file_to_download
io = @client.download(@device.id, @service, file_to_download)
- file = ::File.write(file_to_download.name, io, nil, mode: 'wb')
assert_equal(file_to_download.size, io.size, "File should be the same size as the descriptor said it would be")
end
end
|
no need to write the downloaded file to disk
|
diff --git a/ImagePanel.py b/ImagePanel.py
index <HASH>..<HASH> 100644
--- a/ImagePanel.py
+++ b/ImagePanel.py
@@ -469,7 +469,7 @@ class InfoOverlayCanvasItem(CanvasItem.AbstractCanvasItem):
drawing_context.fill_text(self.data_item.calibrations[0].convert_to_calibrated_size_str(scale_marker_image_width), origin[1], origin[0] - scale_marker_height - 4)
data_item_properties = self.data_item.properties
info_items = list()
- voltage = data_item_properties.get("voltage", 0)
+ voltage = data_item_properties.get("extra_high_tension", 0)
if voltage:
units = "V"
if voltage % 1000 == 0:
|
Rename 'voltage' property to 'extra_high_tension'.
svn r<I>
|
diff --git a/spacy/cli/train.py b/spacy/cli/train.py
index <HASH>..<HASH> 100644
--- a/spacy/cli/train.py
+++ b/spacy/cli/train.py
@@ -165,12 +165,9 @@ def train(cmd, lang, output_dir, train_data, dev_data, n_iter=30, n_sents=0,
gpu_wps=gpu_wps)
finally:
print("Saving model...")
- try:
- with (output_path / 'model-final.pickle').open('wb') as file_:
- with nlp.use_params(optimizer.averages):
- dill.dump(nlp, file_, -1)
- except:
- print("Error saving model")
+ with (output_path / 'model-final.pickle').open('wb') as file_:
+ with nlp.use_params(optimizer.averages):
+ dill.dump(nlp, file_, -1)
def _render_parses(i, to_render):
|
Make cli/train.py not eat exception
|
diff --git a/lib/taskHandlers/action.js b/lib/taskHandlers/action.js
index <HASH>..<HASH> 100644
--- a/lib/taskHandlers/action.js
+++ b/lib/taskHandlers/action.js
@@ -5,7 +5,7 @@ var configStore = require("../configStore");
var {dbErrors} = require("../const");
class PerformAction extends TaskHandlerBase {
- __run(cb, actionDesc, user, args, item) {
+ __run(actionDesc, user, args, item, cb) {
if (actionDesc.hidden(user, item)) {
return cb(new Error("Action is hidden"), null);
}
@@ -45,7 +45,7 @@ class PerformAction extends TaskHandlerBase {
return cb(new Error("Invalid args: request"), null);
}
if (actionDesc.storeAction || storeName === "_nav") {
- return this.__run(cb, actionDesc, user, args);
+ return this.__run(actionDesc, user, args, null, cb);
}
let storeDesc = configStore.getStoreDesc(storeName, user), createBaseIfNotFound = false;
if (storeDesc.type === "single" || storeDesc.display === "single") {
@@ -67,7 +67,7 @@ class PerformAction extends TaskHandlerBase {
return;
}
}
- this.__run(cb, actionDesc, user, args, item);
+ this.__run(actionDesc, user, args, item, cb);
});
}
}
|
moved cb to the last argument in PerformAction.__run
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -23,13 +23,14 @@ def readme():
def version():
- return '0.1.5'
+ return '0.1.6'
setuptools.setup(
name='qj',
description='qj: logging designed for debugging.',
long_description=readme(),
+ long_description_content_type='text/markdown',
version=version(),
url='https://github.com/iansf/qj',
download_url='https://github.com/iansf/qj/archive/%s.tar.gz' % version(),
@@ -39,5 +40,5 @@ setuptools.setup(
license='Apache 2.0',
install_requires=[],
test_suite='nose.collector',
- tests_require=['nose'],
+ tests_require=['nose', 'mock'],
)
|
Update setup.py for new pypi requirements. Update version to <I>. All tests pass on <I> and <I>.
|
diff --git a/tests/comment_test.py b/tests/comment_test.py
index <HASH>..<HASH> 100644
--- a/tests/comment_test.py
+++ b/tests/comment_test.py
@@ -38,7 +38,7 @@ class CommentTest(PyPumpTest):
},
"url": "https://example.com/testuser/comment/UOsxSKbITXixW5r_HAyO2A",
"id": "https://example.com/api/comment/UOsxSKbITXixW5r_HAyO2A",
- "liked": False,
+ "liked": True,
"pump_io": {
"shared": False
}
|
change Comment.liked test to expose bug
|
diff --git a/tests/test_auto_fit.py b/tests/test_auto_fit.py
index <HASH>..<HASH> 100644
--- a/tests/test_auto_fit.py
+++ b/tests/test_auto_fit.py
@@ -282,14 +282,14 @@ class TestAutoFit(unittest.TestCase):
xx, yy = np.meshgrid(xcentres, ycentres, sparse=False, indexing='ij')
x0 = Parameter(value=1.1 * mean[0], min=0.0, max=1.0)
- sig_x = Parameter(1.1 * 0.2, min=0.0, max=0.3)
+ sig_x = Parameter(value=1.1 * 0.2, min=0.0, max=0.3)
y0 = Parameter(value=1.1 * mean[1], min=0.0, max=1.0)
- sig_y = Parameter(1.1 * 0.1, min=0.0, max=0.3)
+ sig_y = Parameter(value=1.1 * 0.1, min=0.0, max=0.3)
A = Parameter(value=1.1 * np.mean(ydata), min=0.0)
b = Parameter(value=1.2 * background, min=0.0)
- x = Variable()
- y = Variable()
- g = Variable()
+ x = Variable('x')
+ y = Variable('y')
+ g = Variable('g')
model = Model({g: A * Gaussian(x, x0, sig_x) * Gaussian(y, y0, sig_y) + b})
|
Updated tests to reflect inspectless style
|
diff --git a/table/policy.go b/table/policy.go
index <HASH>..<HASH> 100644
--- a/table/policy.go
+++ b/table/policy.go
@@ -1100,6 +1100,16 @@ func NewExtCommunitySet(c config.ExtCommunitySet) (*ExtCommunitySet, error) {
}, nil
}
+func (s *ExtCommunitySet) Append(arg DefinedSet) error {
+ err := s.regExpSet.Append(arg)
+ if err != nil {
+ return err
+ }
+ sList := arg.(*ExtCommunitySet).subtypeList
+ s.subtypeList = append(s.subtypeList, sList...)
+ return nil
+}
+
type LargeCommunitySet struct {
regExpSet
}
|
policy: avoid crash when getting ext-community
|
diff --git a/allauth/socialaccount/providers/discord/views.py b/allauth/socialaccount/providers/discord/views.py
index <HASH>..<HASH> 100644
--- a/allauth/socialaccount/providers/discord/views.py
+++ b/allauth/socialaccount/providers/discord/views.py
@@ -10,9 +10,9 @@ from allauth.socialaccount.providers.oauth2.views import (
class DiscordOAuth2Adapter(OAuth2Adapter):
provider_id = DiscordProvider.id
- access_token_url = 'https://discordapp.com/api/oauth2/token'
- authorize_url = 'https://discordapp.com/api/oauth2/authorize'
- profile_url = 'https://discordapp.com/api/users/@me'
+ access_token_url = 'https://discord.com/api/oauth2/token'
+ authorize_url = 'https://discord.com/api/oauth2/authorize'
+ profile_url = 'https://discord.com/api/users/@me'
def complete_login(self, request, app, token, **kwargs):
headers = {
|
fix(discord): Switch to new API domain
|
diff --git a/lib/objectFactory.js b/lib/objectFactory.js
index <HASH>..<HASH> 100644
--- a/lib/objectFactory.js
+++ b/lib/objectFactory.js
@@ -14,27 +14,39 @@ var create = function (params) {
*/
var extendThese = params.extends,
- implementsInterfaces = params.implements || [];
+ implementsInterfaces = params.implements || [],
+ constructor = params.constructor;
if (params.extends) {
delete params.extends;
- }
+ };
if (params.implements) {
delete params.implements
- }
+ };
+
+ if (params.constructor) {
+ // Rename the constructor param so it can be added with the
+ // other params
+ params._constructor = params.constructor;
+ delete params.constructor;
+ };
var outp = function (data) {
+ // Run the constructor
+ this._constructor && this._constructor(data);
+
+ // Then add passed params/data
for (var key in data) {
this[key] = data[key];
};
- };
-
+ };
+
outp.prototype._implements = []
// If extends other do first so they get overridden by those passed as params
// Inehrited prototypes with lower index have precedence
- common.extendPrototypeWithThese(outp, extendThese)
+ common.extendPrototypeWithThese(outp, extendThese);
// The rest of the params are added as methods, overriding previous
common.addMembers(outp, params);
|
Allow passing and inheriting a constructor method, stored as _constructor
|
diff --git a/src/fn/DI/Container.php b/src/fn/DI/Container.php
index <HASH>..<HASH> 100644
--- a/src/fn/DI/Container.php
+++ b/src/fn/DI/Container.php
@@ -32,6 +32,8 @@ class Container extends \DI\Container implements MutableDefinitionSource
);
}
parent::__construct($this->definitionSource = $definitionSource, $proxyFactory, $wrapperContainer);
+ $this->resolvedEntries[self::class] = $this;
+ $this->resolvedEntries[static::class] = $this;
}
/**
diff --git a/tests/fixtures/extra-empty/test.php b/tests/fixtures/extra-empty/test.php
index <HASH>..<HASH> 100644
--- a/tests/fixtures/extra-empty/test.php
+++ b/tests/fixtures/extra-empty/test.php
@@ -5,6 +5,10 @@
fn\Composer\di() instanceof fn\Composer\DI || fn\fail(__LINE__);
+call_user_func(require 'vendor/autoload.php', function(fn\Composer\DI $composer, fn\DI\Container $di) {
+ $composer === $di || fn\fail(__LINE__);
+});
+
echo call_user_func(require 'vendor/autoload.php', function(Psr\Container\ContainerInterface $container) {
return get_class($container);
});
|
add class fn\DI\Container and every derived class to resolved entries
|
diff --git a/patroni/ctl.py b/patroni/ctl.py
index <HASH>..<HASH> 100644
--- a/patroni/ctl.py
+++ b/patroni/ctl.py
@@ -1101,7 +1101,7 @@ def edit_config(obj, cluster_name, force, quiet, kvpairs, pgkvpairs, apply_filen
@ctl.command('show-config', help="Show cluster configuration")
-@click.argument('cluster_name')
+@arg_cluster_name
@click.pass_obj
def show_config(obj, cluster_name):
cluster = get_dcs(obj, cluster_name).get_cluster()
|
Make show-config work with cluster_name from config file (#<I>)
similar to edit-config, list and so on
|
diff --git a/generators/cucumber/templates/env.rb b/generators/cucumber/templates/env.rb
index <HASH>..<HASH> 100644
--- a/generators/cucumber/templates/env.rb
+++ b/generators/cucumber/templates/env.rb
@@ -1,5 +1,5 @@
require 'cucumber'
-require 'capybara/cucumber'
+require 'capybara-screenshot/cucumber'
require_relative '../../config/boot'
require_relative '../../config/capybara'
@@ -12,6 +12,7 @@ Howitzer::Cache.store(:cloud, :start_time, Time.now.utc)
Howitzer::Cache.store(:cloud, :status, true)
Before do |scenario|
+ Capybara.use_default_driver
Howitzer::Log.print_feature_name(scenario.feature.name)
Howitzer::Log.print_scenario_name(scenario.name)
@session_start = CapybaraHelpers.duration(Time.now.utc - Howitzer::Cache.extract(:cloud, :start_time))
@@ -28,6 +29,7 @@ After do |scenario|
page.execute_script("void(document.execCommand('ClearAuthenticationCache', false));")
end
Howitzer::Cache.clear_all_ns
+ Capybara.reset_sessions!
end
at_exit do
|
removed extra capybara dsl including
|
diff --git a/core/src/main/java/hudson/util/AtomicFileWriter.java b/core/src/main/java/hudson/util/AtomicFileWriter.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/hudson/util/AtomicFileWriter.java
+++ b/core/src/main/java/hudson/util/AtomicFileWriter.java
@@ -130,7 +130,7 @@ public class AtomicFileWriter extends Writer {
try {
// Try to make an atomic move.
Files.move(tmpFile, destFile, StandardCopyOption.ATOMIC_MOVE);
- } catch (IOException e) {
+ } catch (AtomicMoveNotSupportedException e) {
// If it falls here that means that Atomic move is not supported by the OS.
// In this case we need to fall-back to a copy option which is supported by all OSes.
Files.move(tmpFile, destFile, StandardCopyOption.REPLACE_EXISTING);
|
Only catch AtomicMoveNotSupportedException.
|
diff --git a/js/lib/mediawiki.Util.js b/js/lib/mediawiki.Util.js
index <HASH>..<HASH> 100644
--- a/js/lib/mediawiki.Util.js
+++ b/js/lib/mediawiki.Util.js
@@ -877,7 +877,7 @@ normalizeOut = function ( out ) {
// http://www.whatwg.org/specs/web-apps/current-work/multipage/the-end.html#serializing-html-fragments
out = normalizeNewlines( out );
return out
- .replace(/<span typeof="mw:(?:(?:Placeholder|Nowiki|Object\/Template|Entity))"(?:\s+[^\s"'>\/=]+(?:\s*=\s*"[^"]*")?)*\s*>((?:[^<]+|(?!<\/span).)*)<\/span>/g, '$1')
+ .replace(/<span typeof="mw:(?:(?:Placeholder|Nowiki|Object\/Template|Entity))"(?:\s+[^\s\"\'>\/=]+(?:\s*=\s*"[^"]*")?)*\s*>((?:[^<]+|(?!<\/span).)*)<\/span>/g, '$1')
// Ignore these attributes for now
.replace(/ (data-parsoid|typeof|resource|rel|prefix|about|rev|datatype|inlist|property|vocab|content|title|class)="[^"]*"/g, '')
// replace mwt ids
|
Escape a single quote so we don't screw up emacs' syntax coloring.
Change-Id: I<I>eca7b0f<I>ac8d<I>a8d<I>e<I>a8f<I>bb5d
|
diff --git a/controller/src/main/java/org/jboss/as/controller/AttributeDefinition.java b/controller/src/main/java/org/jboss/as/controller/AttributeDefinition.java
index <HASH>..<HASH> 100644
--- a/controller/src/main/java/org/jboss/as/controller/AttributeDefinition.java
+++ b/controller/src/main/java/org/jboss/as/controller/AttributeDefinition.java
@@ -415,7 +415,8 @@ public abstract class AttributeDefinition {
}
private ModelNode convertToExpectedType(final ModelNode node) {
- if (node.getType() == type || node.getType() == ModelType.EXPRESSION || Util.isExpression(node.asString()) || !node.isDefined()) {
+ ModelType nodeType = node.getType();
+ if (nodeType == type || nodeType == ModelType.UNDEFINED || nodeType == ModelType.EXPRESSION || Util.isExpression(node.asString())) {
return node;
}
switch (type) {
|
Check for undefined early to improve perf
|
diff --git a/spec/unit/mixin/shell_out_spec.rb b/spec/unit/mixin/shell_out_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/unit/mixin/shell_out_spec.rb
+++ b/spec/unit/mixin/shell_out_spec.rb
@@ -33,12 +33,13 @@ describe Ohai::Mixin::ShellOut, "shell_out" do
else
# this just replicates the behavior of default_paths in chef-utils
default_paths = ( [ ENV['PATH'] ? ENV['PATH'].split(':').reverse : nil, RbConfig::CONFIG["bindir"] ].uniq.reverse + [ "/usr/local/sbin", "/usr/local/bin", "/usr/sbin", "/usr/bin", "/sbin", "/bin" ] ).compact.uniq.join(":")
+ default_locale = ChefConfig::Config.guess_internal_locale
{
timeout: timeout,
environment: {
- "LANG" => "en_US.UTF-8",
- "LANGUAGE" => "en_US.UTF-8",
- "LC_ALL" => "en_US.UTF-8",
+ "LANG" => default_locale,
+ "LANGUAGE" => default_locale,
+ "LC_ALL" => default_locale,
"PATH" => default_paths,
},
}
|
fix the deafult locale
we need to call out to the ChefConfig helper to set this
|
diff --git a/webpype/client.py b/webpype/client.py
index <HASH>..<HASH> 100644
--- a/webpype/client.py
+++ b/webpype/client.py
@@ -26,10 +26,17 @@ class WebPypeBaseClient(object):
resp = urlopen(request)
return resp.read()
- def _wrapinput(self, inputs, array_wrap=False):
+ def _validate_input(self, inputs):
+ if isinstance(inputs, unicode):
+ inputs = str(inputs)
if not isinstance(inputs, dict) and not isinstance(inputs, str):
raise TypeError('''Your input value must be a dictionary or
string. Got: %s''' % inputs)
+ return inputs
+
+ def _wrapinput(self, inputs, array_wrap=False):
+ inputs = self._validateinput(inputs)
+
if array_wrap:
if isinstance(inputs, dict):
data = json.dumps({'inputs': inputs})
|
Added _validate_input(), and moved input validation away
from _wrapinput(). Now it can also handle unicode
|
diff --git a/src/index.js b/src/index.js
index <HASH>..<HASH> 100644
--- a/src/index.js
+++ b/src/index.js
@@ -136,7 +136,7 @@ export default class OneSignal {
static sendTag(key, value) {
if (!checkIfInitialized(RNOneSignal)) return;
- if (!key || !value) {
+ if (!key || (!value && value !== "")) {
console.error("OneSignal: sendTag: must include a key and a value");
}
|
Small Nit: We Should Allow for the setting of blank strings as values
Motivation: tags can be removed by sending a blank string. Here we're updating the logic to allow for that
|
diff --git a/test/test_shell.rb b/test/test_shell.rb
index <HASH>..<HASH> 100644
--- a/test/test_shell.rb
+++ b/test/test_shell.rb
@@ -47,9 +47,13 @@ class ShellTest < Test::Unit::TestCase
end
def test_parse_input
- cmdpath, args = RVC::Shell.parse_input "module.cmd --longarg -s vm1 vm2"
+ cmdpath, args = RVC::Shell.parse_input 'module.cmd --longarg -s vm1 vm2 "spacy vm"'
assert_equal [:module, :cmd], cmdpath
- assert_equal ['--longarg', '-s', 'vm1', 'vm2'], args
+ assert_equal ['--longarg', '-s', 'vm1', 'vm2', 'spacy vm'], args
+
+ cmdpath, args = RVC::Shell.parse_input 'module.cmd --longarg -s vm1 vm2 "spacy vm'
+ assert_equal [:module, :cmd], cmdpath
+ assert_equal ['--longarg', '-s', 'vm1', 'vm2', 'spacy vm'], args
end
def test_lookup_cmd
|
add tests for Shell.parse_input with quotes
|
diff --git a/src/Probability/Distribution/StudentTDistribution.php b/src/Probability/Distribution/StudentTDistribution.php
index <HASH>..<HASH> 100644
--- a/src/Probability/Distribution/StudentTDistribution.php
+++ b/src/Probability/Distribution/StudentTDistribution.php
@@ -2,7 +2,7 @@
namespace Math\Probability\Distribution;
use Math\Functions\Special;
-class TDistribution extends ContinuousNew {
+class TDistribution extends Continuous {
public static function PDF($t, $ν){
if(!is_int($ν)) return false;
$π = \M_PI;
|
Update StudentTDistribution.php
|
diff --git a/pandas/tests/test_series.py b/pandas/tests/test_series.py
index <HASH>..<HASH> 100644
--- a/pandas/tests/test_series.py
+++ b/pandas/tests/test_series.py
@@ -2603,8 +2603,8 @@ class TestSeries(unittest.TestCase, CheckNameIntegration):
self.assertEquals(a.dot(b['2'].values), expected['2'])
#Check series argument
- self.assertEquals(a.dot(b['1']), expected['1'])
- self.assertEquals(a.dot(b2['1']), expected['1'])
+ assert_almost_equal(a.dot(b['1']), expected['1'])
+ assert_almost_equal(a.dot(b2['1']), expected['1'])
self.assertRaises(Exception, a.dot, a.values[:3])
self.assertRaises(ValueError, a.dot, b.T)
|
BUG: tests should use almost_equal for comparing float equal
|
diff --git a/progressbar/__about__.py b/progressbar/__about__.py
index <HASH>..<HASH> 100644
--- a/progressbar/__about__.py
+++ b/progressbar/__about__.py
@@ -19,7 +19,7 @@ A Python Progressbar library to provide visual (yet text based) progress to
long running operations.
'''.strip()
__email__ = 'wolph@wol.ph'
-__version__ = '3.5.0'
+__version__ = '3.5.1'
__license__ = 'BSD'
__copyright__ = 'Copyright 2015 Rick van Hattem (Wolph)'
__url__ = 'https://github.com/WoLpH/python-progressbar'
diff --git a/progressbar/bar.py b/progressbar/bar.py
index <HASH>..<HASH> 100644
--- a/progressbar/bar.py
+++ b/progressbar/bar.py
@@ -57,7 +57,7 @@ class ResizableMixin(DefaultFdMixin):
signal.signal(signal.SIGWINCH, self._handle_resize)
self.signal_set = True
except: # pragma: no cover
- raise
+ pass
def _handle_resize(self, signum=None, frame=None):
'Tries to catch resize signals sent from the terminal.'
|
removed debug code, the exception shouldnt raise
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100755
--- a/index.js
+++ b/index.js
@@ -12,6 +12,7 @@ var execSync = require('child_process').execSync;
var execFile = require('child_process').execFile;
var spawn = require('child_process').spawn;
var os = require('os');
+var node_path = require('path');
// ****************************************************************************
// NodeClam class definition
@@ -531,10 +532,18 @@ NodeClam.prototype.scan_dir = function(path, end_cb, file_cb) {
(function get_file_stats() {
if (files.length > 0) {
var file = files.pop();
+ file = node_path.join(path, file);
fs.stat(file, function(err, info) {
- if (info.isFile()) good_files.push(file);
+ if (!err) {
+ if (info.isFile()) {
+ good_files.push(file);
+ }
+ } else {
+ if (self.settings.debug_mode)
+ console.log("node-clam: Error scanning file in directory: ", err);
+ }
get_file_stats();
- });
+ });
} else {
self.scan_files(good_files, end_file, file_cb);
}
|
Fixed a bug caused when scanning directories with clamdscan with scan_recursively set to false.
|
diff --git a/src/ProxyManager/ProxyGenerator/AccessInterceptorScopeLocalizer/MethodGenerator/BindProxyProperties.php b/src/ProxyManager/ProxyGenerator/AccessInterceptorScopeLocalizer/MethodGenerator/BindProxyProperties.php
index <HASH>..<HASH> 100644
--- a/src/ProxyManager/ProxyGenerator/AccessInterceptorScopeLocalizer/MethodGenerator/BindProxyProperties.php
+++ b/src/ProxyManager/ProxyGenerator/AccessInterceptorScopeLocalizer/MethodGenerator/BindProxyProperties.php
@@ -64,6 +64,10 @@ class BindProxyProperties extends MethodGenerator
foreach ($originalClass->getProperties() as $originalProperty) {
$propertyName = $originalProperty->getName();
+ if ($originalProperty->isStatic()) {
+ continue;
+ }
+
if ($originalProperty->isPrivate()) {
$localizedProperties[] = "\\Closure::bind(function () use (\$localizedObject) {\n "
. '$this->' . $propertyName . ' = & $localizedObject->' . $propertyName . ";\n"
|
Skipping static properties when binding properties in a scope localizer
|
diff --git a/lib/active_scaffold.rb b/lib/active_scaffold.rb
index <HASH>..<HASH> 100644
--- a/lib/active_scaffold.rb
+++ b/lib/active_scaffold.rb
@@ -286,7 +286,7 @@ module ActiveScaffold
as_path = File.join(ActiveScaffold::Config::Core.plugin_directory, 'app', 'views')
index = view_paths.find_index { |p| p.to_s == as_path }
if index
- view_paths.insert index, path
+ self.view_paths = view_paths[0..index-1] + Array(path) + view_paths[index..-1]
else
append_view_path path
end
|
fix add_active_scaffold_path, it was changing view_paths for all controllers
|
diff --git a/modules/activiti-engine/src/main/java/org/activiti/engine/impl/cfg/ProcessEngineConfigurationImpl.java b/modules/activiti-engine/src/main/java/org/activiti/engine/impl/cfg/ProcessEngineConfigurationImpl.java
index <HASH>..<HASH> 100644
--- a/modules/activiti-engine/src/main/java/org/activiti/engine/impl/cfg/ProcessEngineConfigurationImpl.java
+++ b/modules/activiti-engine/src/main/java/org/activiti/engine/impl/cfg/ProcessEngineConfigurationImpl.java
@@ -604,14 +604,14 @@ public abstract class ProcessEngineConfigurationImpl extends ProcessEngineConfig
log.debug("using database type: {}", databaseType);
} catch (SQLException e) {
- e.printStackTrace();
+ log.error("Exception while initializing Database connection", e);
} finally {
try {
if (connection!=null) {
connection.close();
}
} catch (SQLException e) {
- e.printStackTrace();
+ log.error("Exception while closing the Database connection", e);
}
}
}
|
Changed logging IOException to the configured logger. Printing stacktrace may not be optimum.
|
diff --git a/command/v2/bind_security_group_command.go b/command/v2/bind_security_group_command.go
index <HASH>..<HASH> 100644
--- a/command/v2/bind_security_group_command.go
+++ b/command/v2/bind_security_group_command.go
@@ -97,7 +97,7 @@ func (cmd BindSecurityGroupCommand) Execute(args []string) error {
}
for _, space := range spacesToBind {
- cmd.UI.DisplayText("Assigning security group {{.security_group}} to space {{.space}} in org {{.organization}} as {{.username}}...", map[string]interface{}{
+ cmd.UI.DisplayTextWithFlavor("Assigning security group {{.security_group}} to space {{.space}} in org {{.organization}} as {{.username}}...", map[string]interface{}{
"security_group": securityGroup.Name,
"space": space.Name,
"organization": org.Name,
|
display assigning text with flavor
[finishes #<I>]
|
diff --git a/src/java/org/apache/cassandra/io/util/MmappedSegmentedFile.java b/src/java/org/apache/cassandra/io/util/MmappedSegmentedFile.java
index <HASH>..<HASH> 100644
--- a/src/java/org/apache/cassandra/io/util/MmappedSegmentedFile.java
+++ b/src/java/org/apache/cassandra/io/util/MmappedSegmentedFile.java
@@ -54,10 +54,10 @@ public class MmappedSegmentedFile extends SegmentedFile
*/
private Segment floor(long position)
{
- assert 0 <= position && position < length: position + " vs " + length;
+ assert 0 <= position && position < length: String.format("%d >= %d in %s", position, length, path);
Segment seg = new Segment(position, null);
int idx = Arrays.binarySearch(segments, seg);
- assert idx != -1 : "Bad position " + position + " in segments " + Arrays.toString(segments);
+ assert idx != -1 : String.format("Bad position %d for segments %s in %s", position, Arrays.toString(segments), path);
if (idx < 0)
// round down to entry at insertion point
idx = -(idx + 2);
|
add path to MmappedSegmentedFile assertions
|
diff --git a/src/funnies.js b/src/funnies.js
index <HASH>..<HASH> 100644
--- a/src/funnies.js
+++ b/src/funnies.js
@@ -151,4 +151,5 @@ export default [
"Please hold on as we reheat our coffee",
"Kindly hold on as we convert this bug to a feature...",
"Kindly hold on as our intern quits vim...",
+ "Winter is coming...",
];
|
Winter is coming... added to funny phrases (#<I>)
|
diff --git a/integrated-build.js b/integrated-build.js
index <HASH>..<HASH> 100644
--- a/integrated-build.js
+++ b/integrated-build.js
@@ -3,6 +3,7 @@
const addCacheBusting = require('./add-cache-busting.js');
const addCspCompliance = require('./add-csp-compliance.js');
const injectCustomElementsEs5Adapter = require('./inject-custom-elements-es5-adapter.js');
+const lazypipe = require('lazypipe');
const optimizeAssets = require('./optimize-assets.js');
const polymerBuild = require('./polymer-build.js');
@@ -10,11 +11,16 @@ const polymerBuild = require('./polymer-build.js');
* Best practice build pipeline. This only only chains up the various build steps.
*
* @param {Object} config Content of polymer.json
+ * @return
*/
-exports.build = function build(config) {
- return polymerBuild(config)
- .pipe(addCspCompliance())
- .pipe(addCacheBusting())
- .pipe(optimizeAssets())
- .pipe(injectCustomElementsEs5Adapter());
+function build(config) {
+ return lazypipe()
+ .pipe(() => polymerBuild(config))
+ .pipe(() => addCspCompliance())
+ .pipe(() => addCacheBusting())
+ .pipe(() => optimizeAssets())
+ .pipe(() => injectCustomElementsEs5Adapter());
}
+
+module.exports = build;
+
|
Plugin needs to expose a lazypipe for later initialization
|
diff --git a/spec/a9n_spec.rb b/spec/a9n_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/a9n_spec.rb
+++ b/spec/a9n_spec.rb
@@ -97,7 +97,7 @@ describe A9n do
its(:app_url) { should_not be_nil }
its(:app_url) { should == subject.fetch(:app_url) }
- its(:page_title) { should == 'Base Kiełbasa' }
+ its(:page_title) { should == 'Base Kielbasa' }
its(:api_key) { should == 'base1234' }
specify {
expect { subject.app_host }.to raise_error(described_class::NoSuchConfigurationVariable)
@@ -115,7 +115,7 @@ describe A9n do
end
its(:app_host) { should_not be_nil }
- its(:page_title) { should == 'Local Kiełbasa' }
+ its(:page_title) { should == 'Local Kielbasa' }
its(:api_key) { should == 'local1234' }
specify {
expect { subject.app_url }.to raise_error(described_class::NoSuchConfigurationVariable)
|
Remove non-utf chars
|
diff --git a/lib/patron/request.rb b/lib/patron/request.rb
index <HASH>..<HASH> 100644
--- a/lib/patron/request.rb
+++ b/lib/patron/request.rb
@@ -88,7 +88,7 @@ module Patron
end
def timeout=(new_timeout)
- if new_timeout.to_i < 1
+ if new_timeout && new_timeout.to_i < 1
raise ArgumentError, "Timeout must be a positive integer greater than 0"
end
@@ -96,7 +96,7 @@ module Patron
end
def connect_timeout=(new_timeout)
- if new_timeout.to_i < 1
+ if new_timeout && new_timeout.to_i < 1
raise ArgumentError, "Timeout must be a positive integer greater than 0"
end
|
Allow nil timeout to turn off the timeout feature
|
diff --git a/tests/base.py b/tests/base.py
index <HASH>..<HASH> 100644
--- a/tests/base.py
+++ b/tests/base.py
@@ -15,13 +15,14 @@ class PrioCORO:
prio = priority.CORO
priorities = (PrioCORO, PrioOP, PrioFIRST, PrioLAST)
-from cogen.core.proactors import has_iocp, \
+from cogen.core.proactors import has_iocp, has_ctypes_iocp, \
has_kqueue, has_stdlib_kqueue, \
has_epoll, has_stdlib_epoll, \
has_poll, has_select
proactors_available = [
j for j in [
i() for i in (
+ has_ctypes_iocp,
has_iocp,
has_stdlib_kqueue,
has_kqueue,
|
added ctypes iocp to tests
|
diff --git a/pkg/volume/vsphere_volume/vsphere_volume_util.go b/pkg/volume/vsphere_volume/vsphere_volume_util.go
index <HASH>..<HASH> 100644
--- a/pkg/volume/vsphere_volume/vsphere_volume_util.go
+++ b/pkg/volume/vsphere_volume/vsphere_volume_util.go
@@ -94,11 +94,12 @@ func (util *VsphereDiskUtil) CreateVolume(v *vsphereVolumeProvisioner, selectedZ
}
capacity := v.options.PVC.Spec.Resources.Requests[v1.ResourceName(v1.ResourceStorage)]
- // vSphere works with kilobytes, convert to KiB with rounding up
- volSizeKiB, err := volumehelpers.RoundUpToKiBInt(capacity)
+ // vSphere works with KiB, but its minimum allocation unit is 1 MiB
+ volSizeMiB, err := volumehelpers.RoundUpToMiBInt(capacity)
if err != nil {
return nil, err
}
+ volSizeKiB := volSizeMiB * 1024
name := volumeutil.GenerateVolumeName(v.options.ClusterName, v.options.PVName, 255)
volumeOptions := &vclib.VolumeOptions{
CapacityKB: volSizeKiB,
|
Fix rounding-up of Vsphere volume size
|
diff --git a/plexapi/myplex.py b/plexapi/myplex.py
index <HASH>..<HASH> 100644
--- a/plexapi/myplex.py
+++ b/plexapi/myplex.py
@@ -634,15 +634,10 @@ class MyPlexAccount(PlexObject):
"""
return self.batchingItems(self.NEWS, maxresults)
- return items
-
- def podcasts(self):
+ def podcasts(self, maxresults=50):
""" Returns a list of Podcasts Hub items :class:`~plexapi.library.Hub`
"""
- items = []
- data = self.query(url=self.PODCASTS)
- for elem in data:
- items.append(Hub(server=self._server, data=elem))
+ return self.batchingItems(self.PODCASTS, maxresults)
return items
|
update podcasts to use batchingItems
|
diff --git a/itests/standalone/src/test/java/org/wildfly/camel/test/csv/CSVIntegrationTest.java b/itests/standalone/src/test/java/org/wildfly/camel/test/csv/CSVIntegrationTest.java
index <HASH>..<HASH> 100644
--- a/itests/standalone/src/test/java/org/wildfly/camel/test/csv/CSVIntegrationTest.java
+++ b/itests/standalone/src/test/java/org/wildfly/camel/test/csv/CSVIntegrationTest.java
@@ -35,6 +35,7 @@ import org.jboss.arquillian.junit.Arquillian;
import org.jboss.shrinkwrap.api.ShrinkWrap;
import org.jboss.shrinkwrap.api.spec.JavaArchive;
import org.junit.Assert;
+import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.wildfly.camel.test.core.subA.Customer;
@@ -53,6 +54,7 @@ public class CSVIntegrationTest {
}
@Test
+ @Ignore("[FIXME #464] Object may get marshalled to wrong CSV order")
public void testMarshal() throws Exception {
CamelContext camelctx = new DefaultCamelContext();
|
[FIXME #<I>] Object may get marshalled to wrong CSV order
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.