diff
stringlengths 65
26.7k
| message
stringlengths 7
9.92k
|
|---|---|
diff --git a/lib/OpenLayers/Map.js b/lib/OpenLayers/Map.js
index <HASH>..<HASH> 100644
--- a/lib/OpenLayers/Map.js
+++ b/lib/OpenLayers/Map.js
@@ -172,9 +172,8 @@ OpenLayers.Map.prototype = {
// Workaround for the fact that hidden elements return 0 for size.
if (size.w == 0 && size.h == 0) {
- var elementSize = Element.getDimensions(this.div);
- size.w = elementSize.width;
- size.h = elementSize.height;
+ size.w = parseInt(this.div.style.width);
+ size.h = parseInt(this.div.style.height);
}
return size;
},
|
element.getDimensions doesn't check styles of parent elements, so when an element is hideen because of a parent, this breaks. fall back to style.width/height in cases where we have a 0,0 size.
git-svn-id: <URL>
|
diff --git a/lib/copy-file.js b/lib/copy-file.js
index <HASH>..<HASH> 100644
--- a/lib/copy-file.js
+++ b/lib/copy-file.js
@@ -1,7 +1,7 @@
'use strict';
const fs = require('fs');
-const {lstat, unlink} = fs.promises;
+const {lstat, unlink} = require('fs/promises');
const pipe = require('pipe-io');
const tryToCatch = require('try-to-catch');
const copySymlink = require('copy-symlink');
|
chore(copy-file) fs.promises -> fs/promises
|
diff --git a/lib/fog/atmos/storage.rb b/lib/fog/atmos/storage.rb
index <HASH>..<HASH> 100644
--- a/lib/fog/atmos/storage.rb
+++ b/lib/fog/atmos/storage.rb
@@ -155,8 +155,10 @@ module Fog
signature = Base64.encode64( digest ).chomp()
params[:headers]["x-emc-signature"] = signature
- parse = params[:parse]
-
+ params.delete(:host) #invalid excon request parameter
+
+ parse = params.delete(:parse)
+
begin
response = @connection.request(params, &block)
rescue Excon::Errors::HTTPStatusError => error
|
Delete invalid connection keys before request is made
|
diff --git a/OpenPNM/Algorithms/__LinearSolver__.py b/OpenPNM/Algorithms/__LinearSolver__.py
index <HASH>..<HASH> 100644
--- a/OpenPNM/Algorithms/__LinearSolver__.py
+++ b/OpenPNM/Algorithms/__LinearSolver__.py
@@ -324,7 +324,8 @@ class LinearSolver(GenericAlgorithm):
self._logger.warning('The outlet pores have too many neighbors. Internal pores appear to be selected.')
#Fetch area and length of domain
- A = self._net.domain_area(face=inlets)
+ #A = self._net.domain_area(face=inlets)
+ A = (40.5e-6*10)**2
L = self._net.domain_length(face_1=inlets,face_2=outlets)
x = self._result
#Find flow through inlet face
|
temporary patch job (domain_size doesn't work)
Former-commit-id: <I>bf<I>a<I>a<I>f<I>b<I>d<I>d<I>baf3
Former-commit-id: <I>c8e<I>f<I>e<I>c<I>c2ea7dc<I>c3bba<I>bc8
|
diff --git a/src/you_get/extractors/imgur.py b/src/you_get/extractors/imgur.py
index <HASH>..<HASH> 100644
--- a/src/you_get/extractors/imgur.py
+++ b/src/you_get/extractors/imgur.py
@@ -65,7 +65,7 @@ class Imgur(VideoExtractor):
'container': 'jpg'
}
}
- self.title = image['title']
+ self.title = image['title'] or image['hash']
def extract(self, **kwargs):
if 'stream_id' in kwargs and kwargs['stream_id']:
|
[imgur] use hash when title not present
|
diff --git a/lib/Cake/Test/Case/Event/CakeEventManagerTest.php b/lib/Cake/Test/Case/Event/CakeEventManagerTest.php
index <HASH>..<HASH> 100644
--- a/lib/Cake/Test/Case/Event/CakeEventManagerTest.php
+++ b/lib/Cake/Test/Case/Event/CakeEventManagerTest.php
@@ -235,7 +235,7 @@ class CakeEventManagerTest extends CakeTestCase {
*/
public function testDispatchReturnValue() {
$this->skipIf(
- version_compare(PHPUnit_Runner_Version::VERSION, '3.7', '<'),
+ version_compare(PHPUnit_Runner_Version::id(), '3.7', '<'),
'These tests fail in PHPUnit 3.6'
);
$manager = new CakeEventManager;
|
Fix missed use of VERSION.
|
diff --git a/openquake/calculators/base.py b/openquake/calculators/base.py
index <HASH>..<HASH> 100644
--- a/openquake/calculators/base.py
+++ b/openquake/calculators/base.py
@@ -194,7 +194,7 @@ class BaseCalculator(with_metaclass(abc.ABCMeta)):
elif exports: # is a string
fmts = exports.split(',')
else: # use passed values
- fmts = self.oqparam.exports
+ fmts = self.oqparam.exports.split(',')
for fmt in fmts:
if not fmt:
continue
|
Fixed a small bug with exports
Former-commit-id: <I>adeff7e<I>f3c<I>f1a<I>f4fe3a<I>d4edd<I>
|
diff --git a/loader.js b/loader.js
index <HASH>..<HASH> 100644
--- a/loader.js
+++ b/loader.js
@@ -17,7 +17,12 @@ module.exports.pitch = function(request, preReq, data) {
var query = loaderUtils.parseQuery(this.query);
this.addDependency(this.resourcePath);
// We already in child compiler, return empty bundle
- if(this[__dirname] === false) {
+ if(this[__dirname] === undefined) {
+ throw new Error(
+ '"extract-text-webpack-plugin" loader is used without the corresponding plugin, ' +
+ 'refer to https://github.com/webpack/extract-text-webpack-plugin for the usage example'
+ );
+ } else if(this[__dirname] === false) {
return "";
} else if(this[__dirname](null, query)) {
if(query.omit) {
|
Check if loader is used without plugin
|
diff --git a/lib/userlist/push/strategies.rb b/lib/userlist/push/strategies.rb
index <HASH>..<HASH> 100644
--- a/lib/userlist/push/strategies.rb
+++ b/lib/userlist/push/strategies.rb
@@ -11,9 +11,20 @@ module Userlist
def self.lookup_strategy(strategy)
return strategy unless strategy.is_a?(Symbol) || strategy.is_a?(String)
- name = strategy.to_s.capitalize
- require("userlist/push/strategies/#{strategy}") unless const_defined?(name, false)
- const_get(name, false)
+ require_strategy(strategy)
+ const_get(strategy.to_s.capitalize, false)
+ end
+
+ def self.strategy_defined?(strategy)
+ return true unless strategy.is_a?(Symbol) || strategy.is_a?(String)
+
+ const_defined?(strategy.to_s.capitalize, false)
+ end
+
+ def self.require_strategy(strategy)
+ return unless strategy.is_a?(Symbol) || strategy.is_a?(String)
+
+ require("userlist/push/strategies/#{strategy}") unless strategy_defined?(strategy)
end
end
end
|
Extracts methods to check and require strategies
|
diff --git a/spyder/plugins/profiler/plugin.py b/spyder/plugins/profiler/plugin.py
index <HASH>..<HASH> 100644
--- a/spyder/plugins/profiler/plugin.py
+++ b/spyder/plugins/profiler/plugin.py
@@ -42,7 +42,7 @@ class Profiler(SpyderDockablePlugin):
NAME = 'profiler'
REQUIRES = [Plugins.Preferences, Plugins.Editor]
OPTIONAL = [Plugins.MainMenu]
- TABIFY = Plugins.Help
+ TABIFY = [Plugins.Help]
WIDGET_CLASS = ProfilerWidget
CONF_SECTION = NAME
CONF_WIDGET_CLASS = ProfilerConfigPage
diff --git a/spyder/plugins/pylint/plugin.py b/spyder/plugins/pylint/plugin.py
index <HASH>..<HASH> 100644
--- a/spyder/plugins/pylint/plugin.py
+++ b/spyder/plugins/pylint/plugin.py
@@ -39,6 +39,7 @@ class Pylint(SpyderDockablePlugin):
CONF_WIDGET_CLASS = PylintConfigPage
REQUIRES = [Plugins.Preferences, Plugins.Editor]
OPTIONAL = [Plugins.MainMenu, Plugins.Projects]
+ TABIFY = [Plugins.Help]
CONF_FILE = False
DISABLE_ACTIONS_WHEN_HIDDEN = False
|
Pylint/Profiler: Add/fix their TABIFY class attribute respectively
|
diff --git a/test/integration/tst_regression.py b/test/integration/tst_regression.py
index <HASH>..<HASH> 100644
--- a/test/integration/tst_regression.py
+++ b/test/integration/tst_regression.py
@@ -2,8 +2,7 @@ import shutil
import os
import autofit as af
-from autolens.data.instrument import ccd
-from autolens.data import ccd as ccd
+from autolens.data.instrument import ccd as ccd
from autolens.data.array import grids
from autolens.data.array.util import array_util
from autolens.lens import ray_tracing
diff --git a/test/simulation/makers.py b/test/simulation/makers.py
index <HASH>..<HASH> 100644
--- a/test/simulation/makers.py
+++ b/test/simulation/makers.py
@@ -1,6 +1,5 @@
import autofit as af
from autolens.data.instrument import ccd
-from autolens.data import ccd
from autolens.data.array import grids
from autolens.lens import ray_tracing
from autolens.model.galaxy import galaxy as g
|
Refactored data to use instrument module with abstract_data,
ready to add interferometry data class.
|
diff --git a/routes/web.php b/routes/web.php
index <HASH>..<HASH> 100644
--- a/routes/web.php
+++ b/routes/web.php
@@ -62,7 +62,7 @@ Route::middleware(['web', 'admin.auth:admin', 'permission'])
Route::post('attribute/upload', [\AvoRed\Framework\Catalog\Controllers\AttributeController::class, 'upload'])
->name('attribute.upload');
- Route::post('admin-user-image', [\AvoRed\Framework\System\Controllers\AdminUserController::class, 'upload'])
+ Route::post('admin-user-image', [\AvoRed\Framework\User\Controllers\AdminUserController::class, 'upload'])
->name('admin-user-image-upload');
Route::post(
|
fixed Issue #<I> AdminUserController doesn't exist
|
diff --git a/java-wrapper/src/test/java/com/codedisaster/steamworks/test/SteamTestApp.java b/java-wrapper/src/test/java/com/codedisaster/steamworks/test/SteamTestApp.java
index <HASH>..<HASH> 100644
--- a/java-wrapper/src/test/java/com/codedisaster/steamworks/test/SteamTestApp.java
+++ b/java-wrapper/src/test/java/com/codedisaster/steamworks/test/SteamTestApp.java
@@ -55,7 +55,7 @@ public abstract class SteamTestApp {
System.out.println("Initialise Steam client API ...");
- if (!SteamAPI.init()) {
+ if (!SteamAPI.init("../natives/libs")) {
return false;
}
|
Test applications load libraries from ./natives/libs instead of Maven package.
|
diff --git a/lib/escobar/heroku/pipeline.rb b/lib/escobar/heroku/pipeline.rb
index <HASH>..<HASH> 100644
--- a/lib/escobar/heroku/pipeline.rb
+++ b/lib/escobar/heroku/pipeline.rb
@@ -65,10 +65,14 @@ module Escobar
end
def default_branch
- github_client.default_branch
+ @default_branch ||= github_client.default_branch
end
def required_commit_contexts(forced = false)
+ @required_commit_contexts ||= fetch_required_commit_contexts(forced)
+ end
+
+ def fetch_required_commit_contexts(forced = false)
return [] if forced
github_client.required_contexts.map do |context|
if context == "continuous-integration/travis-ci"
|
Cache calls to GitHub in Pipeline
|
diff --git a/spec/controllers/spree/adyen_redirect_controller_spec.rb b/spec/controllers/spree/adyen_redirect_controller_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/controllers/spree/adyen_redirect_controller_spec.rb
+++ b/spec/controllers/spree/adyen_redirect_controller_spec.rb
@@ -32,16 +32,18 @@ module Spree
and_return payment_method
end
- it "creates a payment for the order" do
+ it "creates a payment for the current order" do
expect{ subject }.to change { order.payments.count }.from(0).to(1)
end
- it "sets the payment attributes with the response" do
- subject
- expect(order.payments.last).to have_attributes(
- amount: order.total,
- payment_method: payment_method,
- response_code: psp_reference)
+ describe "created payment" do
+ it "has attributes from the request" do
+ subject
+ expect(order.payments.last).to have_attributes(
+ amount: order.total,
+ payment_method: payment_method,
+ response_code: psp_reference)
+ end
end
it "redirects to order complete page" do
|
Restruct portion of test
Just put extra describe blocks to specify what the test is talking
about i.e. the created payment.
|
diff --git a/demo/start.rb b/demo/start.rb
index <HASH>..<HASH> 100644
--- a/demo/start.rb
+++ b/demo/start.rb
@@ -46,18 +46,15 @@ class App
dt = Rufus::Decision::Table.new(json.last)
- #out = dt.transform!(in_to_h(json.first))
- keys = json.first.first
- rows = json.first[1..-1]
- results = rows.collect do |row|
- h = keys.inject({}) { |h, k| h[k] = row.shift; h }
- h = dt.transform(h)
- keys = (keys + h.keys).sort.uniq
- keys.inject([]) { |a, k| a << h[k] }
- end
- results.unshift(keys)
-
- [ 200, {}, results.to_json ]
+ input = Rufus::Decision.transpose(json.first)
+ # from array of arrays to array of hashes
+
+ output = input.inject([]) { |a, hash| a << dt.transform(hash); a }
+
+ output = Rufus::Decision.transpose(output)
+ # from array of hashes to array of arrays
+
+ [ 200, {}, output.to_json ]
end
end
|
leveraging Rufus::Decision.transpose()
|
diff --git a/pkg/cloudprovider/gce/token_source.go b/pkg/cloudprovider/gce/token_source.go
index <HASH>..<HASH> 100644
--- a/pkg/cloudprovider/gce/token_source.go
+++ b/pkg/cloudprovider/gce/token_source.go
@@ -31,7 +31,7 @@ import (
const (
// Max QPS to allow through to the token URL.
- tokenURLQPS = 1
+ tokenURLQPS = .05 // back off to once every 20 seconds when failing
// Maximum burst of requests to token URL before limiting.
tokenURLBurst = 3
)
|
Increase the rate limiting of GCE's token source. The burst being at 3
means transient errors won't incur such long waits, but repeating
failures shouldn't be retrying every second.
|
diff --git a/spec/models/profile.rb b/spec/models/profile.rb
index <HASH>..<HASH> 100644
--- a/spec/models/profile.rb
+++ b/spec/models/profile.rb
@@ -1,5 +1,5 @@
class Profile
include Mongoid::Document
- field :name, type: String
+ field :name, :type => String
shard_key :name
end
|
Let tests pass for <I> and others.
|
diff --git a/lib/Condorcet/Condorcet.php b/lib/Condorcet/Condorcet.php
index <HASH>..<HASH> 100644
--- a/lib/Condorcet/Condorcet.php
+++ b/lib/Condorcet/Condorcet.php
@@ -1650,9 +1650,12 @@ class Vote
use CandidateVote_CondorcetLink ;
private $_ranking = array();
+ private $_rankingHistory = array();
+
private $_tags = array();
private $_id;
private $_createdAt;
+ private $updatedAt;
///
@@ -1682,8 +1685,16 @@ class Vote
// SETTERS
- public function setRanking ($ranking)
+ public function setRanking ($rankingCandidate)
{
+ if (empty($this->_link))
+ {
+ $this->archiveRanking();
+ }
+ else
+ {
+
+ }
}
@@ -1696,6 +1707,14 @@ class Vote
///
// INTERNAL
+
+ private function archiveRanking ()
+ {
+ if (!empty($this->_ranking))
+ {
+ $this->_rankingHistory[] = array('ranking' => $this->_ranking, 'timestamp' => microtime(true));
+ }
+ }
}
@@ -1737,5 +1756,4 @@ trait CandidateVote_CondorcetLink
else
{ return false ; }
}
-
}
\ No newline at end of file
|
Class Vote => setRanking() && archiveRanking()
|
diff --git a/src/js/components/Clock/Clock.js b/src/js/components/Clock/Clock.js
index <HASH>..<HASH> 100644
--- a/src/js/components/Clock/Clock.js
+++ b/src/js/components/Clock/Clock.js
@@ -157,14 +157,11 @@ class Clock extends Component {
this.setState({ elements: nextElements }, () => {
if (onChange) {
+ const { elements: e2 } = this.state;
if (elements.duration) {
- onChange(
- `P${elements.hours}H${elements.minutes}M${elements.seconds}S`,
- );
+ onChange(`P${e2.hours}H${e2.minutes}M${e2.seconds}S`);
} else {
- onChange(
- `T${elements.hours}:${elements.minutes}:${elements.seconds}`,
- );
+ onChange(`T${e2.hours}:${e2.minutes}:${e2.seconds}`);
}
}
});
|
Fix Clock.onChange bug (#<I>)
|
diff --git a/concrete/src/Entity/File/Version.php b/concrete/src/Entity/File/Version.php
index <HASH>..<HASH> 100644
--- a/concrete/src/Entity/File/Version.php
+++ b/concrete/src/Entity/File/Version.php
@@ -1649,6 +1649,12 @@ class Version implements ObjectInterface
if ($type->shouldExistFor($imageWidth, $imageHeight, $file)) {
$path_resolver = $app->make(Resolver::class);
$path = $path_resolver->getPath($this, $type);
+ if ($path) {
+ $url = $app->make('site')->getSite()->getSiteCanonicalURL();
+ if ($url) {
+ $path = $url . $path;
+ }
+ }
}
}
} else {
|
adding canonical url to thumbnail paths since the method implies we're getting a url back
|
diff --git a/generators/docker-base.js b/generators/docker-base.js
index <HASH>..<HASH> 100644
--- a/generators/docker-base.js
+++ b/generators/docker-base.js
@@ -68,10 +68,10 @@ function checkImages() {
this.appsFolders.forEach((appsFolder, index) => {
const appConfig = this.appConfigs[index];
if (appConfig.buildTool === 'maven') {
- imagePath = this.destinationPath(`${this.directoryPath + appsFolder}/target/docker/${_.kebabCase(appConfig.baseName)}-*.war`);
+ imagePath = this.destinationPath(`${this.directoryPath + appsFolder}/target/docker`);
runCommand = './mvnw package -Pprod dockerfile:build';
} else {
- imagePath = this.destinationPath(`${this.directoryPath + appsFolder}/build/docker/${_.kebabCase(appConfig.baseName)}-*.war`);
+ imagePath = this.destinationPath(`${this.directoryPath + appsFolder}/build/docker`);
runCommand = './gradlew -Pprod bootRepackage buildDocker';
}
if (shelljs.ls(imagePath).length === 0) {
|
Modify the check to use the new Docker plugin
Not a perfect check, but similar to the previous one - this is just to help people when we can
|
diff --git a/hazelcast/src/main/java/com/hazelcast/util/executor/DelegatingFuture.java b/hazelcast/src/main/java/com/hazelcast/util/executor/DelegatingFuture.java
index <HASH>..<HASH> 100644
--- a/hazelcast/src/main/java/com/hazelcast/util/executor/DelegatingFuture.java
+++ b/hazelcast/src/main/java/com/hazelcast/util/executor/DelegatingFuture.java
@@ -109,7 +109,7 @@ public class DelegatingFuture<V> implements Future<V> {
}
public final boolean isDone() {
- return done;
+ return done ? done : future.isDone();
}
protected void setError(Throwable error) {
|
DelegatingFuture isDone
|
diff --git a/lib/GetStream/Stream/Client.php b/lib/GetStream/Stream/Client.php
index <HASH>..<HASH> 100644
--- a/lib/GetStream/Stream/Client.php
+++ b/lib/GetStream/Stream/Client.php
@@ -141,17 +141,20 @@ class Client
*/
public function getBaseUrl()
{
- $api_endpoint = static::API_ENDPOINT;
- $localPort = getenv('STREAM_LOCAL_API_PORT');
- if ($localPort) {
- $baseUrl = "http://localhost:$localPort/api";
- } else {
- if ($this->location) {
- $subdomain = "{$this->location}-api";
+ $baseUrl = getenv('STREAM_BASE_URL');
+ if (!$baseUrl) {
+ $api_endpoint = static::API_ENDPOINT;
+ $localPort = getenv('STREAM_LOCAL_API_PORT');
+ if ($localPort) {
+ $baseUrl = "http://localhost:$localPort/api";
} else {
- $subdomain = 'api';
+ if ($this->location) {
+ $subdomain = "{$this->location}-api";
+ } else {
+ $subdomain = 'api';
+ }
+ $baseUrl = "{$this->protocol}://{$subdomain}." . $api_endpoint;
}
- $baseUrl = "{$this->protocol}://{$subdomain}." . $api_endpoint;
}
return $baseUrl;
}
|
Added support for STREAM_BASE_URL environment variable
|
diff --git a/app/controllers/storytime/subscriptions_controller.rb b/app/controllers/storytime/subscriptions_controller.rb
index <HASH>..<HASH> 100644
--- a/app/controllers/storytime/subscriptions_controller.rb
+++ b/app/controllers/storytime/subscriptions_controller.rb
@@ -6,6 +6,7 @@ module Storytime
def create
@subscription = Storytime::Subscription.find_by(permitted_attributes) || Storytime::Subscription.new(permitted_attributes)
+ @subscription.site = Storytime::Site.first if @subscription.site.nil? # if we ever go multi-site, this would likely become current_site
@subscription.subscribed = true if @subscription.subscribed == false
if @subscription.save
@@ -30,7 +31,7 @@ module Storytime
private
def permitted_attributes
- params.require(:subscription).permit(:email, :t, :site_id)
+ params.require(:subscription).permit(:email, :t)
end
def set_subscription
|
Remove site_id from permitted_attributes
|
diff --git a/elasticstack/utils.py b/elasticstack/utils.py
index <HASH>..<HASH> 100644
--- a/elasticstack/utils.py
+++ b/elasticstack/utils.py
@@ -23,7 +23,6 @@
from haystack import connections
from importlib import import_module
-from django.apps import apps
def prepare_object(obj, using='default'):
@@ -45,13 +44,23 @@ def prepare_object(obj, using='default'):
def get_model(app_label, model_name):
"""
Fetches a Django model using the app registry.
- This doesn't require that an app with the given app label exists,
- which makes it safe to call when the registry is being populated.
- All other methods to access models might raise an exception about the
- registry not being ready yet.
+
+ This doesn't require that an app with the given app label exists, which
+ makes it safe to call when the registry is being populated. All other
+ methods to access models might raise an exception about the registry not
+ being ready yet.
+
Raises LookupError if model isn't found.
"""
try:
+ from django.apps import apps
+ from django.core.exceptions import AppRegistryNotReady
+ except ImportError:
+ # Older Django version!
+ from django.db import models
+ return models.get_model(app_label, model_name)
+
+ try:
return apps.get_model(app_label, model_name)
except AppRegistryNotReady:
if apps.apps_ready and not apps.models_ready:
|
Import old get_model for older Django
Includes compulsive docstring formatting
|
diff --git a/model/QueueDispatcher.php b/model/QueueDispatcher.php
index <HASH>..<HASH> 100644
--- a/model/QueueDispatcher.php
+++ b/model/QueueDispatcher.php
@@ -450,6 +450,8 @@ class QueueDispatcher extends ConfigurableService implements QueueDispatcherInte
*
* It will be deprecated once we have the general GUI for displaying different info of a task for the user.
*
+ * @deprecated
+ *
* @param \core_kernel_classes_Resource $resource
* @return null|\core_kernel_classes_Resource
*/
@@ -464,6 +466,8 @@ class QueueDispatcher extends ConfigurableService implements QueueDispatcherInte
/**
* It will be deprecated once we have the general GUI for displaying different info of a task for the user.
*
+ * @deprecated
+ *
* @param \core_kernel_classes_Resource $resource
* @return Report
*/
@@ -496,6 +500,8 @@ class QueueDispatcher extends ConfigurableService implements QueueDispatcherInte
*
* It will be deprecated once we have the general GUI for displaying different info of a task for the user.
*
+ * @deprecated
+ *
* @param TaskInterface $task
* @param \core_kernel_classes_Resource|null $resource - placeholder resource to be linked with task.
* @return \core_kernel_classes_Resource
|
TAO-<I> Put some dispatcher methods to deprecated
|
diff --git a/packages/vuetify/src/components/VSelect/VSelect.js b/packages/vuetify/src/components/VSelect/VSelect.js
index <HASH>..<HASH> 100644
--- a/packages/vuetify/src/components/VSelect/VSelect.js
+++ b/packages/vuetify/src/components/VSelect/VSelect.js
@@ -448,10 +448,16 @@ export default {
if (onlyBools) {
replacement = Object.keys(replacement).join(', ')
} else {
- replacement = JSON.stringify(replacement, null, multiple ? 2 : 0).replace(/"([^(")"]+)":/g, '$1:').replace(/"/g, '\'')
+ replacement = JSON.stringify(replacement, null, multiple ? 2 : 0)
+ .replace(/"([^(")"]+)":/g, '$1:')
+ .replace(/"/g, '\'')
}
- consoleWarn(`${props} ${multiple ? 'are' : 'is'} deprecated, use ${separator}:menu-props="${replacement}"${separator} instead`, this)
+ consoleWarn(
+ `${props} ${multiple ? 'are' : 'is'} deprecated, use ` +
+ `${separator}${onlyBools ? '' : ':'}menu-props="${replacement}"${separator} instead`,
+ this
+ )
}
}
|
fix(Select): remove colon from menu-props suggestion with booleans only
|
diff --git a/lib/zmachine/connection.rb b/lib/zmachine/connection.rb
index <HASH>..<HASH> 100644
--- a/lib/zmachine/connection.rb
+++ b/lib/zmachine/connection.rb
@@ -173,8 +173,14 @@ module ZMachine
def readable!
ZMachine.logger.debug("zmachine:connection:#{__method__}", connection: self) if ZMachine.debug
mark_active!
- data = @channel.read_inbound_data
- receive_data(data) if data
+ loop do
+ data = @channel.read_inbound_data
+ if data
+ receive_data(data)
+ else
+ break
+ end
+ end
nil
end
diff --git a/lib/zmachine/tcp_msg_channel.rb b/lib/zmachine/tcp_msg_channel.rb
index <HASH>..<HASH> 100644
--- a/lib/zmachine/tcp_msg_channel.rb
+++ b/lib/zmachine/tcp_msg_channel.rb
@@ -80,14 +80,7 @@ module ZMachine
end
end
- # clear buffer
- if @buffer.remaining
- bytes = java.util.Arrays.copyOfRange(@buffer.array, @buffer.position, @buffer.position+@buffer.remaining)
- @buffer.clear
- @buffer.put(bytes)
- else
- @buffer.clear
- end
+ @buffer.compact
data
end
|
allow for more than one receive_data per read_inbound_data
|
diff --git a/provider/godaddy/godaddy.go b/provider/godaddy/godaddy.go
index <HASH>..<HASH> 100644
--- a/provider/godaddy/godaddy.go
+++ b/provider/godaddy/godaddy.go
@@ -550,7 +550,7 @@ func maxOf(vars ...int64) int64 {
func toString(obj interface{}) string {
b, err := json.MarshalIndent(obj, "", " ")
- if e != nil {
+ if err != nil {
return fmt.Sprintf("<%v>", e)
}
|
Update provider/godaddy/godaddy.go
Typo: rename e to err
|
diff --git a/src/test/org/openscience/cdk/interfaces/AbstractAtomContainerTest.java b/src/test/org/openscience/cdk/interfaces/AbstractAtomContainerTest.java
index <HASH>..<HASH> 100644
--- a/src/test/org/openscience/cdk/interfaces/AbstractAtomContainerTest.java
+++ b/src/test/org/openscience/cdk/interfaces/AbstractAtomContainerTest.java
@@ -77,7 +77,7 @@ public abstract class AbstractAtomContainerTest extends AbstractChemObjectTest {
}
@Test public void testCloneButKeepOriginalsIntact() throws Exception {
- IMolecule molecule = (IMolecule)newChemObject();
+ IAtomContainer molecule = (IAtomContainer)newChemObject();
IAtom atom = molecule.getBuilder().newAtom();
molecule.addAtom(atom);
Assert.assertEquals(atom, molecule.getAtom(0));
|
Fixed a ClassCastException in a unit test; I messed up (mea culpa)
|
diff --git a/saltcloud/config.py b/saltcloud/config.py
index <HASH>..<HASH> 100644
--- a/saltcloud/config.py
+++ b/saltcloud/config.py
@@ -313,6 +313,10 @@ def apply_cloud_providers_config(overrides, defaults=None):
'single entry for EC2, Joyent, Openstack, and so '
'forth.'
)
+ raise saltcloud.exceptions.SaltCloudConfigError(
+ 'The cloud provider alias {0!r} has multiple entries '
+ 'for the {1[provider]!r} driver.'.format(key, details)
+ )
handled_providers.add(
details['provider']
)
|
Fail soon with an improper cloud providers alias setup.
|
diff --git a/ailment/analyses/propagator.py b/ailment/analyses/propagator.py
index <HASH>..<HASH> 100644
--- a/ailment/analyses/propagator.py
+++ b/ailment/analyses/propagator.py
@@ -83,8 +83,8 @@ def get_engine(base_engine):
super(SimEnginePropagator, self).__init__()
self._stack_pointer_tracker = stack_pointer_tracker
- def _process(self, state, successors, block=None):
- super(SimEnginePropagator, self)._process(state, successors, block=block)
+ def _process(self, state, successors, block=None, whitelist=None, **kwargs):
+ super(SimEnginePropagator, self)._process(state, successors, block=block, whitelist=whitelist, **kwargs)
#
# VEX statement handlers
|
Propagator: Support statement whitelists.
|
diff --git a/src/rzslider.js b/src/rzslider.js
index <HASH>..<HASH> 100644
--- a/src/rzslider.js
+++ b/src/rzslider.js
@@ -284,6 +284,7 @@ function throttle(func, wait, options) {
self = this;
this.initElemHandles();
+ this.addAccessibility();
this.calcViewDimensions();
this.setMinAndMax();
@@ -587,6 +588,18 @@ function throttle(func, wait, options) {
this.selBar.addClass('rz-draggable');
}
},
+
+ /**
+ * Adds accessibility atributes
+ *
+ * Run only once during initialization
+ *
+ * @returns {undefined}
+ */
+ addAccessibility: function ()
+ {
+ this.sliderElem.attr("role", "slider");
+ },
/**
* Calculate dimensions that are dependent on view port size
|
Added role=slider for better accessibility.
|
diff --git a/test/test_listplugin.rb b/test/test_listplugin.rb
index <HASH>..<HASH> 100755
--- a/test/test_listplugin.rb
+++ b/test/test_listplugin.rb
@@ -584,7 +584,7 @@ module ListPluginTests
nid,d = @rc.rttable.search_node("aa")
vn = @rc.rttable.get_vnode_id(d)
- st = Roma::Storage::TCMemStorage.new
+ st = Roma::Storage::RubyHashStorage.new
st.vn_list = [vn]
st.storage_path = 'storage_test'
st.opendb
|
Use Ruby Hash storage instead of TokyoCabinet
|
diff --git a/tests/Database/Ddd/EntityTest.php b/tests/Database/Ddd/EntityTest.php
index <HASH>..<HASH> 100644
--- a/tests/Database/Ddd/EntityTest.php
+++ b/tests/Database/Ddd/EntityTest.php
@@ -1101,7 +1101,7 @@ class EntityTest extends TestCase
'delete_at' => 0,
]));
- $post = Post::connectSandbox(['password' => '123456'], function () {
+ $post = Post::connectSandbox(['password' => $GLOBALS['LEEVEL_ENV']['DATABASE']['MYSQL']['PASSWORD']], function () {
return Post::find()->where('id', 1)->findOne();
});
|
tests(entity): fix tests of connectSandbox
|
diff --git a/plexapi/audio.py b/plexapi/audio.py
index <HASH>..<HASH> 100644
--- a/plexapi/audio.py
+++ b/plexapi/audio.py
@@ -186,12 +186,12 @@ class Track(Audio):
def build_item(server, elem, initpath):
- VIDEOCLS = {Movie.TYPE:Movie, Show.TYPE:Show, Season.TYPE:Season, Episode.TYPE:Episode}
- vtype = elem.attrib.get('type')
- if vtype in VIDEOCLS:
- cls = VIDEOCLS[vtype]
+ AUDIOCLS = {Artist.TYPE:Artist, Album.TYPE:Album, Track.TYPE:Track}
+ atype = elem.attrib.get('type')
+ if atype in AUDIOCLS:
+ cls = AUDIOCLS[atype]
return cls(server, elem, initpath)
- raise UnknownType('Unknown video type: %s' % vtype)
+ raise UnknownType('Unknown audio type: %s' % atype)
def find_key(server, key):
|
Fix build_item() to deal with audio
|
diff --git a/doc.go b/doc.go
index <HASH>..<HASH> 100644
--- a/doc.go
+++ b/doc.go
@@ -181,7 +181,7 @@ Conn.PgTypes.
See example_custom_type_test.go for an example of a custom type for the
PostgreSQL point type.
-[]byte Mapping
+Raw Bytes Mapping
[]byte passed as arguments to Query, QueryRow, and Exec are passed unmodified
to PostgreSQL. In like manner, a *[]byte passed to Scan will be filled with
|
Tweak doc.go so section head is detected
|
diff --git a/src/Helper/DrushHelper.php b/src/Helper/DrushHelper.php
index <HASH>..<HASH> 100644
--- a/src/Helper/DrushHelper.php
+++ b/src/Helper/DrushHelper.php
@@ -59,7 +59,8 @@ class DrushHelper extends Helper
if (!$reset && isset($version)) {
return $version;
}
- exec($this->getDrushExecutable() . ' --version', $drushVersion, $returnCode);
+ $command = $this->getDrushExecutable() . ' --version';
+ exec($command, $drushVersion, $returnCode);
if ($returnCode > 0) {
$message = $returnCode == 127 ? 'Error finding Drush version' : 'Drush is not installed';
throw new \Exception($message, $returnCode);
@@ -68,7 +69,7 @@ class DrushHelper extends Helper
// Parse the version from the Drush output. It should be a string a bit
// like " Drush Version : 8.0.0-beta14 ".
if (!preg_match('/:\s*([0-9]+\.[a-z0-9\-\.]+)\s*$/', $drushVersion[0], $matches)) {
- throw new \Exception("Unexpected 'drush --version' output: \n" . implode("\n", $drushVersion));
+ throw new \Exception("Unexpected output from command '$command': \n" . implode("\n", $drushVersion));
}
$version = $matches[1];
|
Even clearer drush version error output
We need to see the exact command being run
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -174,9 +174,6 @@ setup_config = dict(
'multitaper', 'seismology', 'signal processing'],
packages=find_packages(),
zip_safe=False,
- # this is needed for "pip install instaseis==dev"
- download_url=("https://github.com/krischer/mtspect/zipball/master"
- "#egg=instaseis=dev"),
include_package_data=True
)
|
Removing unnecessary download_url line.
Those who want the dev version can just checkout from github.
|
diff --git a/api/api.go b/api/api.go
index <HASH>..<HASH> 100644
--- a/api/api.go
+++ b/api/api.go
@@ -20,7 +20,6 @@ import (
"net/http"
"net/url"
"os"
- "regexp"
"strings"
"sync"
"time"
@@ -241,7 +240,7 @@ func (a *API) apiRequest(reqMethod string, reqPath string, data []byte) ([]byte,
if !a.useExponentialBackoff {
break
}
- if matched, _ := regexp.MatchString("code 403", err.Error()); matched {
+ if strings.Contains(err.Error(), "code 403") {
break
}
}
|
fix: simplify check for <I> on api call error
|
diff --git a/src/danog/MadelineProto/Magic.php b/src/danog/MadelineProto/Magic.php
index <HASH>..<HASH> 100644
--- a/src/danog/MadelineProto/Magic.php
+++ b/src/danog/MadelineProto/Magic.php
@@ -94,7 +94,7 @@ class Magic
self::$revision = 'Revision: '.self::$revision.$latest;
}
self::$can_parallel = false;
- if (php_sapi_name() === 'cli') {
+ if (php_sapi_name() === 'cli' && !(class_exists('\\Phar') && \Phar::running())) {
try {
$back = debug_backtrace(0);
$promise = \Amp\File\get(end($back)['file']);
|
Disable parallel processing on phar
|
diff --git a/lib/todos.js b/lib/todos.js
index <HASH>..<HASH> 100644
--- a/lib/todos.js
+++ b/lib/todos.js
@@ -200,9 +200,7 @@ function getLanguage (filename /*, content */) {
}
function fromDiff (repo, diff, sha, conf) {
- conf = _.merge({
- "onProgress": _.noop
- }, conf || {}, config.defaults);
+ conf = _.merge({ "onProgress": _.noop }, config.defaults, conf || {});
var todos = _.flatten(_.map(diff, function (file) {
var addedLines = _.filter(file.lines, "add");
|
FIX always using config defaults in fromDiff
|
diff --git a/delay.js b/delay.js
index <HASH>..<HASH> 100644
--- a/delay.js
+++ b/delay.js
@@ -6,7 +6,7 @@ var isReduced = require("./is-reduced")
var end = require("./end")
function delay(source, ms) {
- ms = ms || 3 // Minimum 3ms, as on less dispatch order becomes unreliable
+ ms = ms || 6 // Minimum 6ms, as on less dispatch order becomes unreliable
return reducible(function(next, result) {
var timeout = 0
var ended = false
|
Increase default delay from to 3 to 6 as phantom
seems to act strange on less that that.
|
diff --git a/src/plugins/pouchdb.gql.js b/src/plugins/pouchdb.gql.js
index <HASH>..<HASH> 100644
--- a/src/plugins/pouchdb.gql.js
+++ b/src/plugins/pouchdb.gql.js
@@ -287,7 +287,7 @@
symbol("identifier", function (tok) {
//here we allow for identifiers with the same name as functions
- if (isfunction (tok.value) && peekToken().type === "(") {
+ if (isFunction (tok.value) && peekToken().type === "(") {
var args= [];
while(advance()){
if (token.type === ")"){
|
(#<I>) - fixed a typo in gql plugin
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -5,7 +5,7 @@ here = os.path.abspath(os.path.dirname(__file__))
setup(
name='django-tilebundler',
- version='0.1-alpha6',
+ version='0.1-beta2',
author='Syrus Mesdaghi',
author_email='geoshape.org@gmail.com',
url='https://github.com/ROGUE-JCTD/django-tilebundler',
@@ -27,11 +27,11 @@ setup(
'Programming Language :: Python :: 2.7'
],
install_requires=[
- 'Django==1.6.10',
+ 'Django>=1.6.10',
'MapProxy==1.8.0',
- 'PyYAML==3.10',
- 'django-tastypie==0.12.1',
- 'Shapely==1.5.9',
+ 'PyYAML>=3.10',
+ 'django-tastypie>=0.12.1',
+ 'Shapely>=1.5.9',
'psutil==3.0.1'
]
)
|
allow >= on some dependancies.
|
diff --git a/src/reporter/Coverage.php b/src/reporter/Coverage.php
index <HASH>..<HASH> 100644
--- a/src/reporter/Coverage.php
+++ b/src/reporter/Coverage.php
@@ -182,6 +182,11 @@ class Coverage extends Terminal
}
}
+ /**
+ * Output the coverage report of a metrics instance.
+ *
+ * @param Metrics $metrics A metrics instance.
+ */
protected function _renderCoverage($metrics)
{
$stats = $metrics->data();
diff --git a/src/reporter/coverage/Collector.php b/src/reporter/coverage/Collector.php
index <HASH>..<HASH> 100644
--- a/src/reporter/coverage/Collector.php
+++ b/src/reporter/coverage/Collector.php
@@ -388,6 +388,15 @@ class Collector
return $this->_methodMetrics($node, $metrics);
}
+ /**
+ * Helper for `Collector::metrics()`.
+ *
+ * @param string $type The metric type.
+ * @param integer $index The line index.
+ * @param integer $value The value to update.
+ * @param integer $increment The increment to perform if the line has not already been processed.
+ * @return integer The metric value.
+ */
protected function _lineMetric($type, $index, $value, $increment = 1) {
if ($this->_processed[$type] >= $index) {
return $value;
|
Adds missing docblocs.
|
diff --git a/calculation-engine/engine-tests/engine-it-performance/src/test/java/com/dataart/spreadsheetanalytics/BenchmarkTestParent.java b/calculation-engine/engine-tests/engine-it-performance/src/test/java/com/dataart/spreadsheetanalytics/BenchmarkTestParent.java
index <HASH>..<HASH> 100644
--- a/calculation-engine/engine-tests/engine-it-performance/src/test/java/com/dataart/spreadsheetanalytics/BenchmarkTestParent.java
+++ b/calculation-engine/engine-tests/engine-it-performance/src/test/java/com/dataart/spreadsheetanalytics/BenchmarkTestParent.java
@@ -60,7 +60,7 @@ public class BenchmarkTestParent {
public void startAllBenchmarks() throws Exception {
ChainedOptionsBuilder opts = new OptionsBuilder()
.mode(Mode.SampleTime)
- .timeUnit(TimeUnit.MILLISECONDS)
+ .timeUnit(TimeUnit.NANOSECONDS)
.warmupTime(TimeValue.seconds(1))
.warmupIterations(8)
.measurementTime(TimeValue.seconds(1))
|
perf tests timeunit is changed to nanosecs
|
diff --git a/i3ipc/connection.py b/i3ipc/connection.py
index <HASH>..<HASH> 100644
--- a/i3ipc/connection.py
+++ b/i3ipc/connection.py
@@ -15,6 +15,8 @@ import os
import subprocess
from threading import Timer, Lock
import time
+import Xlib
+import Xlib.display
python2 = sys.version_info[0] < 3
@@ -49,17 +51,10 @@ class Connection(object):
if not socket_path:
try:
- socket_path = subprocess.check_output(['i3', '--get-socketpath'],
- close_fds=True,
- universal_newlines=True).strip()
- except Exception:
- pass
-
- if not socket_path:
- try:
- socket_path = subprocess.check_output(['sway', '--get-socketpath'],
- close_fds=True,
- universal_newlines=True).strip()
+ disp = Xlib.display.Display()
+ root = disp.screen().root
+ i3atom = disp.intern_atom("I3_SOCKET_PATH")
+ socket_path = root.get_full_property(i3atom, Xlib.X.AnyPropertyType).value.decode()
except Exception:
pass
|
Use Xlib directly instead of calling i3 in a subprocess to get the socket path
|
diff --git a/test/jade.test.js b/test/jade.test.js
index <HASH>..<HASH> 100644
--- a/test/jade.test.js
+++ b/test/jade.test.js
@@ -645,18 +645,6 @@ module.exports = {
assert.equal(html, render(str));
- // Non-Enumerable
- var str = [
- '- each val in 1',
- ' li= val'
- ].join('\n');
-
- var html = [
- '<li>1</li>'
- ].join('');
-
- assert.equal(html, render(str));
-
// Complex
var str = [
'- var obj = { foo: "bar", baz: "raz" };',
|
Each no longer works with non-enumerables
|
diff --git a/src/Watchers/ExceptionWatcher.php b/src/Watchers/ExceptionWatcher.php
index <HASH>..<HASH> 100644
--- a/src/Watchers/ExceptionWatcher.php
+++ b/src/Watchers/ExceptionWatcher.php
@@ -49,7 +49,9 @@ class ExceptionWatcher extends Watcher
'message' => $exception->getMessage(),
'trace' => $trace,
'line_preview' => ExceptionContext::get($exception),
- 'context' => Arr::except($event->context, ['exception', 'telescope']),
+ 'context' => transform(Arr::except($event->context, ['exception', 'telescope']), function ($context) {
+ return ! empty($context) ? $context : null;
+ }),
])->tags($this->tags($event))
);
}
|
Only send context if it has value, otherwise set it to null.
|
diff --git a/amino/do.py b/amino/do.py
index <HASH>..<HASH> 100644
--- a/amino/do.py
+++ b/amino/do.py
@@ -1,5 +1,5 @@
from types import GeneratorType
-from typing import TypeVar, Callable, Any, Generator, cast, Type
+from typing import TypeVar, Callable, Any, Generator, Type
import functools
from amino.tc.base import F
@@ -13,6 +13,8 @@ Do = Generator
# NOTE ostensibly, this cannot be tailrecced without separating strictly evaluated monadic composition from lazy ones.
# itr.gi_frame.f_lasti is the instruction pointer and could be used to detect laziness.
+# NOTE due to the nature of generators, a do with a lazily evaluated monad cannot be executed twice.
+# NOTE Lists don't work properly because the generator will be consumed by the first element
def untyped_do(f: Callable[..., Generator[G, B, None]]) -> Callable[..., G]:
@functools.wraps(f)
def do_loop(*a: Any, **kw: Any) -> F[B]:
@@ -36,7 +38,7 @@ def do(tpe: Type[A]) -> Callable[[Callable[..., Generator]], Callable[..., A]]:
f.tpe = tpe
f.__do = None
f.__do_original = f
- return cast(Callable[[Callable[..., Generator]], Callable[..., A]], functools.wraps(f)(untyped_do))(f)
+ return functools.wraps(f)(untyped_do)(f)
return deco
tdo = do
|
remove cast from `do`
|
diff --git a/lib/roar/representer.rb b/lib/roar/representer.rb
index <HASH>..<HASH> 100644
--- a/lib/roar/representer.rb
+++ b/lib/roar/representer.rb
@@ -17,12 +17,6 @@ module Roar
end
end
- attr_reader :represented
-
- def initialize(represented=nil) # FIXME!
- @represented = represented
- end
-
# DISCUSS: serialize on instance?
def serialize(represented, mime_type)
|
removing @represented as we all hate ivars.
|
diff --git a/lib/Drupal/AppConsole/Command/GeneratorFormCommand.php b/lib/Drupal/AppConsole/Command/GeneratorFormCommand.php
index <HASH>..<HASH> 100644
--- a/lib/Drupal/AppConsole/Command/GeneratorFormCommand.php
+++ b/lib/Drupal/AppConsole/Command/GeneratorFormCommand.php
@@ -163,12 +163,12 @@ class GeneratorFormCommand extends GeneratorCommand {
// TODO: validate
$input_type = $d->askAndValidate(
$output,
- $dialog->getQuestion(' Type'),
+ $dialog->getQuestion(' Type', 'textfield',':'),
function($input) use ($input_types){
return $input;
},
false,
- null,
+ 'textfield',
$input_types
);
|
Set textfield as default on field generation
|
diff --git a/src/java/org/apache/cassandra/gms/Gossiper.java b/src/java/org/apache/cassandra/gms/Gossiper.java
index <HASH>..<HASH> 100644
--- a/src/java/org/apache/cassandra/gms/Gossiper.java
+++ b/src/java/org/apache/cassandra/gms/Gossiper.java
@@ -817,6 +817,8 @@ public class Gossiper implements IFailureDetectionEventListener, GossiperMBean
return;
}
+ localState.markDead();
+
MessageOut<EchoMessage> echoMessage = new MessageOut<EchoMessage>(MessagingService.Verb.ECHO, new EchoMessage(), EchoMessage.serializer);
logger.trace("Sending a EchoMessage to {}", addr);
IAsyncCallback echoHandler = new IAsyncCallback()
|
Mark EPS as dead before sending echo command.
Patch by Sankalp Kohli, reviewed by brandonwilliams for CASSANDRA-<I>
|
diff --git a/demos/modal.php b/demos/modal.php
index <HASH>..<HASH> 100644
--- a/demos/modal.php
+++ b/demos/modal.php
@@ -61,4 +61,5 @@ $t->on('click', 'tr', new \atk4\ui\jsModal(
]
)
));
+$t->addStyle('cursor', 'pointer');
|
imply ability to click on table with mouse pointer.
|
diff --git a/src/attributes.js b/src/attributes.js
index <HASH>..<HASH> 100644
--- a/src/attributes.js
+++ b/src/attributes.js
@@ -317,12 +317,12 @@ jQuery.extend({
return ret;
} else {
- var attr = elem.getAttribute( name );
+ ret = elem.getAttribute( name );
// Non-existent attributes return null, we normalize to undefined
- return attr === null || attr === "undefined" || attr === "null" ?
+ return ret === null || ret === "null" ?
undefined :
- attr;
+ ret;
}
}
},
@@ -336,7 +336,7 @@ jQuery.extend({
// set property to null if getSetAttribute not supported (IE6-7)
// setting className to null makes the class "null"
if ( name === "className" ) {
- elem.className = "";
+ elem[ name ] = "";
} else {
elem.setAttribute( name, null );
}
|
Non-existent attribute for jQuery.attr no longer needs to check for "undefined"
- Remove an unnecessary var
- Use variable in removeAttr for better minification
|
diff --git a/go/dhcp/api.go b/go/dhcp/api.go
index <HASH>..<HASH> 100644
--- a/go/dhcp/api.go
+++ b/go/dhcp/api.go
@@ -5,7 +5,6 @@ import (
"fmt"
"io"
"io/ioutil"
- "log"
"net"
"net/http"
"strconv"
@@ -208,11 +207,11 @@ func decodeOptions(b string) map[dhcp.OptionCode][]byte {
var options []Options
_, value := etcdGet(b)
decodedValue := converttobyte(value)
+ var dhcpOptions = make(map[dhcp.OptionCode][]byte)
if err := json.Unmarshal(decodedValue, &options); err != nil {
- log.Fatal(err)
+ return dhcpOptions
}
- // spew.Dump(options)
- var dhcpOptions = make(map[dhcp.OptionCode][]byte)
+
for _, option := range options {
var Value interface{}
switch option.Type {
|
Do not crash when there is nothing in the cache
|
diff --git a/lib/webpack.config.js b/lib/webpack.config.js
index <HASH>..<HASH> 100644
--- a/lib/webpack.config.js
+++ b/lib/webpack.config.js
@@ -109,13 +109,6 @@ const plugins = [
}),
];
-if (isDev) {
- plugins.push(
- new webpack.HotModuleReplacementPlugin(),
- new webpack.NoEmitOnErrorsPlugin()
- );
-}
-
if (!isDev) {
plugins.push(
new CompressionWebpackPlugin({
|
PWA-<I> Removed unnecessary plugins.
|
diff --git a/gym-unity/setup.py b/gym-unity/setup.py
index <HASH>..<HASH> 100755
--- a/gym-unity/setup.py
+++ b/gym-unity/setup.py
@@ -38,6 +38,6 @@ setup(
author_email="ML-Agents@unity3d.com",
url="https://github.com/Unity-Technologies/ml-agents",
packages=find_packages(),
- install_requires=["gym==0.20.0", f"mlagents_envs=={VERSION}"],
+ install_requires=["gym==0.21.0", f"mlagents_envs=={VERSION}"],
cmdclass={"verify": VerifyVersionCommand},
)
|
upgrade gym to <I> (#<I>)
|
diff --git a/lib/travis/model/job/test.rb b/lib/travis/model/job/test.rb
index <HASH>..<HASH> 100644
--- a/lib/travis/model/job/test.rb
+++ b/lib/travis/model/job/test.rb
@@ -17,7 +17,7 @@ class Job
class << self
def append_log!(id, chars)
- job = find(id, :select => [:id, :repository_id, :source_id, :source_type, :state, :config]) # is this still needed? we introduced this as an optimization when the log was still on the jobs table
+ job = find(id)
job.append_log!(chars) unless job.finished?
end
end
|
no need to select a reduced set of fields any more
|
diff --git a/src/main/java/org/asteriskjava/manager/internal/AbstractBuilder.java b/src/main/java/org/asteriskjava/manager/internal/AbstractBuilder.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/asteriskjava/manager/internal/AbstractBuilder.java
+++ b/src/main/java/org/asteriskjava/manager/internal/AbstractBuilder.java
@@ -54,15 +54,6 @@ abstract class AbstractBuilder
setterName = "clazz";
}
- /*
- * The class property needs to be renamed. It is used in
- * MusicOnHoldEvent.
- */
- if ("class".equals(setterName))
- {
- setterName = "classname";
- }
-
setter = setters.get(setterName);
if (setter == null && !setterName.endsWith("s")) // no exact match
|
Remove dead code from AbstractBuilder.setAttributes()
The lines above this already change 'class' -> 'clazz' so this block
can never be executed.
|
diff --git a/src/DependencyInjection/Configuration.php b/src/DependencyInjection/Configuration.php
index <HASH>..<HASH> 100644
--- a/src/DependencyInjection/Configuration.php
+++ b/src/DependencyInjection/Configuration.php
@@ -18,8 +18,9 @@ class Configuration implements ConfigurationInterface
public function getConfigTreeBuilder()
{
$treeBuilder = new TreeBuilder('ashley_dawson_glide');
-
- $rootNode = $treeBuilder->getRootNode();
+ $rootNode = \method_exists($treeBuilder, 'getRootNode')
+ ? $treeBuilder->getRootNode()
+ : $treeBuilder->root('ashley_dawson_glide');
$rootNode
->children()
|
Make treebuilder configuration backwards compatible with older Symfony versions
|
diff --git a/tests/test_mock_solver_loading.py b/tests/test_mock_solver_loading.py
index <HASH>..<HASH> 100644
--- a/tests/test_mock_solver_loading.py
+++ b/tests/test_mock_solver_loading.py
@@ -202,6 +202,8 @@ alpha|file-alpha-url,file-alpha-token,,alpha-solver
"""
+# patch the new config loading mechanism, to test only legacy config loading
+@mock.patch("dwave.cloud.config.detect_configfile_path", lambda: None)
class MockConfiguration(unittest.TestCase):
"""Ensure that the precedence of configuration sources is followed."""
|
Fix legacy config loading tests
While testing legacy config loading, completely disable the new config
loading.
|
diff --git a/axes/decorators.py b/axes/decorators.py
index <HASH>..<HASH> 100644
--- a/axes/decorators.py
+++ b/axes/decorators.py
@@ -176,9 +176,13 @@ def get_user_attempts(request):
if COOLOFF_TIME:
for attempt in attempts:
- if attempt.attempt_time + COOLOFF_TIME < datetime.now() \
- and attempt.trusted is False:
- attempt.delete()
+ if attempt.attempt_time + COOLOFF_TIME < datetime.now():
+ if attempt.trusted:
+ attempt.failures_since_start = 0
+ attempt.save()
+ else:
+ attempt.delete()
+ objects_deleted = True
return attempts
|
When the cooloff period has expired if the user is trusted: reset the
failure counter otherwise obliterate the user.
|
diff --git a/request.go b/request.go
index <HASH>..<HASH> 100644
--- a/request.go
+++ b/request.go
@@ -52,6 +52,7 @@ func (a *Request) GetRequestedScopes() Arguments {
}
func (a *Request) SetRequestedScopes(s Arguments) {
+ a.Scopes = nil
for _, scope := range s {
a.AppendRequestedScope(scope)
}
|
request: fix SetRequestedScopes (#<I>)
|
diff --git a/lib/Minify/HTML.php b/lib/Minify/HTML.php
index <HASH>..<HASH> 100644
--- a/lib/Minify/HTML.php
+++ b/lib/Minify/HTML.php
@@ -135,7 +135,7 @@ class Minify_HTML
.'|canvas|caption|center|col(?:group)?|dd|dir|div|dl|dt|fieldset|figcaption|figure|footer|form'
.'|frame(?:set)?|h[1-6]|head|header|hgroup|hr|html|legend|li|link|main|map|menu|meta|nav'
.'|ol|opt(?:group|ion)|output|p|param|section|t(?:able|body|head|d|h||r|foot|itle)'
- .'|ul|video)\\b[^>]*>)/i', '$1', $this->_html);
+ .'|ul|video)\\b[^>]*>)/iu', '$1', $this->_html);
// remove ws outside of all elements
$this->_html = preg_replace(
|
Fix issue where minify() would corrupt Unicode characters (such as ) in some environments
Adding the `u` flag there fixes an issue in my environment (MAMP <I> with PHP <I>) where `minify()` would corrupt the ` ` character (it would get replaced by the replacement character).
|
diff --git a/avatar/views.py b/avatar/views.py
index <HASH>..<HASH> 100644
--- a/avatar/views.py
+++ b/avatar/views.py
@@ -50,17 +50,20 @@ def _notification_updated(request, avatar):
def _get_avatars(user):
# Default set. Needs to be sliced, but that's it. Keep the natural order.
- avatars = user.avatar_set.all()
+ avatars = user.avatar_set.all()
# Current avatar
- avatar = avatars.filter(primary=True)[:1]
- if avatar:
- avatar = avatar[0]
+ primary_avatar = avatars.order_by('-primary')[:1]
+ if primary_avatar:
+ avatar = primary_avatar[0]
else:
avatar = None
- # Slice the default set now that we used the queryset for the primary avatar
- avatars = avatars[:AVATAR_MAX_AVATARS_PER_USER]
+ if AVATAR_MAX_AVATARS_PER_USER == 1:
+ avatars = primary_avatar
+ else:
+ # Slice the default set now that we used the queryset for the primary avatar
+ avatars = avatars[:AVATAR_MAX_AVATARS_PER_USER]
return (avatar, avatars)
def add(request, extra_context={}, next_override=None):
|
This method is more closer to the original and should be more robust even if you change the max number of avatars
|
diff --git a/lib/billy/proxy.rb b/lib/billy/proxy.rb
index <HASH>..<HASH> 100644
--- a/lib/billy/proxy.rb
+++ b/lib/billy/proxy.rb
@@ -70,7 +70,7 @@ module Billy
EM.run do
EM.error_handler do |e|
Billy.log :error, "#{e.class} (#{e.message}):"
- Billy.log :error, e.backtrace.join("\n")
+ Billy.log :error, e.backtrace.join("\n") unless e.backtrace.nil?
end
@signature = EM.start_server(host, Billy.config.proxy_port, ProxyConnection) do |p|
|
Don't print backtrace when it's not available
Causing an exception in EM error handler terminates
thread, what in turns makes subsequent specs fail
|
diff --git a/app/models/organization.rb b/app/models/organization.rb
index <HASH>..<HASH> 100644
--- a/app/models/organization.rb
+++ b/app/models/organization.rb
@@ -48,7 +48,9 @@ class Organization < ApplicationRecord
has_many :ip_pool_rules, :dependent => :destroy, :as => :owner
after_create do
- self.ip_pools << IPPool.transactional.default
+ if pool = IPPool.transactional.default
+ self.ip_pools << IPPool.transactional.default
+ end
end
def status
diff --git a/db/seeds.rb b/db/seeds.rb
index <HASH>..<HASH> 100644
--- a/db/seeds.rb
+++ b/db/seeds.rb
@@ -3,4 +3,4 @@ user = User.create!(:first_name => "Example", :last_name => "Admin", :email_addr
org = Organization.create!(:name => "Acme Inc", :permalink => "acme", :time_zone => "London", :owner => user)
org.users << user
-server = Server.create!(:ip_pool => ip_pool, :organization => org, :name => "Example Server", :permalink => "example", :mode => "Live")
+server = Server.create!(:organization => org, :name => "Example Server", :permalink => "example", :mode => "Live")
|
fixes issue with creating new organizations when there are no ip pools
|
diff --git a/lib/comfortable_mexican_sofa/seeds/page.rb b/lib/comfortable_mexican_sofa/seeds/page.rb
index <HASH>..<HASH> 100644
--- a/lib/comfortable_mexican_sofa/seeds/page.rb
+++ b/lib/comfortable_mexican_sofa/seeds/page.rb
@@ -170,7 +170,6 @@ module ComfortableMexicanSofa::Seeds::Page
attrs = {
"label" => page.label,
"layout" => page.layout.try(:identifier),
- "parent" => page.parent && (page.parent.slug.present?? page.parent.slug : 'index'),
"target_page" => page.target_page.try(:full_path),
"categories" => page.categories.map{|c| c.label},
"is_published" => page.is_published,
diff --git a/test/lib/seeds/pages_test.rb b/test/lib/seeds/pages_test.rb
index <HASH>..<HASH> 100644
--- a/test/lib/seeds/pages_test.rb
+++ b/test/lib/seeds/pages_test.rb
@@ -156,7 +156,6 @@ class SeedsPagesTest < ActiveSupport::TestCase
---
label: Default Page
layout: default
- parent:\s
target_page: "/child-page"
categories:
- Default
@@ -181,7 +180,6 @@ class SeedsPagesTest < ActiveSupport::TestCase
---
label: Child Page
layout: default
- parent: index
target_page:\s
categories: []
is_published: true
|
we dont export page parent data. thats folders
|
diff --git a/app/models/foreman_tasks/recurring_logic.rb b/app/models/foreman_tasks/recurring_logic.rb
index <HASH>..<HASH> 100644
--- a/app/models/foreman_tasks/recurring_logic.rb
+++ b/app/models/foreman_tasks/recurring_logic.rb
@@ -31,12 +31,12 @@ module ForemanTasks
if value
task.update!(:start_at => next_occurrence_time) if task.start_at < Time.zone.now
update(:state => 'active')
- else
- update(:state => 'disabled')
end
- else
+ elsif value
raise RecurringLogicCancelledException
end
+
+ update(:state => 'disabled') unless value
end
def enabled?
diff --git a/test/unit/recurring_logic_test.rb b/test/unit/recurring_logic_test.rb
index <HASH>..<HASH> 100644
--- a/test/unit/recurring_logic_test.rb
+++ b/test/unit/recurring_logic_test.rb
@@ -148,6 +148,12 @@ class RecurringLogicsTest < ActiveSupport::TestCase
assert ForemanTasks.dynflow.world.persistence.load_delayed_plan(task.execution_plan.id).frozen
end
+ it 'handles if the task has been deleted' do
+ logic.tasks.find_by(:state => 'scheduled').destroy
+ logic.update!(:enabled => false)
+ assert_equal 'disabled', logic.state
+ end
+
it 'properly re-enables on disable' do
logic.update!(:enabled => false)
logic.update!(:enabled => true)
|
Fixes #<I> - fix disable on rec logic with deleted task
|
diff --git a/test/resharding.py b/test/resharding.py
index <HASH>..<HASH> 100755
--- a/test/resharding.py
+++ b/test/resharding.py
@@ -561,6 +561,13 @@ primary key (name)
# check the binlog players are running and exporting vars
self.check_destination_master(shard_2_master, ['test_keyspace/80-'])
self.check_destination_master(shard_3_master, ['test_keyspace/80-'])
+ # When the binlog players/filtered replication is turned on, the query
+ # service must be turned off on the destination masters.
+ # The tested behavior is a safeguard to prevent that somebody can
+ # accidentally modify data on the destination masters while they are not
+ # migrated yet and the source shards are still the source of truth.
+ shard_2_master.wait_for_vttablet_state('NOT_SERVING')
+ shard_3_master.wait_for_vttablet_state('NOT_SERVING')
# check that binlog server exported the stats vars
self.check_binlog_server_vars(shard_1_slave1, horizontal=True)
|
test/resharding.py: Add check that query service is turned off after filtered replication was turned on.
The tested behavior is a safeguard to prevent that somebody can accidentally modify data on the destination masters while they are not migrated yet and the source shards are still the source of truth.
|
diff --git a/tests/contrib/openstack/test_neutron_utils.py b/tests/contrib/openstack/test_neutron_utils.py
index <HASH>..<HASH> 100644
--- a/tests/contrib/openstack/test_neutron_utils.py
+++ b/tests/contrib/openstack/test_neutron_utils.py
@@ -86,6 +86,7 @@ class NeutronTests(unittest.TestCase):
'neutron.plugins.midonet.plugin.MidonetPluginV2')
self.os_release.return_value = 'liberty'
+ self.config.return_value = 'mem-1.9'
plugins = neutron.neutron_plugins()
self.assertEquals(plugins['midonet']['driver'],
'midonet.neutron.plugin_v1.MidonetPluginV2')
|
fix lack of midonet-origin for liberty specific behavior
|
diff --git a/tensorflow_datasets/video/moving_mnist.py b/tensorflow_datasets/video/moving_mnist.py
index <HASH>..<HASH> 100644
--- a/tensorflow_datasets/video/moving_mnist.py
+++ b/tensorflow_datasets/video/moving_mnist.py
@@ -82,7 +82,6 @@ class MovingMnist(tfds.core.GeneratorBasedBuilder):
return [
tfds.core.SplitGenerator(
name=tfds.Split.TEST,
- num_shards=5,
gen_kwargs=dict(data_path=data_path)),
]
@@ -99,5 +98,5 @@ class MovingMnist(tfds.core.GeneratorBasedBuilder):
images = np.load(fp)
images = np.transpose(images, (1, 0, 2, 3))
images = np.expand_dims(images, axis=-1)
- for sequence in images:
- yield dict(image_sequence=sequence)
+ for i, sequence in enumerate(images):
+ yield i, dict(image_sequence=sequence)
|
Fixing moving-mnist that was broken after S3 migration.
PiperOrigin-RevId: <I>
|
diff --git a/salt/cloud/__init__.py b/salt/cloud/__init__.py
index <HASH>..<HASH> 100644
--- a/salt/cloud/__init__.py
+++ b/salt/cloud/__init__.py
@@ -231,7 +231,7 @@ class CloudClient(object):
if a.get('provider', '')]
if providers:
_providers = opts.get('providers', {})
- for provider in _providers:
+ for provider in list(_providers):
if provider not in providers:
_providers.pop(provider)
return opts
|
use list on providers
This is needed because we are modifying the size of the dictionary
inside the loop.
|
diff --git a/packages/aws-lambda-create-request-response/src/index.js b/packages/aws-lambda-create-request-response/src/index.js
index <HASH>..<HASH> 100644
--- a/packages/aws-lambda-create-request-response/src/index.js
+++ b/packages/aws-lambda-create-request-response/src/index.js
@@ -4,9 +4,10 @@ const Stream = require('stream')
const queryString = require('querystring')
module.exports = (event, callback) => {
+ const base64Support = process.env.BINARY_SUPPORT !== 'no'
const response = {
body: Buffer.from(''),
- isBase64Encoded: true,
+ isBase64Encoded: base64Support,
statusCode: 200,
headers: {}
}
@@ -61,7 +62,9 @@ module.exports = (event, callback) => {
}
res.end = text => {
if (text) res.write(text)
- response.body = Buffer.from(response.body).toString('base64')
+ response.body = Buffer.from(response.body).toString(
+ base64Support ? 'base64' : undefined
+ )
response.headers = res.headers || {}
callback(null, response)
}
|
added binary support config process.env.BINARY_SUPPORT
|
diff --git a/src/org/opencms/i18n/A_CmsMessageBundle.java b/src/org/opencms/i18n/A_CmsMessageBundle.java
index <HASH>..<HASH> 100644
--- a/src/org/opencms/i18n/A_CmsMessageBundle.java
+++ b/src/org/opencms/i18n/A_CmsMessageBundle.java
@@ -1,7 +1,7 @@
/*
* File : $Source: /alkacon/cvs/opencms/src/org/opencms/i18n/A_CmsMessageBundle.java,v $
- * Date : $Date: 2005/05/02 15:13:40 $
- * Version: $Revision: 1.16 $
+ * Date : $Date: 2005/05/03 13:02:44 $
+ * Version: $Revision: 1.17 $
*
* This library is part of OpenCms -
* the Open Source Content Mananagement System
@@ -54,6 +54,7 @@ public abstract class A_CmsMessageBundle implements I_CmsMessageBundle {
org.opencms.file.collectors.Messages.get(),
org.opencms.flex.Messages.get(),
org.opencms.importexport.Messages.get(),
+ org.opencms.jsp.Messages.get(),
org.opencms.loader.Messages.get(),
org.opencms.lock.Messages.get(),
org.opencms.mail.Messages.get(),
|
Added org.opencms.jsp.messages bundle test.
|
diff --git a/opf.py b/opf.py
index <HASH>..<HASH> 100644
--- a/opf.py
+++ b/opf.py
@@ -2,11 +2,17 @@ import os
import os.path
import utils
import dublincore
+from xml.dom.minidom import getDOMImplementation
+
+class contentOPF(object):
+ '''A class to represent the OPF document.'''
+
+ def __init__(self):
+ pass
def generateOPF(article, dirname):
'''Creates the content.opf document from an Article instance issued as
input'''
- from xml.dom.minidom import getDOMImplementation
#Initiate a DOMImplementation for the OPF
impl = getDOMImplementation()
|
Beginnings of new contentOPF class
|
diff --git a/js/okcoinusd.js b/js/okcoinusd.js
index <HASH>..<HASH> 100644
--- a/js/okcoinusd.js
+++ b/js/okcoinusd.js
@@ -282,8 +282,10 @@ module.exports = class okcoinusd extends Exchange {
let tickers_result = {};
for (let i = 0; i < tickers.length; i++) {
let market = undefined;
- if (('symbol' in tickers[i]) && (tickers[i]['symbol'] in this.markets_by_id)) {
- market = this.markets_by_id[tickers[i]['symbol']];
+ if ('symbol' in tickers[i]) {
+ if (tickers[i]['symbol'] in this.markets_by_id) {
+ market = this.markets_by_id[tickers[i]['symbol']];
+ }
}
let ticker = this.extend (tickers[i], { 'timestamp': timestamp });
tickers_result[market['symbol']] = this.parseTicker (ticker, market);
|
yet another attempt to fix PHP transpile
separated if (A && B) into if(A) if(B)
|
diff --git a/src/Blueprint.php b/src/Blueprint.php
index <HASH>..<HASH> 100644
--- a/src/Blueprint.php
+++ b/src/Blueprint.php
@@ -2,11 +2,12 @@
namespace Rougin\Blueprint;
-use Auryn\InjectionException;
use Auryn\Injector;
-use Symfony\Component\Console\Application;
+use ReflectionClass;
use Twig_Environment;
use Twig_Loader_Filesystem;
+use Auryn\InjectionException;
+use Symfony\Component\Console\Application;
/**
* Blueprint
@@ -122,12 +123,16 @@ class Blueprint
substr($file, $commandPath)
);
+ $className = $this->paths['namespace'] . '\\' . $className;
+
try {
- $command = $this->injector->make(
- $this->paths['namespace'] . '\\' . $className
- );
+ $reflection = new ReflectionClass($className);
+
+ if ( ! $reflection->isAbstract()) {
+ $command = $this->injector->make($className);
- $this->console->add($command);
+ $this->console->add($command);
+ }
} catch (InjectionException $exception) {
echo $exception->getMessage() . PHP_EOL;
|
Fixed issue if class is abstract in Blueprint.php
|
diff --git a/certinfo/certinfo.go b/certinfo/certinfo.go
index <HASH>..<HASH> 100644
--- a/certinfo/certinfo.go
+++ b/certinfo/certinfo.go
@@ -100,7 +100,7 @@ func ParseCertificatePEM(certPEM []byte) (*Certificate, error) {
return ParseCertificate(cert), nil
}
-// Uses the helper to parse an x509 CSR PEM.
+// ParseCSRPEM uses the helper to parse an x509 CSR PEM.
func ParseCSRPEM(csrPEM []byte) (*x509.CertificateRequest, error) {
csrObject, err := helpers.ParseCSRPEM(csrPEM)
if err != nil {
@@ -110,7 +110,7 @@ func ParseCSRPEM(csrPEM []byte) (*x509.CertificateRequest, error) {
return csrObject, nil
}
-// Uses the helper to parse an x509 CSR PEM file.
+// ParseCSRFile uses the helper to parse an x509 CSR PEM file.
func ParseCSRFile(csrFile string) (*x509.CertificateRequest, error) {
csrPEM, err := ioutil.ReadFile(csrFile)
if err != nil {
|
Correct comments to be godoc-compliant.
|
diff --git a/healthcheck/healthcheck.go b/healthcheck/healthcheck.go
index <HASH>..<HASH> 100644
--- a/healthcheck/healthcheck.go
+++ b/healthcheck/healthcheck.go
@@ -215,7 +215,7 @@ func (o *Observation) retryCheck(quit chan struct{}, shutdown shutdownFunc) {
return
}
- log.Debugf("Health check: %v, call: %v failed with: %v, "+
+ log.Infof("Health check: %v, call: %v failed with: %v, "+
"backing off for: %v", o, count, err, o.Backoff)
// If we are still within the number of calls allowed for this
|
healthcheck: bump logging for failed healthchecks to info
In this commit, we bump up the logging for failed healthchecks from
`debug` to `info`. This should help us get to the bottom of the current
set of reported false positives that are causing `lnd` nodes to
erroneously shutdown.
|
diff --git a/libgreader/__init__.py b/libgreader/__init__.py
index <HASH>..<HASH> 100644
--- a/libgreader/__init__.py
+++ b/libgreader/__init__.py
@@ -8,7 +8,13 @@ __author__ = "Matt Behrens <askedrelic@gmail.com>"
__version__ = "0.8.0"
__copyright__ = "Copyright (C) 2012 Matt Behrens"
-from .googlereader import GoogleReader
-from .auth import AuthenticationMethod, ClientAuthMethod, OAuthMethod, OAuth2Method
-from .items import *
-from .url import ReaderUrl
+try:
+ import requests
+except ImportError:
+ # Will occur during setup.py install
+ pass
+else:
+ from .googlereader import GoogleReader
+ from .auth import AuthenticationMethod, ClientAuthMethod, OAuthMethod, OAuth2Method
+ from .items import *
+ from .url import ReaderUrl
|
Fix import error during setup.py install
|
diff --git a/lib/editor.js b/lib/editor.js
index <HASH>..<HASH> 100644
--- a/lib/editor.js
+++ b/lib/editor.js
@@ -394,6 +394,7 @@ Editor.prototype._initHandlers = function () {
var self = this;
self.on('keypress', function (ch, key) {
+ if (key.name !== 'mouse') { self.data.mouseDown = false; }
var selection = self.select();
var binding = util.getBinding(self.options.bindings, key);
|
Keypresses disable mouse bugfix
|
diff --git a/src/core/utils_old.py b/src/core/utils_old.py
index <HASH>..<HASH> 100644
--- a/src/core/utils_old.py
+++ b/src/core/utils_old.py
@@ -843,7 +843,7 @@ from src.widgets.matplotlibwidget import MatplotlibWidget
def test():
import sys
- from PyQt4.QtGui import QMainWindow, QApplication
+ from src.qt.QtGui import QMainWindow, QApplication
class ApplicationWindow(QMainWindow):
def __init__(self):
diff --git a/src/parametres/paramData.py b/src/parametres/paramData.py
index <HASH>..<HASH> 100644
--- a/src/parametres/paramData.py
+++ b/src/parametres/paramData.py
@@ -23,11 +23,11 @@ This file is part of openFisca.
from xml.etree.ElementTree import ElementTree, SubElement, Element
from xml.dom import minidom
-from src.core.utils_old import Bareme
-from datetime import datetime
+from src.qt.compat import from_qvariant
from src.core.config import CONF
+from datetime import datetime
-from src.qt.compat import from_qvariant
+from src.core.utils_old import Bareme
class Tree2Object(object):
|
Strange bug solved by inversing imports in paramData
|
diff --git a/cdm/src/test/java/thredds/crawlabledataset/TestAllCrawlableDataset.java b/cdm/src/test/java/thredds/crawlabledataset/TestAllCrawlableDataset.java
index <HASH>..<HASH> 100644
--- a/cdm/src/test/java/thredds/crawlabledataset/TestAllCrawlableDataset.java
+++ b/cdm/src/test/java/thredds/crawlabledataset/TestAllCrawlableDataset.java
@@ -26,6 +26,7 @@ public class TestAllCrawlableDataset extends TestCase
suite.addTestSuite( thredds.crawlabledataset.TestCrawlableDatasetFilter.class );
suite.addTestSuite( thredds.crawlabledataset.filter.TestRegExpMatchOnNameFilter.class );
suite.addTestSuite( thredds.crawlabledataset.filter.TestWildcardMatchOnNameFilter.class );
+ suite.addTestSuite( thredds.crawlabledataset.filter.TestLogicalCompFilterFactory.class );
return suite;
}
|
More work on logical composition of filters.
|
diff --git a/lib/hirb/helpers/table.rb b/lib/hirb/helpers/table.rb
index <HASH>..<HASH> 100644
--- a/lib/hirb/helpers/table.rb
+++ b/lib/hirb/helpers/table.rb
@@ -147,7 +147,8 @@ module Hirb
end
rows = filter_values(rows)
rows.each_with_index {|e,i| e[:hirb_number] = (i + 1).to_s} if @options[:number]
- methods.grep(/_callback$/).sort.each do |meth|
+ deleted_callbacks = Array(@options[:delete_callbacks]).map {|e| "#{e}_callback" }
+ (methods.grep(/_callback$/) - deleted_callbacks).sort.each do |meth|
rows = send(meth, rows, @options.dup)
end
validate_values(rows)
|
delete callbacks option for boson
|
diff --git a/lib/adminable/resource.rb b/lib/adminable/resource.rb
index <HASH>..<HASH> 100644
--- a/lib/adminable/resource.rb
+++ b/lib/adminable/resource.rb
@@ -1,5 +1,7 @@
module Adminable
class Resource
+ include Comparable
+
attr_reader :name, :model, :attributes
# @param name [String] resource name, usually same as the model name
@@ -27,8 +29,8 @@ module Adminable
@attributes ||= Adminable::Attributes::Collection.new(@model)
end
- def ==(other)
- other.is_a?(Adminable::Resource) && name == other.name
+ def <=>(other)
+ other.is_a?(Adminable::Resource) && name <=> other.name
end
end
end
|
Replace resource == method with <=> and Comparable module
|
diff --git a/v1/tsdb/config.go b/v1/tsdb/config.go
index <HASH>..<HASH> 100644
--- a/v1/tsdb/config.go
+++ b/v1/tsdb/config.go
@@ -14,7 +14,7 @@ const (
DefaultEngine = "tsm1"
// DefaultIndex is the default index for new shards
- DefaultIndex = InmemIndexName
+ DefaultIndex = TSI1IndexName
// tsdb/engine/wal configuration options
|
fix: Default to TSI1 index for new shards
|
diff --git a/src/Session/Timer.php b/src/Session/Timer.php
index <HASH>..<HASH> 100644
--- a/src/Session/Timer.php
+++ b/src/Session/Timer.php
@@ -94,7 +94,7 @@ class Timer
public function setIdleTtl($idle_ttl)
{
if ($this->ini_gc_maxlifetime < $idle_ttl) {
- throw new Exception('session.gc_maxlifetime less than idle time');
+ throw new Exception("session.gc_maxlifetime $this->ini_gc_maxlifetime less than idle time $idle_ttl");
}
$this->idle_ttl = $idle_ttl;
}
|
Impoved Exception message string.
This message helps pinpoint the problem and localize it as to its cause.
|
diff --git a/tests/test_init.py b/tests/test_init.py
index <HASH>..<HASH> 100644
--- a/tests/test_init.py
+++ b/tests/test_init.py
@@ -126,6 +126,9 @@ class InitTests(unittest.TestCase):
else:
modname = '%s.%s' % (module.__name__, modname)
+ if sys.version_info < (3,) and sys.platform == 'win32' and b'\r\n' in full_code:
+ full_code = full_code.replace(b'\r\n', b'\n')
+
imports = set([])
module_node = ast.parse(full_code, filename=full_path)
walk_ast(module_node, modname, imports)
|
Handle test environments on Windows with Python 2.x using git autocrlf
|
diff --git a/blockstore/lib/operations/nameimport.py b/blockstore/lib/operations/nameimport.py
index <HASH>..<HASH> 100644
--- a/blockstore/lib/operations/nameimport.py
+++ b/blockstore/lib/operations/nameimport.py
@@ -128,8 +128,11 @@ def make_outputs( data, inputs, recipient_address, sender_address, update_hash_b
]
-def broadcast(name, recipient_address, update_hash, private_key, blockchain_client, testset=False):
+def broadcast(name, recipient_address, update_hash, private_key, blockchain_client, blockchain_broadcaster=None, testset=False):
+ if blockchain_broadcaster is None:
+ blockchain_broadcaster = blockchain_client
+
nulldata = build(name, testset=testset)
# get inputs and from address
@@ -142,7 +145,7 @@ def broadcast(name, recipient_address, update_hash, private_key, blockchain_clie
outputs = make_outputs(nulldata, inputs, recipient_address, from_address, update_hash_b58, format='hex')
# serialize, sign, and broadcast the tx
- response = serialize_sign_and_broadcast(inputs, outputs, private_key_obj, blockchain_client)
+ response = serialize_sign_and_broadcast(inputs, outputs, private_key_obj, blockchain_broadcaster)
# response = {'success': True }
response.update({'data': nulldata})
|
TEST: Separate broadcaster from utxo provider for imports
|
diff --git a/allennlp/modules/elmo.py b/allennlp/modules/elmo.py
index <HASH>..<HASH> 100644
--- a/allennlp/modules/elmo.py
+++ b/allennlp/modules/elmo.py
@@ -440,7 +440,7 @@ class _ElmoCharacterEncoder(torch.nn.Module):
# create the layers, and load the weights
self._highways = Highway(n_filters, n_highway, activation=torch.nn.functional.relu)
for k in range(n_highway):
- # The AllenNLP highway is one matrix multplication with concatenation of
+ # The AllenNLP highway is one matrix multiplication with concatenation of
# transform and carry weights.
with h5py.File(cached_path(self._weight_file), "r") as fin:
# The weights are transposed due to multiplication order assumptions in tf
|
docs: fix simple typo, multplication -> multiplication (#<I>)
There is a small typo in allennlp/modules/elmo.py.
Should read `multiplication` rather than `multplication`.
|
diff --git a/test/unit/helpers/builders.js b/test/unit/helpers/builders.js
index <HASH>..<HASH> 100644
--- a/test/unit/helpers/builders.js
+++ b/test/unit/helpers/builders.js
@@ -35,6 +35,25 @@ export function buildField(textFieldClass, options) {
}
}
+ // This is necessary because of a Chrome "feature" where it won't do any focusing
+ // or blurring if the browser window not in focus itself. Otherwise running Karma
+ // testing in the background is impossible.
+ if (field) {
+ let hasFocus = false;
+
+ field.hasFocus = () => hasFocus;
+
+ field.element.focus = function () {
+ hasFocus = true;
+ field.element.dispatchEvent(new UIEvent('focus'));
+ }
+
+ field.element.blur = function () {
+ hasFocus = false;
+ field.element.dispatchEvent(new UIEvent('blur'));
+ }
+ }
+
return field;
}
|
Fixes focus/blur issues with Chrome.
|
diff --git a/tests/unit/utf8Test.php b/tests/unit/utf8Test.php
index <HASH>..<HASH> 100644
--- a/tests/unit/utf8Test.php
+++ b/tests/unit/utf8Test.php
@@ -1428,7 +1428,7 @@ class Unit_utf8Test extends OxidTestCase
{
$sValue = 'agentūЛитовfür';
- $aFields = array( 'oxuser__oxusername', 'oxuser__oxpassword',
+ $aFields = array( 'oxuser__oxusername',
'oxuser__oxustid', 'oxuser__oxcompany', 'oxuser__oxfname',
'oxuser__oxlname', 'oxuser__oxstreet', 'oxuser__oxstreetnr',
'oxuser__oxaddinfo', 'oxuser__oxcity', 'oxuser__oxzip',
|
<I>: Login doesn't work with Mysql <I> and iUtfMode=1 unified default collation with salt field
|
diff --git a/ontrack-extension-github/src/main/resources/static/extension/github/module.js b/ontrack-extension-github/src/main/resources/static/extension/github/module.js
index <HASH>..<HASH> 100644
--- a/ontrack-extension-github/src/main/resources/static/extension/github/module.js
+++ b/ontrack-extension-github/src/main/resources/static/extension/github/module.js
@@ -105,11 +105,12 @@ angular.module('ontrack.extension.github', [
$scope.toggleAutoReload = () => {
$scope.autoReload = !$scope.autoReload;
if ($scope.autoReload) {
+ localStorage.setItem(autoReloadKey, $scope.autoReload);
registerReload();
} else {
+ localStorage.removeItem(autoReloadKey);
otTaskService.stop(taskName);
}
- localStorage.setItem(autoReloadKey, $scope.autoReload);
};
$scope.topPayloads = () => {
|
#<I> Persistence of the auto reload being disabled
|
diff --git a/libtmux/server.py b/libtmux/server.py
index <HASH>..<HASH> 100644
--- a/libtmux/server.py
+++ b/libtmux/server.py
@@ -264,7 +264,13 @@ class Server(TmuxRelationalObject, EnvironmentMixin):
# clear up empty dict
panes = [
- dict((k, v) for k, v in window.items() if v) for window in panes
+ dict(
+ (k, v) for k, v in window.items()
+ if v or
+ k == 'pane_current_path'
+ ) # preserve pane_current_path, in case it entered a new process
+ # where we may not get a cwd from.
+ for window in panes
]
if self._panes:
|
don't empty pane_current_path if new one is empty
sometimes even repeated polling for a new pane path over 1,
3, 5 seconds won't return anything in some instances.
In older cases, we'd remove the pane_current_path from the pane's
_info, now we'll keep the last non-empty result libtmux found until
a new one fills it.
|
diff --git a/Swat/SwatApplication.php b/Swat/SwatApplication.php
index <HASH>..<HASH> 100644
--- a/Swat/SwatApplication.php
+++ b/Swat/SwatApplication.php
@@ -1,7 +1,6 @@
<?php
require_once 'Swat/SwatObject.php';
-require_once 'Swat/SwatLayout.php';
require_once 'Swat/SwatApplicationModule.php';
/**
diff --git a/Swat/SwatPage.php b/Swat/SwatPage.php
index <HASH>..<HASH> 100644
--- a/Swat/SwatPage.php
+++ b/Swat/SwatPage.php
@@ -1,6 +1,7 @@
<?php
require_once 'Swat/SwatObject.php';
+require_once 'Swat/SwatLayout.php';
/**
* Base class for a page
|
Move require SwatLayout to page from application.
svn commit r<I>
|
diff --git a/web/src/main/webapp/resources/js/jquery.arrayExpress-autocomplete-1.1.0.130305.js b/web/src/main/webapp/resources/js/jquery.arrayExpress-autocomplete-1.1.0.130305.js
index <HASH>..<HASH> 100644
--- a/web/src/main/webapp/resources/js/jquery.arrayExpress-autocomplete-1.1.0.130305.js
+++ b/web/src/main/webapp/resources/js/jquery.arrayExpress-autocomplete-1.1.0.130305.js
@@ -595,7 +595,7 @@ $.Autocompleter.defaults = {
if ("f" == data.data[1]) {
value = value + "<span class=\"ac_field\">Filter " + data.data[2] + "</span>";
} else if ("o" == data.data[1]) {
- value = value + "<span class=\"ac_efo\">EFO</span>";
+ //value = value + "<span class=\"ac_efo\">EFO</span>";
if (null != data.treeLevel) {
if (data.treeId) {
|
Removed EFO term from autocomplete suggestions
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.