diff
stringlengths 65
26.7k
| message
stringlengths 7
9.92k
|
|---|---|
diff --git a/common/src/main/java/com/turn/ttorrent/common/TorrentGeneralMetadata.java b/common/src/main/java/com/turn/ttorrent/common/TorrentGeneralMetadata.java
index <HASH>..<HASH> 100644
--- a/common/src/main/java/com/turn/ttorrent/common/TorrentGeneralMetadata.java
+++ b/common/src/main/java/com/turn/ttorrent/common/TorrentGeneralMetadata.java
@@ -56,4 +56,9 @@ public interface TorrentGeneralMetadata {
*/
boolean isPrivate();
+ /**
+ * @return SHA-1 hash of info dictionary
+ */
+ String getHexInfoHash();
+
}
|
added getHexInfoHash method to torrent general metadata interface.
|
diff --git a/db/db.py b/db/db.py
index <HASH>..<HASH> 100644
--- a/db/db.py
+++ b/db/db.py
@@ -1235,7 +1235,7 @@ class DB(object):
sys.stderr.write("Refreshing schema. Please wait...")
if self.schemas is not None and isinstance(self.schemas, list) and 'schema_specified' in self._query_templates['system']:
schemas_str = ','.join([repr(schema) for schema in self.schemas])
- q = self._query_templates['system']['schema_specified'] % str(self.schemas)
+ q = self._query_templates['system']['schema_specified'] % schemas_str
elif exclude_system_tables==True:
q = self._query_templates['system']['schema_no_system']
else:
|
Fix for schema_specified case in refresh_schema
|
diff --git a/pysoa/client/client.py b/pysoa/client/client.py
index <HASH>..<HASH> 100644
--- a/pysoa/client/client.py
+++ b/pysoa/client/client.py
@@ -358,11 +358,12 @@ class Client(object):
context['correlation_id'] = correlation_id
elif 'correlation_id' not in context:
context['correlation_id'] = six.u(uuid.uuid1().hex)
- # Optionally add switches
- if switches is not None:
- context['switches'] = list(switches)
- elif 'switches' not in context:
- context['switches'] = []
+ # Switches can come from three different places, so merge them
+ # and ensure that they are unique
+ switches = set(switches or [])
+ if context_extra:
+ switches |= set(context_extra.pop('switches', []))
+ context['switches'] = list(set(context.get('switches', [])) | switches)
# Add any extra stuff
if context_extra:
context.update(context_extra)
|
Ensure that switches from Client.context are correctly merged with the switches passed to each request.
|
diff --git a/paramiko/server.py b/paramiko/server.py
index <HASH>..<HASH> 100644
--- a/paramiko/server.py
+++ b/paramiko/server.py
@@ -179,6 +179,35 @@ class ServerInterface (object):
@rtype: int
"""
return AUTH_FAILED
+
+ def check_global_request(self, kind, msg):
+ """
+ Handle a global request of the given C{kind}. This method is called
+ in server mode and client mode, whenever the remote host makes a global
+ request. If there are any arguments to the request, they will be in
+ C{msg}.
+
+ There aren't any useful global requests defined, aside from port
+ forwarding, so usually this type of request is an extension to the
+ protocol.
+
+ If the request was successful and you would like to return contextual
+ data to the remote host, return a tuple. Items in the tuple will be
+ sent back with the successful result. (Note that the items in the
+ tuple can only be strings, ints, longs, or bools.)
+
+ The default implementation always returns C{False}, indicating that it
+ does not support any global requests.
+
+ @param kind: the kind of global request being made.
+ @type kind: str
+ @param msg: any extra arguments to the request.
+ @type msg: L{Message}
+ @return: C{True} or a tuple of data if the request was granted;
+ C{False} otherwise.
+ @rtype: bool
+ """
+ return False
### Channel requests
|
[project @ Arch-1:<EMAIL><I>-public%secsh--dev--<I>--patch-<I>]
oops (continued)
er, part 2 of that.
|
diff --git a/kuyruk/worker.py b/kuyruk/worker.py
index <HASH>..<HASH> 100644
--- a/kuyruk/worker.py
+++ b/kuyruk/worker.py
@@ -106,12 +106,13 @@ class Worker:
try:
signals.worker_start.send(self.kuyruk, worker=self)
self._consume_messages()
- signals.worker_shutdown.send(self.kuyruk, worker=self)
finally:
self.shutdown_pending.set()
for t in self._threads:
t.join()
+ signals.worker_shutdown.send(self.kuyruk, worker=self)
+
logger.debug("End run worker")
def _consume_messages(self) -> None:
|
always send worker_shutdown signal
|
diff --git a/grip/browser.py b/grip/browser.py
index <HASH>..<HASH> 100644
--- a/grip/browser.py
+++ b/grip/browser.py
@@ -1,5 +1,6 @@
import socket
import webbrowser
+import time
def is_server_running(host, port):
@@ -8,7 +9,8 @@ def is_server_running(host, port):
host and port.
"""
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- return not sock.connect_ex((host, port)) == 0
+ rc = sock.connect_ex((host, port))
+ return rc == 0
def wait_for_server(host, port):
@@ -20,7 +22,7 @@ def wait_for_server(host, port):
the Flask server.
"""
while not is_server_running(host, port):
- pass
+ time.sleep(0.1)
def start_browser(url):
|
Stop browser opening consuming all the sockets
|
diff --git a/mongorest/resource.py b/mongorest/resource.py
index <HASH>..<HASH> 100644
--- a/mongorest/resource.py
+++ b/mongorest/resource.py
@@ -3,19 +3,17 @@
import six
from werkzeug.routing import Map, Rule
-from werkzeug.wrappers import Response
from .collection import Collection
-from .utils import deserialize
from .wsgi import WSGIWrapper
__all__ = [
'Resource',
- 'ListResourceMixin',
- 'CreateResourceMixin',
- 'RetrieveResourceMixin',
- 'UpdateResourceMixin',
- 'DeleteResourceMixin',
+ # 'ListResourceMixin',
+ # 'CreateResourceMixin',
+ # 'RetrieveResourceMixin',
+ # 'UpdateResourceMixin',
+ # 'DeleteResourceMixin',
]
|
removing mixins cause they need fixing
|
diff --git a/lib/devise/rails/routes.rb b/lib/devise/rails/routes.rb
index <HASH>..<HASH> 100644
--- a/lib/devise/rails/routes.rb
+++ b/lib/devise/rails/routes.rb
@@ -94,10 +94,24 @@ module ActionDispatch::Routing
#
# devise_for :users, path: 'accounts'
#
- # * singular: setup the singular name for the given resource. This is used as the instance variable
- # name in controller, as the name in routes and the scope given to warden.
+ # * singular: setup the singular name for the given resource. This is used as the helper methods
+ # names in controller ("authenticate_#{singular}!", "#{singular}_signed_in?", "current_#{singular}"
+ # and "#{singular}_session"), as the scope name in routes and as the scope given to warden.
#
- # devise_for :users, singular: :user
+ # devise_for :admins, singular: :manager
+ #
+ # devise_scope :manager do
+ # ...
+ # end
+ #
+ # class ManagerController < ApplicationController
+ # before_filter authenticate_manager!
+ #
+ # def show
+ # @manager = current_manager
+ # ...
+ # end
+ # end
#
# * path_names: configure different path names to overwrite defaults :sign_in, :sign_out, :sign_up,
# :password, :confirmation, :unlock.
|
[ci skip] Write how to use `singular` option of `ActionDispatch::Routing::Mapper#devise_for`
* Replace "the instance variable name in controller" with "the helper methods
names in controller".
Devise dose not define instance variable for controllers but define helper
methods for controllers.
* Replace "the name in routes" with "the scope name in routes".
`singular` is used as an argument of `devise_scope`.
* Add sample codes of routing and controller.
|
diff --git a/lib/main.js b/lib/main.js
index <HASH>..<HASH> 100644
--- a/lib/main.js
+++ b/lib/main.js
@@ -222,7 +222,7 @@ function sendAction(cmd, type, msg, options) {
})
.buffer("dataPacketType", 1)
.tap(function (vars) {
- var responseType = parseUtil.int8(vars.dataPacketType);
+ var responseType = parseUtil.int8(vars.dataPacketType) & 0x0F;
if (responseType !== 10) {
errorString = "Invalid response (expecting SEXP)";
|
Mask response type with 0x0F.
See #<I>.
|
diff --git a/tests/MockTest.php b/tests/MockTest.php
index <HASH>..<HASH> 100644
--- a/tests/MockTest.php
+++ b/tests/MockTest.php
@@ -63,4 +63,17 @@ class MockTest extends PHPUnit_Framework_TestCase {
$this->assertEquals($result, 'hello["world"]');
$server->close();
}
+ public function testMissingMethod2() {
+ $service = new Service();
+ $service->addMissingMethod(function (string $name, array $args, Context $context): string {
+ return $name . json_encode($args) . $context->remoteAddress['address'];
+ });
+ $server = new MockServer('testMissingMethod2');
+ $service->bind($server);
+ $client = new Client(['mock://testMissingMethod2']);
+ $proxy = $client->useService();
+ $result = $proxy->hello('world');
+ $this->assertEquals($result, 'hello["world"]testMissingMethod2');
+ $server->close();
+ }
}
\ No newline at end of file
|
Added testMissingMethod2
|
diff --git a/gwpy/data/array.py b/gwpy/data/array.py
index <HASH>..<HASH> 100644
--- a/gwpy/data/array.py
+++ b/gwpy/data/array.py
@@ -165,6 +165,17 @@ class Array(Quantity):
prefixstr, arrstr, indent, metadata)
# -------------------------------------------
+ # Pickle helpers
+
+ def dumps(self, order='C'):
+ return super(Quantity, self).dumps()
+ dumps.__doc__ = numpy.ndarray.dumps.__doc__
+
+ def tostring(self, order='C'):
+ return super(Quantity, self).tostring()
+ tostring.__doc__ = numpy.ndarray.tostring.__doc__
+
+ # -------------------------------------------
# array methods
def median(self, axis=None, out=None, overwrite_input=False):
|
Array: override dumps and tostring to enable pickling
|
diff --git a/examples/create_invite_link.py b/examples/create_invite_link.py
index <HASH>..<HASH> 100644
--- a/examples/create_invite_link.py
+++ b/examples/create_invite_link.py
@@ -1,7 +1,7 @@
-import telebot, threading
+import telebot
from time import sleep, time
-from telebot import InlineKeyboardMarkup as ikm #Only for creating Inline Buttons, not necessary for creating Invite Links
-from telebot import InlineKeyboardButton as ikb #Only for creating Inline Buttons, not necessary for creating Invite Links
+from telebot.types import InlineKeyboardMarkup as ikm #Only for creating Inline Buttons, not necessary for creating Invite Links
+from telebot.types import InlineKeyboardButton as ikb #Only for creating Inline Buttons, not necessary for creating Invite Links
Token = "api_token" #Your Bot Access Token
Group_ID = -1234567890 #Group ID for which invite link is to be created
|
Update create_invite_link.py
Added .types while importing inline markup keyboards (fix)
Removed threading import since message is not to be deleted
|
diff --git a/lib/stripe_mock/api/client.rb b/lib/stripe_mock/api/client.rb
index <HASH>..<HASH> 100644
--- a/lib/stripe_mock/api/client.rb
+++ b/lib/stripe_mock/api/client.rb
@@ -3,8 +3,10 @@ module StripeMock
def self.client; @client; end
def self.start_client(port=4999)
+ return @client unless @client.nil?
+
alias_stripe_method :request, StripeMock.method(:redirect_to_mock_server)
- @client = Client.new(port)
+ @client = StripeMock::Client.new(port)
@state = 'remote'
@client
end
diff --git a/spec/server_spec.rb b/spec/server_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/server_spec.rb
+++ b/spec/server_spec.rb
@@ -73,6 +73,12 @@ describe 'StripeMock Server' do
end
+ it "doesn't create multiple clients" do
+ result = StripeMock.start_client
+ expect(result.__id__).to eq(@client.__id__)
+ end
+
+
it "raises an error when client is stopped" do
expect(@client).to be_a StripeMock::Client
expect(@client.state).to eq('ready')
|
Don't create new clients when calling start_client multiple times
|
diff --git a/test/test.js b/test/test.js
index <HASH>..<HASH> 100644
--- a/test/test.js
+++ b/test/test.js
@@ -292,12 +292,3 @@ test.skip('skip test with `.skip()`', function (t) {
t.end();
});
});
-
-test.skip('throwing in a test should emit the error', function (t) {
- ava(function (a) {
- throw new Error('unicorn');
- }).run(function (err) {
- t.is(err.message, 'unicornn');
- t.end();
- });
-});
|
we shouldn't catch thrown user errors
this also aligns with `tape` behaviour
|
diff --git a/azurerm/internal/services/dns/dns_zone_data_source.go b/azurerm/internal/services/dns/dns_zone_data_source.go
index <HASH>..<HASH> 100644
--- a/azurerm/internal/services/dns/dns_zone_data_source.go
+++ b/azurerm/internal/services/dns/dns_zone_data_source.go
@@ -90,7 +90,11 @@ func dataSourceArmDnsZoneRead(d *schema.ResourceData, meta interface{}) error {
resp = *zone
}
+ if resp.ID == nil || *resp.ID == "" {
+ return fmt.Errorf("failed reading ID for DNS Zone %q (Resource Group %q)", name, resourceGroup)
+ }
d.SetId(*resp.ID)
+
d.Set("name", name)
d.Set("resource_group_name", resourceGroup)
|
added nil and empty check for id
|
diff --git a/lib/event_sourcery/event_store/event_builder.rb b/lib/event_sourcery/event_store/event_builder.rb
index <HASH>..<HASH> 100644
--- a/lib/event_sourcery/event_store/event_builder.rb
+++ b/lib/event_sourcery/event_store/event_builder.rb
@@ -1,13 +1,12 @@
module EventSourcery
module EventStore
class EventBuilder
-
def initialize(event_type_serializer:)
@event_type_serializer = event_type_serializer
end
def build(event_data)
- @event_type_serializer.deserialize(event_data[:type]).new(event_data)
+ @event_type_serializer.deserialize(event_data.fetch(:type)).new(event_data)
end
end
end
|
Type is a requirement so use fetch
|
diff --git a/lib/unitwise/scale.rb b/lib/unitwise/scale.rb
index <HASH>..<HASH> 100644
--- a/lib/unitwise/scale.rb
+++ b/lib/unitwise/scale.rb
@@ -94,6 +94,10 @@ module Unitwise
end
memoize :simplified_value
+ def expression
+ unit.expression
+ end
+
# Convert to a simple string representing the scale.
# @api public
def to_s
diff --git a/test/unitwise/measurement_test.rb b/test/unitwise/measurement_test.rb
index <HASH>..<HASH> 100644
--- a/test/unitwise/measurement_test.rb
+++ b/test/unitwise/measurement_test.rb
@@ -13,7 +13,7 @@ describe Unitwise::Measurement do
describe "#convert_to" do
it "must convert to a similar unit code" do
- mph.convert_to('km/h').value.must_almost_equal 96.56063
+ mph.convert_to('km/h').value.must_almost_equal(96.56063)
end
it "must raise an error if the units aren't similar" do
lambda { mph.convert_to('N') }.must_raise Unitwise::ConversionError
@@ -33,6 +33,9 @@ describe Unitwise::Measurement do
it "must convert derived units to special units" do
r.convert_to("Cel").value.must_almost_equal(0)
end
+ it "must convert to a unit of another measurement" do
+ mph.convert_to(kmh).value.must_almost_equal(96.56064)
+ end
end
describe "#*" do
|
May now convert measurements to units of other measurements.
|
diff --git a/lib/helper/WebDriverIO.js b/lib/helper/WebDriverIO.js
index <HASH>..<HASH> 100644
--- a/lib/helper/WebDriverIO.js
+++ b/lib/helper/WebDriverIO.js
@@ -170,7 +170,7 @@ class WebDriverIO extends Helper {
// set defaults
this.options = {
- waitforTimeout: 1000, // ms
+ waitForTimeout: 1000, // ms
desiredCapabilities: {},
restart: true
};
@@ -180,7 +180,7 @@ class WebDriverIO extends Helper {
this.options.baseUrl = this.options.url || this.options.baseUrl;
this.options.desiredCapabilities.browserName = this.options.browser || this.options.desiredCapabilities.browserName;
- this.options.waitforTimeout /= 1000; // convert to seconds
+ this.options.waitForTimeout /= 1000; // convert to seconds
if (!this.options.url || !this.options.browser) {
|
Fix typo in webdriverio config timeout (#<I>)
|
diff --git a/closure/goog/labs/useragent/util.js b/closure/goog/labs/useragent/util.js
index <HASH>..<HASH> 100644
--- a/closure/goog/labs/useragent/util.js
+++ b/closure/goog/labs/useragent/util.js
@@ -12,12 +12,6 @@
// See the License for the specific language governing permissions and
// limitations under the License.
-/**
- * @fileoverview Utilities used by goog.labs.userAgent tools. These functions
- * should not be used outside of goog.labs.userAgent.*.
- *
- * @author nnaze@google.com (Nathan Naze)
- */
goog.provide('goog.labs.userAgent.util');
|
RELNOTES: n/a
-------------
Created by MOE: <URL>
|
diff --git a/src/test/java/strman/StrmanTest.java b/src/test/java/strman/StrmanTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/strman/StrmanTest.java
+++ b/src/test/java/strman/StrmanTest.java
@@ -942,4 +942,10 @@ public class StrmanTest {
public void isEnclosedBetween_shouldThrowIllegalArgumentExceptionWhenEncloserIsNull() throws Exception {
assertThat(isEnclosedBetween("shekhar", null), is(false));
}
+
+ @Test
+ public void words_shouldConvertTextToWords() throws Exception {
+ final String line = "This is a string, with words!";
+ assertThat(words(line), is(new String[]{"This", "is", "a", "string", "with", "words"}));
+ }
}
\ No newline at end of file
|
Resolved #<I>
|
diff --git a/bug_test.go b/bug_test.go
index <HASH>..<HASH> 100644
--- a/bug_test.go
+++ b/bug_test.go
@@ -574,15 +574,8 @@ func Test_issue80(t *testing.T) {
14018689590001,
140186895900001,
1401868959000001,
- 1401868959000001.5,
- 14018689590000001,
- 140186895900000001,
- 1401868959000000001,
- 14018689590000000001,
- 140186895900000000001,
- 140186895900000000001.5
]);
- `, "[1401868959,14018689591,140186895901,1401868959001,14018689590001,140186895900001,1401868959000001,1.4018689590000015e+15,14018689590000001,140186895900000001,1401868959000000001,1.401868959e+19,1.401868959e+20,1.401868959e+20]")
+ `, "[1401868959,14018689591,140186895901,1401868959001,14018689590001,140186895900001,1401868959000001]")
})
}
|
Fix Test_issue<I> on Go <I>
An upstream change (<URL>) makes the Go encoder
to be more compliant with the ES6 standard. Ironically, this change
causes Test_issue<I> to fail on the larger number ranges.
To make this test work on both Go <I> and Go <I>, we delete the larger
value tests, which are arguably locking in the wrong behavior.
|
diff --git a/packages/cli/src/link/ios/getTargets.js b/packages/cli/src/link/ios/getTargets.js
index <HASH>..<HASH> 100644
--- a/packages/cli/src/link/ios/getTargets.js
+++ b/packages/cli/src/link/ios/getTargets.js
@@ -40,4 +40,4 @@ export default function getTargets(project) {
false,
};
});
-};
+}
|
fix: make prettier happy (#<I>)
|
diff --git a/lnetatmo.py b/lnetatmo.py
index <HASH>..<HASH> 100644
--- a/lnetatmo.py
+++ b/lnetatmo.py
@@ -320,10 +320,10 @@ class WeatherStationData:
self.stations = { d['station_name'] : d for d in self.rawData }
self.homes = { d['home_name'] : d["station_name"] for d in self.rawData }
# Keeping the old behavior for default station name
- if station and station not in self.stations: raise NoDevice("No station with name %s" % station)
- self.default_station = station or list(self.stations.keys())[0]
if home and home not in self.homes : raise NoHome("No home with name %s" % home)
self.default_home = home or list(self.homes.keys())[0]
+ if station and station not in self.stations: raise NoDevice("No station with name %s" % station)
+ self.default_station = station or [v["station_name"] for k,v in self.stations.items() if v["home_name"] == self.default_home][0]
self.modules = dict()
self.default_station_data = self.stationByName(self.default_station)
if 'modules' in self.default_station_data:
|
[Fix] #<I> wrong default station selected for multi-homes setup
|
diff --git a/src/Composer/Compiler.php b/src/Composer/Compiler.php
index <HASH>..<HASH> 100644
--- a/src/Composer/Compiler.php
+++ b/src/Composer/Compiler.php
@@ -107,6 +107,7 @@ class Compiler
$this->addFile($phar, $file, false);
}
$this->addFile($phar, new \SplFileInfo(__DIR__ . '/../../vendor/symfony/console/Resources/bin/hiddeninput.exe'), false);
+ $this->addFile($phar, new \SplFileInfo(__DIR__ . '/../../vendor/symfony/polyfill-mbstring/Resources/mb_convert_variables.php8'), false);
$finder = new Finder();
$finder->files()
|
Add missing file to v1 phar
|
diff --git a/confidence.py b/confidence.py
index <HASH>..<HASH> 100644
--- a/confidence.py
+++ b/confidence.py
@@ -227,7 +227,8 @@ def loadf(*fnames, default=_NoDefault):
if default is _NoDefault or path.exists(fname):
# (attempt to) open fname if it exists OR if we're expected to raise an error on a missing file
with open(fname, 'r') as fp:
- return yaml.load(fp.read())
+ # default to empty dict, yaml.load will return None for an empty document
+ return yaml.load(fp.read()) or {}
else:
return default
|
Default to empty dict for falsy yaml sources
|
diff --git a/tests/test-timber-post.php b/tests/test-timber-post.php
index <HASH>..<HASH> 100644
--- a/tests/test-timber-post.php
+++ b/tests/test-timber-post.php
@@ -20,6 +20,17 @@
$this->assertEquals($firstPost->next()->ID, $nextPost->ID);
}
+ function testPrev(){
+ $posts = array();
+ for($i = 0; $i<2; $i++){
+ $posts[] = $this->factory->post->create();
+ sleep(1);
+ }
+ $lastPost = new TimberPost($posts[1]);
+ $prevPost = new TimberPost($posts[0]);
+ $this->assertEquals($lastPost->prev()->ID, $prevPost->ID);
+ }
+
function testNextWithDraftAndFallover(){
$posts = array();
for($i = 0; $i<3; $i++){
|
wrote test for TimberPost::prev
|
diff --git a/file_system.go b/file_system.go
index <HASH>..<HASH> 100644
--- a/file_system.go
+++ b/file_system.go
@@ -218,9 +218,12 @@ type FileSystem interface {
//
// * (http://goo.gl/IQkWZa) sys_fsync calls do_fsync, calls vfs_fsync, calls
// vfs_fsync_range.
+ //
// * (http://goo.gl/5L2SMy) vfs_fsync_range calls f_op->fsync.
//
- // Note that this is also called by fdatasync(2) (cf. http://goo.gl/01R7rF).
+ // Note that this is also called by fdatasync(2) (cf. http://goo.gl/01R7rF),
+ // and may be called for msync(2) with the MS_SYNC flag (see the notes on
+ // FlushFile).
//
// See also: FlushFile, which may perform a similar purpose when closing a
// file (but which is not used in "real" file systems).
|
Added a callout to msync for SyncFile.
|
diff --git a/resources/lang/ja-JP/cachet.php b/resources/lang/ja-JP/cachet.php
index <HASH>..<HASH> 100644
--- a/resources/lang/ja-JP/cachet.php
+++ b/resources/lang/ja-JP/cachet.php
@@ -53,9 +53,9 @@ return [
// Service Status
'service' => [
- 'good' => '[0,1]正常に稼動しています|[2,Inf]全システムが正常に稼動しています',
- 'bad' => '[0,1]問題が発生しています|[2,Inf]一部システムにて問題が発生しています',
- 'major' => '[0, 1]システムで大きな問題が発生 |[2、*]いくつかのシステムの主要な問題が発生しています。',
+ 'good' => '全システムが正常に稼動しています',
+ 'bad' => '[0,1]問題が発生しています|[2,*]一部システムに問題が発生しています',
+ 'major' => '[0,1]システムに大きな問題が発生|[2,*]いくつかのシステムに大きな問題が発生しています。',
],
'api' => [
|
New translations cachet.php (Japanese)
|
diff --git a/test/youtube-dl/video_test.rb b/test/youtube-dl/video_test.rb
index <HASH>..<HASH> 100644
--- a/test/youtube-dl/video_test.rb
+++ b/test/youtube-dl/video_test.rb
@@ -7,22 +7,22 @@ describe YoutubeDL::Video do
end
it 'should download videos without options' do
- YoutubeDL.download TEST_URL
+ YoutubeDL::Video.download TEST_URL
assert_equal 1, Dir.glob(TEST_GLOB).length
end
it 'should download videos with options' do
- YoutubeDL.download TEST_URL, output: TEST_FILENAME, format: TEST_FORMAT
+ YoutubeDL::Video.download TEST_URL, output: TEST_FILENAME, format: TEST_FORMAT
assert File.exist? TEST_FILENAME
end
it 'should download multiple videos without options' do
- YoutubeDL.download [TEST_URL, TEST_URL2]
+ YoutubeDL::Video.download [TEST_URL, TEST_URL2]
assert_equal 2, Dir.glob(TEST_GLOB).length
end
it 'should download multiple videos with options' do
- YoutubeDL.download [TEST_URL, TEST_URL2], output: 'test_%(title)s-%(id)s.%(ext)s'
+ YoutubeDL::Video.download [TEST_URL, TEST_URL2], output: 'test_%(title)s-%(id)s.%(ext)s'
assert_equal 2, Dir.glob('test_' + TEST_GLOB).length
end
end
@@ -33,7 +33,7 @@ describe YoutubeDL::Video do
end
it 'should download videos, exactly like .download' do
- YoutubeDL.get TEST_URL
+ YoutubeDL::Video.get TEST_URL
assert_equal Dir.glob(TEST_GLOB).length, 1
end
end
|
Changed Video test to actually test Video and not just the YoutubeDL module.
|
diff --git a/activerecord/lib/active_record/database_configurations/database_config.rb b/activerecord/lib/active_record/database_configurations/database_config.rb
index <HASH>..<HASH> 100644
--- a/activerecord/lib/active_record/database_configurations/database_config.rb
+++ b/activerecord/lib/active_record/database_configurations/database_config.rb
@@ -7,7 +7,7 @@ module ActiveRecord
# as this is the parent class for the types of database configuration objects.
class DatabaseConfig # :nodoc:
attr_reader :env_name, :name, :spec_name
- deprecate :spec_name, "spec_name accessors are deprecated and will be removed in Rails 6.2, please use name instead."
+ deprecate spec_name: "please use name instead"
attr_accessor :owner_name
diff --git a/activerecord/lib/active_record/tasks/database_tasks.rb b/activerecord/lib/active_record/tasks/database_tasks.rb
index <HASH>..<HASH> 100644
--- a/activerecord/lib/active_record/tasks/database_tasks.rb
+++ b/activerecord/lib/active_record/tasks/database_tasks.rb
@@ -106,7 +106,7 @@ module ActiveRecord
def spec
@spec ||= "primary"
end
- deprecate :spec, "spec_name accessors are deprecated and will be removed in Rails 6.2, please use name instead."
+ deprecate spec: "please use name instead"
def name
@name ||= "primary"
|
Remove duplicate part from deprecation warning
Before:
```
DEPRECATION WARNING: spec_name is deprecated and will be removed from Rails <I> (spec_name accessors are deprecated and will be removed in Rails <I>, please use name instead.)
```
After:
```
DEPRECATION WARNING: spec_name is deprecated and will be removed from Rails <I> (please use name instead)
```
Follow up of #<I>.
|
diff --git a/thrifty-schema/src/main/java/com/microsoft/thrifty/schema/Constant.java b/thrifty-schema/src/main/java/com/microsoft/thrifty/schema/Constant.java
index <HASH>..<HASH> 100644
--- a/thrifty-schema/src/main/java/com/microsoft/thrifty/schema/Constant.java
+++ b/thrifty-schema/src/main/java/com/microsoft/thrifty/schema/Constant.java
@@ -54,6 +54,7 @@ public class Constant implements UserElement {
this.element = builder.element;
this.namespaces = builder.namespaces;
this.mixin = builder.mixin;
+ this.type = builder.type;
}
public ThriftType type() {
@@ -129,11 +130,13 @@ public class Constant implements UserElement {
private ConstElement element;
private ImmutableMap<NamespaceScope, String> namespaces;
+ private final ThriftType type;
Builder(Constant constant) {
super(constant.mixin);
this.element = constant.element;
this.namespaces = constant.namespaces;
+ this.type = constant.type;
}
public Builder namespaces(Map<NamespaceScope, String> namespaces) {
|
Pass type through constant builder (#<I>)
|
diff --git a/tools/humanize/humanize.go b/tools/humanize/humanize.go
index <HASH>..<HASH> 100644
--- a/tools/humanize/humanize.go
+++ b/tools/humanize/humanize.go
@@ -58,16 +58,16 @@ func ParseBytes(str string) (uint64, error) {
return 0, err
}
- unit := strings.ToLower(strings.TrimSpace(str[sep:]))
+ m, err := ParseByteUnit(str[sep:])
+ if err != nil {
+ return 0, err
+ }
- if m, ok := bytesTable[unit]; ok {
- f = f * float64(m)
- if f >= math.MaxUint64 {
- return 0, errors.New("number of bytes too large")
- }
- return uint64(f), nil
+ f = f * float64(m)
+ if f >= math.MaxUint64 {
+ return 0, errors.New("number of bytes too large")
}
- return 0, errors.Errorf("unknown unit: %q", unit)
+ return uint64(f), nil
}
// ParseByteUnit returns the number of bytes in a given unit of storage, or an
|
tools/humanize: use ParseByteUnit from ParseBytes
|
diff --git a/src/Gordalina/Mangopay/Model/User.php b/src/Gordalina/Mangopay/Model/User.php
index <HASH>..<HASH> 100644
--- a/src/Gordalina/Mangopay/Model/User.php
+++ b/src/Gordalina/Mangopay/Model/User.php
@@ -278,7 +278,7 @@ class User extends TimestampableModel
*/
public function setNationality($Nationality)
{
- if (!Utils::isISO3166($Nationality)) {
+ if ($Nationality !== null && !Utils::isISO3166($Nationality)) {
throw new \InvalidArgumentException(sprintf('Invalid nationality iso code: %s', $Nationality));
}
|
Prevent blowing up when creating a user without nationality
|
diff --git a/plugins/commands/serve/mappers/direct.rb b/plugins/commands/serve/mappers/direct.rb
index <HASH>..<HASH> 100644
--- a/plugins/commands/serve/mappers/direct.rb
+++ b/plugins/commands/serve/mappers/direct.rb
@@ -37,8 +37,12 @@ module VagrantPlugins
def converter(direct, mappers)
args = direct.arguments.map do |v|
- logger.trace("converting direct argument #{v} to something useful")
- mappers.map(v)
+ begin
+ mappers.map(v)
+ rescue => err
+ logger.debug("Failed to map value #{v} - #{err}\n#{err.backtrace.join("\n")}")
+ raise
+ end
end
Type::Direct.new(arguments: args)
end
@@ -58,10 +62,10 @@ module VagrantPlugins
def converter(d, mappers)
args = d.args.map do |a|
begin
- logger.trace("direct argument list item map to any: #{a.pretty_inspect}")
mappers.map(a, to: Google::Protobuf::Any)
rescue => err
- raise "Failed to map value #{a} - #{err}\n#{err.backtrace.join("\n")}"
+ logger.debug("Failed to map value #{a} - #{err}\n#{err.backtrace.join("\n")}")
+ raise
end
end
SDK::Args::Direct.new(arguments: args)
|
Log errors from submapping on direct type
|
diff --git a/generators/server/templates/src/main/java/package/config/locale/_AngularCookieLocaleResolver.java b/generators/server/templates/src/main/java/package/config/locale/_AngularCookieLocaleResolver.java
index <HASH>..<HASH> 100644
--- a/generators/server/templates/src/main/java/package/config/locale/_AngularCookieLocaleResolver.java
+++ b/generators/server/templates/src/main/java/package/config/locale/_AngularCookieLocaleResolver.java
@@ -44,9 +44,8 @@ public class AngularCookieLocaleResolver extends CookieLocaleResolver {
@Override
public void addCookie(HttpServletResponse response, String cookieValue) {
- // Mandatory cookie modification for angular to support the locale switching on the server side.
- cookieValue = "%22" + cookieValue + "%22";
- super.addCookie(response, cookieValue);
+ // Mandatory cookie modification for AngularJS to support the locale switching on the server side.
+ super.addCookie(response, "%22" + cookieValue + "%22");
}
private void parseLocaleCookieIfNecessary(HttpServletRequest request) {
|
[Sonar] remove variable intermediary change
|
diff --git a/ast_test.go b/ast_test.go
index <HASH>..<HASH> 100644
--- a/ast_test.go
+++ b/ast_test.go
@@ -283,7 +283,7 @@ var astTests = []testCase{
},
},
{
- []string{"foo | bar", "foo|bar"},
+ []string{"foo | bar", "foo|bar", "foo |\n#etc\nbar"},
BinaryExpr{
Op: OR,
X: litStmt("foo"),
diff --git a/parse.go b/parse.go
index <HASH>..<HASH> 100644
--- a/parse.go
+++ b/parse.go
@@ -654,6 +654,8 @@ func (p *parser) gotStmt(s *Stmt, wantStop bool) bool {
func (p *parser) binaryExpr(op Token, left Stmt) (b BinaryExpr) {
b.OpPos = p.lpos
b.Op = op
+ for p.got('#') {
+ }
p.wantFollowStmt(op.String(), &b.Y, true)
b.X = left
return
|
Fix comments right after a binary operator
|
diff --git a/ez_setup.py b/ez_setup.py
index <HASH>..<HASH> 100644
--- a/ez_setup.py
+++ b/ez_setup.py
@@ -30,7 +30,7 @@ try:
except ImportError:
USER_SITE = None
-DEFAULT_VERSION = "14.4"
+DEFAULT_VERSION = "14.3.1"
DEFAULT_URL = "https://pypi.python.org/packages/source/s/setuptools/"
DEFAULT_SAVE_DIR = os.curdir
diff --git a/setuptools/version.py b/setuptools/version.py
index <HASH>..<HASH> 100644
--- a/setuptools/version.py
+++ b/setuptools/version.py
@@ -1 +1 @@
-__version__ = '14.4'
+__version__ = '14.3.1'
|
Bumped to <I> in preparation for next release.
|
diff --git a/transport/http2_client.go b/transport/http2_client.go
index <HASH>..<HASH> 100644
--- a/transport/http2_client.go
+++ b/transport/http2_client.go
@@ -252,8 +252,7 @@ func (t *http2Client) newStream(ctx context.Context, callHdr *CallHdr) *Stream {
s.windowHandler = func(n int) {
t.updateWindow(s, uint32(n))
}
- // Make a stream be able to cancel the pending operations by itself.
- s.ctx, s.cancel = context.WithCancel(ctx)
+ s.ctx = ctx
s.dec = &recvBufferReader{
ctx: s.ctx,
goAway: s.goAway,
diff --git a/transport/transport.go b/transport/transport.go
index <HASH>..<HASH> 100644
--- a/transport/transport.go
+++ b/transport/transport.go
@@ -169,7 +169,8 @@ type Stream struct {
// nil for client side Stream.
st ServerTransport
// ctx is the associated context of the stream.
- ctx context.Context
+ ctx context.Context
+ // cancel is always nil for client side Stream.
cancel context.CancelFunc
// done is closed when the final status arrives.
done chan struct{}
|
Use user context instead of creating new context for client side stream
|
diff --git a/src/Kodeine/Metable/Metable.php b/src/Kodeine/Metable/Metable.php
index <HASH>..<HASH> 100644
--- a/src/Kodeine/Metable/Metable.php
+++ b/src/Kodeine/Metable/Metable.php
@@ -13,6 +13,16 @@ trait Metable
// Static property registration sigleton for save observation and slow large set hotfix
public static $_isObserverRegistered;
public static $_columnNames;
+
+ /**
+ * whereMeta scope for easier join
+ * -------------------------
+ */
+ public function scopeWhereMeta($query, $key, $value, $alias = null)
+ {
+ $alias = (empty($alias)) ? $this->getMetaTable() : $alias;
+ return $query->join($this->getMetaTable() . ' AS ' . $alias, $this->getQualifiedKeyName(), '=', $alias . '.' . $this->getMetaKeyName())->where('key', '=', $key)->where('value', '=', $value)->select($this->getTable() . '.*');
+ }
/**
* Meta scope for easier join
|
Update Metable.php
helpful/nice to have something like this: Post::whereMeta(['revision', 'draft'])
|
diff --git a/oauth2_test.go b/oauth2_test.go
index <HASH>..<HASH> 100644
--- a/oauth2_test.go
+++ b/oauth2_test.go
@@ -20,7 +20,7 @@ import (
"testing"
"github.com/codegangsta/martini"
- "github.com/codegangsta/martini-contrib/sessions"
+ "github.com/martini-contrib/sessions"
)
func Test_LoginRedirect(t *testing.T) {
|
Import sessions from the new origin.
|
diff --git a/tests/Common/FilesTest.php b/tests/Common/FilesTest.php
index <HASH>..<HASH> 100644
--- a/tests/Common/FilesTest.php
+++ b/tests/Common/FilesTest.php
@@ -24,6 +24,7 @@ class FilesTest extends StorageApiTestCase
$uploadedFile = reset($files);
$this->assertEquals($fileId, $uploadedFile['id']);
$this->assertArrayHasKey('region', $uploadedFile);
+ $this->assertArrayNotHasKey('credentials', $uploadedFile);
}
public function testFilesListFilterByTags()
|
testing that federation token is required for credentials
|
diff --git a/externs/closure-compiler.js b/externs/closure-compiler.js
index <HASH>..<HASH> 100644
--- a/externs/closure-compiler.js
+++ b/externs/closure-compiler.js
@@ -30,3 +30,15 @@ Touch.prototype.webkitRadiusX;
/** @type {number} */
Touch.prototype.webkitRadiusY;
+
+
+
+/**
+ * @type {boolean}
+ */
+WebGLContextAttributes.prototype.preferLowPowerToHighPerformance;
+
+/**
+ * @type {boolean}
+ */
+WebGLContextAttributes.prototype.failIfMajorPerformanceCaveat;
|
Add two missing properties to extern of WebGLContextAttributes
To be removed when the closure-compiler is updated
|
diff --git a/h5p.classes.php b/h5p.classes.php
index <HASH>..<HASH> 100644
--- a/h5p.classes.php
+++ b/h5p.classes.php
@@ -1375,6 +1375,14 @@ class H5PContentValidator {
}
}
}
+ foreach ($semantics->fields as $field) {
+ if (!(isset($field->optional) && $field->optional)) {
+ // Check if field is in group.
+ if (! property_exists($group, $field->name)) {
+ $this->h5pF->setErrorMessage($this->h5pF->t('No value given for mandatory field ' . $field->name));
+ }
+ }
+ }
}
/**
|
OPPG-<I>: Validator just got a little more annoying. Gives warning if mandatory fields are missing in group
|
diff --git a/manifest.php b/manifest.php
index <HASH>..<HASH> 100755
--- a/manifest.php
+++ b/manifest.php
@@ -13,7 +13,7 @@ return array(
'label' => 'Result storage for LTI',
'description' => 'Implements the LTI basic outcome engine for LTI Result Server',
'license' => 'GPL-2.0',
- 'version' => '1.0',
+ 'version' => '2.6',
'author' => 'Open Assessment Technologies',
'requires' => array(
'taoResultServer' => '2.6',
|
Fixed taoLtiBasicOutcome version, it was not fullfilling requirement of <I>+ from ltiDeliveryProvider extension
|
diff --git a/storage/remote/storage.go b/storage/remote/storage.go
index <HASH>..<HASH> 100644
--- a/storage/remote/storage.go
+++ b/storage/remote/storage.go
@@ -15,10 +15,13 @@ package remote
import (
"context"
+ "crypto/md5"
+ "encoding/json"
"sync"
"time"
"github.com/go-kit/kit/log"
+ "github.com/go-kit/kit/log/level"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/common/model"
@@ -37,6 +40,8 @@ type Storage struct {
logger log.Logger
mtx sync.Mutex
+ configHash [16]byte
+
// For writes
walDir string
queues []*QueueManager
@@ -77,6 +82,19 @@ func (s *Storage) ApplyConfig(conf *config.Config) error {
s.mtx.Lock()
defer s.mtx.Unlock()
+ cfgBytes, err := json.Marshal(conf.RemoteWriteConfigs)
+ if err != nil {
+ return err
+ }
+
+ hash := md5.Sum(cfgBytes)
+ if hash == s.configHash {
+ level.Debug(s.logger).Log("msg", "remote write config has not changed, no need to restart QueueManagers")
+ return nil
+ }
+
+ s.configHash = hash
+
// Update write queues
newQueues := []*QueueManager{}
// TODO: we should only stop & recreate queues which have changes,
|
Don't stop, recreate, and start remote storage QueueManagers if the (#<I>)
remote write config hasn't changed at all.
|
diff --git a/main.py b/main.py
index <HASH>..<HASH> 100755
--- a/main.py
+++ b/main.py
@@ -499,10 +499,6 @@ class PowerLogParser:
entity = self._parse_entity(entity)
node = TagChangeNode(ts, entity, tag, value)
- if self.current_node.indent_level > indent_level:
- # mismatched indent levels - closing the node
- # this can happen eg. during mulligans
- self.current_node = self.current_node.parent
self.update_node(node)
self.current_node.indent_level = indent_level
return
|
Remove an indent-mismatch hack causing incorrect replays
|
diff --git a/lib/active_record/connection_adapters/sqlserver/database_statements.rb b/lib/active_record/connection_adapters/sqlserver/database_statements.rb
index <HASH>..<HASH> 100644
--- a/lib/active_record/connection_adapters/sqlserver/database_statements.rb
+++ b/lib/active_record/connection_adapters/sqlserver/database_statements.rb
@@ -256,7 +256,7 @@ module ActiveRecord
end
rows = results.inject([]) do |rows,row|
row.each_with_index do |value, i|
- if value.is_a? raw_connection.class.parent::TimeStamp
+ if value.respond_to?(:to_sqlserver_string)
row[i] = value.to_sqlserver_string
end
end
|
Class#parent inspection is dirt slow.
|
diff --git a/hotdoc/core/base_formatter.py b/hotdoc/core/base_formatter.py
index <HASH>..<HASH> 100644
--- a/hotdoc/core/base_formatter.py
+++ b/hotdoc/core/base_formatter.py
@@ -26,7 +26,7 @@ import shutil
import pygraphviz as pg
from hotdoc.utils.configurable import Configurable
from hotdoc.utils.simple_signals import Signal
-from hotdoc.utils.utils import recursive_overwrite
+from hotdoc.utils.utils import recursive_overwrite, OrderedSet
def _create_hierarchy_graph(hierarchy):
@@ -101,6 +101,8 @@ class Formatter(Configurable):
if os.path.isdir(src):
recursive_overwrite(src, dest)
+ elif os.path.isfile(src):
+ shutil.copyfile(src, dest)
def __copy_extra_files(self, assets_path):
if not os.path.exists(assets_path):
@@ -233,4 +235,4 @@ class Formatter(Configurable):
"""Banana banana
"""
Formatter.editing_server = config.get('editing_server')
- Formatter.extra_assets = config.get_paths('extra_assets')
+ Formatter.extra_assets = OrderedSet(config.get_paths('extra_assets'))
|
base_formatter: copy extra assets files as well
|
diff --git a/eZ/Bundle/EzPublishCoreBundle/Composer/ScriptHandler.php b/eZ/Bundle/EzPublishCoreBundle/Composer/ScriptHandler.php
index <HASH>..<HASH> 100644
--- a/eZ/Bundle/EzPublishCoreBundle/Composer/ScriptHandler.php
+++ b/eZ/Bundle/EzPublishCoreBundle/Composer/ScriptHandler.php
@@ -93,7 +93,7 @@ ________________/\\\\\\\\\\\\\\\____________/\\\\\\\\\\\\\____/\\\\\\___________
<fg=cyan>Welcome to eZ Platform!</fg=cyan>
<options=bold>You may now complete the eZ Platform installation with ezplatform:install command, example of use:</options=bold>
-<comment> $ php ezpublish/console ezplatform:install --env prod demo-clean</comment>
+<comment> $ php ezpublish/console ezplatform:install --env prod demo</comment>
<options=bold>After executing this, you can launch your browser* and get started.</options=bold>
|
Changed install command example to use 'demo'
demo-clean has issues right now.
|
diff --git a/dataviews/ipython/display_hooks.py b/dataviews/ipython/display_hooks.py
index <HASH>..<HASH> 100644
--- a/dataviews/ipython/display_hooks.py
+++ b/dataviews/ipython/display_hooks.py
@@ -24,7 +24,6 @@ except:
from ..dataviews import Stack, View
from ..views import Annotation, Layout, GridLayout, Grid
from ..plots import Plot, GridLayoutPlot
-from ..sheetviews import SheetLayer, SheetStack
from . import magics
from .magics import ViewMagic, ChannelMagic, OptsMagic
|
Removed unused imports in ipython/display_hooks.py
|
diff --git a/bigtable-hbase-dataflow/src/main/java/com/google/cloud/bigtable/dataflow/CloudBigtableIO.java b/bigtable-hbase-dataflow/src/main/java/com/google/cloud/bigtable/dataflow/CloudBigtableIO.java
index <HASH>..<HASH> 100644
--- a/bigtable-hbase-dataflow/src/main/java/com/google/cloud/bigtable/dataflow/CloudBigtableIO.java
+++ b/bigtable-hbase-dataflow/src/main/java/com/google/cloud/bigtable/dataflow/CloudBigtableIO.java
@@ -46,7 +46,6 @@ import org.apache.hadoop.hbase.util.Bytes;
import com.google.api.client.util.Lists;
import com.google.api.client.util.Preconditions;
-import com.google.bigtable.repackaged.com.google.common.collect.ImmutableList;
import com.google.bigtable.v1.BigtableServiceGrpc.BigtableService;
import com.google.bigtable.v1.SampleRowKeysRequest;
import com.google.bigtable.v1.SampleRowKeysResponse;
@@ -144,7 +143,7 @@ public class CloudBigtableIO {
* Configuration for a Cloud Bigtable connection, a table, and an optional scan.
*/
private final CloudBigtableScanConfiguration configuration;
- private transient ImmutableList<SampleRowKeysResponse> sampleRowKeys;
+ private transient List<SampleRowKeysResponse> sampleRowKeys;
/**
* A {@link BoundedSource} for a Cloud Bigtable {@link Table} with a start/stop key range, along
|
Removing a dependency on com.google.bigtable.repackaged.
|
diff --git a/src/main/java/org/asteriskjava/util/Lockable.java b/src/main/java/org/asteriskjava/util/Lockable.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/asteriskjava/util/Lockable.java
+++ b/src/main/java/org/asteriskjava/util/Lockable.java
@@ -9,7 +9,7 @@ import org.asteriskjava.util.Locker.LockCloser;
public class Lockable
{
- private final ReentrantLock internalLock = new ReentrantLock(true);
+ private final ReentrantLock internalLock = new ReentrantLock(false);
final private String lockName;
final AtomicReference<Thread> threadHoldingLock = new AtomicReference<>();
private final AtomicInteger totalWaitTime = new AtomicInteger();
|
switch to unfair lock to flush out locking bugs
|
diff --git a/cookiecutter/config.py b/cookiecutter/config.py
index <HASH>..<HASH> 100644
--- a/cookiecutter/config.py
+++ b/cookiecutter/config.py
@@ -76,7 +76,7 @@ def get_config(config_path):
def get_user_config(config_file=None, default_config=False):
"""Return the user config as a dict.
- If ``default_config`` is True, ignore ``config_file and return default
+ If ``default_config`` is True, ignore ``config_file`` and return default
values for the config parameters.
If a path to a ``config_file`` is given, that is different from the default
|
Fix docstring
- cookiecutter/config.py:docstring of cookiecutter.config.get_user_config:3: WARNING: Inline literal start-string without end-string.
|
diff --git a/lib/Page.js b/lib/Page.js
index <HASH>..<HASH> 100644
--- a/lib/Page.js
+++ b/lib/Page.js
@@ -395,8 +395,8 @@ class Page extends EventEmitter {
if (clipRect) {
await Promise.all([
this._client.send('Emulation.setVisibleSize', {
- width: clipRect.width / this._screenDPI,
- height: clipRect.height / this._screenDPI,
+ width: Math.ceil(clipRect.width / this._screenDPI),
+ height: Math.ceil(clipRect.height / this._screenDPI),
}),
this._client.send('Emulation.forceViewport', {
x: clipRect.x / this._screenDPI,
|
Pass integers to the Emulation.setVisibleSize
Integers are required in the Emulation.setVisibleSize. This patch
fixes the screenshot clipRect so that it never tries to pass
float values.
|
diff --git a/src/Symfony/Component/OptionsResolver/OptionsResolver.php b/src/Symfony/Component/OptionsResolver/OptionsResolver.php
index <HASH>..<HASH> 100644
--- a/src/Symfony/Component/OptionsResolver/OptionsResolver.php
+++ b/src/Symfony/Component/OptionsResolver/OptionsResolver.php
@@ -252,7 +252,7 @@ class OptionsResolver implements Options, OptionsResolverInterface
throw new AccessException('Options cannot be made required from a lazy option or normalizer.');
}
- foreach ((array) $optionNames as $key => $option) {
+ foreach ((array) $optionNames as $option) {
$this->defined[$option] = true;
$this->required[$option] = true;
}
@@ -333,7 +333,7 @@ class OptionsResolver implements Options, OptionsResolverInterface
throw new AccessException('Options cannot be defined from a lazy option or normalizer.');
}
- foreach ((array) $optionNames as $key => $option) {
+ foreach ((array) $optionNames as $option) {
$this->defined[$option] = true;
}
|
[OptionsResolver] Remove Unused Variable from Foreach Cycles
|
diff --git a/traversal/planner/PlannerEdge.java b/traversal/planner/PlannerEdge.java
index <HASH>..<HASH> 100644
--- a/traversal/planner/PlannerEdge.java
+++ b/traversal/planner/PlannerEdge.java
@@ -1238,10 +1238,10 @@ public abstract class PlannerEdge<VERTEX_FROM extends PlannerVertex<?>, VERTEX_T
void computeCost(GraphManager graphMgr) {
if (isLoop() || to.props().hasIID()) {
cost = 1;
+ return;
}
cost = 0;
-
Set<TypeVertex> roleTypeVertices = iterate(this.roleTypes()).map(graphMgr.schema()::getType).toSet();
for (TypeVertex roleType : roleTypeVertices) {
assert roleType.isRoleType() && roleType.properLabel().scope().isPresent();
@@ -1264,6 +1264,7 @@ public abstract class PlannerEdge<VERTEX_FROM extends PlannerVertex<?>, VERTEX_T
void computeCost(GraphManager graphMgr) {
if (isLoop() || to.props().hasIID()) {
cost = 1;
+ return;
}
cost = 0;
|
fix incorrect RolePlayer edge cost when IID or Loop is present
|
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index <HASH>..<HASH> 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -4,6 +4,11 @@ RSpec.configure do |config|
config.treat_symbols_as_metadata_keys_with_true_values = true
config.run_all_when_everything_filtered = true
config.filter_run :focus
-
config.order = 'random'
end
+
+module Test
+ def self.load_fixture(name)
+ File.read(File.expand_path("../html/#{name}", __FILE__))
+ end
+end
|
Add helper for loading HTML fixtures in tests
|
diff --git a/js/remote.js b/js/remote.js
index <HASH>..<HASH> 100644
--- a/js/remote.js
+++ b/js/remote.js
@@ -676,8 +676,6 @@ Transaction.prototype.offer_create = function (src, taker_pays, taker_gets, expi
this.secret = this.account_secret(src);
this.transaction.TransactionType = 'OfferCreate';
this.transaction.Account = this.account_default(src);
- this.transaction.Amount = deliver_amount.to_json();
- this.transaction.Destination = dst_account;
this.transaction.Fee = fees.offer.to_json();
this.transaction.TakerPays = taker_pays.to_json();
this.transaction.TakerGets = taker_gets.to_json();
|
JS: Fix offer_create.
|
diff --git a/examples/repl.js b/examples/repl.js
index <HASH>..<HASH> 100644
--- a/examples/repl.js
+++ b/examples/repl.js
@@ -19,4 +19,5 @@ engine(server)
.use(engine.logger('logs'))
.use(engine.stats())
.use(engine.repl(__dirname + '/repl'))
+ .use(engine.debug())
.listen();
\ No newline at end of file
diff --git a/lib/plugins/repl.js b/lib/plugins/repl.js
index <HASH>..<HASH> 100644
--- a/lib/plugins/repl.js
+++ b/lib/plugins/repl.js
@@ -80,3 +80,15 @@ exports.help = function(master, sock){
};
exports.help.description = 'Display help information';
+
+/**
+ * Spawn `n` additional workers.
+ */
+
+exports.spawn = function(master, sock, n){
+ n = n || 1;
+ sock.write('spawning ' + n + ' worker' + (n > 1 ? 's' : '') + '\n');
+ master.spawn(n);
+};
+
+exports.spawn.description = 'Spawn one or more additional workers';
\ No newline at end of file
|
Added spawn(n) REPL command
|
diff --git a/wayback-core/src/test/java/org/archive/wayback/webapp/AccessPointTest.java b/wayback-core/src/test/java/org/archive/wayback/webapp/AccessPointTest.java
index <HASH>..<HASH> 100644
--- a/wayback-core/src/test/java/org/archive/wayback/webapp/AccessPointTest.java
+++ b/wayback-core/src/test/java/org/archive/wayback/webapp/AccessPointTest.java
@@ -156,7 +156,7 @@ public class AccessPointTest extends TestCase {
// behavior returning null are commented out because EasyMock provides them by default.
httpRequest = EasyMock.createNiceMock(HttpServletRequest.class);
- httpResponse = EasyMock.createMock(HttpServletResponse.class);
+ httpResponse = EasyMock.createNiceMock(HttpServletResponse.class);
// RequestDispatcher - setup expectations, call replay() and verify() if
// method calls are expected.
requestDispatcher = EasyMock.createMock(RequestDispatcher.class);
|
FIX: Fix AccessPointTest unit test by using createNiceMock
|
diff --git a/nolds/__init__.py b/nolds/__init__.py
index <HASH>..<HASH> 100644
--- a/nolds/__init__.py
+++ b/nolds/__init__.py
@@ -1,4 +1,4 @@
from .measures import lyap_r, lyap_e, sampen, hurst_rs, corr_dim, dfa, \
- binary_n, logarithmic_n, logarithmic_r, expected_h, logmid_n
+ binary_n, logarithmic_n, logarithmic_r, expected_h, logmid_n, expected_rs
from .datasets import brown72, tent_map, logistic_map, fbm, fgn, qrandom, \
load_qrandom
diff --git a/nolds/measures.py b/nolds/measures.py
index <HASH>..<HASH> 100644
--- a/nolds/measures.py
+++ b/nolds/measures.py
@@ -836,7 +836,8 @@ def expected_rs(n):
"""
Calculates the expected (R/S)_n for white noise for a given n.
- This is used as a correction factor in the function hurst_rs.
+ This is used as a correction factor in the function hurst_rs. It uses the
+ formula of Anis-Lloyd-Peters (see [h_3]_).
Args:
n (int):
|
updates description of expected_rs and adds it to the exported functions
|
diff --git a/perceval/backends/core/redmine.py b/perceval/backends/core/redmine.py
index <HASH>..<HASH> 100644
--- a/perceval/backends/core/redmine.py
+++ b/perceval/backends/core/redmine.py
@@ -57,7 +57,7 @@ class Redmine(Backend):
:param tag: label used to mark the data
:param archive: archive to store/retrieve items
"""
- version = '0.9.1'
+ version = '0.9.2'
CATEGORIES = [CATEGORY_ISSUE]
@@ -89,7 +89,6 @@ class Redmine(Backend):
from_date = DEFAULT_DATETIME
from_date = datetime_to_utc(from_date)
-
kwargs = {'from_date': from_date}
items = super().fetch(category, **kwargs)
@@ -339,7 +338,7 @@ class RedmineClient(HttpClient):
CWATCHERS = 'watchers'
def __init__(self, base_url, api_token=None, archive=None, from_archive=False):
- super().__init__(base_url.rstrip('/'), archive, from_archive)
+ super().__init__(base_url.rstrip('/'), archive=archive, from_archive=from_archive)
self.api_token = api_token
def issues(self, from_date=DEFAULT_DATETIME,
|
[redmine] Fix initialization client
This patch fixes the initialization of 'archive' and 'from_archive'
parameters, thus allowing to correctly store data within the
archive.
|
diff --git a/system/src/Grav/Console/ConsoleTrait.php b/system/src/Grav/Console/ConsoleTrait.php
index <HASH>..<HASH> 100644
--- a/system/src/Grav/Console/ConsoleTrait.php
+++ b/system/src/Grav/Console/ConsoleTrait.php
@@ -3,6 +3,7 @@ namespace Grav\Console;
use Grav\Common\Grav;
use Grav\Common\Composer;
+use Grav\Common\GravTrait;
use Grav\Console\Cli\ClearCacheCommand;
use Symfony\Component\Console\Formatter\OutputFormatterStyle;
use Symfony\Component\Console\Input\ArrayInput;
@@ -15,6 +16,8 @@ use Symfony\Component\Console\Output\OutputInterface;
*/
trait ConsoleTrait
{
+ use GravTrait;
+
/**
* @var
*/
@@ -105,10 +108,10 @@ trait ConsoleTrait
$input = new ArrayInput($all);
return $command->run($input, $this->output);
}
-
+
/**
* Validate if the system is based on windows or not.
- *
+ *
* @return bool
*/
public function isWindows()
|
Added GravTrait back to ensure 3rd party CLI plugins don't break.
|
diff --git a/tests/reducer/change-model-test.js b/tests/reducer/change-model-test.js
index <HASH>..<HASH> 100644
--- a/tests/reducer/change-model-test.js
+++ b/tests/reducer/change-model-test.js
@@ -3,7 +3,7 @@ var actions = require('../../lib/actions')
var reducer = require('../../lib/reducer').reducer
describe('reducer: CHANGE_MODEL', function () {
- let initialState
+ var initialState
beforeEach(function () {
initialState = {
baseModel: {
@@ -33,7 +33,7 @@ describe('reducer: CHANGE_MODEL', function () {
})
describe('when model does not include references', function () {
- let newState
+ var newState
beforeEach(function () {
newState = reducer(initialState, {
type: actions.CHANGE_MODEL,
@@ -47,7 +47,7 @@ describe('reducer: CHANGE_MODEL', function () {
})
describe('when model includes complex references', function () {
- let newState
+ var newState
beforeEach(function () {
newState = reducer(initialState, {
type: actions.CHANGE_MODEL,
|
support node5, remove block scoped vars
|
diff --git a/src/saml2/pack.py b/src/saml2/pack.py
index <HASH>..<HASH> 100644
--- a/src/saml2/pack.py
+++ b/src/saml2/pack.py
@@ -66,9 +66,12 @@ def http_form_post_message(message, location, relay_state="", typ="SAMLRequest")
if not isinstance(message, basestring):
message = "%s" % (message,)
-
- response.append(FORM_SPEC % (location, typ, base64.b64encode(message),
- relay_state))
+
+ if typ == "SAMLRequest":
+ _msg = base64.b64encode(message)
+ else:
+ _msg = message
+ response.append(FORM_SPEC % (location, typ, _msg, relay_state))
response.append("""<script type="text/javascript">""")
response.append(" window.onload = function ()")
|
May not always want to b<I> encode the message
|
diff --git a/astromodels/functions/functions.py b/astromodels/functions/functions.py
index <HASH>..<HASH> 100644
--- a/astromodels/functions/functions.py
+++ b/astromodels/functions/functions.py
@@ -1079,7 +1079,10 @@ class Log_parabola(Function1D):
# dimensionless because of a division (like xx here) are not recognized as such by the power
# operator, which throws an exception: ValueError: Quantities and Units may only be raised to a scalar power
# This is a quick fix, waiting for astropy 1.2 which will fix this
- return K * xx**((alpha + beta * np.log10(xx)).to(''))
+
+ xx = xx.to('')
+
+ return K * xx**(alpha + beta * np.log10(xx))
@property
def peak_energy(self):
|
Dealing with the bug in astropy which doesn't allow to use arrays in powers (will be fixed in astropy <I>)
|
diff --git a/lib/YandexMoney/ApiRequestor.php b/lib/YandexMoney/ApiRequestor.php
index <HASH>..<HASH> 100644
--- a/lib/YandexMoney/ApiRequestor.php
+++ b/lib/YandexMoney/ApiRequestor.php
@@ -10,7 +10,7 @@ class ApiRequestor
/**
*
*/
- const USER_AGENT = 'yamolib-php';
+ const USER_AGENT = 'yandex-money-sdk-php';
/**
*
@@ -65,13 +65,11 @@ class ApiRequestor
curl_setopt($curl, CURLOPT_USERAGENT, self::USER_AGENT);
curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
- // curl_setopt($curl, CURLOPT_FORBID_REUSE, true);
curl_setopt($curl, CURLOPT_CONNECTTIMEOUT, 30);
curl_setopt($curl, CURLOPT_TIMEOUT, 80);
curl_setopt($curl, CURLOPT_POST, true);
curl_setopt($curl, CURLOPT_POSTFIELDS, $params);
curl_setopt($curl, CURLOPT_SSL_VERIFYPEER, true);
- // curl_setopt($curl, CURLOPT_SSL_VERIFYHOST, true);
curl_setopt($curl, CURLOPT_CAINFO, __DIR__ . self::CERTIFICATE_PATH);
$this->_log($this->_makeRequestLogMessage($uri, $params));
|
user agent changed to "yandex-money-sdk-php"
|
diff --git a/lib/conceptql/behaviors/provenanceable.rb b/lib/conceptql/behaviors/provenanceable.rb
index <HASH>..<HASH> 100644
--- a/lib/conceptql/behaviors/provenanceable.rb
+++ b/lib/conceptql/behaviors/provenanceable.rb
@@ -201,7 +201,7 @@ module ConceptQL
h[c[0][0]].merge!( [[c[0][1],c[1].flatten]].to_h){|key,new_v,old_v| (new_v.flatten + old_v.flatten).uniq}
}
else
- res = {}
+ res = {FILE_PROVENANCE_TYPES_VOCAB => {}, CODE_PROVENANCE_TYPES_VOCAB => {}}
end
return res
|
Fix error when using concept ids in provenance operator by themselves
|
diff --git a/src/TableRow.js b/src/TableRow.js
index <HASH>..<HASH> 100644
--- a/src/TableRow.js
+++ b/src/TableRow.js
@@ -9,9 +9,7 @@ class TableRow extends Component {
}
rowClick = e => {
- if (e.target.tagName !== 'INPUT' &&
- e.target.tagName !== 'SELECT' &&
- e.target.tagName !== 'TEXTAREA') {
+ if (e.target.tagName === 'TD') {
const rowIndex = this.props.index + 1;
const cellIndex = e.target.cellIndex;
const { selectRow, unselectableRow, isSelected, onSelectRow, onExpandRow } = this.props;
|
fix clicking on a custom selection column will trigger selection twice
|
diff --git a/src/Sylius/Bundle/CoreBundle/Application/Kernel.php b/src/Sylius/Bundle/CoreBundle/Application/Kernel.php
index <HASH>..<HASH> 100644
--- a/src/Sylius/Bundle/CoreBundle/Application/Kernel.php
+++ b/src/Sylius/Bundle/CoreBundle/Application/Kernel.php
@@ -31,17 +31,17 @@ use Webmozart\Assert\Assert;
class Kernel extends HttpKernel
{
- public const VERSION = '1.7.1';
+ public const VERSION = '1.7.2-DEV';
- public const VERSION_ID = '10701';
+ public const VERSION_ID = '10702';
public const MAJOR_VERSION = '1';
public const MINOR_VERSION = '7';
- public const RELEASE_VERSION = '1';
+ public const RELEASE_VERSION = '2';
- public const EXTRA_VERSION = '';
+ public const EXTRA_VERSION = 'DEV';
public function __construct(string $environment, bool $debug)
{
|
Change application's version to <I>-DEV
|
diff --git a/lib/protocol/tcp.js b/lib/protocol/tcp.js
index <HASH>..<HASH> 100644
--- a/lib/protocol/tcp.js
+++ b/lib/protocol/tcp.js
@@ -38,7 +38,6 @@ exports.connect = function connect(options, cb) {
keepAliveIdle = options['tcpKeepAliveIdle'] * 1000;
if (isNaN(keepAliveIdle)) keepAliveIdle = defaultKeepAliveIdle;
}
- console.log(keepAliveIdle);
socket.setKeepAlive(true, keepAliveIdle);
return socket;
};
|
Remove tcp keepalive debug code
|
diff --git a/wafer/pages/management/tests/test_load_pages.py b/wafer/pages/management/tests/test_load_pages.py
index <HASH>..<HASH> 100644
--- a/wafer/pages/management/tests/test_load_pages.py
+++ b/wafer/pages/management/tests/test_load_pages.py
@@ -8,6 +8,8 @@ from django.core.management import call_command
from django.test import TestCase
from django.utils.six import StringIO
+from wafer.pages.models import Page
+
PAGES = {
"page1.md": "\n".join([
"---",
@@ -43,3 +45,8 @@ class LoadPagesTest(TestCase):
"Loaded page page1",
"Loaded page page2",
])
+ pages = sorted(Page.objects.all(), key=lambda p: p.name)
+ self.assertEqual(pages[0].name, "Page 1")
+ self.assertEqual(pages[1].name, "Page 2")
+ self.assertEqual(pages[0].content.raw, "This is page 1.")
+ self.assertEqual(pages[1].content.raw, "This is page 2.")
|
Test that pages made it to the database.
|
diff --git a/rfc5424logging/__init__.py b/rfc5424logging/__init__.py
index <HASH>..<HASH> 100644
--- a/rfc5424logging/__init__.py
+++ b/rfc5424logging/__init__.py
@@ -1,6 +1,14 @@
-from .handler import Rfc5424SysLogHandler, NILVALUE
+from .handler import Rfc5424SysLogHandler, TlsRfc5424SysLogHandler, NILVALUE
from .adapter import Rfc5424SysLogAdapter, EMERGENCY, ALERT, NOTICE
__version__ = "1.2.1"
-__all__ = ['Rfc5424SysLogHandler', 'Rfc5424SysLogAdapter', 'EMERGENCY', 'ALERT', 'NOTICE', 'NILVALUE']
+__all__ = [
+ 'Rfc5424SysLogHandler',
+ 'Rfc5424SysLogAdapter',
+ 'TlsRfc5424SysLogHandler',
+ 'EMERGENCY',
+ 'ALERT',
+ 'NOTICE',
+ 'NILVALUE'
+]
|
loading the new class to __all__
|
diff --git a/lib/dm-core.rb b/lib/dm-core.rb
index <HASH>..<HASH> 100644
--- a/lib/dm-core.rb
+++ b/lib/dm-core.rb
@@ -26,7 +26,7 @@ require 'extlib/inflection'
begin
gem 'fastthread', '~>1.0.1'
require 'fastthread'
-rescue Gem::LoadError
+rescue LoadError
# fastthread not installed
end
|
Changed exception to check for to ancestor of Gem::LoadError
|
diff --git a/src/java/voldemort/server/gossip/Gossiper.java b/src/java/voldemort/server/gossip/Gossiper.java
index <HASH>..<HASH> 100644
--- a/src/java/voldemort/server/gossip/Gossiper.java
+++ b/src/java/voldemort/server/gossip/Gossiper.java
@@ -44,7 +44,6 @@ public class Gossiper implements Runnable {
running.set(false);
}
- @Override
public void run() {
while (running.get()) {
Node node = selectPeer();
|
Removed @Override on an interface for <I> compatibility.
|
diff --git a/src/test/java/com/github/davidmoten/rx2/internal/flowable/FlowableRepeatingTransformTest.java b/src/test/java/com/github/davidmoten/rx2/internal/flowable/FlowableRepeatingTransformTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/com/github/davidmoten/rx2/internal/flowable/FlowableRepeatingTransformTest.java
+++ b/src/test/java/com/github/davidmoten/rx2/internal/flowable/FlowableRepeatingTransformTest.java
@@ -234,7 +234,6 @@ public final class FlowableRepeatingTransformTest {
};
Flowable.just(1, 5) //
.to(Transformers.repeat(plusOne, 3, 1, tester)) //
- .doOnNext(Consumers.println()) //
.test() //
.assertValues(2, 6) //
.assertComplete();
@@ -319,7 +318,6 @@ public final class FlowableRepeatingTransformTest {
private static void check(int n, int maxChained) {
int result = Flowable.range(1, n) //
.to(Transformers.reduce(reducer, maxChained)) //
- .doOnNext(Consumers.println()) //
.single(-1) //
.blockingGet();
Assert.assertEquals(sum(n), result);
|
reduce logging from FlowableRepeatingTransformTest
|
diff --git a/gui/tools/designer.py b/gui/tools/designer.py
index <HASH>..<HASH> 100644
--- a/gui/tools/designer.py
+++ b/gui/tools/designer.py
@@ -159,12 +159,20 @@ class BasicDesigner:
wx_obj = self.current
sx, sy = self.start
x, y = wx.GetMousePosition()
- # update gui specs (this will overwrite relative dimensions):
+ # calculate the new position (this will overwrite relative dimensions):
x, y = (x + sx, y + sy)
if evt.ShiftDown(): # snap to grid:
x = x / GRID_SIZE[0] * GRID_SIZE[0]
y = y / GRID_SIZE[1] * GRID_SIZE[1]
- wx_obj.obj.pos = (wx.Point(x, y))
+ # calculate the diff to use in the rest of the selected objects:
+ ox, oy = wx_obj.obj.pos
+ dx, dy = (x - ox), (y - oy)
+ # move all selected objects:
+ for obj in self.selection:
+ x, y = obj.pos
+ x = x + dx
+ y = y + dy
+ obj.pos = (wx.Point(x, y))
def do_resize(self, evt, wx_obj, (n, w, s, e)):
"Called by SelectionTag"
|
allow moving group object using mouse (designer)
|
diff --git a/distutils/tests/test_dist.py b/distutils/tests/test_dist.py
index <HASH>..<HASH> 100644
--- a/distutils/tests/test_dist.py
+++ b/distutils/tests/test_dist.py
@@ -83,6 +83,10 @@ class DistributionTestCase(support.LoggingSilencer,
self.assertIsInstance(cmd, test_dist)
self.assertEqual(cmd.sample_option, "sometext")
+ @unittest.skipIf(
+ 'distutils' not in Distribution.parse_config_files.__module__,
+ 'Cannot test when virtualenv has monkey-patched Distribution.',
+ )
def test_venv_install_options(self):
sys.argv.append("install")
self.addCleanup(os.unlink, TESTFN)
|
Mark test_venv to be skipped when running under a virtualenv as virtualenv monkey patches distutils.
|
diff --git a/client/client_test.go b/client/client_test.go
index <HASH>..<HASH> 100644
--- a/client/client_test.go
+++ b/client/client_test.go
@@ -544,3 +544,23 @@ func TestRedirectFollowingHTTPClient(t *testing.T) {
}
}
}
+
+func TestDefaultCheckRedirect(t *testing.T) {
+ tests := []struct {
+ num int
+ err error
+ }{
+ {0, nil},
+ {5, nil},
+ {10, nil},
+ {11, ErrTooManyRedirects},
+ {29, ErrTooManyRedirects},
+ }
+
+ for i, tt := range tests {
+ err := DefaultCheckRedirect(tt.num)
+ if !reflect.DeepEqual(tt.err, err) {
+ t.Errorf("#%d: want=%#v got=%#v", i, tt.err, err)
+ }
+ }
+}
|
client: test DefaultCheckRedirect
|
diff --git a/salt/states/boto3_route53.py b/salt/states/boto3_route53.py
index <HASH>..<HASH> 100644
--- a/salt/states/boto3_route53.py
+++ b/salt/states/boto3_route53.py
@@ -650,8 +650,13 @@ def rr_present(name, HostedZoneId=None, DomainName=None, PrivateZone=False, Name
fixed_rrs += [rr]
ResourceRecords = [{'Value': rr} for rr in sorted(fixed_rrs)]
+ # https://github.com/boto/boto/pull/1216
+ # the Route53 API returns the unicode version of the '*' character
+ UnicodedName = Name
+ if '*' in Name:
+ UnicodedName = Name.replace('*',r'\052')
recordsets = __salt__['boto3_route53.get_resource_records'](HostedZoneId=HostedZoneId,
- StartRecordName=Name, StartRecordType=Type, region=region, key=key, keyid=keyid,
+ StartRecordName=UnicodedName, StartRecordType=Type, region=region, key=key, keyid=keyid,
profile=profile)
if SetIdentifier and recordsets:
|
Use the unicode version of the '*' character for route<I>
|
diff --git a/fusesoc/main.py b/fusesoc/main.py
index <HASH>..<HASH> 100644
--- a/fusesoc/main.py
+++ b/fusesoc/main.py
@@ -152,6 +152,10 @@ def init(args):
f.write("cores_root = {}\n".format(cores_root))
pr_info("FuseSoC is ready to use!")
+def list_paths(args):
+ cores_root = CoreManager().get_cores_root()
+ print("\n".join(cores_root))
+
def list_cores(args):
cores = CoreManager().get_cores()
print("\nAvailable cores:\n")
@@ -336,6 +340,9 @@ def main():
parser_core_info.add_argument('core')
parser_core_info.set_defaults(func=core_info)
+ parser_list_paths = subparsers.add_parser('list-paths', help='Displays the search order for core root paths')
+ parser_list_paths.set_defaults(func=list_paths)
+
#Simulation subparser
parser_sim = subparsers.add_parser('sim', help='Setup and run a simulation')
parser_sim.add_argument('--sim', nargs=1, help='Override the simulator settings from the system file')
|
Add command to list the core_root paths
This can help the user to find conflicts etc.
|
diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/signing.py b/datadog_checks_dev/datadog_checks/dev/tooling/signing.py
index <HASH>..<HASH> 100644
--- a/datadog_checks_dev/datadog_checks/dev/tooling/signing.py
+++ b/datadog_checks_dev/datadog_checks/dev/tooling/signing.py
@@ -1,8 +1,16 @@
# (C) Datadog, Inc. 2018
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
+# flake8: noqa
import shutil
+# NOTE: Set one minute for any GPG subprocess to timeout in in-toto. Should be
+# enough time for developers to find and enter their PIN and / or touch their
+# Yubikey. We do this before we load the rest of in-toto, so that this setting
+# takes effect.
+import in_toto.settings
+in_toto.settings.SUBPROCESS_TIMEOUT = 60
+
from in_toto import runlib
from in_toto.gpg.constants import GPG_COMMAND
|
Increase gpg timeout to give time to developers to interact with Yubikeys (#<I>)
* increase gpg timeout value for in-toto
* skip flake8 for this module
|
diff --git a/dist/index.js b/dist/index.js
index <HASH>..<HASH> 100644
--- a/dist/index.js
+++ b/dist/index.js
@@ -26432,8 +26432,7 @@ return /******/ (function(modules) { // webpackBootstrap
value: function createField(control, props) {
var _this2 = this;
- console.log('creating ' + props.model);
- if (!control || !control.props) return control;
+ if (!control || !control.props || Object.hasOwnProperty(control.props, 'modelValue')) return control;
var dispatch = props.dispatch;
var model = props.model;
diff --git a/src/components/field-component.js b/src/components/field-component.js
index <HASH>..<HASH> 100644
--- a/src/components/field-component.js
+++ b/src/components/field-component.js
@@ -39,8 +39,10 @@ function selector(state, { model }) {
class Field extends React.Component {
createField(control, props) {
- console.log(`creating ${props.model}`);
- if (!control || !control.props) return control;
+ if (!control
+ || !control.props
+ || Object.hasOwnProperty(control.props, 'modelValue')
+ ) return control;
let {
dispatch,
|
Optimizing performance for already-created controls
|
diff --git a/rollup.config.js b/rollup.config.js
index <HASH>..<HASH> 100644
--- a/rollup.config.js
+++ b/rollup.config.js
@@ -36,11 +36,16 @@ const plugins = [
main: true,
browser: true,
}),
- replace({
- 'process.env.NODE_ENV': JSON.stringify(process.env.NODE_ENV),
- }),
];
+if (format === 'umd') {
+ plugins.push(
+ replace({
+ 'process.env.NODE_ENV': JSON.stringify(process.env.NODE_ENV),
+ })
+ );
+}
+
if (production) {
plugins.push(uglify());
}
|
Updated builds so that the NODE_ENV only gets replaced in umd builds
|
diff --git a/src/java/com/threerings/presents/client/InvocationDirector.java b/src/java/com/threerings/presents/client/InvocationDirector.java
index <HASH>..<HASH> 100644
--- a/src/java/com/threerings/presents/client/InvocationDirector.java
+++ b/src/java/com/threerings/presents/client/InvocationDirector.java
@@ -1,5 +1,5 @@
//
-// $Id: InvocationDirector.java,v 1.30 2003/07/20 17:02:59 mdb Exp $
+// $Id: InvocationDirector.java,v 1.31 2003/07/25 20:51:08 mdb Exp $
package com.threerings.presents.client;
@@ -345,6 +345,7 @@ public class InvocationDirector
// reregister our receivers
_clobj.startTransaction();
try {
+ _clobj.setReceivers(new DSet());
Iterator iter = receivers.entries();
while (iter.hasNext()) {
_clobj.addToReceivers((Registration)iter.next());
|
We need also to clear the receivers when we transfer them from our auth
user object to our chosen pirate user object.
git-svn-id: svn+ssh://src.earth.threerings.net/narya/trunk@<I> <I>f4-<I>e9-<I>-aa3c-eee0fc<I>fb1
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -142,6 +142,10 @@ EventEmitterProto.trigger = EventEmitterProto.emit;
* @description Super small and simple interpretation of popular event management.
*/
function EventEmitter() {
+ if (!(this instanceof EventEmitter)) {
+ return new EventEmitter();
+ }
+
this._listeners = [];
}
diff --git a/test/unit/specs/test.super-event-emitter.js b/test/unit/specs/test.super-event-emitter.js
index <HASH>..<HASH> 100644
--- a/test/unit/specs/test.super-event-emitter.js
+++ b/test/unit/specs/test.super-event-emitter.js
@@ -24,6 +24,14 @@ describe('EventEmitter', function () {
expect(spyFn).toHaveBeenCalled();
});
+ it('should create a new instance when called without `new`', function() {
+ var instance = EventEmitter();
+ var instance2 = EventEmitter();
+
+ expect(instance).not.toBe(instance2);
+ expect(instance.on).toBeDefined();
+ });
+
it('should allow mixing with existing objects', function() {
var existing = {};
EventEmitter.mixin(existing);
|
Allow `new`-less instantiation and prevent a global object leak
|
diff --git a/src/java/com/threerings/media/image/ImageUtil.java b/src/java/com/threerings/media/image/ImageUtil.java
index <HASH>..<HASH> 100644
--- a/src/java/com/threerings/media/image/ImageUtil.java
+++ b/src/java/com/threerings/media/image/ImageUtil.java
@@ -1,5 +1,5 @@
//
-// $Id: ImageUtil.java,v 1.11 2002/05/04 19:34:14 mdb Exp $
+// $Id: ImageUtil.java,v 1.12 2002/05/04 21:36:32 ray Exp $
package com.threerings.media.util;
@@ -282,7 +282,7 @@ public class ImageUtil
if (image instanceof BufferedImage) {
BufferedImage bimage = (BufferedImage)image;
int argb = bimage.getRGB(x, y);
- int alpha = argb >> 24;
+ // int alpha = argb >> 24;
// Log.info("Checking [x=" + x + ", y=" + y + ", " + alpha);
// it's only a hit if the pixel is non-transparent
|
this line should have been commented out too
git-svn-id: svn+ssh://src.earth.threerings.net/narya/trunk@<I> <I>f4-<I>e9-<I>-aa3c-eee0fc<I>fb1
|
diff --git a/lib/mongo_mapper/plugins/querying/decorator.rb b/lib/mongo_mapper/plugins/querying/decorator.rb
index <HASH>..<HASH> 100644
--- a/lib/mongo_mapper/plugins/querying/decorator.rb
+++ b/lib/mongo_mapper/plugins/querying/decorator.rb
@@ -30,19 +30,13 @@ module MongoMapper
def last(opts={})
model.load(super)
end
-
+
private
def method_missing(method, *args, &block)
- if model.respond_to?(method)
- query = model.send(method, *args, &block)
- if query.is_a?(Plucky::Query)
- merge(query)
- else
- super
- end
- else
- super
- end
+ return super unless model.respond_to?(method)
+ result = model.send(method, *args, &block)
+ return super unless result.is_a?(Plucky::Query)
+ merge(result)
end
end
end
|
Slight change in Query method missing stuff. Feel like this is easier to read than the nested ifs.
|
diff --git a/tests/Service/CacheServiceTest.php b/tests/Service/CacheServiceTest.php
index <HASH>..<HASH> 100644
--- a/tests/Service/CacheServiceTest.php
+++ b/tests/Service/CacheServiceTest.php
@@ -156,6 +156,25 @@ class CacheServiceTest extends \PHPUnit_Framework_TestCase
$this->cacheService->save($this->getMvcEvent());
}
+ public function testSaveEventHasCacheKey()
+ {
+ $response = $this->getMvcEvent()->getResponse();
+ $response->setContent('mockContent');
+
+ $this->cacheService->getEventManager()->attach(CacheEvent::EVENT_SHOULDCACHE, function () { return true; });
+ $this->cacheService->getEventManager()->attach(CacheEvent::EVENT_SAVE, function (CacheEvent $e) {
+ $this->assertNotNull($e->getCacheKey());
+ });
+
+ $this->storageMock
+ ->shouldReceive('setItem')
+ ->once()
+ ->with('/foo/bar', $response->getContent());
+
+ $this->cacheService->getOptions()->setCacheResponse(false);
+ $this->cacheService->save($this->getMvcEvent());
+ }
+
public function testResponseIsCachedWhenOneListenerReturnsTrue()
{
$this->cacheService->getEventManager()->attach(CacheEvent::EVENT_SHOULDCACHE, function () { return false; });
|
Added test to check if EVENT_SAVE has cache key set
|
diff --git a/openupgradelib/openupgrade_merge_records.py b/openupgradelib/openupgrade_merge_records.py
index <HASH>..<HASH> 100644
--- a/openupgradelib/openupgrade_merge_records.py
+++ b/openupgradelib/openupgrade_merge_records.py
@@ -419,6 +419,9 @@ def _change_generic(env, model_name, record_ids, target_record_id,
if (model._table, res_id_column) in exclude_columns:
continue
if method == 'orm':
+ if not model._fields.get(model_column) or \
+ not model._fields.get(res_id_column):
+ continue
records = model.search([
(model_column, '=', model_name),
(res_id_column, 'in', record_ids)])
@@ -442,6 +445,9 @@ def _change_generic(env, model_name, record_ids, target_record_id,
"Changed %s record(s) of model '%s'",
len(records), model_to_replace)
else:
+ if not column_exists(env.cr, model._table, res_id_column) or \
+ not column_exists(env.cr, model._table, model_column):
+ continue
format_args = {
'table': sql.Identifier(model._table),
'res_id_column': sql.Identifier(res_id_column),
|
[FIX] merge_records: assure columns exist in _change_generic
|
diff --git a/netty-reactive-streams/src/test/java/com/typesafe/netty/ChannelPublisherTest.java b/netty-reactive-streams/src/test/java/com/typesafe/netty/ChannelPublisherTest.java
index <HASH>..<HASH> 100644
--- a/netty-reactive-streams/src/test/java/com/typesafe/netty/ChannelPublisherTest.java
+++ b/netty-reactive-streams/src/test/java/com/typesafe/netty/ChannelPublisherTest.java
@@ -138,7 +138,7 @@ public class ChannelPublisherTest {
}
T take() throws Exception {
- T t = elements.poll(100, TimeUnit.MILLISECONDS);
+ T t = elements.poll(1000, TimeUnit.MILLISECONDS);
assertNotNull(t);
return t;
}
|
Try increasing subscriber probe timeout to fix CI
|
diff --git a/lib/Doctrine/DBAL/Platforms/AbstractPlatform.php b/lib/Doctrine/DBAL/Platforms/AbstractPlatform.php
index <HASH>..<HASH> 100644
--- a/lib/Doctrine/DBAL/Platforms/AbstractPlatform.php
+++ b/lib/Doctrine/DBAL/Platforms/AbstractPlatform.php
@@ -322,15 +322,15 @@ abstract class AbstractPlatform
/**
* Mark this type as to be commented in ALTER TABLE and CREATE TABLE statements.
*
- * @param Type $doctrineType
+ * @param string|Type $doctrineType
* @return void
*/
- public function markDoctrineTypeCommented(Type $doctrineType)
+ public function markDoctrineTypeCommented($doctrineType)
{
if ($this->doctrineTypeComments === null) {
$this->initializeCommentedDoctrineTypes();
}
- $this->doctrineTypeComments[] = $doctrineType->getName();
+ $this->doctrineTypeComments[] = $doctrineType instanceof Type ? $doctrineType->getName() : $doctrineType;
}
/**
|
[Platform] Allow a string to be passed as type
|
diff --git a/cmd/swarm/run_test.go b/cmd/swarm/run_test.go
index <HASH>..<HASH> 100644
--- a/cmd/swarm/run_test.go
+++ b/cmd/swarm/run_test.go
@@ -242,7 +242,7 @@ func existingTestNode(t *testing.T, dir string, bzzaccount string) *testNode {
"--bzzaccount", bzzaccount,
"--bzznetworkid", "321",
"--bzzport", httpPort,
- "--verbosity", "3",
+ "--verbosity", fmt.Sprint(*loglevel),
)
node.Cmd.InputLine(testPassphrase)
defer func() {
@@ -318,7 +318,7 @@ func newTestNode(t *testing.T, dir string) *testNode {
"--bzzaccount", account.Address.String(),
"--bzznetworkid", "321",
"--bzzport", httpPort,
- "--verbosity", "3",
+ "--verbosity", fmt.Sprint(*loglevel),
)
node.Cmd.InputLine(testPassphrase)
defer func() {
|
cmd/swarm: respect --loglevel in run_test helpers (#<I>)
When CLI tests were spanning new nodes, the log level verbosity was
hard coded as 6. So the Swarm process was always polluting the test
output with TRACE level logs.
Now `go test -v ./cmd/swarm -loglevel 0` works as expected.
|
diff --git a/iktomi/utils/html.py b/iktomi/utils/html.py
index <HASH>..<HASH> 100644
--- a/iktomi/utils/html.py
+++ b/iktomi/utils/html.py
@@ -105,13 +105,19 @@ class Cleaner(clean.Cleaner):
continue
par = None
+ def _tail_is_empty(self, el):
+ return not el.tail and el.tail.strip(u' \t\r\n\v\f\u00a0')
+
def is_element_empty(self, el):
if el.tag == 'br':
return True
if el.tag not in self.drop_empty_tags:
return False
children = el.getchildren()
- empty_children = all(map(self.is_element_empty, children))
+ empty_children = all(
+ [self.is_element_empty(child) and self._tail_is_empty(child)
+ for child in children]
+ )
text = el.text and el.text.strip(u' \t\r\n\v\f\u00a0')
return not text and empty_children
|
HtmlElement is not empty if it has tail
|
diff --git a/tests/tasks/test_plugin_based.py b/tests/tasks/test_plugin_based.py
index <HASH>..<HASH> 100644
--- a/tests/tasks/test_plugin_based.py
+++ b/tests/tasks/test_plugin_based.py
@@ -160,7 +160,7 @@ def test_ensure_workflow_data_is_saved_in_various_conditions(
# Start the task.run in a separate process and terminate it.
# This simulates the Cancel behavior by TERM signal.
- def _build_docker_image(self, *args, **kwargs):
+ def _build_docker_image(*args, **kwargs):
def _cancel_build(*args, **kwargs):
raise TaskCanceledException()
@@ -180,6 +180,7 @@ def test_ensure_workflow_data_is_saved_in_various_conditions(
time.sleep(0.3)
proc.terminate()
+ time.sleep(1)
assert context_dir.join("workflow.json").exists()
wf_data = ImageBuildWorkflowData()
|
fix test for checking existance of workflow.json
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -25,7 +25,11 @@ function whyIsNodeRunning (logger) {
hook.disable()
var activeResources = [...active.values()].filter(function(r) {
- if (r.type === 'Timeout' && !r.resource.hasRef()) return false
+ if (
+ r.type === 'Timeout' &&
+ typeof r.resource.hasRef === 'function'
+ && !r.resource.hasRef()
+ ) return false
return true
})
|
Fix issue with excluding unrefed timers in node version <<I>
|
diff --git a/lib/ticketmaster/provider.rb b/lib/ticketmaster/provider.rb
index <HASH>..<HASH> 100644
--- a/lib/ticketmaster/provider.rb
+++ b/lib/ticketmaster/provider.rb
@@ -24,6 +24,12 @@ module TicketMaster::Provider
def authorize(authentication = {})
@authentication = TicketMaster::Authenticator.new(authentication)
end
+
+ # All providers must define this method.
+ # It should implement the code for validating the authentication
+ def valid?
+ raise TicketMaster::Exception.new("#{Base.name}::#{this_method} method must be implemented by the provider")
+ end
# Providers should try to define this method
#
diff --git a/spec/ticketmaster-exception_spec.rb b/spec/ticketmaster-exception_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/ticketmaster-exception_spec.rb
+++ b/spec/ticketmaster-exception_spec.rb
@@ -13,6 +13,13 @@ describe "Ticketmaster Exception Messages" do
before(:each) do
@ticketmaster = TicketMaster.new(:tester, {})
end
+
+ describe "TicketMaster::Provider::Base" do
+ it "valid? method raises correct exception" do
+ msg = "TicketMaster::Provider::Base::valid? method must be implemented by the provider"
+ lambda { @ticketmaster.valid? }.should raise_error(@exception, msg)
+ end
+ end
describe "TicketMaster::Provider::Helper" do
it "easy_finder method raises correct exception" do
|
added a new valid method to implement in each provider
|
diff --git a/gdx-ai/src/com/badlogic/gdx/ai/btree/decorator/Include.java b/gdx-ai/src/com/badlogic/gdx/ai/btree/decorator/Include.java
index <HASH>..<HASH> 100644
--- a/gdx-ai/src/com/badlogic/gdx/ai/btree/decorator/Include.java
+++ b/gdx-ai/src/com/badlogic/gdx/ai/btree/decorator/Include.java
@@ -94,12 +94,15 @@ public class Include<E> extends Decorator<E> {
Include<E> include = (Include<E>)task;
include.subtree = subtree;
include.lazy = lazy;
+ include.guard = guard;
return task;
}
private Task<E> createSubtreeRootTask () {
- return BehaviorTreeLibraryManager.getInstance().createRootTask(subtree);
+ Task<E> rootTask = BehaviorTreeLibraryManager.getInstance().createRootTask(subtree);
+ rootTask.setGuard(guard);
+ return rootTask;
}
@Override
|
fix(BehaviorTree): properly copy & clone the IncludeTask guard to the generated tree.
|
diff --git a/pkg/minikube/exit/exit.go b/pkg/minikube/exit/exit.go
index <HASH>..<HASH> 100644
--- a/pkg/minikube/exit/exit.go
+++ b/pkg/minikube/exit/exit.go
@@ -102,6 +102,6 @@ func displayError(msg string, err error) {
out.ErrT(out.Empty, "")
out.FatalT("{{.msg}}: {{.err}}", out.V{"msg": translate.T(msg), "err": err})
out.ErrT(out.Empty, "")
- out.ErrT(out.Sad, "Sorry that minikube crashed. If this was unexpected, we would love to hear from you:")
+ out.ErrT(out.Sad, "minikube is exiting due to an error. If this message is not helpful, please open an issue:")
out.ErrT(out.URL, "https://github.com/kubernetes/minikube/issues/new/choose")
}
|
Not a crash, but an error
|
diff --git a/claripy/frontends/composite_frontend.py b/claripy/frontends/composite_frontend.py
index <HASH>..<HASH> 100644
--- a/claripy/frontends/composite_frontend.py
+++ b/claripy/frontends/composite_frontend.py
@@ -43,7 +43,7 @@ class CompositeFrontend(ConstrainedFrontend):
super(CompositeFrontend, self)._ana_setstate(base_state)
def downsize(self):
- for e in self._solvers.values():
+ for e in self._solver_list:
e.downsize()
#
@@ -62,7 +62,10 @@ class CompositeFrontend(ConstrainedFrontend):
@property
def variables(self):
- return set(self._solvers.keys())
+ if len(self._solver_list) == 0:
+ return set()
+ else:
+ return set.union(*[s.variables for s in self._solver_list])
# this is really hacky, but we want to avoid having our variables messed with
@variables.setter
@@ -285,6 +288,9 @@ class CompositeFrontend(ConstrainedFrontend):
#
def _reabsorb_solver(self, s):
+ if len(s.variables) == 0 or self._solvers[next(iter(s.variables))] is s:
+ return
+
if isinstance(s, ModelCacheMixin):
done = set()
for ns in s.split():
|
small optimizations regarding the solver lists and solver reabsorbtion
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.