diff
stringlengths 65
26.7k
| message
stringlengths 7
9.92k
|
|---|---|
diff --git a/store/src/main/java/com/buschmais/jqassistant/core/store/impl/AbstractGraphStore.java b/store/src/main/java/com/buschmais/jqassistant/core/store/impl/AbstractGraphStore.java
index <HASH>..<HASH> 100644
--- a/store/src/main/java/com/buschmais/jqassistant/core/store/impl/AbstractGraphStore.java
+++ b/store/src/main/java/com/buschmais/jqassistant/core/store/impl/AbstractGraphStore.java
@@ -27,7 +27,7 @@ import com.buschmais.xo.api.XOManagerFactory;
public abstract class AbstractGraphStore implements Store {
private static final Logger LOGGER = LoggerFactory.getLogger(AbstractGraphStore.class);
- private static final int AUTOCOMMIT_THRESHOLD = 8192;
+ private static final int AUTOCOMMIT_THRESHOLD = 32678;
private XOManagerFactory xoManagerFactory;
private XOManager xoManager;
private int created;
|
#<I> rework artifact resolution in maven plugins
|
diff --git a/lib/discordrb/webhooks/version.rb b/lib/discordrb/webhooks/version.rb
index <HASH>..<HASH> 100644
--- a/lib/discordrb/webhooks/version.rb
+++ b/lib/discordrb/webhooks/version.rb
@@ -1,7 +1,9 @@
# frozen_string_literal: true
# Webhook support for discordrb
-module Discordrb::Webhooks
- # The current version of discordrb-webhooks.
- VERSION = '0.1.0'.freeze
+module Discordrb
+ module Webhooks
+ # The current version of discordrb-webhooks.
+ VERSION = '0.1.0'.freeze
+ end
end
|
:anchor: Declare the discordrb module separately in webhooks/version
|
diff --git a/src/main/java/org/killbill/billing/client/KillBillClient.java b/src/main/java/org/killbill/billing/client/KillBillClient.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/killbill/billing/client/KillBillClient.java
+++ b/src/main/java/org/killbill/billing/client/KillBillClient.java
@@ -2,6 +2,7 @@
* Copyright 2010-2013 Ning, Inc.
* Copyright 2014 Groupon, Inc
* Copyright 2014 The Billing Project, LLC
+ * Copyright 2015 Cloudyle GmbH
*
* The Billing Project licenses this file to you under the Apache License, version 2.0
* (the "License"); you may not use this file except in compliance with the
|
Update KillBillClient.java
|
diff --git a/src/main/java/com/buschmais/jqassistant/scm/cli/task/AnalyzeTask.java b/src/main/java/com/buschmais/jqassistant/scm/cli/task/AnalyzeTask.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/buschmais/jqassistant/scm/cli/task/AnalyzeTask.java
+++ b/src/main/java/com/buschmais/jqassistant/scm/cli/task/AnalyzeTask.java
@@ -74,12 +74,9 @@ public class AnalyzeTask extends AbstractAnalyzeTask {
try {
final ReportHelper reportHelper = new ReportHelper(getLog());
final int conceptViolations = reportHelper.verifyConceptResults(severity, inMemoryReportWriter);
- if (conceptViolations > 0) {
- throw new CliRuleViolationException(conceptViolations + " concept(s) returned empty results!");
- }
final int constraintViolations = reportHelper.verifyConstraintResults(severity, inMemoryReportWriter);
- if (constraintViolations > 0) {
- throw new CliRuleViolationException(constraintViolations + " constraint(s) violated!");
+ if (conceptViolations > 0 || constraintViolations > 0) {
+ throw new CliRuleViolationException("Violations detected: " + conceptViolations + " concepts, " + constraintViolations + " constraints");
}
} finally {
store.commitTransaction();
|
#<I> verify override of rule severities
|
diff --git a/sbt-compiler-maven-plugin/src/main/java/com/google/code/sbt/compiler/plugin/SBTAddManagedSourcesMojo.java b/sbt-compiler-maven-plugin/src/main/java/com/google/code/sbt/compiler/plugin/SBTAddManagedSourcesMojo.java
index <HASH>..<HASH> 100644
--- a/sbt-compiler-maven-plugin/src/main/java/com/google/code/sbt/compiler/plugin/SBTAddManagedSourcesMojo.java
+++ b/sbt-compiler-maven-plugin/src/main/java/com/google/code/sbt/compiler/plugin/SBTAddManagedSourcesMojo.java
@@ -28,7 +28,7 @@ import org.apache.maven.project.MavenProject;
/**
* Add managed source root, if it is not already added.<br><br>
* Helper mojo.<br>
- * Adds <code>${project.build.directory}/src_managed</code> is added to project's compile source roots
+ * Adds <code>${project.build.directory}/src_managed</code> as compile source root
* even if it does not exist yet (it may be created later by source generators).
*
* @author <a href="mailto:gslowikowski@gmail.com">Grzegorz Slowikowski</a>
|
Slightly improved javadoc comment.
|
diff --git a/src/index.js b/src/index.js
index <HASH>..<HASH> 100644
--- a/src/index.js
+++ b/src/index.js
@@ -1,5 +1,5 @@
//let baseURL = "http://svcs.ebay.com/services/search/FindingService/v1";
-let makeRequest = require('./request');
+let { makeRequest, base64Encode } = require('./request');
let urlObject = require('./buildURL');
function Ebay(options) {
@@ -90,6 +90,20 @@ Ebay.prototype = {
}, (error) => {
console.log(error);
})
+ },
+
+ getItem: function (itemId) {
+ if (!itemId) throw new Error("Item Id is required");
+
+
+ },
+
+ setAccessToken: function (token) {
+ this.options.access_token = token;
+ },
+
+ getAccessToken: function () {
+
}
};
diff --git a/src/request.js b/src/request.js
index <HASH>..<HASH> 100644
--- a/src/request.js
+++ b/src/request.js
@@ -17,4 +17,10 @@ let makeRequest = function makeRequest(url) {
}
-module.exports = makeRequest;
\ No newline at end of file
+
+let base64Encode = (encodeData) => {
+ let buff = new Buffer(encodeData);
+ return buff.toString('base64');
+}
+
+module.exports = { makeRequest, base64Encode };
\ No newline at end of file
|
added base<I> encoding
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -82,7 +82,7 @@ setup( name='batman-package',
author_email = 'laura.kreidberg@gmail.com',
url = 'https://github.com/lkreidberg/batman',
packages =['batman'],
- license = ['GNU GPLv3'],
+ license = 'GNU GPLv3',
description ='Fast transit light curve modeling',
classifiers = [
'Development Status :: 5 - Production/Stable',
|
setup.py: Fix pip install error in Python <I> due to license string
|
diff --git a/lib/svtplay_dl/service/tv4play.py b/lib/svtplay_dl/service/tv4play.py
index <HASH>..<HASH> 100644
--- a/lib/svtplay_dl/service/tv4play.py
+++ b/lib/svtplay_dl/service/tv4play.py
@@ -104,7 +104,7 @@ class Tv4play(Service, OpenGraphThumbMixin):
janson2 = jansson["props"]["pageProps"]["initialApolloState"]
show = jansson["query"]["nid"]
- program = janson2[f"Program:{show}"]
+ program = janson2[f'Program:{{"nid":"{show}"}}']
episodes_panel = []
clips_panel = []
for panel in program["panels"]:
|
tv4play: fix so find all episodes work
|
diff --git a/mammoth/docx/__init__.py b/mammoth/docx/__init__.py
index <HASH>..<HASH> 100644
--- a/mammoth/docx/__init__.py
+++ b/mammoth/docx/__init__.py
@@ -50,14 +50,14 @@ def _read_comments(zip_file, body_readers):
def _read_document(zip_file, body_readers, notes, comments):
- file_relationships = _try_read_entry_or_default(
+ package_relationships = _try_read_entry_or_default(
zip_file,
"_rels/.rels",
read_relationships_xml_element,
default=Relationships.EMPTY,
)
- document_filename = _find_document_filename(zip_file, file_relationships)
+ document_filename = _find_document_filename(zip_file, package_relationships)
with zip_file.open(document_filename) as document_fileobj:
document_xml = office_xml.read(document_fileobj)
|
Rename file_relationships to package_relationships
|
diff --git a/lib/emailMixin.js b/lib/emailMixin.js
index <HASH>..<HASH> 100644
--- a/lib/emailMixin.js
+++ b/lib/emailMixin.js
@@ -179,8 +179,10 @@ module.exports = function(self) {
}
var finalData = {};
- var baseUrl = self.options.baseUrl || req.baseUrl || (req.protocol + '://' + req.get('Host'));
-
+ // Allow middleware to supply baseUrl; if it's not there
+ // use the sitewide option; if that's not there construct it
+ // from what we do know about the request
+ var baseUrl = req.baseUrl || self.options.baseUrl || (req.protocol + '://' + req.get('Host'));
var parsed = urls.parse(baseUrl);
var host = parsed.host;
var protocol = parsed.protocol;
@@ -218,7 +220,6 @@ module.exports = function(self) {
text: module.render(template + '.txt', finalData, req),
html: module.render(template + '.html', finalData, req)
};
- // console.log(message);
return module._mailer.sendMail(message, callback);
};
|
allow req.baseUrl to beat out the baseUrl option; we never set it, so assume it was set thoughtfully by a middleware dev and should be respected
|
diff --git a/bugzilla/bug.py b/bugzilla/bug.py
index <HASH>..<HASH> 100644
--- a/bugzilla/bug.py
+++ b/bugzilla/bug.py
@@ -93,6 +93,10 @@ class _Bug(object):
"refresh(). This will be slow, if you want to avoid "
"this, properly use query/getbug include_fields.",
self.bug_id, name)
+
+ # We pass the attribute name to getbug, since for something like
+ # 'attachments' which downloads lots of data we really want the
+ # user to opt in.
self.refresh(fields=[name])
refreshed = True
|
bug: Comment about our use of refresh(fields=X)
|
diff --git a/favorites/actions/toggleFavorites.js b/favorites/actions/toggleFavorites.js
index <HASH>..<HASH> 100644
--- a/favorites/actions/toggleFavorites.js
+++ b/favorites/actions/toggleFavorites.js
@@ -90,7 +90,7 @@ const removeFavorites = productId => (dispatch) => {
new PipelineRequest('deleteFavorites')
.setInput({ productId })
.dispatch()
- .then(res(receiveRemoveFavorites()))
+ .then(() => res(receiveRemoveFavorites()))
.catch(rej);
});
diff --git a/favorites/selectors/index.js b/favorites/selectors/index.js
index <HASH>..<HASH> 100644
--- a/favorites/selectors/index.js
+++ b/favorites/selectors/index.js
@@ -70,6 +70,9 @@ export const hasFavorites = createSelector(
count => !!count
);
+/**
+ * Returns true when the current product is on the favorites list
+ */
export const isCurrentProductOnFavoriteList = createSelector(
getCurrentProductId,
getFavoritesProductsIds,
|
CON-<I> Users can remove items from their favorite list
- js doc and bugfix
|
diff --git a/lib/gruff/base.rb b/lib/gruff/base.rb
index <HASH>..<HASH> 100644
--- a/lib/gruff/base.rb
+++ b/lib/gruff/base.rb
@@ -206,6 +206,9 @@ module Gruff
@raw_columns.freeze
@raw_rows.freeze
+ @scale = @columns / @raw_columns
+ @scale.freeze
+
@marker_count = nil
@maximum_value = @minimum_value = nil
@increment = nil
@@ -215,8 +218,6 @@ module Gruff
@title = nil
@title_font = nil
- @scale = @columns / @raw_columns
-
@font = nil
@bold_title = true
|
refactor: Freeze @scale as constant
|
diff --git a/SingularityS3Uploader/src/main/java/com/hubspot/singularity/s3uploader/config/SingularityS3UploaderConfiguration.java b/SingularityS3Uploader/src/main/java/com/hubspot/singularity/s3uploader/config/SingularityS3UploaderConfiguration.java
index <HASH>..<HASH> 100644
--- a/SingularityS3Uploader/src/main/java/com/hubspot/singularity/s3uploader/config/SingularityS3UploaderConfiguration.java
+++ b/SingularityS3Uploader/src/main/java/com/hubspot/singularity/s3uploader/config/SingularityS3UploaderConfiguration.java
@@ -44,6 +44,7 @@ public class SingularityS3UploaderConfiguration extends BaseRunnerConfiguration
private Optional<String> s3SecretKey = Optional.absent();
@Max(5368709120L)
+ @Min(0)
@JsonProperty
private long maxSingleUploadSizeBytes = 5368709120L;
@@ -51,7 +52,7 @@ public class SingularityS3UploaderConfiguration extends BaseRunnerConfiguration
@JsonProperty
private long uploadPartSize = 20971520L;
- @Min(500)
+ @Min(0)
@JsonProperty
private int retryWaitMs = 1000;
|
add min 0 for uploader fields
|
diff --git a/lib/chatwork/chatwork_error.rb b/lib/chatwork/chatwork_error.rb
index <HASH>..<HASH> 100644
--- a/lib/chatwork/chatwork_error.rb
+++ b/lib/chatwork/chatwork_error.rb
@@ -3,13 +3,15 @@ module ChatWork
class ChatWorkError < StandardError
def self.from_response(status, body)
+ # HTTP status 204 don't have body.
+ return APIError.new(status, "") if status == 204
+
hash =
begin
JSON.load(body)
rescue JSON::ParserError => e
return ChatWork::APIConnectionError.new("Response JSON is broken. #{e.message}: #{body}", e)
end
-
unless hash['errors']
return APIConnectionError.new("Invalid response #{body}")
end
diff --git a/lib/chatwork/client.rb b/lib/chatwork/client.rb
index <HASH>..<HASH> 100644
--- a/lib/chatwork/client.rb
+++ b/lib/chatwork/client.rb
@@ -18,6 +18,8 @@ module ChatWork
def handle_response(response)
case response.status
+ when 204
+ ChatWork::ChatWorkError.from_response(response.status, response.body)
when 200..299
begin
JSON.parse(response.body)
|
Add code to solve <I> error.
<I> will occar when API has no results.
|
diff --git a/activesupport/test/testing/file_fixtures_test.rb b/activesupport/test/testing/file_fixtures_test.rb
index <HASH>..<HASH> 100644
--- a/activesupport/test/testing/file_fixtures_test.rb
+++ b/activesupport/test/testing/file_fixtures_test.rb
@@ -1,5 +1,7 @@
require 'abstract_unit'
+require 'pathname'
+
class FileFixturesTest < ActiveSupport::TestCase
self.file_fixture_path = File.expand_path("../../file_fixtures", __FILE__)
|
Pathname might not be always initialized.
Require 'pathname' explicitly
|
diff --git a/go/libkb/track.go b/go/libkb/track.go
index <HASH>..<HASH> 100644
--- a/go/libkb/track.go
+++ b/go/libkb/track.go
@@ -190,7 +190,7 @@ func (t TrackDiffNone) IsSameAsTracked() bool {
}
func (t TrackDiffNone) ToDisplayString() string {
- return "tracked"
+ return "followed"
}
func (t TrackDiffNone) ToDisplayMarkup() *Markup {
return NewMarkup(t.ToDisplayString())
|
track -> follow (#<I>)
|
diff --git a/lib/beaker-pe/install/pe_utils.rb b/lib/beaker-pe/install/pe_utils.rb
index <HASH>..<HASH> 100644
--- a/lib/beaker-pe/install/pe_utils.rb
+++ b/lib/beaker-pe/install/pe_utils.rb
@@ -223,6 +223,7 @@ module Beaker
# @option opts [String] :pe_ver Default PE version to install or upgrade to
# (Otherwise uses individual hosts pe_ver)
# @option opts [Boolean] :pe_debug (false) Should we run the installer in debug mode?
+ # @option opts [Boolean] :interactive (false) Should we run the installer in interactive mode?
# @example
# on host, "#{installer_cmd(host, opts)} -a #{host['working_dir']}/answers"
# @api private
@@ -241,7 +242,7 @@ module Beaker
else
pe_debug = host[:pe_debug] || opts[:pe_debug] ? ' -D' : ''
pe_cmd = "cd #{host['working_dir']}/#{host['dist']} && ./#{host['pe_installer']}#{pe_debug}"
- if ! version_is_less(host['pe_ver'], '2016.2.1')
+ if ! version_is_less(host['pe_ver'], '2016.2.1') && ! opts[:interactive]
# -y option sets "assume yes" mode where yes or whatever default will be assumed
pe_cmd += " -y"
end
|
(feature) Allow install methods to accept an interactive option
This pr adds logic to allow interactive options in Beaker. Although Beaker will not be able to successfully install without `-y`, there are and may be more cases where the installer is tested against interactive mode.
|
diff --git a/spyder/plugins/ipythonconsole/widgets/client.py b/spyder/plugins/ipythonconsole/widgets/client.py
index <HASH>..<HASH> 100644
--- a/spyder/plugins/ipythonconsole/widgets/client.py
+++ b/spyder/plugins/ipythonconsole/widgets/client.py
@@ -750,6 +750,7 @@ class ClientWidget(QWidget, SaveHistoryMixin):
self.info_page = self.blank_page
self.set_info_page()
self.shellwidget.show()
+ self.get_control().setFocus()
def _read_stderr(self):
"""Read the stderr file of the kernel."""
|
IPython console: Set focus to the console after hiding the loading page
|
diff --git a/future/tests/test_bytes.py b/future/tests/test_bytes.py
index <HASH>..<HASH> 100644
--- a/future/tests/test_bytes.py
+++ b/future/tests/test_bytes.py
@@ -175,10 +175,10 @@ class TestBytes(unittest.TestCase):
self.assertEqual(2, bytes(b'AB:CD:E').find(b':'))
def test_rfind_not_found(self):
- self.assertEqual(-1, bytes(b'ABCDE').find(b':'))
+ self.assertEqual(-1, bytes(b'ABCDE').rfind(b':'))
def test_rfind_found(self):
- self.assertEqual(4, bytes(b'AB:CD:E').find(b':'))
+ self.assertEqual(5, bytes(b'AB:CD:E').rfind(b':'))
def test_bytes_join_bytes(self):
b = bytes(b' * ')
|
Fix rfind tests in test_bytes
|
diff --git a/gogs.go b/gogs.go
index <HASH>..<HASH> 100644
--- a/gogs.go
+++ b/gogs.go
@@ -14,7 +14,7 @@ import (
)
func Version() string {
- return "0.8.1"
+ return "0.8.2"
}
// Client represents a Gogs API client.
diff --git a/issue.go b/issue.go
index <HASH>..<HASH> 100644
--- a/issue.go
+++ b/issue.go
@@ -53,6 +53,7 @@ type CreateIssueOption struct {
Assignee string `json:"assignee"`
Milestone int64 `json:"milestone"`
Labels []int64 `json:"labels"`
+ Closed bool `json:"closed"`
}
func (c *Client) CreateIssue(owner, repo string, opt CreateIssueOption) (*Issue, error) {
|
issue: add field to indicate state when create
|
diff --git a/library/SimplePie.php b/library/SimplePie.php
index <HASH>..<HASH> 100755
--- a/library/SimplePie.php
+++ b/library/SimplePie.php
@@ -1294,6 +1294,7 @@ class SimplePie
// Check absolute bare minimum requirements.
if (!extension_loaded('xml') || !extension_loaded('pcre'))
{
+ $this->error = 'XML or PCRE extensions not loaded!';
return false;
}
// Then check the xml extension is sane (i.e., libxml 2.7.x issue on PHP < 5.2.9 and libxml 2.7.0 to 2.7.2 on any version) if we don't have xmlreader.
|
Error message when XML or PCRE extensions missing
The XML extension is not loaded by default on PHP7. Relevant to have the
reason of failure in the error message instead of returning an empty
error message.
<URL>
|
diff --git a/src/Core/Router.php b/src/Core/Router.php
index <HASH>..<HASH> 100644
--- a/src/Core/Router.php
+++ b/src/Core/Router.php
@@ -212,6 +212,9 @@ class Router
switch ($method) {
// POST method
case HttpMethods::POST:
+ $postData = array_merge($_GET, $_POST);
+ return $postData ?: [];
+ break;
// PUT method
case HttpMethods::PUT:
// PATH method
|
fix form/multipart post data in engine
|
diff --git a/src/Kernel/Application.php b/src/Kernel/Application.php
index <HASH>..<HASH> 100755
--- a/src/Kernel/Application.php
+++ b/src/Kernel/Application.php
@@ -4,6 +4,7 @@ namespace Encore\Kernel;
use Encore\Container\Container;
use Symfony\Component\Debug\Debug;
+use Encore\Container\ServiceProvider;
use Encore\Config\ServiceProvider as ConfigServiceProvider;
class Application extends Container
@@ -132,7 +133,9 @@ class Application extends Container
*/
protected function registerProvider(ServiceProvider $provider, $force = false)
{
- parent::registerProvider();
+ parent::registerProvider($provider, $force);
+
+ if ( ! in_array($provider, $this->registered)) return;
if ($this->booted and method_exists($provider, 'booted')) {
$provider->boot();
|
Fixes to registerProvider method
|
diff --git a/src/DependencyInjection/Configuration.php b/src/DependencyInjection/Configuration.php
index <HASH>..<HASH> 100644
--- a/src/DependencyInjection/Configuration.php
+++ b/src/DependencyInjection/Configuration.php
@@ -41,8 +41,13 @@ class Configuration implements ConfigurationInterface
*/
public function getConfigTreeBuilder(): TreeBuilder
{
- $treeBuilder = new TreeBuilder('contentful', 'array', $this->builder);
- $root = $treeBuilder->getRootNode();
+ if (\method_exists(TreeBuilder::class, 'getRootNode')) {
+ $treeBuilder = new TreeBuilder('contentful', 'array', $this->builder);
+ $root = $treeBuilder->getRootNode();
+ } else {
+ $treeBuilder = new TreeBuilder();
+ $root = $treeBuilder->root('contentful', 'array', $this->builder);
+ }
$root
->addDefaultsIfNotSet()
|
Use method_exists to provide compatibility with very old versions of Symfony.
|
diff --git a/rejected/consumer.py b/rejected/consumer.py
index <HASH>..<HASH> 100644
--- a/rejected/consumer.py
+++ b/rejected/consumer.py
@@ -735,10 +735,12 @@ class PublishingConsumer(Consumer):
# Publish the message
self.logger.debug('Publishing message to %s:%s', exchange, routing_key)
- self._channel.basic_publish(exchange=exchange,
- routing_key=routing_key,
- properties=msg_props,
- body=body)
+ with self.statsd_track_duration('publish.{}.{}'.format(exchange,
+ routing_key)):
+ self._channel.basic_publish(exchange=exchange,
+ routing_key=routing_key,
+ properties=msg_props,
+ body=body)
def reply(self, response_body, properties,
auto_id=True,
|
Track publishing time by exchange and routing key
|
diff --git a/src/config/bindings.php b/src/config/bindings.php
index <HASH>..<HASH> 100644
--- a/src/config/bindings.php
+++ b/src/config/bindings.php
@@ -5,7 +5,7 @@ return array(
* Register the processor binding.
* Default: 'Lablog\Lablog\Processor\MarkdownProcessor'
*/
- 'Lablog\Lablog\Processor\ProcessorInterface' => 'Lablog\Parsedown\MarkdownProcessor',
+ 'Lablog\Lablog\Processor\ProcessorInterface' => 'Lablog\Markdown\MarkdownProcessor',
/**
* Register the page type binding.
|
Corrected spelling mistake, replaced parsedown with markdown.
|
diff --git a/src/components/networked.js b/src/components/networked.js
index <HASH>..<HASH> 100644
--- a/src/components/networked.js
+++ b/src/components/networked.js
@@ -281,7 +281,9 @@ AFRAME.registerComponent('networked', {
return;
}
- if (this.data.owner !== entityData.owner && this.data.lastOwnerTime < entityData.lastOwnerTime) {
+ if (this.data.owner !== entityData.owner &&
+ (this.data.lastOwnerTime < entityData.lastOwnerTime ||
+ (this.data.lastOwnerTime === entityData.lastOwnerTime && this.data.owner < entityData.owner))) {
// TODO: File issue for partial set attribute.
// this.el.setAttribute("networked", { owner: entityData.owner });
|
add check in case both lastOwnerTime's are the same
|
diff --git a/test/unit/FieldType/Slug/Generator/EntityPrePersistGeneratorTest.php b/test/unit/FieldType/Slug/Generator/EntityPrePersistGeneratorTest.php
index <HASH>..<HASH> 100644
--- a/test/unit/FieldType/Slug/Generator/EntityPrePersistGeneratorTest.php
+++ b/test/unit/FieldType/Slug/Generator/EntityPrePersistGeneratorTest.php
@@ -48,7 +48,8 @@ class EntityPrePersistGeneratorTest extends TestCase
$this->assertInstanceOf(Template::class, $generatedTemplate);
$this->assertFalse((string)$generatedTemplate === '');
$this->assertEquals(
- '$this->niets = Tardigrades\Helper\StringConverter::toSlug($this->getSnail() . \'-\' . $this->getSexy());
+ '// phpcs:ignore Generic.Files.LineLength -- easier than to write a method that will fix line ending and indentation...
+$this->niets = Tardigrades\Helper\StringConverter::toSlug($this->getSnail() . \'-\' . $this->getSexy());
',
(string) $generatedTemplate);
}
|
added phpcs comment to test
|
diff --git a/pyoko/db/adapter/db_riak.py b/pyoko/db/adapter/db_riak.py
index <HASH>..<HASH> 100644
--- a/pyoko/db/adapter/db_riak.py
+++ b/pyoko/db/adapter/db_riak.py
@@ -62,7 +62,6 @@ class BlockSave(object):
key_list = list(set(Adapter.block_saved_keys))
indexed_obj_count = self.mdl.objects.filter(key__in=key_list)
while Adapter.block_saved_keys and indexed_obj_count.count() < len(key_list):
- print("save %s" % Adapter.block_saved_keys)
time.sleep(.4)
Adapter.COLLECT_SAVES = False
@@ -154,7 +153,8 @@ class Adapter(BaseAdapter):
# t1 = time.time()
obj = self.bucket.get(doc['_yz_rk'])
if not obj.exists:
- raise ObjectDoesNotExist("Possibly a Riak <-> Solr sync delay issue!")
+ raise ObjectDoesNotExist("We got %s from Solr for %s bucket but cannot find it in the Riak" % (
+ doc['_yz_rk'], self._model_class))
yield obj.data, obj.key
if settings.DEBUG:
sys.PYOKO_STAT_COUNTER['read'] += 1
|
removed leftover print statement
added more explicit error message for key found in solr but not in riak exception.
rref #<I> ref GH-<I>
|
diff --git a/each.js b/each.js
index <HASH>..<HASH> 100644
--- a/each.js
+++ b/each.js
@@ -12,6 +12,7 @@ module.exports = function(items, ctx, fn) {
}
} else {
for (var key in items) {
+ if (!items.hasOwnProperty(key)) { continue }
fn.call(ctx, items[key], key)
}
}
|
Don't loop over inherited properties in each
|
diff --git a/rAppid.js b/rAppid.js
index <HASH>..<HASH> 100644
--- a/rAppid.js
+++ b/rAppid.js
@@ -196,7 +196,7 @@
if (err) {
callback(err);
} else {
- applicationContext.createApplicationInstance(document, function (err, stage, application) {
+ applicationContext.createApplicationInstance(window, function (err, stage, application) {
if (err) {
callback(err);
} else {
@@ -357,10 +357,20 @@
this.$config = config;
};
- ApplicationContext.prototype.createApplicationInstance = function (document, callback) {
+ ApplicationContext.prototype.createApplicationInstance = function (window, callback) {
+
+ var document;
+
// create instance
var applicationFactory = this.$applicationFactory;
+ if (window.document) {
+ document = window.document;
+ } else {
+ document = window;
+ window = null;
+ }
+
// TODO: add node support for window
var stage = new Stage(this.$requirejsContext, this, document, window);
|
create application context with window instead of document
|
diff --git a/EventListener/ConfigSubscriber.php b/EventListener/ConfigSubscriber.php
index <HASH>..<HASH> 100644
--- a/EventListener/ConfigSubscriber.php
+++ b/EventListener/ConfigSubscriber.php
@@ -149,7 +149,7 @@ class ConfigSubscriber extends CommonSubscriber
// $this->extendedFields = $this->leadModel->getExtendedEntities(['keys' => 'alias']);
// @todo - check permissions before including Secure.
- $fields = $this->leadModel->getEntities(
+ $fields = $this->fieldModel->getEntities(
[
[
'column' => 'f.isPublished',
|
[ENG-<I>] fix model instance from lead to field
|
diff --git a/src/main/java/graphql/introspection/Introspection.java b/src/main/java/graphql/introspection/Introspection.java
index <HASH>..<HASH> 100644
--- a/src/main/java/graphql/introspection/Introspection.java
+++ b/src/main/java/graphql/introspection/Introspection.java
@@ -408,6 +408,7 @@ public class Introspection {
.description("An enum describing valid locations where a directive can be placed")
.value("QUERY", DirectiveLocation.QUERY, "Indicates the directive is valid on queries.")
.value("MUTATION", DirectiveLocation.MUTATION, "Indicates the directive is valid on mutations.")
+ .value("SUBSCRIPTION", DirectiveLocation.SUBSCRIPTION, "Indicates the directive is valid on subscriptions.")
.value("FIELD", DirectiveLocation.FIELD, "Indicates the directive is valid on fields.")
.value("FRAGMENT_DEFINITION", DirectiveLocation.FRAGMENT_DEFINITION, "Indicates the directive is valid on fragment definitions.")
.value("FRAGMENT_SPREAD", DirectiveLocation.FRAGMENT_SPREAD, "Indicates the directive is valid on fragment spreads.")
|
Add SUBSCRIPTION to __DirectiveLocation enum
|
diff --git a/master/buildbot/test/unit/test_db_migrate_versions_018_add_sourcestampset.py b/master/buildbot/test/unit/test_db_migrate_versions_018_add_sourcestampset.py
index <HASH>..<HASH> 100644
--- a/master/buildbot/test/unit/test_db_migrate_versions_018_add_sourcestampset.py
+++ b/master/buildbot/test/unit/test_db_migrate_versions_018_add_sourcestampset.py
@@ -44,8 +44,8 @@ class Migration(migration.MigrateTestMixin, unittest.TestCase):
sa.Column('results', sa.SmallInteger),
)
self.buildsets.create(bind=conn)
- sa.Index('buildsets_complete', self.buildsets.c.complete)
- sa.Index('buildsets_submitted_at', self.buildsets.c.submitted_at)
+ sa.Index('buildsets_complete', self.buildsets.c.complete).create()
+ sa.Index('buildsets_submitted_at', self.buildsets.c.submitted_at).create()
self.patches = sa.Table('patches', metadata,
sa.Column('id', sa.Integer, primary_key=True),
|
actually create the indexes on buildsets in test setup
|
diff --git a/core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreeRID.java b/core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreeRID.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreeRID.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreeRID.java
@@ -301,8 +301,10 @@ public class OMVRBTreeRID extends OMVRBTreePersistent<OIdentifiable, OIdentifiab
@Override
public int commitChanges() {
- if (!((OMVRBTreeRIDProvider) getProvider()).isEmbeddedStreaming())
+ if (!((OMVRBTreeRIDProvider) getProvider()).isEmbeddedStreaming()){
+ saveAllNewEntries();
return super.commitChanges();
+ }
return 0;
}
|
Fixed bug on OMVRBTreeRID: sometimes add was lazy and in case of db close could be not saved on disk
|
diff --git a/discover/client.go b/discover/client.go
index <HASH>..<HASH> 100644
--- a/discover/client.go
+++ b/discover/client.go
@@ -205,7 +205,7 @@ func (s *ServiceSet) Unwatch(ch chan *ServiceUpdate) {
}
func (s *ServiceSet) Wait() (*ServiceUpdate, error) {
- updateCh := make(chan *ServiceUpdate, 1)
+ updateCh := make(chan *ServiceUpdate, 1024) // buffer because of Watch bringCurrent race bug
s.Watch(updateCh, true)
defer s.Unwatch(updateCh)
select {
diff --git a/discover/client_test.go b/discover/client_test.go
index <HASH>..<HASH> 100644
--- a/discover/client_test.go
+++ b/discover/client_test.go
@@ -145,7 +145,12 @@ func TestClient(t *testing.T) {
t.Fatal("Registering service failed", err)
}
for i := 0; i < 5; i++ {
- update := <-updates
+ var update *ServiceUpdate
+ select {
+ case update = <-updates:
+ case <-time.After(3 * time.Second):
+ t.Fatal("Timeout exceeded")
+ }
if update.Online != true {
t.Fatal("Service update of unexected status: ", update, i)
}
|
discoverd: ran into race regarding bringCurrent. temp workaround until handled as separate issue
|
diff --git a/tests/test_mixture_smoothing.py b/tests/test_mixture_smoothing.py
index <HASH>..<HASH> 100644
--- a/tests/test_mixture_smoothing.py
+++ b/tests/test_mixture_smoothing.py
@@ -23,3 +23,5 @@ class MS_Tester(unittest.TestCase):
0.0911574, 0.0911574]), array([1, 1, 1, 1])))
+if __name__=="__main__":
+ unittest.main()
|
adding if __name__=='__main__' to tests missing it
|
diff --git a/lib/markdown/highlightLines.js b/lib/markdown/highlightLines.js
index <HASH>..<HASH> 100644
--- a/lib/markdown/highlightLines.js
+++ b/lib/markdown/highlightLines.js
@@ -39,7 +39,7 @@ module.exports = md => {
})
if (inRange) {
return {
- code: `<span class="highlighted-line">${split}</span>`,
+ code: `<span class="highlighted-line">${split || '\n'}</span>`,
highlighted: true
}
}
|
fix: highlight line issue for empty lines
|
diff --git a/trezorlib/client.py b/trezorlib/client.py
index <HASH>..<HASH> 100644
--- a/trezorlib/client.py
+++ b/trezorlib/client.py
@@ -384,6 +384,11 @@ class ProtocolMixin(object):
return []
n = n.split('/')
+
+ # m/a/b/c => a/b/c
+ if n[0] == 'm':
+ n = n[1:]
+
path = []
for x in n:
prime = False
|
trezorctl: accept also m/a/b/c as get_address path
|
diff --git a/src/Form/EventListener/GenerateAddressFieldsSubscriber.php b/src/Form/EventListener/GenerateAddressFieldsSubscriber.php
index <HASH>..<HASH> 100644
--- a/src/Form/EventListener/GenerateAddressFieldsSubscriber.php
+++ b/src/Form/EventListener/GenerateAddressFieldsSubscriber.php
@@ -175,6 +175,7 @@ class GenerateAddressFieldsSubscriber implements EventSubscriberInterface
AddressFormatInterface::ADMINISTRATIVE_AREA_TYPE_STATE => 'State',
AddressFormatInterface::POSTAL_CODE_TYPE_POSTAL => 'Postal Code',
AddressFormatInterface::POSTAL_CODE_TYPE_ZIP => 'ZIP code',
+ AddressFormatInterface::POSTAL_CODE_TYPE_PIN => 'PIN code',
);
// Determine the correct administrative area label.
diff --git a/src/Model/AddressFormatInterface.php b/src/Model/AddressFormatInterface.php
index <HASH>..<HASH> 100644
--- a/src/Model/AddressFormatInterface.php
+++ b/src/Model/AddressFormatInterface.php
@@ -37,6 +37,7 @@ interface AddressFormatInterface
// Postal code types.
const POSTAL_CODE_TYPE_POSTAL = 'postal';
const POSTAL_CODE_TYPE_ZIP = 'zip';
+ const POSTAL_CODE_TYPE_PIN = 'pin';
/**
* Gets the two-letter country code.
|
Add the PIN postal code type (used by India).
|
diff --git a/BeforeQueryTrait.php b/BeforeQueryTrait.php
index <HASH>..<HASH> 100644
--- a/BeforeQueryTrait.php
+++ b/BeforeQueryTrait.php
@@ -7,6 +7,9 @@ trait BeforeQueryTrait
public static function find()
{
+ /**
+ * @var $obj ActiveRecord
+ */
$obj = new static;
$class = new \ReflectionClass($obj);
$condition = [];
@@ -15,6 +18,9 @@ trait BeforeQueryTrait
$condition = array_merge($condition, $property->getValue($obj));
}
}
- return (new \sibds\behaviors\TrashQuery($obj))->findRemoved()->andFilterWhere($condition);
+ if($obj->hasAttribute($obj->removedAttribute))
+ return (new \sibds\behaviors\TrashQuery($obj))->findRemoved()->andFilterWhere($condition);
+ else
+ return parent::find()->andFilterWhere($condition);
}
}
|
Update BeforeQueryTrait.php
Fix error with models not support "soft delete"
|
diff --git a/client/createReducer.js b/client/createReducer.js
index <HASH>..<HASH> 100644
--- a/client/createReducer.js
+++ b/client/createReducer.js
@@ -10,6 +10,9 @@ from '~routes/admin/authenticated/sidebar/community-settings/recipient/page'
// Reapop
import { reducer as notificationsReducer } from 'reapop'
+// Apollo
+import { client } from './store'
+
// Application
import auth from '~client/account/redux/reducers'
import wait from '~client/components/await/redux/reducers'
@@ -36,6 +39,7 @@ export default function createReducer (asyncReducers) {
recurringForm,
communityRecipientForm
}),
+ apollo: client.reducer(),
notifications: notificationsReducer(),
auth,
wait,
diff --git a/client/store.js b/client/store.js
index <HASH>..<HASH> 100644
--- a/client/store.js
+++ b/client/store.js
@@ -53,6 +53,8 @@ export function configureStore (initialState, thunkExtraArgument) {
})
)
+ middlewares.push(client.middleware())
+
let store = createStore(
createReducer(),
initialState,
|
chore(graphql): add reducer mode on apollo client
|
diff --git a/lib/jss/api_object/self_servable.rb b/lib/jss/api_object/self_servable.rb
index <HASH>..<HASH> 100644
--- a/lib/jss/api_object/self_servable.rb
+++ b/lib/jss/api_object/self_servable.rb
@@ -177,10 +177,12 @@ module JSS
notifications_supported: :ssvc_and_nctr,
notification_reminders: true
},
- JSS::MacApplication => { # TODO: add the correct values when Jamf fixes this bug
- in_self_service_data_path: nil, # [:general, :distribution_method],
- in_self_service: nil, # MAKE_AVAILABLE,
- not_in_self_service: nil, # AUTO_INSTALL_OR_PROMPT,
+ JSS::MacApplication => {
+ # in_self_service_data_path was finally implemnted in JamfPro 10.9
+ # Jamf Product Issue [PI-003773]
+ in_self_service_data_path: [:general, :deployment_type],
+ in_self_service: MAKE_AVAILABLE,
+ not_in_self_service: AUTO_INSTALL_OR_PROMPT,
targets: [:macos],
payload: :app,
can_display_in_categories: true,
|
jamf fixed the selfservice data for mac apps as of Jamf Pro <I>
|
diff --git a/jsonpb/jsonpb.go b/jsonpb/jsonpb.go
index <HASH>..<HASH> 100644
--- a/jsonpb/jsonpb.go
+++ b/jsonpb/jsonpb.go
@@ -1116,6 +1116,8 @@ func (s mapKeys) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
func (s mapKeys) Less(i, j int) bool {
if k := s[i].Kind(); k == s[j].Kind() {
switch k {
+ case reflect.String:
+ return s[i].String() < s[j].String()
case reflect.Int32, reflect.Int64:
return s[i].Int() < s[j].Int()
case reflect.Uint32, reflect.Uint64:
|
jsonpb: avoid copying string-valued map-keys (#<I>)
|
diff --git a/connection.go b/connection.go
index <HASH>..<HASH> 100644
--- a/connection.go
+++ b/connection.go
@@ -534,7 +534,7 @@ func (cn *connection) updatePiecePriority(piece int) {
default:
panic(tpp)
}
- prio += piece
+ prio += piece / 2
cn.pieceRequestOrder.Set(piece, prio)
cn.updateRequests()
}
|
Reduce the impact of preferring earlier pieces
I think urgent pieces at the end of a torrent were getting fairly starved.
|
diff --git a/src/main/java/nl/jqno/equalsverifier/AbstractDelegationChecker.java b/src/main/java/nl/jqno/equalsverifier/AbstractDelegationChecker.java
index <HASH>..<HASH> 100644
--- a/src/main/java/nl/jqno/equalsverifier/AbstractDelegationChecker.java
+++ b/src/main/java/nl/jqno/equalsverifier/AbstractDelegationChecker.java
@@ -15,6 +15,7 @@
*/
package nl.jqno.equalsverifier;
+import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import nl.jqno.equalsverifier.internal.*;
import java.lang.reflect.Field;
@@ -146,6 +147,7 @@ class AbstractDelegationChecker<T> implements Checker {
}
}
+ @SuppressFBWarnings(value = "DE_MIGHT_IGNORE", justification = "These exceptions will re-occur and be handled later.")
private <S> void checkAbstractMethods(Class<?> instanceClass, S instance, S copy, boolean prefabPossible) {
try {
instance.equals(copy);
|
FindBugs: might ignore exception
|
diff --git a/logfmt.go b/logfmt.go
index <HASH>..<HASH> 100644
--- a/logfmt.go
+++ b/logfmt.go
@@ -83,17 +83,8 @@ func writeBytesKey(buf *bytes.Buffer, b []byte) {
buf.Write(bytesNull)
return
}
- index := bytes.IndexFunc(b, invalidKey)
- if index < 0 {
- buf.Write(b)
- return
- }
- if index > 0 {
- buf.Write(b[0:index])
- b = b[index:]
- }
for {
- index = bytes.IndexFunc(b, invalidKey)
+ index := bytes.IndexFunc(b, invalidKey)
if index < 0 {
break
}
|
Remove unnecessary code in writeBytesKey
|
diff --git a/lib/cisco_node_utils/snmpuser.rb b/lib/cisco_node_utils/snmpuser.rb
index <HASH>..<HASH> 100644
--- a/lib/cisco_node_utils/snmpuser.rb
+++ b/lib/cisco_node_utils/snmpuser.rb
@@ -241,7 +241,7 @@ module Cisco
# Retrieve password hashes
hashed_pw = SnmpUser.auth_password('dummy_user', @engine_id)
if hashed_pw.nil?
- fail "SNMP dummy user #{dummy_user} #{@engine_id} was configured " \
+ fail "SNMP dummy user 'dummy_user' #{@engine_id} was configured " \
"but password is missing?\n" \
+ @@node.get(command: 'show run snmp all')
end
@@ -289,7 +289,7 @@ module Cisco
dummyau = SnmpUser.auth_password('dummy_user', @engine_id)
hashed_pw = SnmpUser.priv_password('dummy_user', @engine_id)
if hashed_pw.nil?
- fail "SNMP dummy user #{dummy_user} #{@engine_id} was configured " \
+ fail "SNMP dummy user 'dummy_user' #{@engine_id} was configured " \
"but password is missing?\n" \
+ @@node.get(command: 'show run snmp all')
end
|
snmpuser: dummy_user var does not exist
|
diff --git a/src/ConfigTrait.php b/src/ConfigTrait.php
index <HASH>..<HASH> 100644
--- a/src/ConfigTrait.php
+++ b/src/ConfigTrait.php
@@ -41,6 +41,7 @@ trait ConfigTrait
try {
$keys = func_get_args();
array_shift($keys);
+ \Assert\thatAll($keys)->string()->notEmpty();
$config = new Config($config->getKey($keys));
} catch (Exception $exception) {
throw new RuntimeException(
|
Assert that all keys are non-empty strings.
|
diff --git a/TYPO3.Flow/Classes/Monitor/ChangeDetectionStrategy/F3_FLOW3_Monitor_ChangeDetectionStrategy_ModificationTime.php b/TYPO3.Flow/Classes/Monitor/ChangeDetectionStrategy/F3_FLOW3_Monitor_ChangeDetectionStrategy_ModificationTime.php
index <HASH>..<HASH> 100644
--- a/TYPO3.Flow/Classes/Monitor/ChangeDetectionStrategy/F3_FLOW3_Monitor_ChangeDetectionStrategy_ModificationTime.php
+++ b/TYPO3.Flow/Classes/Monitor/ChangeDetectionStrategy/F3_FLOW3_Monitor_ChangeDetectionStrategy_ModificationTime.php
@@ -90,6 +90,8 @@ class ModificationTime implements \F3\FLOW3\Monitor\ChangeDetectionStrategyInter
return self::STATUS_CHANGED;
}
} else {
+ unset($this->filesAndModificationTimes[$pathAndFilename]);
+ $this->modificationTimesChanged = TRUE;
return self::STATUS_DELETED;
}
} else {
|
FLOW3:
* small fix to "modification time" change detection strategy
* fixes php lint bus error along the way - see r<I> for a previous case of that
Original-Commit-Hash: 0fe<I>e3fdf<I>f4db4c<I>d2b8a<I>fecdb1
|
diff --git a/src/nls/ru/strings.js b/src/nls/ru/strings.js
index <HASH>..<HASH> 100644
--- a/src/nls/ru/strings.js
+++ b/src/nls/ru/strings.js
@@ -257,7 +257,7 @@ define({
"BASEURL_ERROR_SEARCH_DISALLOWED" : "Основной URL не может содержать такие параметры поиска как \"{0}\".",
"BASEURL_ERROR_HASH_DISALLOWED" : "Основной URL не может содержать такие хеши как \"{0}\".",
"BASEURL_ERROR_INVALID_CHAR" : "Специальные символы как '{0}' должны быть %-экранированы.",
- "BASEURL_ERROR_UNKOWN_ERROR" : "Неизвестная ошибка при парсинге основного URL",
+ "BASEURL_ERROR_UNKNOWN_ERROR" : "Неизвестная ошибка при парсинге основного URL",
// extensions/default/DebugCommands
|
UNKOWN -> UNKNOWN
|
diff --git a/tests/unit/states/grains_test.py b/tests/unit/states/grains_test.py
index <HASH>..<HASH> 100644
--- a/tests/unit/states/grains_test.py
+++ b/tests/unit/states/grains_test.py
@@ -157,7 +157,7 @@ class GrainsTestCase(TestCase):
with patch.dict(grains.__opts__, {'test': True}):
with patch.dict(grains.__grains__, {self.name: [self.value]}):
- self.assertDictEqual(grains.list_absent(self.name, self.value),
+ self.assertDictEqual(grains.list_absent(self.name, [self.value]),
ret1)
self.assertDictEqual(grains.list_absent(self.name, self.value), ret2)
|
unit test update
grain value looks for list in assertion
|
diff --git a/lib/behat/behat_base.php b/lib/behat/behat_base.php
index <HASH>..<HASH> 100644
--- a/lib/behat/behat_base.php
+++ b/lib/behat/behat_base.php
@@ -597,6 +597,11 @@ class behat_base extends Behat\MinkExtension\Context\RawMinkContext {
* @throws ExpectationException
*/
protected function resize_window($windowsize) {
+ // Non JS don't support resize window.
+ if (!$this->running_javascript()) {
+ return;
+ }
+
switch ($windowsize) {
case "small":
$width = 640;
@@ -611,7 +616,7 @@ class behat_base extends Behat\MinkExtension\Context\RawMinkContext {
$height = 1600;
break;
default:
- preg_match('/^(small|medium|large|\d+x\d+)$/', $windowsize, $matches);
+ preg_match('/^(\d+x\d+)$/', $windowsize, $matches);
if (empty($matches) || (count($matches) != 2)) {
throw new ExpectationException("Invalid screen size, can't resize", $this->getSession());
}
|
MDL-<I> behat: Resize window only possible with javascript
|
diff --git a/watchtower/wtclient/session_queue.go b/watchtower/wtclient/session_queue.go
index <HASH>..<HASH> 100644
--- a/watchtower/wtclient/session_queue.go
+++ b/watchtower/wtclient/session_queue.go
@@ -316,8 +316,8 @@ func (q *sessionQueue) drainBackups() {
// before attempting to dequeue any pending updates.
stateUpdate, isPending, backupID, err := q.nextStateUpdate()
if err != nil {
- log.Errorf("SessionQueue(%s) unable to get next state "+
- "update: %v", err)
+ log.Errorf("SessionQueue(%v) unable to get next state "+
+ "update: %v", q.ID(), err)
return
}
@@ -557,7 +557,7 @@ func (q *sessionQueue) sendStateUpdate(conn wtserver.Peer,
// TODO(conner): borked watchtower
err = fmt.Errorf("unable to ack seqnum=%d: %v",
stateUpdate.SeqNum, err)
- log.Errorf("SessionQueue(%s) failed to ack update: %v", err)
+ log.Errorf("SessionQueue(%v) failed to ack update: %v", q.ID(), err)
return err
case err == wtdb.ErrLastAppliedReversion:
|
watchtower: fix linter errors
|
diff --git a/lib/MwbExporter/Formatter/Doctrine2/Yaml/Model/Table.php b/lib/MwbExporter/Formatter/Doctrine2/Yaml/Model/Table.php
index <HASH>..<HASH> 100644
--- a/lib/MwbExporter/Formatter/Doctrine2/Yaml/Model/Table.php
+++ b/lib/MwbExporter/Formatter/Doctrine2/Yaml/Model/Table.php
@@ -281,11 +281,12 @@ class Table extends BaseTable
return $this;
}
+ /**
+ * (non-PHPdoc)
+ * @see \MwbExporter\Model\Base::getVars()
+ */
protected function getVars()
{
- $vars = parent::getVars();
- $vars['%entity%'] = str_replace('\\', '.', $this->getModelNameAsFQCN());
-
- return $vars;
+ return array_merge(parent::getVars(), array('%entity%' => str_replace('\\', '.', $this->getModelNameAsFQCN())));
}
}
|
Merge model variables with its parent.
|
diff --git a/test.php b/test.php
index <HASH>..<HASH> 100644
--- a/test.php
+++ b/test.php
@@ -5,10 +5,9 @@ echo '<pre>';
set_time_limit(300);
-require realpath(dirname(__FILE__)) . '/GameQ/Autoloader.php';
+require realpath(dirname(__FILE__)) . '/src/GameQ/Autoloader.php';
-
-$gq = new GameQ();
+$gq = new \GameQ\GameQ();
/*$gq->addServer(array(
'id' => 1,
'type' => 'source',
@@ -19,7 +18,7 @@ $gq = new GameQ();
$gq->addServer(array(
//'id' => 2,
'type' => 'source',
- 'host' => '64.74.97.72:27017'
+ 'host' => '192.223.26.191:27015'
));
|
Updated test file while building as example for others.
|
diff --git a/tools/loggraphdiff/loggraphdiff.go b/tools/loggraphdiff/loggraphdiff.go
index <HASH>..<HASH> 100644
--- a/tools/loggraphdiff/loggraphdiff.go
+++ b/tools/loggraphdiff/loggraphdiff.go
@@ -49,11 +49,11 @@ func main() {
old, err := readGraph(os.Args[1])
if err != nil {
- log.Fatal("failed to read %s: %s", os.Args[1], err)
+ log.Fatalf("failed to read %s: %s", os.Args[1], err)
}
new, err := readGraph(os.Args[2])
if err != nil {
- log.Fatal("failed to read %s: %s", os.Args[1], err)
+ log.Fatalf("failed to read %s: %s", os.Args[1], err)
}
var nodes []string
@@ -84,7 +84,7 @@ func main() {
})
fmt.Println("digraph G {")
- fmt.Println(" rankdir = \"BT\";\n")
+ fmt.Print(" rankdir = \"BT\";\n\n")
for _, n := range nodes {
var attrs string
_, inOld := old.nodes[n]
|
tools/loggraphdiff: Fix vet warnings
|
diff --git a/scripts/github/merge_dev_to_feature.py b/scripts/github/merge_dev_to_feature.py
index <HASH>..<HASH> 100644
--- a/scripts/github/merge_dev_to_feature.py
+++ b/scripts/github/merge_dev_to_feature.py
@@ -7,6 +7,7 @@ REPO_NAME=os.environ.get('REPOSITORY')
MASTER_NAME=os.environ.get('MASTER','dev')
USERNAME=os.environ.get('USERNAME')
PASSWORD=os.environ.get('PASSWORD')
+BUILD_COMMIT=os.environ.get('BUILD_COMMIT', None)
g = Github(USERNAME,PASSWORD)
@@ -37,7 +38,10 @@ for branch in repo.get_branches():
# Get a comparison of master vs branch. compare.ahead_by means master is head of the branch.
# This orientation is necessary so the compare.files list lists files changed in master but not
# in the branch.
- compare = repo.compare(branch.commit.sha, master.commit.sha)
+ if BUILD_COMMIT:
+ compare = repo.compare(branch.commit.sha, BUILD_COMMIT)
+ else:
+ compare = repo.compare(branch.commit.sha, master.commit.sha)
if not compare.files:
print 'Skipping branch %s: branch has no files different than %s' % (branch.name, master.name)
continue
|
Added support for passing a commit id from master branch for merge
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -18,7 +18,7 @@ function attachVirtuals(schema) {
var virtuals = [];
var keys = Object.keys(schema.virtuals);
for (var i = 0; i < keys.length; ++i) {
- if (!schema.virtuals[keys[i]].ref) {
+ if (!schema.virtuals[keys[i]].ref && (!schema.virtuals[keys[i]].options || !schema.virtuals[keys[i]].options.ref)) {
virtuals.push(keys[i]);
}
}
|
Fix for where 'ref' is in options
|
diff --git a/automl/synth.py b/automl/synth.py
index <HASH>..<HASH> 100644
--- a/automl/synth.py
+++ b/automl/synth.py
@@ -26,7 +26,7 @@ versions = ["v1beta1"]
# Generate automl GAPIC layer
# ----------------------------------------------------------------------------
for version in versions:
- library = gapic.py_library("automl", version)
+ library = gapic.py_library("automl", version, include_protos=True)
s.move(library / f"google/cloud/automl_{version}")
s.move(library / f"tests/unit/gapic/{version}")
s.move(library / f"docs/gapic/{version}")
diff --git a/datacatalog/synth.py b/datacatalog/synth.py
index <HASH>..<HASH> 100644
--- a/datacatalog/synth.py
+++ b/datacatalog/synth.py
@@ -29,6 +29,7 @@ library = gapic.py_library(
version,
config_path='/google/cloud/datacatalog/artman_datacatalog_v1beta1.yaml',
artman_output_name='datacatalog-v1beta1',
+ include_protos=True,
)
s.move(
|
Include protos in synth. (#<I>)
|
diff --git a/functions.php b/functions.php
index <HASH>..<HASH> 100644
--- a/functions.php
+++ b/functions.php
@@ -1024,7 +1024,7 @@ function pods_shortcode ( $tags, $content = null ) {
if ( !empty( $pod ) ) {
$tags[ 'name' ] = get_post_type();
- $tags[ 'id' ] = get_the_ID();
+ $id = $tags[ 'id' ] = get_the_ID();
}
}
@@ -1048,14 +1048,16 @@ function pods_shortcode ( $tags, $content = null ) {
return '<p>Please provide either a template or field name</p>';
}
- // id > slug (if both exist)
- $id = empty( $tags[ 'slug' ] ) ? null : $tags[ 'slug' ];
+ if ( !isset( $id ) ) {
+ // id > slug (if both exist)
+ $id = empty( $tags[ 'slug' ] ) ? null : $tags[ 'slug' ];
- if ( !empty ( $tags[ 'id' ] ) ) {
- $id = $tags[ 'id' ];
+ if ( !empty ( $tags[ 'id' ] ) ) {
+ $id = $tags[ 'id' ];
- if ( is_numeric( $id ) )
- $id = absint( $id );
+ if ( is_numeric( $id ) )
+ $id = absint( $id );
+ }
}
if ( !isset( $pod ) )
|
Minor tweaks to shortcode slug/id handling
|
diff --git a/lib/dynflow/rails/configuration.rb b/lib/dynflow/rails/configuration.rb
index <HASH>..<HASH> 100644
--- a/lib/dynflow/rails/configuration.rb
+++ b/lib/dynflow/rails/configuration.rb
@@ -168,7 +168,7 @@ module Dynflow
if remote?
false
else
- if defined?(::Sidekiq) && !Sidekiq.options[:dynflow_world].nil?
+ if defined?(::Sidekiq) && Sidekiq.options[:dynflow_executor]
::Dynflow::Executors::Sidekiq::Core
else
::Dynflow::Executors::Parallel::Core
|
Properly detect which executor core to use
We used to check if `Sidekiq.options[:dynflow_executor]` is not nil, but in
rails this was being set after the world was initialized. That led to Parallel
core being used instead of Sidekiq one.
|
diff --git a/kafka/codec.py b/kafka/codec.py
index <HASH>..<HASH> 100644
--- a/kafka/codec.py
+++ b/kafka/codec.py
@@ -2,6 +2,7 @@ import gzip
from io import BytesIO
import struct
+import six
from six.moves import xrange
_XERIAL_V1_HEADER = (-126, b'S', b'N', b'A', b'P', b'P', b'Y', 0, 1, 1)
@@ -100,10 +101,15 @@ def snappy_encode(payload, xerial_compatible=True, xerial_blocksize=32*1024):
out.write(struct.pack('!' + fmt, dat))
# Chunk through buffers to avoid creating intermediate slice copies
- for chunk in (buffer(payload, i, xerial_blocksize)
+ if six.PY2:
+ chunker = lambda payload, i, size: buffer(payload, i, size)
+ else:
+ chunker = lambda payload, i, size: memoryview(payload)[i:size+i].tobytes()
+
+ for chunk in (chunker(payload, i, xerial_blocksize)
for i in xrange(0, len(payload), xerial_blocksize)):
- block = snappy.compress(chunk)
+ block = snappy.compress(chunk) # this wont accept a raw memoryview...?
block_size = len(block)
out.write(struct.pack('!i', block_size))
out.write(block)
|
Python3 does not support buffer -- use memoryview in snappy_decode
|
diff --git a/lib/extras/liquid_view.rb b/lib/extras/liquid_view.rb
index <HASH>..<HASH> 100644
--- a/lib/extras/liquid_view.rb
+++ b/lib/extras/liquid_view.rb
@@ -11,16 +11,25 @@ class LiquidView
end
- def render(template, local_assigns)
+ def render(template, local_assigns_for_rails_less_than_2_1_0 = nil)
@action_view.controller.headers["Content-Type"] ||= 'text/html; charset=utf-8'
assigns = @action_view.assigns.dup
+ # template is a Template object in Rails >=2.1.0, a source string previously.
+ if template.respond_to? :source
+ source = template.source
+ local_assigns = template.locals
+ else
+ source = template
+ local_assigns = local_assigns_for_rails_less_than_2_1_0
+ end
+
if content_for_layout = @action_view.instance_variable_get("@content_for_layout")
assigns['content_for_layout'] = content_for_layout
end
assigns.merge!(local_assigns)
- liquid = Liquid::Template.parse(template)
+ liquid = Liquid::Template.parse(source)
liquid.render(assigns, :filters => [@action_view.controller.master_helper_module], :registers => {:action_view => @action_view, :controller => @action_view.controller})
end
|
Enable rails <I>.x compaitibility by allowing the render method to accept an ActionView::Template object.
This seems to complete the earlier 'ugly hack' for rails <I>.x compatibility
|
diff --git a/test/packages.test.js b/test/packages.test.js
index <HASH>..<HASH> 100644
--- a/test/packages.test.js
+++ b/test/packages.test.js
@@ -10,6 +10,7 @@ import {
DropzonesPackage,
EmphasisPackage,
FilePackage,
+ FindAndReplacePackage,
GridPackage,
GutterPackage,
HeadingPackage,
@@ -53,6 +54,7 @@ test('import all packages', (t) => {
DropzonesPackage,
EmphasisPackage,
FilePackage,
+ FindAndReplacePackage,
GridPackage,
GutterPackage,
HeadingPackage,
|
Test find and replace package importing.
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -244,12 +244,12 @@ function runMongoMigrate(direction, migrationEnd) {
function performMigration(direction, migrateTo) {
var db = require('./lib/db');
db.getConnection(dbConfig || require(cwd + path.sep + configFileName)[dbProperty], function (err, db) {
- var migrationCollection = db.migrationCollection,
- dbConnection = db.connection;
if (err) {
console.error('Error connecting to database');
process.exit(1);
}
+ var migrationCollection = db.migrationCollection,
+ dbConnection = db.connection;
migrationCollection.find({}).sort({num: -1}).limit(1).toArray(function (err, migrationsRun) {
if (err) {
|
don't try to use db connection if error is thrown
Currently, if there are issues connecting to mongo
db.migrationCollection is still called. If db is undefined, this will
raise an exception. Moving error conditional up, so that an error is
logged instead.
|
diff --git a/src/inner-slider.js b/src/inner-slider.js
index <HASH>..<HASH> 100644
--- a/src/inner-slider.js
+++ b/src/inner-slider.js
@@ -401,6 +401,7 @@ export class InnerSlider extends React.Component {
changeSlide = (options, dontAnimate = false) => {
const spec = { ...this.props, ...this.state };
let targetSlide = changeSlide(spec, options);
+ if (targetSlide === this.state.targetSlide) return;
if (targetSlide !== 0 && !targetSlide) return;
if (dontAnimate === true) {
this.slideHandler(targetSlide, dontAnimate);
|
avoid slide change if it is already on the same slide
|
diff --git a/src/modules/Parallax.js b/src/modules/Parallax.js
index <HASH>..<HASH> 100644
--- a/src/modules/Parallax.js
+++ b/src/modules/Parallax.js
@@ -84,10 +84,10 @@ class Parallax extends Component {
return true;
}
- componentDidUpdate(nextProps) {
- const { parent, bgImage, bgImageSrcSet, bgImageSizes } = nextProps;
+ componentDidUpdate() {
+ const { parent, bgImage, bgImageSrcSet, bgImageSizes } = this.props;
const { bgImage: stateBgImage } = this.state;
- this.splitChildren = getSplitChildren(nextProps);
+ this.splitChildren = getSplitChildren(this.props);
if (parent && this.parent !== parent) {
this.parent = parent;
this.removeListeners();
|
fix: reference current this.props in cDU
|
diff --git a/middleman-core/lib/middleman-core/renderers/coffee_script.rb b/middleman-core/lib/middleman-core/renderers/coffee_script.rb
index <HASH>..<HASH> 100644
--- a/middleman-core/lib/middleman-core/renderers/coffee_script.rb
+++ b/middleman-core/lib/middleman-core/renderers/coffee_script.rb
@@ -31,7 +31,9 @@ module Middleman
def evaluate(context, locals, &block)
begin
super
- rescue ::ExecJS::RuntimeError=> e
+ rescue ::ExecJS::RuntimeError => e
+ e.to_s
+ rescue => e
e.to_s
end
end
|
Catch JRuby/Coffee exception correctly
|
diff --git a/modules/cms/controllers/Index.php b/modules/cms/controllers/Index.php
index <HASH>..<HASH> 100644
--- a/modules/cms/controllers/Index.php
+++ b/modules/cms/controllers/Index.php
@@ -162,15 +162,15 @@ class Index extends Controller
/*
* Extensibility
*/
- Event::fire('cms.template.save', [$this, $type]);
- $this->fireEvent('cms.template.save', [$type]);
+ Event::fire('cms.template.save', [$this, $template, $type]);
+ $this->fireEvent('cms.template.save', [$template, $type]);
Flash::success(Lang::get('cms::lang.template.saved'));
$result = [
- 'templatePath' => $template->fileName,
+ 'templatePath' => $template->fileName,
'templateMtime' => $template->mtime,
- 'tabTitle' => $this->getTabTitle($type, $template)
+ 'tabTitle' => $this->getTabTitle($type, $template)
];
if ($type == 'page') {
|
Fix cms.template.save event, should always pass the primary object as the first/second parameter.
This may be a breaking change for some, sorry about that.
|
diff --git a/packages/net/csp/transports.js b/packages/net/csp/transports.js
index <HASH>..<HASH> 100644
--- a/packages/net/csp/transports.js
+++ b/packages/net/csp/transports.js
@@ -206,11 +206,8 @@ transports.xhr = Class(baseTransport, function(supr) {
} else if('onreadystatechange' in xhr) {
xhr.onreadystatechange = bind(this, '_onReadyStateChange', rType, cb, eb);
}
- if(data) {
- xhr.send(data);
- } else {
- xhr.send();
- }
+
+ setTimeout(bind(xhr, 'send', data), 0);
};
});
|
this line was accidently deleted in 3b<I>b7beb1d9ce<I> (mcarter) - fixes spinning loading indicators in webkit
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -147,7 +147,7 @@ function serveManifest(paths, opts, callback) {
watchers.push(watcher);
checkReady();
},
- catchupDelay: 500
+ catchupDelay: ('catchupDelay' in opts) ? opts['catchupDelay'] : 500
});
});
}
|
Allow opts to override the catchupDelay value
|
diff --git a/lib/auger/request.rb b/lib/auger/request.rb
index <HASH>..<HASH> 100644
--- a/lib/auger/request.rb
+++ b/lib/auger/request.rb
@@ -1,7 +1,7 @@
module Auger
class Request
- attr_accessor :tests, :before_tests_proc, :response, :arg
+ attr_accessor :tests, :before_tests_proc, :arg
def self.load(arg, &block)
request = new(arg)
|
no need for @response in request
|
diff --git a/ccmlib/common.py b/ccmlib/common.py
index <HASH>..<HASH> 100644
--- a/ccmlib/common.py
+++ b/ccmlib/common.py
@@ -527,7 +527,7 @@ def get_version_from_build(install_dir=None, node_path=None):
def get_dse_version(install_dir):
for root, dirs, files in os.walk(install_dir):
for file in files:
- match = re.search('^dse-([0-9.]+)(?:-SNAPSHOT)?\.jar', file)
+ match = re.search('^dse(?:-core)-([0-9.]+)(?:-SNAPSHOT)?\.jar', file)
if match:
return match.group(1)
return None
|
added support to find version using new dse jar name format
|
diff --git a/text_formatter.go b/text_formatter.go
index <HASH>..<HASH> 100644
--- a/text_formatter.go
+++ b/text_formatter.go
@@ -46,15 +46,22 @@ type TextFormatter struct {
// Enable logging the full timestamp when a TTY is attached instead of just
// the time passed since beginning of execution.
FullTimestamp bool
+
+ // The fields are sorted by default for a consistent output. For applications
+ // that log extremely frequently and don't use the JSON formatter this may not
+ // be desired.
+ DisableSorting bool
}
func (f *TextFormatter) Format(entry *Entry) ([]byte, error) {
-
var keys []string = make([]string, 0, len(entry.Data))
for k := range entry.Data {
keys = append(keys, k)
}
- sort.Strings(keys)
+
+ if !f.DisableSorting {
+ sort.Strings(keys)
+ }
b := &bytes.Buffer{}
diff --git a/text_formatter_test.go b/text_formatter_test.go
index <HASH>..<HASH> 100644
--- a/text_formatter_test.go
+++ b/text_formatter_test.go
@@ -32,3 +32,6 @@ func TestQuoting(t *testing.T) {
checkQuoting(false, errors.New("invalid"))
checkQuoting(true, errors.New("invalid argument"))
}
+
+// TODO add tests for sorting etc., this requires a parser for the text
+// formatter output.
|
text_formatter: add field to disable sorting
|
diff --git a/pkg/cmd/cli/cli.go b/pkg/cmd/cli/cli.go
index <HASH>..<HASH> 100644
--- a/pkg/cmd/cli/cli.go
+++ b/pkg/cmd/cli/cli.go
@@ -36,9 +36,8 @@ run new-app:
$ %[1]s new-app openshift/ruby-20-centos7~https://github.com/openshift/ruby-hello-world.git
This will create an application based on the Docker image 'openshift/ruby-20-centos7' that builds
-the source code at 'github.com/openshift/ruby-hello-world.git'. To start the build, run
-
- $ %[1]s start-build ruby-hello-world --follow
+the source code at 'github.com/openshift/ruby-hello-world.git'. A build will start automatically and
+a deployment will start as soon as the build finishes.
Once your application is deployed, use the status, get, and describe commands to see more about
the created components:
|
Fix docs of oc
|
diff --git a/zechframework.php b/zechframework.php
index <HASH>..<HASH> 100755
--- a/zechframework.php
+++ b/zechframework.php
@@ -1,9 +1,6 @@
<?php
require __DIR__ . DIRECTORY_SEPARATOR . "vendor" . DIRECTORY_SEPARATOR . "autoload.php";
-error_reporting(E_ALL);
-ini_set('display_errors', 1);
-
// Define constants for file paths, url, etc.
define('DS', DIRECTORY_SEPARATOR);
define('ROOT_PATH', __DIR__);
|
for now I removed the php init settings. best to leave the server configuration on the server.
|
diff --git a/src/Request.php b/src/Request.php
index <HASH>..<HASH> 100644
--- a/src/Request.php
+++ b/src/Request.php
@@ -209,12 +209,11 @@ final class Request
/**
* Get all GET params.
- * @param bool $setNoneNull
* @return array
*/
- final public function getParams(bool $setNoneNull = false): array
+ final public function getParams(): array
{
- return $this->params->get->toArray($setNoneNull);
+ return $this->params->get->toArray();
}
/**
@@ -230,12 +229,11 @@ final class Request
/**
* Get all POST params.
- * @param bool $setNoneNull
* @return array
*/
- final public function postParams(bool $setNoneNull = false): array
+ final public function postParams(): array
{
- return $this->params->post->toArray($setNoneNull);
+ return $this->params->post->toArray();
}
/**
@@ -251,12 +249,11 @@ final class Request
/**
* Get all COOKIE params.
- * @param bool $setNoneNull
* @return array
*/
- final public function cookieParams(bool $setNoneNull = false): array
+ final public function cookieParams(): array
{
- return $this->params->cookie->toArray($setNoneNull);
+ return $this->params->cookie->toArray();
}
/**
|
Remove $setNoneNull options.
|
diff --git a/go/engine/gpg_import_key.go b/go/engine/gpg_import_key.go
index <HASH>..<HASH> 100644
--- a/go/engine/gpg_import_key.go
+++ b/go/engine/gpg_import_key.go
@@ -149,7 +149,7 @@ func (e *GPGImportKeyEngine) Run(ctx *Context) (err error) {
break
}
}
- if duplicate {
+ if duplicate && !e.arg.OnlyImport {
// This key's already been posted to the server.
res, err := ctx.GPGUI.ConfirmDuplicateKeyChosen(context.TODO(), 0)
if err != nil {
|
Fix `--only-import` of existing key in PGP select
Address one aspect of CORE-<I>
|
diff --git a/plugins/org.eclipse.xtext/src/org/eclipse/xtext/documentation/impl/MultiLineJavaDocTypeReferenceProvider.java b/plugins/org.eclipse.xtext/src/org/eclipse/xtext/documentation/impl/MultiLineJavaDocTypeReferenceProvider.java
index <HASH>..<HASH> 100644
--- a/plugins/org.eclipse.xtext/src/org/eclipse/xtext/documentation/impl/MultiLineJavaDocTypeReferenceProvider.java
+++ b/plugins/org.eclipse.xtext/src/org/eclipse/xtext/documentation/impl/MultiLineJavaDocTypeReferenceProvider.java
@@ -26,7 +26,7 @@ public class MultiLineJavaDocTypeReferenceProvider implements IJavaDocTypeRefere
public List<ReplaceRegion> computeTypeRefRegions(INode node) {
List<ReplaceRegion> regions = Lists.newArrayList();
Iterable<ILeafNode> leafNodes = node.getLeafNodes();
- computeRegions(regions, leafNodes, "@link ", "}", "#");
+ computeRegions(regions, leafNodes, "@link ", " ", "#");
computeRegions(regions, leafNodes, "@see ", " " , "#");
return regions;
}
|
[Xbase] Fix problem in compiling JavaDoc
|
diff --git a/api.php b/api.php
index <HASH>..<HASH> 100644
--- a/api.php
+++ b/api.php
@@ -1616,7 +1616,7 @@ class PHP_CRUD_API {
echo '{"swagger":"2.0",';
echo '"info":{';
echo '"title":"'.$database.'",';
- echo '"description":"API generated with [PHP_CRUD_API](https://github.com/mevdschee/php-crud-api)",';
+ echo '"description":"API generated with [PHP-CRUD-API](https://github.com/mevdschee/php-crud-api)",';
echo '"version":"1.0.0"';
echo '},';
echo '"host":"'.$_SERVER['HTTP_HOST'].'",';
|
Update project name in swagger output
|
diff --git a/sllurp/llrp_proto.py b/sllurp/llrp_proto.py
index <HASH>..<HASH> 100644
--- a/sllurp/llrp_proto.py
+++ b/sllurp/llrp_proto.py
@@ -27,6 +27,8 @@ import logging
import struct
from collections import defaultdict
from binascii import hexlify
+from six import iteritems
+
from .util import BIT, BITMASK, func, reverse_dict
from . import llrp_decoder
from .llrp_errors import LLRPError
@@ -3367,7 +3369,7 @@ class LLRPMessageDict(dict):
# Reverse dictionary for Message_struct types
Message_Type2Name = {}
-for msgname, msgstruct in Message_struct.iteritems():
+for msgname, msgstruct in iteritems(Message_struct):
try:
ty = msgstruct['type']
except KeyError:
|
use iteritems from six
|
diff --git a/lottie/src/main/java/com/airbnb/lottie/LottieAnimationView.java b/lottie/src/main/java/com/airbnb/lottie/LottieAnimationView.java
index <HASH>..<HASH> 100644
--- a/lottie/src/main/java/com/airbnb/lottie/LottieAnimationView.java
+++ b/lottie/src/main/java/com/airbnb/lottie/LottieAnimationView.java
@@ -136,7 +136,7 @@ import java.util.Map;
}
}
if (ta.getBoolean(R.styleable.LottieAnimationView_lottie_autoPlay, false)) {
- lottieDrawable.playAnimation();
+ wasAnimatingWhenDetached = true;
autoPlay = true;
}
|
Prevent autoPlay from playing an animation before it is attached to a window
Fixes #<I>
|
diff --git a/devices.js b/devices.js
index <HASH>..<HASH> 100755
--- a/devices.js
+++ b/devices.js
@@ -1193,10 +1193,10 @@ const devices = [
extend: hue.light_onoff_brightness_colortemp_colorxy,
},
{
- zigbeeModel: ['LCA002'],
+ zigbeeModel: ['LCA001', 'LCA002'],
model: '9290022166',
vendor: 'Philips',
- description: 'Hue white and color ambiance E26',
+ description: 'Hue white and color ambiance E26/E27',
extend: hue.light_onoff_brightness_colortemp_colorxy,
},
{
|
Added LCA<I> Philips Hue White and Ambiance E<I> (#<I>)
* Added LCA<I> Philips Hue White and Ambiance E<I>
* Added a space between LCA<I> and LCA<I>
|
diff --git a/rest_framework_extensions/compat.py b/rest_framework_extensions/compat.py
index <HASH>..<HASH> 100644
--- a/rest_framework_extensions/compat.py
+++ b/rest_framework_extensions/compat.py
@@ -211,7 +211,10 @@ else:
# handle different QuerySet representations
def queryset_to_value_list(queryset):
- assert isinstance(queryset, str)
+ if six.PY3:
+ assert isinstance(queryset, str)
+ else:
+ assert isinstance(queryset, basestring)
# django 1.10 introduces syntax "<QuerySet [(#1), (#2), ...]>"
# we extract only the list of tuples from the string
|
added python <I> compatibility for string/unicode comparison
|
diff --git a/bcbio/pipeline/run_info.py b/bcbio/pipeline/run_info.py
index <HASH>..<HASH> 100644
--- a/bcbio/pipeline/run_info.py
+++ b/bcbio/pipeline/run_info.py
@@ -799,6 +799,8 @@ def _add_algorithm_defaults(algorithm):
Converts allowed multiple inputs into lists if specified as a single item.
Converts required single items into string if specified as a list
"""
+ if not algorithm:
+ algorithm = {}
defaults = {"archive": [],
"tools_off": [],
"tools_on": [],
|
Fix error caused by an empty algorithm in the config YAML.
Closes #<I>.
|
diff --git a/js/jquery.mapael.js b/js/jquery.mapael.js
index <HASH>..<HASH> 100644
--- a/js/jquery.mapael.js
+++ b/js/jquery.mapael.js
@@ -317,12 +317,12 @@
options.text.attrs["text-anchor"] = textPosition.textAnchor;
elem.textElem = paper.text(textPosition.x, textPosition.y, options.text.content).attr(options.text.attrs);
$.fn.mapael.setHoverOptions(elem.textElem, options.text.attrs, options.text.attrsHover);
- $.fn.mapael.setHover(paper, elem.mapElem, elem.textElem);
options.eventHandlers && $.fn.mapael.setEventHandlers(id, options, elem.mapElem, elem.textElem);
+ $.fn.mapael.setHover(paper, elem.mapElem, elem.textElem);
$(elem.textElem.node).attr("data-id", id);
} else {
- $.fn.mapael.setHover(paper, elem.mapElem);
options.eventHandlers && $.fn.mapael.setEventHandlers(id, options, elem.mapElem);
+ $.fn.mapael.setHover(paper, elem.mapElem);
}
// Init the tooltip
|
Bind event handlers before the binding of mouseover and mouseout events
|
diff --git a/languagetool-dev/src/test/java/org/languagetool/dev/eval/LanguageDetectionEval.java b/languagetool-dev/src/test/java/org/languagetool/dev/eval/LanguageDetectionEval.java
index <HASH>..<HASH> 100644
--- a/languagetool-dev/src/test/java/org/languagetool/dev/eval/LanguageDetectionEval.java
+++ b/languagetool-dev/src/test/java/org/languagetool/dev/eval/LanguageDetectionEval.java
@@ -46,6 +46,7 @@ class LanguageDetectionEval {
//languageIdentifier.enableFasttext(new File("/path/to/fasttext/binary"), new File("/path/to/fasttext/model"));
// Daniel's paths:
//languageIdentifier.enableFasttext(new File("/home/languagetool/fasttext/fasttext"), new File("/home/languagetool/fasttext/lid.176.bin"));
+ //languageIdentifier.enableNgrams(new File("/home/languagetool/model_ml50_new.zip"));
}
private float evaluate(Language language) throws IOException {
|
add ngram path (commented out)
|
diff --git a/src/main/java/com/github/jinahya/bit/io/BitIoConstraints.java b/src/main/java/com/github/jinahya/bit/io/BitIoConstraints.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/github/jinahya/bit/io/BitIoConstraints.java
+++ b/src/main/java/com/github/jinahya/bit/io/BitIoConstraints.java
@@ -98,7 +98,7 @@ final class BitIoConstraints {
static {
MAX_SIZES[0] = (int) pow(2, MIN_EXPONENT);
for (int i = 1; i < MAX_SIZES.length; i++) {
- MAX_SIZES[i] = MAX_SIZES[i - 1] << 2;
+ MAX_SIZES[i] = MAX_SIZES[i - 1] << 1;
}
}
@@ -142,7 +142,7 @@ final class BitIoConstraints {
}
static int requireValidSizeChar(final int size) {
- return requireValidSize(true, SIZE_EXPONENT_CHAR, size);
+ return requireValidSize(false, SIZE_EXPONENT_CHAR, size);
}
// -----------------------------------------------------------------------------------------------------------------
|
Fix wrong size constraint for char
|
diff --git a/lib/luban/cli/version.rb b/lib/luban/cli/version.rb
index <HASH>..<HASH> 100644
--- a/lib/luban/cli/version.rb
+++ b/lib/luban/cli/version.rb
@@ -1,5 +1,5 @@
module Luban
module CLI
- VERSION = "0.4.4"
+ VERSION = "0.4.5"
end
end
|
bump up version to <I>
|
diff --git a/segmentstore/server/src/main/java/io/pravega/segmentstore/server/reading/StreamSegmentReadIndex.java b/segmentstore/server/src/main/java/io/pravega/segmentstore/server/reading/StreamSegmentReadIndex.java
index <HASH>..<HASH> 100644
--- a/segmentstore/server/src/main/java/io/pravega/segmentstore/server/reading/StreamSegmentReadIndex.java
+++ b/segmentstore/server/src/main/java/io/pravega/segmentstore/server/reading/StreamSegmentReadIndex.java
@@ -1172,7 +1172,7 @@ class StreamSegmentReadIndex implements CacheManager.Client, AutoCloseable {
insert(offset, data);
}
} catch (Exception ex) {
- log.error("{}: Unable to process Storage Read callback. Offset={}, Result=[{}].", this.traceObjectId, offset, result);
+ log.error("{}: Unable to process Storage Read callback. Offset={}, Result=[{}].", this.traceObjectId, offset, result, ex);
}
};
|
Issue <I>: (SegmentStore) Logging the exception in StreamSegmentReadIndex.queueStorageRead (#<I>)
Fixed the error logging in StreamSegmentReadIndex.queueStorageRead to log the exception, not just the message.
|
diff --git a/findspark.py b/findspark.py
index <HASH>..<HASH> 100644
--- a/findspark.py
+++ b/findspark.py
@@ -137,7 +137,10 @@ def init(spark_home=None, python_path=None, edit_rc=False, edit_profile=False):
# add pyspark to sys.path
spark_python = os.path.join(spark_home, 'python')
- py4j = glob(os.path.join(spark_python, 'lib', 'py4j-*.zip'))[0]
+ try:
+ py4j = glob(os.path.join(spark_python, 'lib', 'py4j-*.zip'))[0]
+ except IndexError:
+ raise Exception("Unable to find py4j, your SPARK_HOME may not be configured correctly")
sys.path[:0] = [spark_python, py4j]
if edit_rc:
|
Added descriptive error when py4j can't be found
Ran into this issue while attempting to use the library and got a "IndexError", this change makes it much more clear that something is probably wrong with the user's config of Spark, since py4j cannot be found (at least, this is what happened in my case, open to a more general rephrasing).
|
diff --git a/osbs/conf.py b/osbs/conf.py
index <HASH>..<HASH> 100644
--- a/osbs/conf.py
+++ b/osbs/conf.py
@@ -615,7 +615,7 @@ class Configuration(object):
continue
platform = section.split("platform:")[1]
platform_descriptor = {}
- logger.warning("user configuration platforms in section %s is ignored in ",
+ logger.warning("user configuration platforms in section %s is ignored in "
"arrangement %s and later",
section, REACTOR_CONFIG_ARRANGEMENT_VERSION)
logger.warning("it has been deprecated in favor of the value in the reactor_config_map")
|
conf: fix errant comma in deprecation warnings
|
diff --git a/tests/integ/test_training_compiler.py b/tests/integ/test_training_compiler.py
index <HASH>..<HASH> 100644
--- a/tests/integ/test_training_compiler.py
+++ b/tests/integ/test_training_compiler.py
@@ -32,6 +32,10 @@ def gpu_instance_type(request):
integ.test_region() not in integ.TRAINING_COMPILER_SUPPORTED_REGIONS,
reason="SageMaker Training Compiler is not supported in this region",
)
+@pytest.mark.skipif(
+ integ.test_region() in integ.TRAINING_NO_P3_REGIONS,
+ reason="no ml.p3 instances in this region",
+)
def test_huggingface_pytorch(
sagemaker_session,
gpu_instance_type,
@@ -78,6 +82,10 @@ def test_huggingface_pytorch(
integ.test_region() not in integ.TRAINING_COMPILER_SUPPORTED_REGIONS,
reason="SageMaker Training Compiler is not supported in this region",
)
+@pytest.mark.skipif(
+ integ.test_region() in integ.TRAINING_NO_P3_REGIONS,
+ reason="no ml.p3 instances in this region",
+)
def test_huggingface_tensorflow(
sagemaker_session,
gpu_instance_type,
|
fix: integs for training compiler in non-PDX regions (#<I>)
|
diff --git a/includes/functions.php b/includes/functions.php
index <HASH>..<HASH> 100644
--- a/includes/functions.php
+++ b/includes/functions.php
@@ -1483,6 +1483,13 @@ function yourls_salt( $string ) {
* 'var', 'value', $url
* If $url omitted, uses $_SERVER['REQUEST_URI']
*
+ * The result of this function call is a URL : it should be escaped before being printed as HTML
+ *
+ * @since 1.5
+ * @param string|array $param1 Either newkey or an associative_array.
+ * @param string $param2 Either newvalue or oldquery or URI.
+ * @param string $param3 Optional. Old query or URI.
+ * @return string New URL query string.
*/
function yourls_add_query_arg() {
$ret = '';
@@ -1564,6 +1571,12 @@ function yourls_urlencode_deep( $value ) {
/**
* Remove arg from query. Opposite of yourls_add_query_arg. Stolen from WP.
*
+ * The result of this function call is a URL : it should be escaped before being printed as HTML
+ *
+ * @since 1.5
+ * @param string|array $key Query key or keys to remove.
+ * @param bool|string $query Optional. When false uses the $_SERVER value. Default false.
+ * @return string New URL query string.
*/
function yourls_remove_query_arg( $key, $query = false ) {
if ( is_array( $key ) ) { // removing multiple keys
|
For hackers: add comment about preventing XSS
|
diff --git a/changelogs/changelogs.py b/changelogs/changelogs.py
index <HASH>..<HASH> 100644
--- a/changelogs/changelogs.py
+++ b/changelogs/changelogs.py
@@ -21,7 +21,7 @@ def _load_custom_functions(vendor, name):
:return: dict, functions
"""
functions = {}
- filename = "{}.py".format(name)
+ filename = "{}.py".format(name.lower())
path = os.path.join(
os.path.dirname(os.path.realpath(__file__)), # current working dir
"custom", # /dir/parser
|
Make custom functions case insensitive.
Some packages prefer different kind of casing in their name. For example
`SQLAlchemy` instead of `sqlalchemy`.
|
diff --git a/src/test/java/org/jooq/lambda/SeqTest.java b/src/test/java/org/jooq/lambda/SeqTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/org/jooq/lambda/SeqTest.java
+++ b/src/test/java/org/jooq/lambda/SeqTest.java
@@ -272,10 +272,15 @@ public class SeqTest {
Supplier<Tuple2<Seq<Integer>, Seq<Integer>>> reset = () -> Seq.of(1, 2, 3, 4, 5).duplicate();
Tuple2<Seq<Integer>, Seq<Integer>> duplicate;
+ // Consume v1 first
duplicate = reset.get().map((s1, s2) -> tuple(s1.limit(2), s2.skip(2)));
-
assertEquals(asList(1, 2), duplicate.v1.toList());
assertEquals(asList(3, 4, 5), duplicate.v2.toList());
+
+ // Consume v2 first
+ duplicate = reset.get().map((s1, s2) -> tuple(s1.limit(2), s2.skip(2)));
+ assertEquals(asList(3, 4, 5), duplicate.v2.toList());
+ assertEquals(asList(1, 2), duplicate.v1.toList());
}
@Test
|
[#<I>] Added failing integration test
|
diff --git a/lib/ember/version.rb b/lib/ember/version.rb
index <HASH>..<HASH> 100644
--- a/lib/ember/version.rb
+++ b/lib/ember/version.rb
@@ -6,8 +6,12 @@ module Ember
# we might want to unify this with the ember version string,
# but consistency?
def rubygems_version_string
- major, rc = VERSION.sub('-','.').scan(/(\d+\.\d+\.\d+\.rc)\.(\d+)/).first
+ if VERSION =~ /rc/
+ major, rc = VERSION.sub('-','.').scan(/(\d+\.\d+\.\d+\.rc)\.(\d+)/).first
- "#{major}#{rc}"
+ "#{major}#{rc}"
+ else
+ VERSION
+ end
end
end
|
Fix ember-source to handle non-RCs
|
diff --git a/km3pipe/io/aanet.py b/km3pipe/io/aanet.py
index <HASH>..<HASH> 100644
--- a/km3pipe/io/aanet.py
+++ b/km3pipe/io/aanet.py
@@ -103,6 +103,7 @@ class AanetPump(Pump):
'MCHits': HitSeries.from_aanet(event.mc_hits, event.id),
'MCTracks': TrackSeries.from_aanet(event.mc_trks,
event.id),
+ 'Tracks': TrackSeries.from_aanet(event.trks, event.id),
'filename': filename,
'Header': self.header,
'EventInfo': EventInfo(
@@ -121,10 +122,13 @@ class AanetPump(Pump):
w2,
w3,
),
- }
- recos = read_mini_dst(event, event.id)
- for recname, reco in recos.items():
- blob[recname] = reco
+ }
+ try:
+ recos = read_mini_dst(event, event.id)
+ for recname, reco in recos.items():
+ blob[recname] = reco
+ except IndexError:
+ pass
yield blob
del event_file
|
try reading aanet non-mc tracks
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.