diff stringlengths 65 26.7k | message stringlengths 7 9.92k |
|---|---|
diff --git a/tasks/defs.js b/tasks/defs.js
index <HASH>..<HASH> 100644
--- a/tasks/defs.js
+++ b/tasks/defs.js
@@ -18,7 +18,8 @@ module.exports = function (grunt) {
'Static scope analysis and transpilation of ES6 block scoped const and let variables, to ES3.',
function () {
- var validRun = true,
+ var filesNum = 0,
+ validRun = true,
// Merge task-specific and/or target-specific options with these defaults.
options = this.options();
@@ -54,6 +55,9 @@ module.exports = function (grunt) {
});
function runDefs(srcPath, destPath, defsOptions) {
+ grunt.log.log('Generating"' + destPath + '" from "' + srcPath + '"...');
+ filesNum++;
+
var defsOutput = defs(grunt.file.read(srcPath), defsOptions);
// Write the destination file.
@@ -70,11 +74,12 @@ module.exports = function (grunt) {
// Write defs output to the target file.
grunt.file.write(destPath, defsOutput.src);
- // Print a success message.
- grunt.log.ok('File "' + destPath + '" generated.');
return true;
}
+ if (validRun) {
+ grunt.log.ok(filesNum + ' files successfully generated.');
+ }
return validRun;
}); | log currently generated file (and its source) |
diff --git a/chi.go b/chi.go
index <HASH>..<HASH> 100644
--- a/chi.go
+++ b/chi.go
@@ -77,6 +77,10 @@ type Router interface {
// NotFound defines a handler to respond whenever a route could
// not be found.
NotFound(h http.HandlerFunc)
+
+ // MethodNotAllowed defines a handler to respond whenever a method is
+ // not allowed.
+ MethodNotAllowed(h http.HandlerFunc)
}
// Routes interface adds two methods for router traversal, which is also | Fix missing MethodNotAllowed in Router interface |
diff --git a/lib/codesake/dawn/engine.rb b/lib/codesake/dawn/engine.rb
index <HASH>..<HASH> 100644
--- a/lib/codesake/dawn/engine.rb
+++ b/lib/codesake/dawn/engine.rb
@@ -217,15 +217,20 @@ module Codesake
@vulnerabilities
end
- def is_vulnerable_to?(name)
+ def find_vulnerability_by_name(name)
apply(name) unless is_applied?(name)
-
@vulnerabilities.each do |v|
- return true if v[:name] == name
+ return v if v[:name] == name
end
- false
+ nil
+ end
+
+ def is_vulnerable_to?(name)
+ return (find_vulnerability_by_name(name) != nil)
end
+
+
def has_reflected_xss?
(@reflected_xss.count != 0)
end | Added a find_vulnerability_by_name to retrieve a vulnerability and
changed is_vulnerable_to? to use it |
diff --git a/agent/consul/server_overview.go b/agent/consul/server_overview.go
index <HASH>..<HASH> 100644
--- a/agent/consul/server_overview.go
+++ b/agent/consul/server_overview.go
@@ -170,13 +170,13 @@ func getCatalogOverview(catalog *structs.CatalogContents) *structs.CatalogSummar
summarySort := func(slice []structs.HealthSummary) func(int, int) bool {
return func(i, j int) bool {
- if slice[i].Name < slice[j].Name {
+ if slice[i].PartitionOrEmpty() < slice[j].PartitionOrEmpty() {
return true
}
if slice[i].NamespaceOrEmpty() < slice[j].NamespaceOrEmpty() {
return true
}
- return slice[i].PartitionOrEmpty() < slice[j].PartitionOrEmpty()
+ return slice[i].Name < slice[j].Name
}
}
sort.Slice(summary.Nodes, summarySort(summary.Nodes)) | Sort by partition/ns/servicename instead of the reverse |
diff --git a/proctor-store-svn/src/main/java/com/indeed/proctor/store/SvnProctor.java b/proctor-store-svn/src/main/java/com/indeed/proctor/store/SvnProctor.java
index <HASH>..<HASH> 100644
--- a/proctor-store-svn/src/main/java/com/indeed/proctor/store/SvnProctor.java
+++ b/proctor-store-svn/src/main/java/com/indeed/proctor/store/SvnProctor.java
@@ -276,6 +276,6 @@ public class SvnProctor extends FileBasedProctorStore {
@Override
public String getName() {
- return "SvnProctor";
+ return SvnProctor.class.getName();
}
} | PROC-<I>: use class name instead of a static string. |
diff --git a/lib/sidekiq-spy/app.rb b/lib/sidekiq-spy/app.rb
index <HASH>..<HASH> 100644
--- a/lib/sidekiq-spy/app.rb
+++ b/lib/sidekiq-spy/app.rb
@@ -50,6 +50,7 @@ module SidekiqSpy
def configure_sidekiq
Sidekiq.configure_client do |sidekiq_config|
+ sidekiq_config.logger = nil
sidekiq_config.redis = {
:url => config.url,
:namespace => config.namespace, | Disable Sidekiq logger.
Completely disable the Sidekiq logger; we have nowhere to display the log
messages, anyway! ;) |
diff --git a/core/server/src/main/java/alluxio/master/file/meta/Inode.java b/core/server/src/main/java/alluxio/master/file/meta/Inode.java
index <HASH>..<HASH> 100644
--- a/core/server/src/main/java/alluxio/master/file/meta/Inode.java
+++ b/core/server/src/main/java/alluxio/master/file/meta/Inode.java
@@ -471,7 +471,7 @@ public abstract class Inode<T> implements JournalEntryRepresentable {
protected Objects.ToStringHelper toStringHelper() {
return Objects.toStringHelper(this).add("id", mId).add("name", mName).add("parentId", mParentId)
.add("creationTimeMs", mCreationTimeMs).add("pinned", mPinned).add("deleted", mDeleted)
- .add("ttl", mTtl).add("mTtlAction", mTtlAction)
+ .add("ttl", mTtl).add("TtlAction", mTtlAction)
.add("directory", mDirectory).add("persistenceState", mPersistenceState)
.add("lastModificationTimeMs", mLastModificationTimeMs).add("owner", mOwner)
.add("group", mGroup).add("permission", mMode); | Rename mTtlAction to TtlAction |
diff --git a/lxd/instance/drivers/load.go b/lxd/instance/drivers/load.go
index <HASH>..<HASH> 100644
--- a/lxd/instance/drivers/load.go
+++ b/lxd/instance/drivers/load.go
@@ -66,7 +66,8 @@ func validDevices(state *state.State, cluster *db.Cluster, instanceType instance
}
// Check each device individually using the device package.
- for name, config := range devices {
+ // Use instConf.localDevices so that the cloned config is passed into the driver, so it cannot modify it.
+ for name, config := range instConf.localDevices {
err := device.Validate(instConf, state, name, config)
if err != nil {
return errors.Wrapf(err, "Device validation failed %q", name) | lxd/instance/drivers/load: Pass copy of device config to device.Validate
Ensures a copy of the instance's device config is passed to device.Validate to ensure devices cannot modify the source config map.
This way any internal modifications a device may make to its config map are not reflected outside of the device.
This mirrors the existing usage of device.New(). |
diff --git a/linguist/apps.py b/linguist/apps.py
index <HASH>..<HASH> 100644
--- a/linguist/apps.py
+++ b/linguist/apps.py
@@ -8,4 +8,3 @@ class LinguistConfig(AppConfig):
def ready(self):
super(LinguistConfig, self).ready()
- self.module.autodiscover() | Remove autodiscover from linguist/apps. |
diff --git a/core/src/test/java/com/google/errorprone/bugpatterns/AutoValueFinalMethodsTest.java b/core/src/test/java/com/google/errorprone/bugpatterns/AutoValueFinalMethodsTest.java
index <HASH>..<HASH> 100644
--- a/core/src/test/java/com/google/errorprone/bugpatterns/AutoValueFinalMethodsTest.java
+++ b/core/src/test/java/com/google/errorprone/bugpatterns/AutoValueFinalMethodsTest.java
@@ -163,4 +163,29 @@ public class AutoValueFinalMethodsTest {
"}")
.doTest();
}
+
+ @Test
+ public void testDiagnosticStringWithMultipleMethodMatches() {
+ compilationHelper
+ .addSourceLines(
+ "out/Test.java",
+ "import com.google.auto.value.AutoValue;",
+ "import com.google.auto.value.extension.memoized.Memoized;",
+ "@AutoValue",
+ "abstract class Test {",
+ " static Test create() {",
+ " return null;",
+ " }",
+ " @Override",
+ " // BUG: Diagnostic contains: Make equals, hashCode final in AutoValue classes",
+ " public boolean equals(Object obj) {",
+ " return true;",
+ " }",
+ " @Override",
+ " public int hashCode() {",
+ " return 1;",
+ " }",
+ "}")
+ .doTest();
+ }
} | Add test for Diagnostic String with multiple method matches for AutoValueFinalMethods check
RELNOTES: N/A
-------------
Created by MOE: <URL> |
diff --git a/tests/FiniteStateMachine/VerifyLogTest.php b/tests/FiniteStateMachine/VerifyLogTest.php
index <HASH>..<HASH> 100644
--- a/tests/FiniteStateMachine/VerifyLogTest.php
+++ b/tests/FiniteStateMachine/VerifyLogTest.php
@@ -263,23 +263,6 @@ class Fsm_VerifyLogTest extends FsmTestCase
$this->assertExceptionMessage($stateSet, $log, 'index', $logRecordIndex);
}
- protected function _provideLogsWithSpecificValues($key, $values)
- {
- $argumentSets = array();
- $templateArgumentSets = $this->provideValidLogs();
- foreach ($values as $value) {
- $templateArgumentSetIndex = rand(0, sizeof($templateArgumentSets) - 1);
- $argumentSet = $templateArgumentSets[$templateArgumentSetIndex];
- $log = &$argumentSet['log'];
- $logIndex = rand(0, sizeof($log) - 1);
- $log[$logIndex][$key] = $value;
- unset($log);
- $argumentSet['logRecordIndex'] = $logIndex;
- $argumentSets[] = $argumentSet;
- }
- return $argumentSets;
- }
-
public function provideValidLogs()
{
$stateSet = $this->_getBillingStateSet(); | #<I>: Fsm_VerifyLogTest::_provideLogsWithSpecificValues() has been eliminated |
diff --git a/activerecord/lib/active_record/attribute_methods/before_type_cast.rb b/activerecord/lib/active_record/attribute_methods/before_type_cast.rb
index <HASH>..<HASH> 100644
--- a/activerecord/lib/active_record/attribute_methods/before_type_cast.rb
+++ b/activerecord/lib/active_record/attribute_methods/before_type_cast.rb
@@ -13,7 +13,7 @@ module ActiveRecord
# Returns a hash of attributes before typecasting and deserialization.
def attributes_before_type_cast
- Hash[attribute_names.map { |name| [name, read_attribute_before_type_cast(name)] }]
+ @attributes
end
private | attributes_before_type_cast are just the value of @attributes |
diff --git a/lib/cloudstack-nagios/commands/system_vm.rb b/lib/cloudstack-nagios/commands/system_vm.rb
index <HASH>..<HASH> 100644
--- a/lib/cloudstack-nagios/commands/system_vm.rb
+++ b/lib/cloudstack-nagios/commands/system_vm.rb
@@ -180,6 +180,30 @@ class SystemVm < CloudstackNagios::Base
end
end
+ desc "uptime", "return uptime"
+ def uptime
+ begin
+ host = systemvm_host
+ uptime_sec = 0
+ on host do |h|
+ uptime_sec = capture('cat /proc/uptime').split[0].to_f
+ end
+
+ if uptime_sec < options[:critical]
+ code = 2
+ elsif uptime_sec < options[:warning]
+ code = 1
+ else
+ code = 0
+ end
+
+ puts "UPTIME #{RETURN_CODES[code]} #{uptime_sec}s | uptime=#{uptime_sec}"
+ exit code
+ rescue => e
+ exit_with_failure(e)
+ end
+ end
+
desc "active_ftp", "make sure conntrack_ftp and nf_nat_ftp modules are loaded"
def active_ftp
begin | add feature check uptime (#6) |
diff --git a/spec/functional/mongoid/criterion/inclusion_spec.rb b/spec/functional/mongoid/criterion/inclusion_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/functional/mongoid/criterion/inclusion_spec.rb
+++ b/spec/functional/mongoid/criterion/inclusion_spec.rb
@@ -26,6 +26,36 @@ describe Mongoid::Criterion::Inclusion do
describe "#any_in" do
+ context "when querying on foreign keys" do
+
+ context "when not using object ids" do
+
+ before(:all) do
+ Person.identity :type => String
+ end
+
+ after(:all) do
+ Person.identity :type => BSON::ObjectId
+ end
+
+ let!(:person) do
+ Person.create(:ssn => "123-11-1111")
+ end
+
+ let!(:account) do
+ person.create_account(:name => "test")
+ end
+
+ let(:from_db) do
+ Account.any_in(:person_id => [ person.id ])
+ end
+
+ it "returns the correct results" do
+ from_db.should eq([ account ])
+ end
+ end
+ end
+
context "when chaining after a where" do
let!(:person) do | Include provided spec with #<I> to show behaviour is correct. Closes #<I>. |
diff --git a/src/uncss.js b/src/uncss.js
index <HASH>..<HASH> 100644
--- a/src/uncss.js
+++ b/src/uncss.js
@@ -215,7 +215,7 @@ function init(files, options, callback) {
options
);
- process(options).then(([css, report]) => callback(null, css, report), callback);
+ return process(options).then(([css, report]) => callback(null, css, report), callback);
}
function processAsPostCss(options, pages) { | Return the promises to the caller (#<I>) |
diff --git a/azkaban-common/src/main/java/azkaban/project/ProjectManager.java b/azkaban-common/src/main/java/azkaban/project/ProjectManager.java
index <HASH>..<HASH> 100644
--- a/azkaban-common/src/main/java/azkaban/project/ProjectManager.java
+++ b/azkaban-common/src/main/java/azkaban/project/ProjectManager.java
@@ -80,6 +80,7 @@ public class ProjectManager {
// initialize itself.
Props prop = new Props(props);
prop.put(ValidatorConfigs.PROJECT_ARCHIVE_FILE_PATH, "initialize");
+ new XmlValidatorManager(prop);
loadAllProjects();
} | Validate the configurations of the validator plugins when the Azkaban web server starts. |
diff --git a/src/org/zaproxy/zap/extension/api/API.java b/src/org/zaproxy/zap/extension/api/API.java
index <HASH>..<HASH> 100644
--- a/src/org/zaproxy/zap/extension/api/API.java
+++ b/src/org/zaproxy/zap/extension/api/API.java
@@ -303,7 +303,7 @@ public class API {
List<String> mandatoryParams = other.getMandatoryParamNames();
if (mandatoryParams != null) {
for (String param : mandatoryParams) {
- if (params.getString(param) == null || params.getString(param).length() == 0) {
+ if (!params.has(param) || params.getString(param).length() == 0) {
throw new ApiException(ApiException.Type.MISSING_PARAMETER, param);
}
} | Issue <I> - JSONException while calling an API "other" without the required parameter(s)
Changed to check if the parameter exists (instead of getting it). |
diff --git a/js/ascendex.js b/js/ascendex.js
index <HASH>..<HASH> 100644
--- a/js/ascendex.js
+++ b/js/ascendex.js
@@ -2295,7 +2295,6 @@ module.exports = class ascendex extends Exchange {
'notionalCap': this.safeNumber (bracket, 'positionNotionalUpperBound'),
'maintenanceMarginRatio': this.parseNumber (maintenanceMarginRatio),
'maxLeverage': this.parseNumber (Precise.stringDiv ('1', maintenanceMarginRatio)),
- 'maintenanceAmount': undefined,
'info': bracket,
});
} | removed maintenanceAmount from fetchLeverageTiers |
diff --git a/example/test-coverage.js b/example/test-coverage.js
index <HASH>..<HASH> 100755
--- a/example/test-coverage.js
+++ b/example/test-coverage.js
@@ -20,5 +20,9 @@ exec(sprintf('NODE_PATH=lib-cov %s/bin/whiskey --tests %s/example/test-success-w
process.exit(5);
}
+ if (err && err.code !== 0) {
+ process.exit(5);
+ }
+
process.exit(0);
}); | Exit with non-zero if Whiskey exists with non-zero. |
diff --git a/tests/instancemethods_test.py b/tests/instancemethods_test.py
index <HASH>..<HASH> 100644
--- a/tests/instancemethods_test.py
+++ b/tests/instancemethods_test.py
@@ -259,6 +259,22 @@ class TestEnsureStubsAreUsed:
verifyStubbedInvocationsAreUsed(dog)
+ @pytest.mark.xfail(reason='Not implemented.')
+ def testPassIfVerifiedZeroInteractions(self):
+ dog = mock()
+ when(dog).waggle(1).thenReturn('Sure')
+ verifyZeroInteractions(dog)
+
+ verifyStubbedInvocationsAreUsed(dog)
+
+ @pytest.mark.xfail(reason='Not implemented.')
+ def testPassIfVerifiedNoMoreInteractions(self):
+ dog = mock()
+ when(dog).waggle(1).thenReturn('Sure')
+ verifyNoMoreInteractions(dog)
+
+ verifyStubbedInvocationsAreUsed(dog)
+
def testWildacardCallSignatureOnStub(self):
dog = mock()
when(dog).waggle(Ellipsis).thenReturn('Sure') | Stash two xfail tests around `verifyStubbedInvocationsAreUsed` |
diff --git a/dagobah/core/core.py b/dagobah/core/core.py
index <HASH>..<HASH> 100644
--- a/dagobah/core/core.py
+++ b/dagobah/core/core.py
@@ -116,6 +116,9 @@ class Dagobah(object):
for to_node in to_nodes:
job.add_dependency(from_node, to_node)
+ if job_json.get('notes', None):
+ job.update_job_notes(job_json['notes'])
+
def commit(self, cascade=False):
@@ -418,7 +421,7 @@ class Job(DAG):
self.parent.commit(cascade=True)
- def update_job_notes(self, job_name, notes):
+ def update_job_notes(self, notes):
if not self.state.allow_edit_job:
raise DagobahError('job cannot be edited in its current state')
diff --git a/dagobah/daemon/api.py b/dagobah/daemon/api.py
index <HASH>..<HASH> 100644
--- a/dagobah/daemon/api.py
+++ b/dagobah/daemon/api.py
@@ -368,7 +368,7 @@ def update_job_notes():
abort(400)
job = dagobah.get_job(args['job_name'])
- job.update_job_notes(args['job_name'], args['notes'])
+ job.update_job_notes(args['notes'])
@app.route('/api/edit_task', methods=['POST']) | Fixing dagobahd restart error where notes were not re-loaded from the DB |
diff --git a/bin/tag.js b/bin/tag.js
index <HASH>..<HASH> 100644
--- a/bin/tag.js
+++ b/bin/tag.js
@@ -24,7 +24,14 @@ module.exports = function (callback) {
}
exec('git push --tags', function (err) {
- return callback(err);
+ if(err) {
+ return callback(err);
+ }
+
+ exec('npm publish', function (err) {
+ return callback(err);
+ });
+
});
});
}; | npm publish after successful git tag |
diff --git a/datanommer.models/tests/test_model.py b/datanommer.models/tests/test_model.py
index <HASH>..<HASH> 100644
--- a/datanommer.models/tests/test_model.py
+++ b/datanommer.models/tests/test_model.py
@@ -168,17 +168,22 @@ class TestModels(unittest.TestCase):
def test_add_empty(self):
datanommer.models.add(dict())
- @raises(KeyError)
def test_add_missing_i(self):
msg = copy.deepcopy(scm_message)
del msg['i']
datanommer.models.add(msg)
+ dbmsg = datanommer.models.Message.query.first()
+ self.assertEqual(dbmsg.i, 0)
- @raises(KeyError)
def test_add_missing_timestamp(self):
msg = copy.deepcopy(scm_message)
del msg['timestamp']
datanommer.models.add(msg)
+ dbmsg = datanommer.models.Message.query.first()
+ timediff = datetime.datetime.now() - dbmsg.timestamp
+ # 10 seconds between adding the message and checking
+ # the timestamp should be more than enough.
+ self.assertTrue(timediff < datetime.timedelta(seconds=10))
def test_add_many_and_count_statements(self):
statements = [] | update tests to account for more flexible message formats
Since we now supply defaults for some fedmsg-specific fields, tests that
were previously expected to fail are now succeeding. Update the tests
to be consistent with the way we now process messages. |
diff --git a/pymc/distributions.py b/pymc/distributions.py
index <HASH>..<HASH> 100755
--- a/pymc/distributions.py
+++ b/pymc/distributions.py
@@ -202,11 +202,11 @@ def new_dist_class(*new_class_args):
pv = [np.shape(value(v)) for v in parents.values()]
biggest_parent = np.argmax([np.prod(v) for v in pv])
parents_shape = pv[biggest_parent]
-
+
# Scalar parents can support any shape.
if np.prod(parents_shape) <= 1:
parents_shape = None
-
+
else:
parents_shape = None | Re-rendered metastability and mixing figures as pdfs
git-svn-id: <URL> |
diff --git a/salt/grains/core.py b/salt/grains/core.py
index <HASH>..<HASH> 100644
--- a/salt/grains/core.py
+++ b/salt/grains/core.py
@@ -1332,7 +1332,14 @@ def os_data():
if os.path.exists('/proc/1/cmdline'):
with salt.utils.fopen('/proc/1/cmdline') as fhr:
init_cmdline = fhr.read().replace('\x00', ' ').split()
- init_bin = salt.utils.which(init_cmdline[0])
+ try:
+ init_bin = salt.utils.which(init_cmdline[0])
+ except IndexError:
+ # Emtpy init_cmdline
+ init_bin = None
+ log.warning(
+ "Unable to fetch data from /proc/1/cmdline"
+ )
if init_bin is not None and init_bin.endswith('bin/init'):
supported_inits = (six.b('upstart'), six.b('sysvinit'), six.b('systemd'))
edge_len = max(len(x) for x in supported_inits) - 1 | Catch a possible error, especially trigered in unit tests |
diff --git a/SingularityService/src/main/java/com/hubspot/singularity/scheduler/SingularityScheduler.java b/SingularityService/src/main/java/com/hubspot/singularity/scheduler/SingularityScheduler.java
index <HASH>..<HASH> 100644
--- a/SingularityService/src/main/java/com/hubspot/singularity/scheduler/SingularityScheduler.java
+++ b/SingularityService/src/main/java/com/hubspot/singularity/scheduler/SingularityScheduler.java
@@ -471,7 +471,7 @@ public class SingularityScheduler {
stateCache.getActiveTaskIds().remove(taskId);
}
- if (task.isPresent() && task.get().getTaskRequest().getRequest().isLoadBalanced()) {
+ if (task.isPresent() && task.get().getTaskRequest().getRequest().isLoadBalanced() || !task.isPresent()) {
taskManager.createLBCleanupTask(taskId);
} | still add to lb cleanup queue in the case where task is not found |
diff --git a/lib/gcli/types/date.js b/lib/gcli/types/date.js
index <HASH>..<HASH> 100644
--- a/lib/gcli/types/date.js
+++ b/lib/gcli/types/date.js
@@ -111,9 +111,7 @@ DateType.prototype.decrement = function(value) {
};
DateType.prototype.increment = function(value) {
- console.log('increment!');
if (!this._isValidDate(value)) {
- console.log('invalid anyway');
return this._getDefault();
}
var newValue = new Date(value); | date-<I>: Remove two console.log used for debugging |
diff --git a/test/integration/imagechange_buildtrigger_test.go b/test/integration/imagechange_buildtrigger_test.go
index <HASH>..<HASH> 100644
--- a/test/integration/imagechange_buildtrigger_test.go
+++ b/test/integration/imagechange_buildtrigger_test.go
@@ -1,6 +1,8 @@
package integration
import (
+ "testing"
+
authorizationapi "github.com/openshift/origin/pkg/authorization/api"
buildapi "github.com/openshift/origin/pkg/build/api"
"github.com/openshift/origin/pkg/client"
@@ -12,7 +14,6 @@ import (
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
watchapi "k8s.io/apimachinery/pkg/watch"
kapi "k8s.io/kubernetes/pkg/api"
- "testing"
)
const (
@@ -310,7 +311,7 @@ func runTest(t *testing.T, testname string, projectAdminClient *client.Client, i
}
event = <-buildWatch.ResultChan()
if e, a := watchapi.Modified, event.Type; e != a {
- t.Fatalf("expected watch event type %s, got %s", e, a)
+ t.Fatalf("expected watch event type %s, got %s: %#v", e, a, event.Object)
}
newBuild = event.Object.(*buildapi.Build)
// Make sure the resolution of the build's docker image pushspec didn't mutate the persisted API object | Add logging to imagechange build trigger |
diff --git a/lxd/project/permissions.go b/lxd/project/permissions.go
index <HASH>..<HASH> 100644
--- a/lxd/project/permissions.go
+++ b/lxd/project/permissions.go
@@ -134,6 +134,37 @@ func checkRestrictionsOnVolatileConfig(project *api.Project, instanceType instan
return nil
}
+// AllowVolumeCreation returns an error if any project-specific limit or
+// restriction is violated when creating a new custom volume in a project.
+func AllowVolumeCreation(tx *db.ClusterTx, projectName string, req api.StorageVolumesPost) error {
+ info, err := fetchProject(tx, projectName, true)
+ if err != nil {
+ return err
+ }
+
+ if info == nil {
+ return nil
+ }
+
+ // If "limits.disk" is not set, there's nothing to do.
+ if info.Project.Config["limits.disk"] == "" {
+ return nil
+ }
+
+ // Add the volume being created.
+ info.Volumes = append(info.Volumes, db.StorageVolumeArgs{
+ Name: req.Name,
+ Config: req.Config,
+ })
+
+ err = checkRestrictionsAndAggregateLimits(tx, info)
+ if err != nil {
+ return err
+ }
+
+ return nil
+}
+
// Check that we would not violate the project limits or restrictions if we
// were to commit the given instances and profiles.
func checkRestrictionsAndAggregateLimits(tx *db.ClusterTx, info *projectInfo) error { | lxd/project: Add AllowVolumeCreation() to check limits upon volume creation |
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -115,11 +115,7 @@ setup(
"external = bob.bio.spear.config.annotator.external:annotator", # external VAD
],
"bob.bio.preprocessor": [
- "cqcc20p = bob.bio.spear.config.extractor.cqcc20:cqcc20", # Empty preprocessor for CQCC features
- "energy-2gauss = bob.bio.spear.config.preprocessor.energy_2gauss:preprocessor", # two Gauss energy
- "energy-thr = bob.bio.spear.config.preprocessor.energy_thr:preprocessor", # thresholded energy
- "mod-4hz = bob.bio.spear.config.preprocessor.mod_4hz:preprocessor", # mod_4hz
- "external = bob.bio.spear.config.preprocessor.external:preprocessor", # external VAD
+ "cqcc20p = bob.bio.spear.config.extractor.cqcc20:cqcc20", # Empty preprocessor for CQCC features
],
"bob.bio.extractor": [
"cqcc20e = bob.bio.spear.config.extractor.cqcc20:cqcc20", # Extractor (reads Matlab files) for CQCC features | Remove annotators from preprocessor resources |
diff --git a/src/Providers/TelegramServiceProvider.php b/src/Providers/TelegramServiceProvider.php
index <HASH>..<HASH> 100644
--- a/src/Providers/TelegramServiceProvider.php
+++ b/src/Providers/TelegramServiceProvider.php
@@ -32,7 +32,7 @@ class TelegramServiceProvider extends ServiceProvider
$this->mergeConfigFrom(__DIR__.'/../../stubs/telegram.php', 'botman.telegram');
$this->commands([
- TelegramRegisterCommand::class
+ TelegramRegisterCommand::class,
]);
}
} | Apply fixes from StyleCI (#<I>) |
diff --git a/ella/newman/media/js/generic.suggest.js b/ella/newman/media/js/generic.suggest.js
index <HASH>..<HASH> 100644
--- a/ella/newman/media/js/generic.suggest.js
+++ b/ella/newman/media/js/generic.suggest.js
@@ -348,7 +348,7 @@ GenericSuggestLib = {};
var sug_url = $input.attr('rel');
// The rel attribute is a relative address.
// If we're using the content-by-hash library, we want it to be relative to what's in the hash.
- if (window.adr && $.isFunction(adr)) sug_url = adr(sug_url, { just_get: 1 });
+ if (window.adr && $.isFunction(adr)) sug_url = adr(sug_url, { just_get: 'address' });
if (offset == null || offset < 0)
offset = 0; | Reflected the change in adr() to generic.suggest. |
diff --git a/src/Argument/Argument.php b/src/Argument/Argument.php
index <HASH>..<HASH> 100644
--- a/src/Argument/Argument.php
+++ b/src/Argument/Argument.php
@@ -354,6 +354,14 @@ class Argument
}
/**
+ * @deprecated use values() instead.
+ */
+ public function valueArray()
+ {
+ return $this->values();
+ }
+
+ /**
* Set an argument's value based on its command line entry.
*
* Argument values are type cast based on the value of $castTo. | Avoid breaking BC in version <I> |
diff --git a/src/Application.php b/src/Application.php
index <HASH>..<HASH> 100644
--- a/src/Application.php
+++ b/src/Application.php
@@ -30,7 +30,7 @@ class Application extends BaseApplication
/**
* @var string
*/
- const VERSION = '1.0.0-beta4';
+ const VERSION = '1.0.0-beta5';
/**
* @var string | [console] Tag <I>-beta5 version. (#<I>) |
diff --git a/python/thunder/rdds/data.py b/python/thunder/rdds/data.py
index <HASH>..<HASH> 100644
--- a/python/thunder/rdds/data.py
+++ b/python/thunder/rdds/data.py
@@ -127,6 +127,7 @@ class Data(object):
This calls the Spark cache() method on the underlying RDD.
"""
self.rdd.cache()
+ return self
def filterOnKeys(self, func):
""" Filter records by applying a function to keys """ | Cache needs to return self for proper chaining |
diff --git a/src/Illuminate/Database/Eloquent/Relations/MorphTo.php b/src/Illuminate/Database/Eloquent/Relations/MorphTo.php
index <HASH>..<HASH> 100644
--- a/src/Illuminate/Database/Eloquent/Relations/MorphTo.php
+++ b/src/Illuminate/Database/Eloquent/Relations/MorphTo.php
@@ -159,12 +159,14 @@ class MorphTo extends BelongsTo {
$key = $instance->getKeyName();
- if ($this->withTrashed && $instance->newQuery()->getMacro('withTrashed') !== null)
+ $query = $instance->newQuery();
+
+ if ($this->withTrashed && $query->getMacro('withTrashed') !== null)
{
- $instance = $instance->withTrashed();
+ $query = $query->withTrashed();
}
- return $instance->whereIn($key, $this->gatherKeysByType($type)->all())->get();
+ return $query->whereIn($key, $this->gatherKeysByType($type)->all())->get();
}
/** | [<I>] Always operate with query builder to prevent possible bugs |
diff --git a/grimoire_elk/elastic_items.py b/grimoire_elk/elastic_items.py
index <HASH>..<HASH> 100644
--- a/grimoire_elk/elastic_items.py
+++ b/grimoire_elk/elastic_items.py
@@ -81,11 +81,6 @@ class ElasticItems():
from .utils import get_connector_name
return get_connector_name(type(self))
- def get_connector_backend_name(self):
- """ Find the name for the current connector """
- from .utils import get_connector_backend_name
- return get_connector_backend_name(type(self))
-
# Items generator
def fetch(self, _filter=None):
""" Fetch the items from raw or enriched index. An optional _filter
@@ -142,14 +137,6 @@ class ElasticItems():
filters = self.get_repository_filter_raw(term=True)
filters = json.dumps(filters)
- # Filter also using the backend_name to let a raw index with items
- # from different backends (arthur common raw index)
- filters += '''
- , {"term":
- { "backend_name":"%s" }
- }
- ''' % (self.get_connector_backend_name())
-
if self.filter_raw:
filters += '''
, {"term": | [arthur] Remove filtering using the backend name
The common arthur ES index with all raw items is not used anymore.
GrimoireELK will read all items from redis and distribute them
in different raw indexes. |
diff --git a/src/java/org/apache/cassandra/service/AbstractCassandraDaemon.java b/src/java/org/apache/cassandra/service/AbstractCassandraDaemon.java
index <HASH>..<HASH> 100644
--- a/src/java/org/apache/cassandra/service/AbstractCassandraDaemon.java
+++ b/src/java/org/apache/cassandra/service/AbstractCassandraDaemon.java
@@ -93,7 +93,11 @@ public abstract class AbstractCassandraDaemon implements CassandraDaemon
}
});
- // check the system table for mismatched partitioner.
+ // check the system table to keep user from shooting self in foot by changing partitioner, cluster name, etc.
+ // we do a one-off scrub of the system table first; we can't load the list of the rest of the tables,
+ // until system table is opened.
+ for (CFMetaData cfm : DatabaseDescriptor.getTableMetaData(Table.SYSTEM_TABLE).values())
+ ColumnFamilyStore.scrubDataDirectories(Table.SYSTEM_TABLE, cfm.cfName);
try
{
SystemTable.checkHealth();
@@ -115,7 +119,7 @@ public abstract class AbstractCassandraDaemon implements CassandraDaemon
System.exit(100);
}
- // clean up debris.
+ // clean up debris in the rest of the tables
for (String table : DatabaseDescriptor.getTables())
{
for (CFMetaData cfm : DatabaseDescriptor.getTableMetaData(table).values()) | scrub System keyspace before opening it
patch by jbellis; reviewed by gdusbabek for CASSANDRA-<I>
git-svn-id: <URL> |
diff --git a/gogdb_test.go b/gogdb_test.go
index <HASH>..<HASH> 100644
--- a/gogdb_test.go
+++ b/gogdb_test.go
@@ -100,13 +100,32 @@ err2 cerr2
ResetIOs()
prbgtest()
So(ErrString(), ShouldEqual,
- ` [prbgtest:111] (func.010:101)
+ ` [prbgtest:126] (func.010:101)
prbgtest content
`)
})
+
+ Convey("Test pdbg print with custom instance", func() {
+ apdbg := NewPdbg(SetBuffers)
+ apdbg.Pdbgf("test2")
+ So(apdbg.ErrString(), ShouldEqual,
+ `[func.011:110]
+ test2
+`)
+ apdbg.ResetIOs()
+ prbgtestCustom(apdbg)
+ So(apdbg.ErrString(), ShouldEqual,
+ ` [prbgtestCustom:130] (func.011:116)
+ prbgtest content2
+`)
+ })
})
}
func prbgtest() {
Pdbgf("prbgtest content")
}
+
+func prbgtestCustom(pdbg *Pdbg) {
+ pdbg.Pdbgf("prbgtest content2")
+} | Test pdbg print with custom instance |
diff --git a/lib/Predis.php b/lib/Predis.php
index <HASH>..<HASH> 100644
--- a/lib/Predis.php
+++ b/lib/Predis.php
@@ -448,9 +448,11 @@ abstract class Command implements ICommand {
$key = $this->_arguments[0];
$start = strpos($key, '{');
- $end = strpos($key, '}');
- if ($start !== false && $end !== false) {
- $key = substr($key, ++$start, $end - $start);
+ if ($start !== false) {
+ $end = strpos($key, '}');
+ if ($end !== false) {
+ $key = substr($key, ++$start, $end - $start);
+ }
}
$this->_hash = $distributor->generateKey($key); | Optimize when a pair of curly brackets for key tagging is missing. |
diff --git a/environs/cloudinit/cloudinit.go b/environs/cloudinit/cloudinit.go
index <HASH>..<HASH> 100644
--- a/environs/cloudinit/cloudinit.go
+++ b/environs/cloudinit/cloudinit.go
@@ -400,6 +400,8 @@ func (cfg *MachineConfig) dataFile(name string) string {
return path.Join(cfg.DataDir, name)
}
+// TargetRelease returns a string suitable for use with apt-get --target-release
+// based on the machines series.
func (cfg *MachineConfig) TargetRelease() string {
targetRelease := ""
if cfg.Tools.Version.Series == "precise" { | cloudinit: documented TargetRelease |
diff --git a/AI/toolbox/Toolbox.py b/AI/toolbox/Toolbox.py
index <HASH>..<HASH> 100644
--- a/AI/toolbox/Toolbox.py
+++ b/AI/toolbox/Toolbox.py
@@ -74,9 +74,21 @@ class Toolbox():
txtImport = 'import ' + tool['file']
#exec txtImport
- mod = map(__import__, [tool['file']])
+ # mod = map(__import__, [tool['file']])
+ mod = __import__( tool['file'])
+ #mod = __import__( os.path.basename(tool['file']).split('.')[0])
+
print(tool['function'])
+ txtFunction = os.path.basename(tool['file']).split('.')[0] + '.' + tool['function']
+ print(txtFunction)
+
+ #exec txtFunction
+ func = getattr(mod, tool['function'])
+ func()
+
+
+
#import importlib
#importlib.import_module(tool['file'])
#importlib.import_module('solve_knapsack') | working on running external function dynamically - still buggy |
diff --git a/spanpropagator.go b/spanpropagator.go
index <HASH>..<HASH> 100644
--- a/spanpropagator.go
+++ b/spanpropagator.go
@@ -46,7 +46,7 @@ func (s *tracerImpl) PropagateSpanAsBinary(
sc := sp.(*spanImpl).raw.StandardContext
var err error
var sampledByte byte = 0
- if !sc.Sampled {
+ if sc.Sampled {
sampledByte = 1
} | Fix the fix in #<I>
namely setting Sampled on the child whenever
it was not set on the parent.
Caught by the test from #<I>. Much can be won
by a simple one-liner setup on Travis/CircleCI,
pre-stabilization or not. The current situation
is frankly a little insane.
Happy to set that up, just need the corresponding
rights on the repository. |
diff --git a/redisco/containers.py b/redisco/containers.py
index <HASH>..<HASH> 100644
--- a/redisco/containers.py
+++ b/redisco/containers.py
@@ -166,9 +166,6 @@ class Set(Container):
self.db.sdiffstore(self.key, [self.key, other.key])
return self
- def __repr__(self):
- return u"<redisco.containers.Set(key=%s)>" % self.key
-
def all(self):
return self.db.smembers(self.key)
members = property(all)
@@ -346,7 +343,7 @@ class TypedList(object):
if self._redisco_model:
return filter(lambda o: o is not None, [self.klass.objects.get_by_id(v) for v in values])
else:
- return [self.klass(value, *self._klass_args, **self._klass_kwargs) for v in values]
+ return [self.klass(v, *self._klass_args, **self._klass_kwargs) for v in values]
def all(self):
"""Returns all items in the list."""
@@ -382,7 +379,7 @@ class TypedList(object):
yield self[i]
def __repr__(self):
- return repr(self.typecast_iter(self.members))
+ return repr(self.typecast_iter(self.list))
class SortedSet(Container): | Fixes pylint errors
E:<I>:Set.__repr__: method already defined line <I>
E:<I>:TypedList.typecast_iter: Undefined variable 'value'
E:<I>:TypedList.__repr__: Instance of 'TypedList' has no 'members' member |
diff --git a/girder/events.py b/girder/events.py
index <HASH>..<HASH> 100644
--- a/girder/events.py
+++ b/girder/events.py
@@ -321,6 +321,8 @@ daemon = ForegroundEventsDaemon()
def setupDaemon():
global daemon
+ daemon.stop()
+
if config.getConfig()['server'].get('disable_event_daemon', False):
daemon = ForegroundEventsDaemon()
else: | Stop daemon before reassigning it |
diff --git a/piazza_api/__init__.py b/piazza_api/__init__.py
index <HASH>..<HASH> 100644
--- a/piazza_api/__init__.py
+++ b/piazza_api/__init__.py
@@ -1,3 +1,3 @@
from piazza_api.piazza import Piazza
-__version__ = "0.3.0"
+__version__ = "0.4.0"
diff --git a/piazza_api/network.py b/piazza_api/network.py
index <HASH>..<HASH> 100644
--- a/piazza_api/network.py
+++ b/piazza_api/network.py
@@ -1,3 +1,5 @@
+from collections import namedtuple
+
from .rpc import PiazzaRPC
@@ -52,6 +54,19 @@ class Network(object):
self._rpc = PiazzaRPC(network_id=self._nid)
self._rpc.cookies = cookies
+ ff = namedtuple('FeedFilters', ['unread', 'following', 'folder'])
+ self._feed_filters = ff(UnreadFilter, FollowingFilter, FolderFilter)
+
+ @property
+ def feed_filters(self):
+ """namedtuple instance containing FeedFilter classes for easy access
+
+ :rtype: namedtuple
+ :returns: namedtuple with unread, following, and folder attributes
+ mapping to filters
+ """
+ return self._feed_filters
+
#########
# Posts #
######### | feat(user): Add feed_filters property to Network for easy access |
diff --git a/pyshtools/classes.py b/pyshtools/classes.py
index <HASH>..<HASH> 100644
--- a/pyshtools/classes.py
+++ b/pyshtools/classes.py
@@ -31,10 +31,10 @@ from _SHTOOLS import *
#=========== COEFFICIENT CLASSES ===============================================
#===============================================================================
-
class SHCoeffs(object):
"""
+ EXPERIMENTAL:
Spherical Harmonics Coefficient class. Coefficients can be initialized
using one of the constructor methods:
@@ -301,6 +301,7 @@ class SHComplexCoefficients(SHCoeffs):
class SHGrid(object):
"""
+ EXPERIMENTAL:
Spherical Grid Class that can deal with spatial data on the sphere that is
defined on different grids. Can be constructed from:
@@ -440,6 +441,7 @@ class GLQGrid(SHGrid):
#==== SPHERICAL HARMONICS WINDOW FUNCTION CLASS ====
class SHWindow(object):
"""
+ EXPERIMENTAL:
This class contains collections of spherical harmonics windows that
provide spectral estimates about a specific region
""" | updated classes doc with EXPERIMENTAL |
diff --git a/backends/repeater.js b/backends/repeater.js
index <HASH>..<HASH> 100644
--- a/backends/repeater.js
+++ b/backends/repeater.js
@@ -4,7 +4,7 @@ var util = require('util'),
function RepeaterBackend(startupTime, config, emitter){
var self = this;
this.config = config.repeater || [];
- this.sock = dgram.createSocket('udp6');
+ this.sock = dgram.createSocket('udp4');
// attach
emitter.on('packet', function(packet, rinfo) { self.process(packet, rinfo); }); | making the repeater backend ipv4 only again
some people reported problems with it, so we have to look into a better
configurable solution for this. |
diff --git a/model/qti/Service.php b/model/qti/Service.php
index <HASH>..<HASH> 100755
--- a/model/qti/Service.php
+++ b/model/qti/Service.php
@@ -288,7 +288,7 @@ class Service extends ConfigurableService
return $oldItemContentPropertyValues;
} catch (Exception $e) {
- common_Logger::e('Item content backup failed: ' . $e->getMessage());
+ $this->logError('Item content backup failed: ' . $e->getMessage());
throw new common_Exception("QTI Item backup failed. Item uri - " . $item->getUri());
}
}
@@ -306,7 +306,7 @@ class Service extends ConfigurableService
$item->editPropertyValueByLg($itemContentProperty, $itemContentPropertyValue, $language);
}
} catch (Exception $e) {
- common_Logger::e('Rollback item error: ' . $e->getMessage());
+ $this->logError('Rollback item error: ' . $e->getMessage());
throw new common_Exception(sprintf('Cannot rollback item. Item uri - %s :: Backup folders - %s ', $item->getUri(), json_encode($backUpNames)));
}
} | Use logger trait instead of static logger methods. |
diff --git a/lib/config.js b/lib/config.js
index <HASH>..<HASH> 100644
--- a/lib/config.js
+++ b/lib/config.js
@@ -508,6 +508,17 @@ exports.extend = function(newConf) {
variableProperties.forEach(function(name) {
config[name] = newConf[name] || pkgConf[name];
});
+ var extra = newConf.extra;
+ if (extra && typeof extra === 'string') {
+ extra = readFileText(extra);
+ try {
+ extra = extra && JSON.parse(extra);
+ if (extra && typeof extra === 'object') {
+ config.pluginsDataMap = extend({}, config.pluginsDataMap, extra);
+ }
+ } catch (e) {}
+ extra = null;
+ }
var customHandler = newConf.customHandler || newConf.customHandle;
if (typeof customHandler === 'function') {
config.customHandler = customHandler; | refactor: read json from extra |
diff --git a/i18n-module.php b/i18n-module.php
index <HASH>..<HASH> 100644
--- a/i18n-module.php
+++ b/i18n-module.php
@@ -252,7 +252,7 @@ class yoast_i18n {
if ( $body ) {
$body = json_decode( $body );
- if ( empty( $body->success ) ) {
+ if ( empty( $body->success ) || empty( $body->translation_sets ) ) {
return null;
}
foreach ( $body->translation_sets as $set ) { | Tightened check for valid data in remote API response. |
diff --git a/query.go b/query.go
index <HASH>..<HASH> 100644
--- a/query.go
+++ b/query.go
@@ -3,8 +3,8 @@ package notifications
import (
"encoding/json"
- context "github.com/jbenet/go-ipfs/Godeps/_workspace/src/golang.org/x/net/context"
- peer "github.com/jbenet/go-ipfs/p2p/peer"
+ context "github.com/ipfs/go-ipfs/Godeps/_workspace/src/golang.org/x/net/context"
+ peer "github.com/ipfs/go-ipfs/p2p/peer"
)
const RoutingQueryKey = "RoutingQueryEvent" | Reorged imports from jbenet/go-ipfs to ipfs/go-ipfs
- Modified Godeps/Godeps.json by hand
- [TEST] Updated welcome docs hash to sharness
- [TEST] Updated contact doc
- [TEST] disabled breaking test (t<I>-repo refs local) |
diff --git a/abaaso.js b/abaaso.js
index <HASH>..<HASH> 100644
--- a/abaaso.js
+++ b/abaaso.js
@@ -1453,8 +1453,8 @@ var abaaso = function(){
break;
default:
value = new String(value);
- pattern = (pattern[args[i]]) ? pattern[args[i]] : args[i];
- if (!pattern.test(value)) {
+ var p = (pattern[args[i]]) ? pattern[args[i]] : args[i];
+ if (!p.test(value)) {
invalid.push(i);
exception = true;
} | Fixed an override mistake in validate.test() |
diff --git a/great_expectations/dataset/pandas_dataset.py b/great_expectations/dataset/pandas_dataset.py
index <HASH>..<HASH> 100644
--- a/great_expectations/dataset/pandas_dataset.py
+++ b/great_expectations/dataset/pandas_dataset.py
@@ -883,7 +883,10 @@ class PandasDataset(MetaPandasDataset, pd.DataFrame):
if max_value:
max_value = parse(max_value)
- temp_column = column.map(parse)
+ try:
+ temp_column = column.map(parse)
+ except TypeError as e:
+ temp_column = column
else:
temp_column = column | Catch TypeError if column is already parsed |
diff --git a/features/support/paths.rb b/features/support/paths.rb
index <HASH>..<HASH> 100644
--- a/features/support/paths.rb
+++ b/features/support/paths.rb
@@ -54,8 +54,6 @@ module NavigationHelpers
admin_configuration_path(:format => format)
when /extensions/i
admin_extensions_path(:format => format)
- when /export/i
- export_path(:format => format)
else
raise "Can't find mapping from \"#{page_name}\" to a path.\n" +
"Now, go and add a mapping in #{__FILE__}" | move cuke export_path to export extension |
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -37,8 +37,9 @@ def clean_build(dist):
if os.path.isdir('src/feat'):
feat_dist = setup(**feat_args)
- clean_build(feat_dist)
if os.path.isdir('src/flt'):
- os.mkdir('build')
+ if os.path.isdir('src/feat'):
+ clean_build(feat_dist)
+ os.mkdir('build')
setup(**flt_args) | Only clean the build directory when creating all packages |
diff --git a/worker/worker.go b/worker/worker.go
index <HASH>..<HASH> 100644
--- a/worker/worker.go
+++ b/worker/worker.go
@@ -61,7 +61,7 @@ const (
// check will be anywhere in the range [0, monitor interval]; this is
// randomized so that workers that start at the same time will not
// contest the same locks.
- defaultMonitorInterval = 120 * time.Second
+ defaultMonitorInterval = 15 * time.Second
)
// Returns the name of the working queue based on the worker's processing | worker: lower defaultMonitorInterval to <I>sec |
diff --git a/openquake/calculators/scenario_damage.py b/openquake/calculators/scenario_damage.py
index <HASH>..<HASH> 100644
--- a/openquake/calculators/scenario_damage.py
+++ b/openquake/calculators/scenario_damage.py
@@ -88,7 +88,6 @@ def scenario_damage(riskinputs, param, monitor):
for ri in riskinputs:
# here instead F32 floats are ok
acc = [] # (aid, eid, lid, ds...)
- ri.hazard_getter.init()
for out in ri.gen_outputs(crmodel, monitor):
r = out.rlzi
ne = num_events[r] # total number of events | Removed unused line [skip CI] |
diff --git a/lib/searchkick/index.rb b/lib/searchkick/index.rb
index <HASH>..<HASH> 100644
--- a/lib/searchkick/index.rb
+++ b/lib/searchkick/index.rb
@@ -177,7 +177,6 @@ module Searchkick
client.search(
index: name,
body: {
- fields: [],
query: {match_all: {}},
size: 0
} | Fixed total_doc method for ES 5 |
diff --git a/provider/lxd/credentials.go b/provider/lxd/credentials.go
index <HASH>..<HASH> 100644
--- a/provider/lxd/credentials.go
+++ b/provider/lxd/credentials.go
@@ -410,10 +410,11 @@ func (p environProviderCredentials) finalizeRemoteCredential(
}); err != nil {
return nil, errors.Trace(err)
}
+ fmt.Fprintln(output, "Uploaded certificate to LXD server.")
+ } else {
+ fmt.Fprintln(output, "Reusing certificate from LXD server.")
}
- fmt.Fprintln(output, "Uploaded certificate to LXD server.")
-
lxdServer, _, err := server.GetServer()
if err != nil {
return nil, errors.Trace(err) | Be clear about output message
We should be clear about if the cert is uploaded or not. This will
help us debug the output in the future. |
diff --git a/usr/share/lib/img_proof/tests/SLES/test_sles_motd.py b/usr/share/lib/img_proof/tests/SLES/test_sles_motd.py
index <HASH>..<HASH> 100644
--- a/usr/share/lib/img_proof/tests/SLES/test_sles_motd.py
+++ b/usr/share/lib/img_proof/tests/SLES/test_sles_motd.py
@@ -2,6 +2,8 @@ def test_sles_motd(host, get_release_value):
motd = host.file('/etc/motd')
version = get_release_value('VERSION')
assert version
+ assert motd.exists
+ assert motd.is_file
assert motd.contains(
'SUSE Linux Enterprise Server {0}'.format(
version.replace('-', ' ') | Check if motd file exists and is a file
Before checking the file contents. To make an error state more
clear. |
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -22,7 +22,7 @@ setup(
author='Chris Kiehl',
author_email='audionautic@gmail.com',
package_data={
- '': ['*.txt', '*.png', '*.jpg', '*.json']
+ '': ['*.txt', '*.png', '*.jpg', '*.json', '*.ico']
},
packages=find_packages(),
url='http://pypi.python.org/pypi/Gooey/', | Fixing error of 'icon.ico' not being present in filesystem when running mockapplication |
diff --git a/pandas/core/frame.py b/pandas/core/frame.py
index <HASH>..<HASH> 100644
--- a/pandas/core/frame.py
+++ b/pandas/core/frame.py
@@ -1074,7 +1074,7 @@ class DataFrame(NDFrame):
indexer = self.columns.get_indexer(key)
mask = indexer == -1
if mask.any():
- raise Exception("No column(s) named: %s" % str(key[mask]))
+ raise KeyError("No column(s) named: %s" % str(key[mask]))
return self.reindex(columns=key)
def _slice(self, slobj, axis=0):
@@ -1215,6 +1215,13 @@ class DataFrame(NDFrame):
"""
return NDFrame.pop(self, item)
+ def get(self, column, default=None):
+ try:
+ return self[column]
+ except KeyError:
+ return default
+
+
# to support old APIs
@property
def _series(self): | (1) add a get() method like dicts have, (2) throw KeyError, not Exception, when a column isn't found |
diff --git a/scripts/release_helper/go.py b/scripts/release_helper/go.py
index <HASH>..<HASH> 100644
--- a/scripts/release_helper/go.py
+++ b/scripts/release_helper/go.py
@@ -6,7 +6,7 @@ import os
_GO_OWNER = {'ArcturusZhang'}
# 'github assignee': 'token'
-_ASSIGNEE_TOKEN_GO = {'ArcturusZhang': os.getenv('PYTHON_ZED_TOKEN')}
+_ASSIGNEE_TOKEN_GO = {'ArcturusZhang': os.getenv('GO_DAPENGZHANG_TOKEN')}
class IssueProcessGo(IssueProcess): | update for Go (#<I>) |
diff --git a/lib/Layout.js b/lib/Layout.js
index <HASH>..<HASH> 100644
--- a/lib/Layout.js
+++ b/lib/Layout.js
@@ -482,7 +482,7 @@ function Sequence (elt_layout, count, property) {
/** The number of elements in the sequence. */
this.count = count;
- Object.freeze();
+ Object.freeze(this);
}
Sequence.prototype = Object.create(Layout.prototype);
Sequence.prototype.constructor = Sequence;
@@ -796,7 +796,7 @@ function VariantLayout (union,
* VariantLayout#union|union}. */
this.layout = layout;
- Object.freeze();
+ Object.freeze(this);
}
VariantLayout.prototype = Object.create(Layout.prototype);
VariantLayout.prototype.constructor = VariantLayout; | Layout: fix mis-application of Object.freeze
This was freezing the constructor which was not intended and is not
permitted in ES5. |
diff --git a/lib/phusion_passenger/platform_info/cxx_portability.rb b/lib/phusion_passenger/platform_info/cxx_portability.rb
index <HASH>..<HASH> 100644
--- a/lib/phusion_passenger/platform_info/cxx_portability.rb
+++ b/lib/phusion_passenger/platform_info/cxx_portability.rb
@@ -165,6 +165,8 @@ private
# http://groups.google.com/group/phusion-passenger/t/6b904a962ee28e5c
# http://groups.google.com/group/phusion-passenger/browse_thread/thread/aad4bd9d8d200561
flags << '-DBOOST_SP_USE_PTHREADS'
+ elsif os_name == "linux"
+ flags << '-lrt'
end
flags << '-DHAS_ALLOCA_H' if has_alloca_h? | Link to librt on Linux because Boost now uses clock_gettime() |
diff --git a/src/UnresolvedValueException.php b/src/UnresolvedValueException.php
index <HASH>..<HASH> 100644
--- a/src/UnresolvedValueException.php
+++ b/src/UnresolvedValueException.php
@@ -44,28 +44,13 @@ class UnresolvedValueException extends \RuntimeException
*/
private static function getParameterName(\ReflectionParameter $parameter) : string
{
- return self::getTypeHint($parameter) . '$' . $parameter->getName();
- }
+ $parameterType = '';
- /**
- * Helper class for getParameterName().
- *
- * @param \ReflectionParameter $parameter
- *
- * @return string
- */
- private static function getTypeHint(\ReflectionParameter $parameter) : string
- {
- if ($parameter->isArray()) {
- return 'array ';
+ if (null !== $type = $parameter->getType()) {
+ $parameterType = (string) $type . ' ';
}
- $class = $parameter->getClass();
- if ($class) {
- return $class->getName() . ' ';
- }
-
- return '';
+ return $parameterType . '$' . $parameter->getName();
}
/** | Use ReflectionParameter::getType() for exception messages
This correctly reports the scalar types as well. |
diff --git a/src/main/java/com/lookfirst/wepay/WePayApi.java b/src/main/java/com/lookfirst/wepay/WePayApi.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/lookfirst/wepay/WePayApi.java
+++ b/src/main/java/com/lookfirst/wepay/WePayApi.java
@@ -114,7 +114,18 @@ public class WePayApi {
@Override
public InputStream getData(String uri, String postJson, String token) throws IOException {
HttpURLConnection conn = getConnection(uri, postJson, token);
- return conn.getInputStream();
+ int responseCode = conn.getResponseCode();
+ if (responseCode >= 200 && responseCode < 300) {
+ // everything's cool
+ return conn.getInputStream();
+ } else if (responseCode >= 400 && responseCode < 600) {
+ // something's wrong - get the error stream instead
+ return conn.getErrorStream();
+ } else {
+ // this will throw an IOException for all other HTTP codes but Java doesn't know that
+ // so make it think you're returning something
+ return conn.getInputStream();
+ }
}
} | Added more robust response handling - return the inputStream for 2xx responses, errorStream for 4xx-5xx responses, and throw IOException for everything else |
diff --git a/annis-gui/src/main/java/annis/gui/admin/controller/UserController.java b/annis-gui/src/main/java/annis/gui/admin/controller/UserController.java
index <HASH>..<HASH> 100644
--- a/annis-gui/src/main/java/annis/gui/admin/controller/UserController.java
+++ b/annis-gui/src/main/java/annis/gui/admin/controller/UserController.java
@@ -79,6 +79,7 @@ public class UserController
public void passwordChanged(String userName, String newPassword)
{
model.setPassword(userName, newPassword);
+ view.setUserList(model.getUsers());
}
@Override | set the user list after the password was changed to update the list model (fixes #<I>) |
diff --git a/lib/rspec-puppet/support.rb b/lib/rspec-puppet/support.rb
index <HASH>..<HASH> 100644
--- a/lib/rspec-puppet/support.rb
+++ b/lib/rspec-puppet/support.rb
@@ -9,7 +9,18 @@ module RSpec::Puppet
end
def environment
- 'rp_env'
+ # unfreeze PUPPETVERSION because of https://github.com/bundler/bundler/issues/3187
+ ver = Gem::Version.new("#{Puppet::PUPPETVERSION}")
+ # Since applying a fix for PUP-5522 (puppet 3.8.5 and 4.3.2) puppet symbolizes environment names
+ # internally. The catalog cache needs to assume that the facts and other args do not change between
+ # runs, so we have to mirror this here. Puppet versions before the change require a string as environment
+ # name, or they fail with "Unsupported data type: 'Symbol' on node xyz"
+ # See https://github.com/rodjek/rspec-puppet/pull/354 and PUP-5743 for discussion of this
+ if (Gem::Version.new('3.8.5') <= ver && ver < Gem::Version.new('4.0.0')) || Gem::Version.new('4.3.2') <= ver
+ :rp_env
+ else
+ 'rp_env'
+ end
end
def load_catalogue(type) | (PUP-<I>) adapt to new environment name semantics
Since applying a fix for PUP-<I> (puppet <I> and <I>) puppet symbolizes environment names
internally. The catalog cache needs to assume that the facts and other args do not change between
runs, so we have to mirror this here. Puppet versions before the change require a string as environment
name, or they fail with "Unsupported data type: 'Symbol' on node xyz"
See <URL> |
diff --git a/holoviews/core/util.py b/holoviews/core/util.py
index <HASH>..<HASH> 100644
--- a/holoviews/core/util.py
+++ b/holoviews/core/util.py
@@ -787,7 +787,7 @@ def stream_parameters(streams, no_duplicates=True, exclude=['name']):
If no_duplicates is enabled, a KeyError will be raised if there are
parameter name clashes across the streams.
"""
- param_groups = [s.params().keys() for s in streams]
+ param_groups = [s.contents.keys() for s in streams]
names = [name for group in param_groups for name in group]
if no_duplicates: | stream_parameters utility looks at contents not params |
diff --git a/core/objects.go b/core/objects.go
index <HASH>..<HASH> 100644
--- a/core/objects.go
+++ b/core/objects.go
@@ -26,7 +26,7 @@ const (
)
const (
- ChallengeTypeSimpleHTTPS = "simpleHTTPS"
+ ChallengeTypeSimpleHTTPS = "simpleHttps"
ChallengeTypeDVSNI = "dvsni"
ChallengeTypeDNS = "dns"
ChallengeTypeRecoveryToken = "recoveryToken" | Fix non-compliance issue stemming from PR #<I>.
Caught by @kuba, thanks! |
diff --git a/trustar/report_client.py b/trustar/report_client.py
index <HASH>..<HASH> 100644
--- a/trustar/report_client.py
+++ b/trustar/report_client.py
@@ -63,8 +63,8 @@ class ReportClient(object):
found by adjusting the ``from_time`` and ``to_time`` parameters.
Note: This endpoint will only return reports from a time window of maximum size of 2 weeks. If you give a
- time window larger than 2 weeks, it will pull reports starting at 2 weeks before the “to” date, through the
- “to” date.
+ time window larger than 2 weeks, it will pull reports starting at 2 weeks before the "to" date, through the
+ "to" date.
:param boolean is_enclave: restrict reports to specific distribution type (optional - by default all accessible
reports are returned). | fix ascii in docstring character |
diff --git a/salt/modules/git.py b/salt/modules/git.py
index <HASH>..<HASH> 100644
--- a/salt/modules/git.py
+++ b/salt/modules/git.py
@@ -273,10 +273,10 @@ def _git_run(command, cwd=None, user=None, password=None, identity=None,
if not salt.utils.is_windows() and 'GIT_SSH' in env:
os.remove(env['GIT_SSH'])
- # Cleanup the temporary identify file
+ # Cleanup the temporary identity file
if tmp_identity_file and os.path.exists(tmp_identity_file):
- log.debug('Removing identify file {0}'.format(tmp_identity_file))
- #__salt__['file.remove'](tmp_identity_file)
+ log.debug('Removing identity file {0}'.format(tmp_identity_file))
+ __salt__['file.remove'](tmp_identity_file)
# If the command was successful, no need to try additional IDs
if result['retcode'] == 0: | Uncomment the line that removes the temporary identity file. |
diff --git a/activerecord/lib/active_record/connection_handling.rb b/activerecord/lib/active_record/connection_handling.rb
index <HASH>..<HASH> 100644
--- a/activerecord/lib/active_record/connection_handling.rb
+++ b/activerecord/lib/active_record/connection_handling.rb
@@ -131,7 +131,7 @@ module ActiveRecord
def remove_connection(name = nil)
name ||= @connection_specification_name if defined?(@connection_specification_name)
- # if removing a connection that have a pool, we reset the
+ # if removing a connection that has a pool, we reset the
# connection_specification_name so it will use the parent
# pool.
if connection_handler.retrieve_connection_pool(name) | [ci skip] fix typo in ActiveRecord::ConnectionHandling |
diff --git a/Jakefile.js b/Jakefile.js
index <HASH>..<HASH> 100644
--- a/Jakefile.js
+++ b/Jakefile.js
@@ -7,8 +7,8 @@
var exec = require('child_process').exec,
commands = {
- test: './node_modules/.bin/mocha --ui tdd --reporter spec --colors ./test/complexityReport.js',
- lint: './node_modules/.bin/jshint ./src --config config/jshint.json',
+ test: 'node node_modules/mocha/bin/mocha --ui tdd --reporter spec --colors ./test/complexityReport.js',
+ lint: 'node node_modules/jshint/bin/jshint src --config config/jshint.json',
prepare: 'npm install'
}; | improved Windows support for 'jake test' |
diff --git a/nephele/nephele-server/src/main/java/eu/stratosphere/nephele/checkpointing/CheckpointDecisionCoordinator.java b/nephele/nephele-server/src/main/java/eu/stratosphere/nephele/checkpointing/CheckpointDecisionCoordinator.java
index <HASH>..<HASH> 100644
--- a/nephele/nephele-server/src/main/java/eu/stratosphere/nephele/checkpointing/CheckpointDecisionCoordinator.java
+++ b/nephele/nephele-server/src/main/java/eu/stratosphere/nephele/checkpointing/CheckpointDecisionCoordinator.java
@@ -102,7 +102,7 @@ public final class CheckpointDecisionCoordinator {
final List<CheckpointDecision> checkpointDecisionList = new SerializableArrayList<CheckpointDecision>();
synchronized (graph) {
- checkpointDecisionList.add(new CheckpointDecision(vertex.getID(), true));
+ checkpointDecisionList.add(new CheckpointDecision(vertex.getID(), false)); // @CHECKPOINT DECISION
checkpointDecisions.put(vertex.getAllocatedResource().getInstance(), checkpointDecisionList);
} | Disabled Checkpointing by default. |
diff --git a/js/gateio.js b/js/gateio.js
index <HASH>..<HASH> 100755
--- a/js/gateio.js
+++ b/js/gateio.js
@@ -2858,6 +2858,7 @@ module.exports = class gateio extends Exchange {
let filled = Precise.stringSub (amount, remaining);
let cost = this.safeNumber (order, 'filled_total');
let rawStatus = undefined;
+ let average = undefined;
if (put) {
remaining = amount;
filled = '0';
@@ -2868,6 +2869,7 @@ module.exports = class gateio extends Exchange {
type = isMarketOrder ? 'market' : 'limit';
side = Precise.stringGt (amount, '0') ? 'buy' : 'sell';
rawStatus = this.safeString (order, 'finish_as', 'open');
+ average = this.safeNumber(order, 'fill_price');
} else {
rawStatus = this.safeString (order, 'status');
}
@@ -2913,7 +2915,7 @@ module.exports = class gateio extends Exchange {
'side': side,
'price': this.parseNumber (price),
'stopPrice': this.safeNumber (trigger, 'price'),
- 'average': this.safeNumber (order, 'price'),
+ 'average': average,
'amount': this.parseNumber (Precise.stringAbs (amount)),
'cost': cost,
'filled': this.parseNumber (filled), | Set average price conditinally based on endpoint |
diff --git a/src/lib/exceptionist.js b/src/lib/exceptionist.js
index <HASH>..<HASH> 100644
--- a/src/lib/exceptionist.js
+++ b/src/lib/exceptionist.js
@@ -98,7 +98,7 @@ module.exports = {
},
stacktrace: stacktrace,
user: options.context.user || null,
- timestamp: (new Date).getTime(),
+ timestamp: parseInt((new Date).getTime() / 1000, 10),
level: null,
logger: null,
machine: null | Set correct Epoch timestamp in API payload |
diff --git a/pythonforandroid/recipes/kivy/__init__.py b/pythonforandroid/recipes/kivy/__init__.py
index <HASH>..<HASH> 100644
--- a/pythonforandroid/recipes/kivy/__init__.py
+++ b/pythonforandroid/recipes/kivy/__init__.py
@@ -7,7 +7,8 @@ import glob
class KivyRecipe(CythonRecipe):
# version = 'stable'
- version = 'master'
+ # version = 'master'
+ # version = '1.9.1'
url = 'https://github.com/kivy/kivy/archive/{version}.zip'
name = 'kivy' | Changed Kivy recipe to use <I>
This is necessary for now, as the Android build has been broken by the opengl changes. |
diff --git a/gns3server/compute/docker/docker_vm.py b/gns3server/compute/docker/docker_vm.py
index <HASH>..<HASH> 100644
--- a/gns3server/compute/docker/docker_vm.py
+++ b/gns3server/compute/docker/docker_vm.py
@@ -290,6 +290,7 @@ class DockerVM(BaseNode):
os.makedirs(os.path.join(path, "if-down.d"), exist_ok=True)
os.makedirs(os.path.join(path, "if-pre-up.d"), exist_ok=True)
os.makedirs(os.path.join(path, "if-post-down.d"), exist_ok=True)
+ os.makedirs(os.path.join(path, "interfaces.d"), exist_ok=True)
if not os.path.exists(os.path.join(path, "interfaces")):
with open(os.path.join(path, "interfaces"), "w+") as f: | Create `/etc/network/interfaces.d` in Docker container. Fixes #<I> |
diff --git a/subproviders/subprovider.js b/subproviders/subprovider.js
index <HASH>..<HASH> 100644
--- a/subproviders/subprovider.js
+++ b/subproviders/subprovider.js
@@ -11,12 +11,11 @@ function SubProvider() {
SubProvider.prototype.setEngine = function(engine) {
const self = this
- if (self._ranSetEngine) return
+ if (self.engine) return
self.engine = engine
engine.on('block', function(block) {
self.currentBlock = block
})
- self._ranSetEngine = true
}
SubProvider.prototype.handleRequest = function(payload, next, end) { | check self.engine instead of adding a new instance variable |
diff --git a/tensorflow_probability/python/bijectors/ordered.py b/tensorflow_probability/python/bijectors/ordered.py
index <HASH>..<HASH> 100644
--- a/tensorflow_probability/python/bijectors/ordered.py
+++ b/tensorflow_probability/python/bijectors/ordered.py
@@ -22,6 +22,7 @@ import tensorflow.compat.v2 as tf
from tensorflow_probability.python.bijectors import bijector
from tensorflow_probability.python.internal import assert_util
+from tensorflow.python.util import deprecation # pylint: disable=g-direct-tensorflow-import
__all__ = [
@@ -29,10 +30,13 @@ __all__ = [
]
+@deprecation.deprecated(
+ '2020-10-09',
+ '`Ordered` bijector is deprecated; please use '
+ '`tfb.Invert(tfb.Ascending())` instead.',
+ warn_once=True)
class Ordered(bijector.Bijector):
- """Deprecated. Use bijectors.Invert(bijectors.Ascending()) instead.
-
- Maps a vector of increasing elements to an unconstrained vector.
+ """Maps a vector of increasing elements to an unconstrained vector.
Both the domain and the codomain of the mapping is [-inf, inf], however,
the input of the forward mapping must be strictly increasing. | tensorflow style deprecation for Ordered |
diff --git a/leonardo/module/media/models/foldermodels.py b/leonardo/module/media/models/foldermodels.py
index <HASH>..<HASH> 100644
--- a/leonardo/module/media/models/foldermodels.py
+++ b/leonardo/module/media/models/foldermodels.py
@@ -4,6 +4,7 @@ from __future__ import unicode_literals
from django.contrib.auth import models as auth_models
from django.core import urlresolvers
from django.core.exceptions import ValidationError
+from django.template.defaultfilters import slugify
from django.db import models
from django.db.models import Q
from django.utils.http import urlquote
@@ -159,7 +160,7 @@ class Folder(models.Model, mixins.IconsMixin):
@property
def pretty_logical_path(self):
- return "/%s" % "/".join([f.name for f in self.logical_path + [self]])
+ return "/%s" % "/".join([slugify(f.name) for f in self.logical_path + [self]])
@property
def quoted_logical_path(self): | Slugify media folder logical path |
diff --git a/build/webpack.conf.js b/build/webpack.conf.js
index <HASH>..<HASH> 100644
--- a/build/webpack.conf.js
+++ b/build/webpack.conf.js
@@ -45,7 +45,7 @@ const config = {
{
test: /\.vue$/,
loader: 'vue-loader',
- include: path.resolve(__dirname,"../example")
+ exclude: /node_modules/,
}
]
}, | change include/exclude to include docs folder. |
diff --git a/openquake/calculators/event_based.py b/openquake/calculators/event_based.py
index <HASH>..<HASH> 100644
--- a/openquake/calculators/event_based.py
+++ b/openquake/calculators/event_based.py
@@ -45,11 +45,7 @@ TWO32 = 2 ** 32
def weight(src):
# heuristic weight
- try:
- rate = sum(rate for mag, rate in src.get_annual_occurrence_rates())
- except AttributeError:
- rate = 1
- return src.num_ruptures * src.ndists * rate * 1000
+ return src.num_ruptures * src.ndists
def get_events(ebruptures): | Changed the source weight for event based calculations [skip hazardlib][demos]
Former-commit-id: baeef3ade1aeb<I>b7ca<I>a0b<I>f<I>cfd<I> |
diff --git a/js/bitfinex.js b/js/bitfinex.js
index <HASH>..<HASH> 100644
--- a/js/bitfinex.js
+++ b/js/bitfinex.js
@@ -16,17 +16,9 @@ module.exports = class bitfinex extends Exchange {
'countries': 'VG',
'version': 'v1',
'rateLimit': 1500,
- 'hasCORS': false,
- // old metainfo interface
- 'hasFetchOrder': true,
- 'hasFetchTickers': true,
- 'hasDeposit': true,
- 'hasWithdraw': true,
- 'hasFetchOHLCV': true,
- 'hasFetchOpenOrders': true,
- 'hasFetchClosedOrders': true,
// new metainfo interface
'has': {
+ 'CORS': false,
'fetchOHLCV': true,
'fetchTickers': true,
'fetchOrder': true, | removed obsolete metainfo interface in bitfinex.js |
diff --git a/spice_api/helpers.py b/spice_api/helpers.py
index <HASH>..<HASH> 100644
--- a/spice_api/helpers.py
+++ b/spice_api/helpers.py
@@ -33,7 +33,7 @@ import requests
def get_query_url(medium, query):
query = query.strip()
- terms = query.replace(' ', '+')
+ terms = query.replace("+", "_").replace(' ', '+')
if medium == tokens.Medium.ANIME:
return constants.ANIME_QUERY_BASE + terms
elif medium == tokens.Medium.MANGA: | Error Querying when "+" found in title
The query would return an empty list when there is a "+" in the query. With a little experimentation, I found that if you replace it with a _ it works.
Ex
<URL> |
diff --git a/sacn/receiving/receiver_thread.py b/sacn/receiving/receiver_thread.py
index <HASH>..<HASH> 100644
--- a/sacn/receiving/receiver_thread.py
+++ b/sacn/receiving/receiver_thread.py
@@ -50,7 +50,6 @@ class receiverThread(threading.Thread):
tmp_packet = DataPacket.make_data_packet(raw_data)
except: # try to make a DataPacket. If it fails just go over it
continue
- self.logger.debug(f'Received sACN packet:\n{tmp_packet}')
self.check_for_stream_terminated_and_refresh_timestamp(tmp_packet)
self.refresh_priorities(tmp_packet)
@@ -146,7 +145,6 @@ class receiverThread(threading.Thread):
if packet.universe not in self.previousData.keys() or \
self.previousData[packet.universe] is None or \
self.previousData[packet.universe] != packet.dmxData:
- self.logger.debug('')
# set previous data and inherit callbacks
self.previousData[packet.universe] = packet.dmxData
for callback in self.callbacks[packet.universe]: | Removed debug actions from reciever_thread.py |
diff --git a/lib/codemirror.js b/lib/codemirror.js
index <HASH>..<HASH> 100644
--- a/lib/codemirror.js
+++ b/lib/codemirror.js
@@ -4034,7 +4034,7 @@ window.CodeMirror = (function() {
return builder.pre;
}
- var tokenSpecialChars = /[\t\u0000-\u0019\u200b\u2028\u2029\uFEFF]/g;
+ var tokenSpecialChars = /[\t\u0000-\u0019\u00ad\u200b\u2028\u2029\uFEFF]/g;
function buildToken(builder, text, style, startStyle, endStyle) {
if (!text) return;
if (!tokenSpecialChars.test(text)) { | Add the soft-hyphen character to the list of non-printing chars
Closes #<I> |
diff --git a/output/html/block_editor.php b/output/html/block_editor.php
index <HASH>..<HASH> 100644
--- a/output/html/block_editor.php
+++ b/output/html/block_editor.php
@@ -113,6 +113,7 @@ class QM_Output_Html_Block_Editor extends QM_Output_Html {
$media_blocks = array(
'core/audio' => 'id',
+ 'core/cover' => 'id',
'core/cover-image' => 'id',
'core/file' => 'id',
'core/image' => 'id', | The `cover-image` block type is now called `cover`. |
diff --git a/src/Providers/Html.php b/src/Providers/Html.php
index <HASH>..<HASH> 100644
--- a/src/Providers/Html.php
+++ b/src/Providers/Html.php
@@ -110,6 +110,14 @@ class Html extends Provider implements ProviderInterface
/**
* {@inheritdoc}
*/
+ public function getAuthorName()
+ {
+ return $this->bag->get('author') ?: $this->bag->get('contributors');
+ }
+
+ /**
+ * {@inheritdoc}
+ */
public function getProviderIconsUrls()
{
return (array) $this->bag->get('icons') ?: []; | use author and contributors meta tag to get the authorName |
diff --git a/core/src/main/java/com/google/errorprone/bugpatterns/threadsafety/WellKnownMutability.java b/core/src/main/java/com/google/errorprone/bugpatterns/threadsafety/WellKnownMutability.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/com/google/errorprone/bugpatterns/threadsafety/WellKnownMutability.java
+++ b/core/src/main/java/com/google/errorprone/bugpatterns/threadsafety/WellKnownMutability.java
@@ -104,6 +104,7 @@ final class WellKnownMutability {
.add("com.google.protobuf.ByteString")
.add("com.google.protobuf.Descriptors$Descriptor")
.add("com.google.protobuf.Descriptors$EnumDescriptor")
+ .add("com.google.protobuf.Descriptors$EnumValueDescriptor")
.add("com.google.protobuf.Descriptors$FieldDescriptor")
.add("com.google.protobuf.Descriptors$FileDescriptor")
.add("com.google.protobuf.Descriptors$ServiceDescriptor") | Marking EnumValueDescriptor as immutable in WellKnownMutability.
RELNOTES: N/A
-------------
Created by MOE: <URL> |
diff --git a/examples/cordova/app.js b/examples/cordova/app.js
index <HASH>..<HASH> 100644
--- a/examples/cordova/app.js
+++ b/examples/cordova/app.js
@@ -1,4 +1,4 @@
-const {Button, Page, NavigationView, ScrollView, ui} = require('tabris');
+const {Button, Page, NavigationView, ScrollView, device, ui} = require('tabris');
const ToastPage = require('./ToastPage');
const SharingPage = require('./SharingPage');
const MotionPage = require('./MotionPage');
@@ -19,16 +19,21 @@ let contentContainer = new ScrollView({
left: 0, top: 0, right: 0, bottom: 0
}).appendTo(mainPage);
-[
- SharingPage,
- ToastPage,
- MotionPage,
- NetworkPage,
- CameraPage,
- BarcodeScannerPage,
- MediaPage,
- ActionSheetPage
-].forEach(Page => {
+(
+ device.platform === 'windows' ? [
+ MotionPage,
+ NetworkPage
+ ] : [
+ SharingPage,
+ ToastPage,
+ MotionPage,
+ NetworkPage,
+ CameraPage,
+ BarcodeScannerPage,
+ MediaPage,
+ ActionSheetPage
+ ]
+).forEach(Page => {
let page = new Page();
addPageSelector(page);
}); | Reduce cordova demo on windows
It can later be investigated how to improve windows support for cordova
plug-ins, but for now include only the two that acually work.
Change-Id: I<I>ab<I>d<I>d5ed<I>e<I>d7e2f<I>a1d8bbb |
diff --git a/py/selenium/webdriver/firefox/options.py b/py/selenium/webdriver/firefox/options.py
index <HASH>..<HASH> 100644
--- a/py/selenium/webdriver/firefox/options.py
+++ b/py/selenium/webdriver/firefox/options.py
@@ -153,7 +153,7 @@ class Options(ArgOptions):
if len(self._preferences) > 0:
opts["prefs"] = self._preferences
if self._proxy is not None:
- self._proxy.add_to_capabilities(opts)
+ self._proxy.add_to_capabilities(caps)
if self._profile is not None:
opts["profile"] = self._profile.encoded
if len(self._arguments) > 0: | [py] Pass capabilities rather than options to the proxy object (#<I>)
The method add_to_capabilities from the Proxy class takes capabilities
(self._caps) as argument instead of "opts" |
diff --git a/lib/rolify/version.rb b/lib/rolify/version.rb
index <HASH>..<HASH> 100644
--- a/lib/rolify/version.rb
+++ b/lib/rolify/version.rb
@@ -1,3 +1,3 @@
module Rolify
- VERSION = "3.3.0.rc1"
+ VERSION = "3.3.0.rc2"
end | releasing <I> RC2
[ci skip] |
diff --git a/lib/searchkick/index_options.rb b/lib/searchkick/index_options.rb
index <HASH>..<HASH> 100644
--- a/lib/searchkick/index_options.rb
+++ b/lib/searchkick/index_options.rb
@@ -13,9 +13,6 @@ module Searchkick
index_type = index_type.call if index_type.respond_to?(:call)
end
- ngram_type = below70 ? "nGram" : "ngram"
- edge_ngram_type = below70 ? "edgeNGram" : "edge_ngram"
-
custom_mapping = options[:mappings] || {}
if below70 && custom_mapping.keys.map(&:to_sym).include?(:properties)
# add type
@@ -129,12 +126,12 @@ module Searchkick
max_shingle_size: 5
},
searchkick_edge_ngram: {
- type: edge_ngram_type,
+ type: "edge_ngram",
min_gram: 1,
max_gram: 50
},
searchkick_ngram: {
- type: ngram_type,
+ type: "ngram",
min_gram: 1,
max_gram: 50
}, | New ngram names work in <I> as well |
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -116,14 +116,6 @@ RTCDataConnection.prototype._createConnection = function() {
this.peerConnection.addEventListener('icecandidate', function handleICECandidate(event) {
var candidate = event.candidate;
if (candidate) {
- // firefox can't JSON.stringify mozRTCIceCandidate objects...
- if (global.mozRTCPeerConnection) {
- candidate = {
- sdpMLineIndex: candidate.sdpMLineIndex,
- sdpMid: candidate.sdpMid,
- candidate: candidate.candidate
- };
- }
dataConnection.emit('candidate', candidate);
}
}); | remove stuff about firefox not being able to json-stringify candidates |
diff --git a/sync/task/broker/broker.go b/sync/task/broker/broker.go
index <HASH>..<HASH> 100644
--- a/sync/task/broker/broker.go
+++ b/sync/task/broker/broker.go
@@ -99,14 +99,22 @@ func (t *Task) Run(c task.Command) error {
// subscribe for the pool size
for i := 0; i < t.Options.Pool; i++ {
- // subscribe to work
- subWork, err := t.Broker.Subscribe(topic, workFn, broker.Queue(fmt.Sprintf("work.%d", i)))
+ err := func() error {
+ // subscribe to work
+ subWork, err := t.Broker.Subscribe(topic, workFn, broker.Queue(fmt.Sprintf("work.%d", i)))
+ if err != nil {
+ return err
+ }
+
+ // unsubscribe on completion
+ defer subWork.Unsubscribe()
+
+ return nil
+ }()
+
if err != nil {
return err
}
-
- // unsubscribe on completion
- defer subWork.Unsubscribe()
}
// subscribe to all status messages | prevent resource leak (#<I>) |
diff --git a/src/org/mozilla/javascript/Token.java b/src/org/mozilla/javascript/Token.java
index <HASH>..<HASH> 100644
--- a/src/org/mozilla/javascript/Token.java
+++ b/src/org/mozilla/javascript/Token.java
@@ -418,6 +418,7 @@ public class Token
case COMMENT: return "COMMENT";
case GENEXPR: return "GENEXPR";
case METHOD: return "METHOD";
+ case ARROW: return "ARROW";
}
// Token without name | Add name for Token.ARROW |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.