diff
stringlengths 65
26.7k
| message
stringlengths 7
9.92k
|
|---|---|
diff --git a/state/settings.go b/state/settings.go
index <HASH>..<HASH> 100644
--- a/state/settings.go
+++ b/state/settings.go
@@ -243,7 +243,7 @@ func (c *Settings) Read() error {
// readSettingsDoc reads the settings with the given
// key. It returns the settings and the current rxnRevno.
func readSettingsDoc(st *State, key string) (map[string]interface{}, int64, error) {
- settings, closer := st.getCollection(settingsC)
+ settings, closer := st.getRawCollection(settingsC)
defer closer()
config := map[string]interface{}{}
|
state: use a raw collection when load settings
This is required to allow loading of settings before the env UUID
migration for settings. Without this change upgrades to <I> fail.
|
diff --git a/src/generator/custom/CustomGenerator.js b/src/generator/custom/CustomGenerator.js
index <HASH>..<HASH> 100644
--- a/src/generator/custom/CustomGenerator.js
+++ b/src/generator/custom/CustomGenerator.js
@@ -49,7 +49,7 @@ module.exports = function(PluginResolver) {
// TODO: this should be in the HtmlWriterFile, but i dont want to create
// one every time
var ECT = require("ect");
- this.renderer = ECT({ root : this.settings.templateDir });
+ this.renderer = ECT({ root : generatorSettings.templateDir });
};
// Extend from the base generator
|
fix(generator): Fix reference in customGenerator
|
diff --git a/test/flatten.js b/test/flatten.js
index <HASH>..<HASH> 100644
--- a/test/flatten.js
+++ b/test/flatten.js
@@ -53,4 +53,20 @@ test('flatten stream of streams', function (t) {
})
+test.only('flatten stream of broken streams', function (t) {
+ var _err = new Error('I am broken');
+ pull(
+ pull.values([
+ pull.Source(function read(abort, cb) {
+ cb(_err)
+ })
+ ]),
+ pull.flatten(),
+ pull.onEnd(function (err) {
+ t.equal(err, _err)
+ t.end()
+ })
+ )
+
+})
|
Add failing test for flattening stream of broken streams
|
diff --git a/pmml-model/src/main/java/org/dmg/pmml/Cell.java b/pmml-model/src/main/java/org/dmg/pmml/Cell.java
index <HASH>..<HASH> 100644
--- a/pmml-model/src/main/java/org/dmg/pmml/Cell.java
+++ b/pmml-model/src/main/java/org/dmg/pmml/Cell.java
@@ -20,7 +20,7 @@ import org.jpmml.model.annotations.Property;
)
@XmlTransient
abstract
-public class Cell extends PMMLObject implements HasValue<Cell> {
+public class Cell extends PMMLObject {
@XmlValue
@XmlValueExtension
@@ -37,12 +37,10 @@ public class Cell extends PMMLObject implements HasValue<Cell> {
abstract
public QName getName();
- @Override
public String getValue(){
return this.value;
}
- @Override
public Cell setValue(@Property("value") String value){
this.value = value;
|
Removed Cell from HasValue class hierarchy
|
diff --git a/audioread/gstdec.py b/audioread/gstdec.py
index <HASH>..<HASH> 100644
--- a/audioread/gstdec.py
+++ b/audioread/gstdec.py
@@ -228,7 +228,7 @@ class GstAudioFile(object):
self.ready_sem.acquire()
if self.read_exc:
# An error occurred before the stream became ready.
- self.close()
+ self.close(True)
raise self.read_exc
self.running = True
@@ -324,8 +324,8 @@ class GstAudioFile(object):
return self
# Cleanup.
- def close(self):
- if self.running:
+ def close(self, force=False):
+ if self.running or force:
self.running = False
# Stop reading the file.
|
clean up gst properly when an error occurs
|
diff --git a/anyconfig/globals.py b/anyconfig/globals.py
index <HASH>..<HASH> 100644
--- a/anyconfig/globals.py
+++ b/anyconfig/globals.py
@@ -2,6 +2,8 @@
# Copyright (C) 2013 Satoru SATOH <ssato @ redhat.com>
# License: MIT
#
+"""anyconfig globals.
+"""
import logging
@@ -11,22 +13,22 @@ VERSION = "0.0.3.10"
_LOGGING_FORMAT = "%(asctime)s %(name)s: [%(levelname)s] %(message)s"
-def getLogger(name="anyconfig", format=_LOGGING_FORMAT,
- level=logging.WARNING, **kwargs):
+def get_logger(name="anyconfig", log_format=_LOGGING_FORMAT,
+ level=logging.WARNING):
"""
Initialize custom logger.
"""
- logging.basicConfig(level=level, format=format)
+ logging.basicConfig(level=level, format=log_format)
logger = logging.getLogger(name)
handler = logging.StreamHandler()
handler.setLevel(level)
- handler.setFormatter(logging.Formatter(format))
- logger.addHandler()
+ handler.setFormatter(logging.Formatter(log_format))
+ logger.addHandler(handler)
return logger
-LOGGER = getLogger()
+LOGGER = get_logger()
# vim:sw=4:ts=4:et:
|
fix some pylint errors and warnings
|
diff --git a/dvc/version.py b/dvc/version.py
index <HASH>..<HASH> 100644
--- a/dvc/version.py
+++ b/dvc/version.py
@@ -7,7 +7,7 @@ import os
import subprocess
-_BASE_VERSION = "0.35.7"
+_BASE_VERSION = "0.40.0"
def _generate_version(base_version):
|
dvc: bump to <I>
|
diff --git a/src/org/jgroups/blocks/RequestCorrelator.java b/src/org/jgroups/blocks/RequestCorrelator.java
index <HASH>..<HASH> 100644
--- a/src/org/jgroups/blocks/RequestCorrelator.java
+++ b/src/org/jgroups/blocks/RequestCorrelator.java
@@ -1,4 +1,4 @@
-// $Id: RequestCorrelator.java,v 1.58 2010/01/18 14:32:37 belaban Exp $
+// $Id: RequestCorrelator.java,v 1.59 2010/01/27 09:21:34 belaban Exp $
package org.jgroups.blocks;
@@ -607,6 +607,9 @@ public class RequestCorrelator {
rsp=req.makeReply();
rsp.setFlag(Message.OOB);
rsp.setFlag(Message.DONT_BUNDLE);
+ if(req.isFlagSet(Message.NO_FC))
+ rsp.setFlag(Message.NO_FC);
+
if(rsp_buf instanceof Buffer)
rsp.setBuffer((Buffer)rsp_buf);
else if (rsp_buf instanceof byte[])
|
if NO_FC was set in the request, it is also set in the response (<URL>)
|
diff --git a/digitalocean/Image.py b/digitalocean/Image.py
index <HASH>..<HASH> 100644
--- a/digitalocean/Image.py
+++ b/digitalocean/Image.py
@@ -41,3 +41,6 @@ class Image(BaseAPI):
type="PUT",
params={"name": new_name}
)
+
+ def __str__(self):
+ return "%s %s %s" % (self.id, self.name, self.distribution)
\ No newline at end of file
|
The domain should return the ID, the distribution name and the name when used as string.
|
diff --git a/redish/proxy.py b/redish/proxy.py
index <HASH>..<HASH> 100644
--- a/redish/proxy.py
+++ b/redish/proxy.py
@@ -33,15 +33,18 @@ class Proxy(Redis):
return TYPE_MAP[typ](key, self)
def __setitem__(self, key, value):
- if isinstance(value, (int, basestring):
-
+ if isinstance(value, (int, basestring)):
+ self.set(key, value)
+ return
+ pline = self.pipeline()
if self.exists(key):
- self.delete(key)
+ pline = pline.delete(key)
if isinstance(value, list):
for item in value:
- self.lpush(key, item)
+ pline = pline.rpush(key, item)
elif isinstance(value, set):
for item in value:
- self.sadd(key, item)
+ pline = pline.sadd(key, item)
elif isinstance(value, dict):
- self.hmset(key, value)
+ pline = pline.hmset(key, value)
+ pline.execute()
|
Finish that line on checking for ints and strings, implement __setitem__ as pipeline.
|
diff --git a/extensions/roc-package-webpack-dev/src/actions/build.js b/extensions/roc-package-webpack-dev/src/actions/build.js
index <HASH>..<HASH> 100644
--- a/extensions/roc-package-webpack-dev/src/actions/build.js
+++ b/extensions/roc-package-webpack-dev/src/actions/build.js
@@ -108,7 +108,7 @@ export default ({ context: { verbose, config: { settings } } }) => (targets) =>
const validTargets = targets.filter((target) => webpackTargets.some((webpackTarget) => webpackTarget === target));
if (validTargets.length === 0) {
- return Promise.resolve();
+ return () => Promise.resolve();
}
log.small.log(`Starting the builder using "${settings.build.mode}" as the mode.\n`);
|
Fixed a problem when no targets where valid
|
diff --git a/rabbithole.go b/rabbithole.go
index <HASH>..<HASH> 100644
--- a/rabbithole.go
+++ b/rabbithole.go
@@ -206,11 +206,11 @@ type ConnectionInfo struct {
ClientProperties Properties `json:"client_properties"`
- RecvOct int `json:"recv_oct"`
- SendOct int `json:"send_oct"`
- RecvCount int `json:"recv_cnt"`
- SendCount int `json:"send_cnt"`
- SendPendi int `json:"send_pend"`
+ RecvOct uint64 `json:"recv_oct"`
+ SendOct uint64 `json:"send_oct"`
+ RecvCount uint64 `json:"recv_cnt"`
+ SendCount uint64 `json:"send_cnt"`
+ SendPendi uint64 `json:"send_pend"`
RecvOctDetails RateDetails `json:"recv_oct_details"`
SendOctDetails RateDetails `json:"send_oct_details"`
}
|
Use uint<I> for values that can be really large
|
diff --git a/lib/devices/gateway/sensor_ht.js b/lib/devices/gateway/sensor_ht.js
index <HASH>..<HASH> 100644
--- a/lib/devices/gateway/sensor_ht.js
+++ b/lib/devices/gateway/sensor_ht.js
@@ -3,8 +3,9 @@
const SubDevice = require('./subdevice');
const { Temperature, Humidity } = require('../capabilities/sensor');
+const Voltage = require('./voltage');
-module.exports = class SensorHT extends SubDevice.with(Temperature, Humidity) {
+module.exports = class SensorHT extends SubDevice.with(Temperature, Humidity, Voltage) {
constructor(parent, info) {
super(parent, info);
|
Add battery state to Temperature Sensor
These work the same as the body sensors, so we can simply import
the existing capability.
|
diff --git a/lib/poolparty/helpers/provisioners/slave.rb b/lib/poolparty/helpers/provisioners/slave.rb
index <HASH>..<HASH> 100644
--- a/lib/poolparty/helpers/provisioners/slave.rb
+++ b/lib/poolparty/helpers/provisioners/slave.rb
@@ -33,6 +33,7 @@ module PoolParty
def run_once_and_clean
<<-EOS
+rm -rf /etc/puppet/ssl
. /etc/profile && /usr/sbin/puppetd --onetime --no-daemonize --logdest syslog --server master #{unix_hide_string}
rm -rf /etc/puppet/ssl
EOS
|
Updated removal of the certs before prerun on the slaves
|
diff --git a/src/statements/returnStatement.js b/src/statements/returnStatement.js
index <HASH>..<HASH> 100644
--- a/src/statements/returnStatement.js
+++ b/src/statements/returnStatement.js
@@ -2,7 +2,7 @@ import traverser from "../traverser";
// http://esprima.readthedocs.io/en/latest/syntax-tree-format.html#return-statement
export const ReturnStatement = ({ argument }) => {
- const value = traverser(argument) || "nil";
+ const value = traverser(argument);
- return `return ${value}`;
-};
\ No newline at end of file
+ return value ? `return ${value}` : "do return end";
+};
|
Add support for return with no argument
Fixes #8
|
diff --git a/IDBStore.js b/IDBStore.js
index <HASH>..<HASH> 100644
--- a/IDBStore.js
+++ b/IDBStore.js
@@ -17,7 +17,6 @@
var IDBStore;
var defaults = {
- dbName: 'IDB',
storeName: 'Store',
dbVersion: 1,
keyPath: 'id',
@@ -37,6 +36,8 @@
mixin(this, defaults);
mixin(this, kwArgs);
+ this.dbName = 'IDBWrapper-' + this.storeName;
+
onStoreReady && (this.onStoreReady = onStoreReady);
this.idb = window.indexedDB || window.webkitIndexedDB || window.mozIndexedDB;
|
Don't allow setting of dbName
This removes the ability to have multiple stores in one db.
|
diff --git a/wafer/static/js/edit_schedule.js b/wafer/static/js/edit_schedule.js
index <HASH>..<HASH> 100644
--- a/wafer/static/js/edit_schedule.js
+++ b/wafer/static/js/edit_schedule.js
@@ -127,7 +127,7 @@
e.target.classList.remove('success');
e.target.classList.remove('info');
- var typeClass = scheduleItemType === 'talk' ? 'success' : 'info';
+ var typeClass = scheduleItemType === 'talk' ? 'table-success' : 'table-info';
e.target.classList.add(typeClass);
var ajaxData = {
@@ -170,8 +170,8 @@
scheduleItemCell.removeAttribute('id');
scheduleItemCell.classList.remove('draggable');
- scheduleItemCell.classList.remove('info');
- scheduleItemCell.classList.remove('success');
+ scheduleItemCell.classList.remove('table-info');
+ scheduleItemCell.classList.remove('table-success');
scheduleItemCell.removeAttribute('data-scheduleitem-id');
scheduleItemCell.removeAttribute('data-talk-id');
scheduleItemCell.removeAttribute('data-page-id');
|
schedule editor: fix coloring newly scheduled/deleted items
This fixes a problem where:
- newly-allocated items (i.e. ones that were just dropped in the
schedule) don't get colored as the others, but are colored after one
refreshes the page.
- newly deleted items are removed, but the cell where they were is still
colored as filled cells.
|
diff --git a/niworkflows/viz/utils.py b/niworkflows/viz/utils.py
index <HASH>..<HASH> 100644
--- a/niworkflows/viz/utils.py
+++ b/niworkflows/viz/utils.py
@@ -367,7 +367,7 @@ def plot_registration(
display.add_contours(white, colors="b", **kwargs)
display.add_contours(pial, colors="r", **kwargs)
elif contour is not None:
- display.add_contours(contour, colors="b", levels=[0.5], linewidths=0.5)
+ display.add_contours(contour, colors="r", levels=[0.5], linewidths=0.5)
svg = extract_svg(display, compress=compress)
display.close()
|
FIX: Improve mask contour visibility
|
diff --git a/src/Engine.php b/src/Engine.php
index <HASH>..<HASH> 100644
--- a/src/Engine.php
+++ b/src/Engine.php
@@ -197,6 +197,9 @@ class Engine implements EngineInterface, TemplateAware, FinderAware
*/
public function render($template, array $data = [])
{
+ if (is_file($template)) {
+ return $this->renderTemplate($template, $data);
+ }
$path = $this->find($template);
if ($path) {
return $this->doRender($path, $data);
|
Allow Engine::render() to accept template full path
See #<I>
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -576,6 +576,8 @@ async function activate() {
// TODO: Show the list of commits that would be added. Should be disabled by default.
// git log master..develop --oneline
+ if ( !fs.existsSync( '.gitignore' ) )
+ logger.warn( '.gitignore file missing, it is suggested to create it before committing unwanted files.' );
if ( !fs.existsSync( 'CHANGELOG.md' ) )
logger.warn( 'CHANGELOG.md file missing, it is suggested to create it before a public release.' );
if ( !fs.existsSync( 'README.md' ) )
|
Added warnings on .gitignore files missing from the repository
|
diff --git a/pywb/static/wombat.js b/pywb/static/wombat.js
index <HASH>..<HASH> 100644
--- a/pywb/static/wombat.js
+++ b/pywb/static/wombat.js
@@ -1039,7 +1039,10 @@ var wombat_internal = function($wbwindow) {
return n1 + rewrite_url(n2) + n3;
}
- return value.replace(STYLE_REGEX, style_replacer);
+
+ value = value.replace(STYLE_REGEX, style_replacer);
+ value = value.replace('WB_wombat_', '');
+ return value;
}
//============================================
|
rewrite: ensure WB_wombat_ removed from and style strings
|
diff --git a/pkg/build/builder/util.go b/pkg/build/builder/util.go
index <HASH>..<HASH> 100644
--- a/pkg/build/builder/util.go
+++ b/pkg/build/builder/util.go
@@ -10,15 +10,19 @@ import (
stiapi "github.com/openshift/source-to-image/pkg/api"
)
+var (
+ // procCGroupPattern is a regular expression that parses the entries in /proc/self/cgroup
+ procCGroupPattern = regexp.MustCompile(`\d+:([a-z_,]+):/.*/(docker-|)([a-z0-9]+).*`)
+)
+
// readNetClsCGroup parses /proc/self/cgroup in order to determine the container id that can be used
// the network namespace that this process is running on.
func readNetClsCGroup(reader io.Reader) string {
cgroups := make(map[string]string)
- re := regexp.MustCompile(`\d+:([a-z_,]+):/.*/(docker-|)([a-z0-9]+).*`)
scanner := bufio.NewScanner(reader)
for scanner.Scan() {
- if match := re.FindStringSubmatch(scanner.Text()); match != nil {
+ if match := procCGroupPattern.FindStringSubmatch(scanner.Text()); match != nil {
list := strings.Split(match[1], ",")
containerId := match[3]
if len(list) > 0 {
|
Move the cgroup regex to package level.
Address code review comment: coding convention calls for regex
patterns to be defined as package level vars.
|
diff --git a/WellCommerceNewsBundle.php b/WellCommerceNewsBundle.php
index <HASH>..<HASH> 100644
--- a/WellCommerceNewsBundle.php
+++ b/WellCommerceNewsBundle.php
@@ -12,7 +12,9 @@
namespace WellCommerce\Bundle\NewsBundle;
+use Symfony\Component\DependencyInjection\ContainerBuilder;
use Symfony\Component\HttpKernel\Bundle\Bundle;
+use WellCommerce\Bundle\NewsBundle\DependencyInjection\Compiler;
/**
* Class WellCommerceNewsBundle
@@ -21,5 +23,10 @@ use Symfony\Component\HttpKernel\Bundle\Bundle;
*/
class WellCommerceNewsBundle extends Bundle
{
-
+ public function build(ContainerBuilder $container)
+ {
+ parent::build($container);
+ $container->addCompilerPass(new Compiler\AutoRegisterServicesPass());
+ $container->addCompilerPass(new Compiler\MappingCompilerPass());
+ }
}
|
Added compiler passes to all bundles
(cherry picked from commit <I>ebd<I>a7aa0b9c6be3c<I>f<I>a7ce<I>bc<I>f)
|
diff --git a/src/DB/Entity/LazyLoading/Implementation.php b/src/DB/Entity/LazyLoading/Implementation.php
index <HASH>..<HASH> 100644
--- a/src/DB/Entity/LazyLoading/Implementation.php
+++ b/src/DB/Entity/LazyLoading/Implementation.php
@@ -25,22 +25,21 @@ trait Implementation
*/
public static function lazyload($values)
{
+ $class = get_called_class();
+
if (is_scalar($values)) {
- if (!$this instanceof Identifiable) {
- $class = get_called_class();
+ if (!is_a($class, Identifiable::class)) {
throw new Exception("Unable to lazy load a scalar value for $class: Identity property not defined");
}
$prop = static::getIdProperty();
if (is_array($prop)) {
- $class = get_called_class();
throw new Exception("Unable to lazy load a scalar value for $class: Class has a complex identity");
}
$values = [$prop => $values];
}
- $class = get_called_class();
$reflection = new \ReflectionClass($class);
$entity = $reflection->newInstanceWithoutConstructor();
|
LazyLoading implementation: don't use is static method
|
diff --git a/src/dolo/misc/yamlfile.py b/src/dolo/misc/yamlfile.py
index <HASH>..<HASH> 100644
--- a/src/dolo/misc/yamlfile.py
+++ b/src/dolo/misc/yamlfile.py
@@ -22,7 +22,8 @@ Imports the content of a modfile into the current interpreter scope
# check
if 'controls' in declarations:
variables_groups = OrderedDict()
- for vtype in ['states','controls','expectations','auxiliary']:
+ known_types = ['states','controls','expectations','auxiliary','auxiliary_2']
+ for vtype in known_types:
if vtype in declarations:
variables_groups[vtype] = [Variable(vn,0) for vn in declarations[vtype]]
variables_ordering = sum(variables_groups.values(),[])
|
yamlfile recognize auxiliary_2 as a new category (temporary ?)
|
diff --git a/javalite-templator/src/main/java/org/javalite/templator/MergeToken.java b/javalite-templator/src/main/java/org/javalite/templator/MergeToken.java
index <HASH>..<HASH> 100644
--- a/javalite-templator/src/main/java/org/javalite/templator/MergeToken.java
+++ b/javalite-templator/src/main/java/org/javalite/templator/MergeToken.java
@@ -49,8 +49,6 @@ class MergeToken extends TemplateToken {
String[] parts = Util.split(mergeSpec, '.');
this.objectName = parts[0];
this.propertyName = parts[1];
- } else {
- throw new ParseException("Failed to parse: " + mergeSpec);
}
}
|
#<I> Implement a built-in function mechanism for MergeTag - fixed broken tests
|
diff --git a/plugins/provisioners/shell/provisioner.rb b/plugins/provisioners/shell/provisioner.rb
index <HASH>..<HASH> 100644
--- a/plugins/provisioners/shell/provisioner.rb
+++ b/plugins/provisioners/shell/provisioner.rb
@@ -96,12 +96,10 @@ module VagrantPlugins
exec_path.gsub!('/', '\\')
exec_path = "c:#{exec_path}" if exec_path.start_with?("\\")
- command = <<-EOH
- $old = Get-ExecutionPolicy;
- Set-ExecutionPolicy Unrestricted -force;
- #{exec_path}#{args};
- Set-ExecutionPolicy $old -force
- EOH
+ # For PowerShell scripts bypass the execution policy
+ command = "#{exec_path}#{args}"
+ command = "powershell -executionpolicy bypass -file #{command}" if
+ File.extname(exec_path).downcase == '.ps1'
if config.path
@machine.ui.detail(I18n.t("vagrant.provisioners.shell.running",
@@ -112,7 +110,7 @@ module VagrantPlugins
end
# Execute it with sudo
- comm.sudo(command) do |type, data|
+ comm.sudo(command, elevated: config.privileged) do |type, data|
handle_comm(type, data)
end
end
|
Default the WinRM shell provisioner to privileged (elevated)
|
diff --git a/public/lib/main.js b/public/lib/main.js
index <HASH>..<HASH> 100644
--- a/public/lib/main.js
+++ b/public/lib/main.js
@@ -1,12 +1,18 @@
(function() {
"use strict";
+ function requestPermission() {
+ (new Notify('NodeBB')).requestPermission();
+ }
+
jQuery('document').ready(function() {
require(['notify'], function(Notify) {
var logo = $('.forum-logo').attr('src');
+ requestPermission();
+
jQuery('#notif_dropdown').on('click', function() {
- (new Notify('NodeBB')).requestPermission();
+ requestPermission();
});
socket.on('event:new_notification', function(data) {
|
request permission on page load as well (will not work in Chrome)
|
diff --git a/params/version.go b/params/version.go
index <HASH>..<HASH> 100644
--- a/params/version.go
+++ b/params/version.go
@@ -21,10 +21,10 @@ import (
)
const (
- VersionMajor = 1 // Major version component of the current release
- VersionMinor = 9 // Minor version component of the current release
- VersionPatch = 11 // Patch version component of the current release
- VersionMeta = "unstable" // Version metadata to append to the version string
+ VersionMajor = 1 // Major version component of the current release
+ VersionMinor = 9 // Minor version component of the current release
+ VersionPatch = 11 // Patch version component of the current release
+ VersionMeta = "stable" // Version metadata to append to the version string
)
// Version holds the textual version string.
|
params: release Geth <I> stable
|
diff --git a/lib/rollbar/railtie.rb b/lib/rollbar/railtie.rb
index <HASH>..<HASH> 100644
--- a/lib/rollbar/railtie.rb
+++ b/lib/rollbar/railtie.rb
@@ -29,7 +29,7 @@ module Rollbar
config.after_initialize do
Rollbar.preconfigure do |config|
- config.logger ||= ::Rails.logger
+ config.default_logger = proc { ::Rails.logger }
config.environment ||= ::Rails.env
config.root ||= ::Rails.root
config.framework = "Rails: #{::Rails::VERSION::STRING}"
|
Set Rails.logger as Rollbar.configuration.default_logger.
|
diff --git a/cmd/mirror-main.go b/cmd/mirror-main.go
index <HASH>..<HASH> 100644
--- a/cmd/mirror-main.go
+++ b/cmd/mirror-main.go
@@ -510,11 +510,13 @@ func (mj *mirrorJob) monitorMirrorStatus(cancel context.CancelFunc) (errDuringMi
errDuringMirror = true
}
- if mj.opts.activeActive {
+ // Do not quit mirroring if we are in --watch or --active-active mode
+ if !mj.opts.activeActive && !mj.opts.isWatch {
cancel()
cancelInProgress = true
- continue
}
+
+ continue
}
if sURLs.SourceContent != nil {
|
mirror: Do not exit upon errors when --watch is passed (#<I>)
Currently, when mirror is not able to copy an object (e.g. corrupted)
and watch is specified, mirror will restart mirroring again to stop
again at the same problematic object.
|
diff --git a/src/Console/CreateCommand.php b/src/Console/CreateCommand.php
index <HASH>..<HASH> 100644
--- a/src/Console/CreateCommand.php
+++ b/src/Console/CreateCommand.php
@@ -103,6 +103,7 @@ class CreateCommand extends Command
protected function installParts(SkeletonCreator $creator)
{
$parts = [
+ new Parts\Base\Part(),
new Parts\PhpUnit\Part(),
new Parts\TravisCI\Part(),
];
|
Actually run base part when creating new package.
|
diff --git a/resource/resource.go b/resource/resource.go
index <HASH>..<HASH> 100644
--- a/resource/resource.go
+++ b/resource/resource.go
@@ -17,6 +17,7 @@ var originRevisionTypes = map[OriginKind]RevisionType{
// Resource defines a single resource within Juju state.
type Resource struct {
+ // Spec is the backing resource spec.
Spec Spec
// Origin identifies the where the resource came from.
|
Add a missing doc comment.
|
diff --git a/src/org/joml/Intersectiond.java b/src/org/joml/Intersectiond.java
index <HASH>..<HASH> 100644
--- a/src/org/joml/Intersectiond.java
+++ b/src/org/joml/Intersectiond.java
@@ -2662,7 +2662,7 @@ public class Intersectiond {
* @return <code>true</code> iff both circles intersect; <code>false</code> otherwise
*/
public static boolean intersectCircleCircle(Vector2d centerA, double radiusSquaredA, Vector2d centerB, double radiusSquaredB, Vector3d intersectionCenterAndHL) {
- return intersectCircleCircle(centerA.x, centerA.y, radiusSquaredB, centerB.x, centerB.y, radiusSquaredB, intersectionCenterAndHL);
+ return intersectCircleCircle(centerA.x, centerA.y, radiusSquaredA, centerB.x, centerB.y, radiusSquaredB, intersectionCenterAndHL);
}
/**
|
intersectCircleCircle forwarding the wrong param.
intersectCircleCircle function was forwarding the radiusSquaredB parameter instead of radiusSquaredA.
|
diff --git a/dashi/__init__.py b/dashi/__init__.py
index <HASH>..<HASH> 100644
--- a/dashi/__init__.py
+++ b/dashi/__init__.py
@@ -18,7 +18,7 @@ from .exceptions import DashiError, BadRequestError, NotFoundError, \
UnknownOperationError, WriteConflictError
from .util import Countdown, RetryBackoff
-__version__ = '0.2.7'
+__version__ = '0.3.0'
log = logging.getLogger(__name__)
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -2,9 +2,10 @@
import os
import codecs
-from dashi import __version__
-VERSION = __version__
+# STOP! you MUST also update this in dashi/__init__.py
+VERSION = "0.3.0" # SEE ABOVE LINE BEFORE EDITING THIS
+# HEY! did you see the above two lines?
if os.path.exists("README.rst"):
long_description = codecs.open('README.rst', "r", "utf-8").read()
|
Update versions and fix setup.py bug
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -38,7 +38,7 @@ def get_install_requirements(path):
setup(name='confluent-kafka',
- version='1.0.1rc1',
+ version='1.0.1',
description='Confluent\'s Python client for Apache Kafka',
author='Confluent Inc',
author_email='support@confluent.io',
|
Version <I> (#<I>)
|
diff --git a/examples/glyphs/airports_map.py b/examples/glyphs/airports_map.py
index <HASH>..<HASH> 100644
--- a/examples/glyphs/airports_map.py
+++ b/examples/glyphs/airports_map.py
@@ -40,6 +40,7 @@ with urllib.request.urlopen(airports_service) as response:
x_range = Range1d(start=df['geometry.x'].min() - 10000, end=df['geometry.x'].max() + 10000)
y_range = Range1d(start=df['geometry.y'].min() - 10000, end=df['geometry.y'].max() + 10000)
+ # create plot and add tools
hover_tool = HoverTool(tooltips=[("Name", "@name"), ("Elevation", "@elevation (m)")])
p = Plot(x_range=x_range, y_range=y_range, plot_height=800, plot_width=800, title=title)
p.add_tools(ResizeTool(), WheelZoomTool(), PanTool(), BoxZoomTool(), hover_tool)
|
added additional comment and [ci enable examples]
|
diff --git a/cmd/peco/peco.go b/cmd/peco/peco.go
index <HASH>..<HASH> 100644
--- a/cmd/peco/peco.go
+++ b/cmd/peco/peco.go
@@ -81,27 +81,31 @@ func main() {
err = ctx.ReadConfig(opts.Rcfile)
if err != nil {
fmt.Fprintln(os.Stderr, err)
- os.Exit(1)
+ ctx.ExitStatus = 1
+ return
}
}
if err = ctx.ReadBuffer(in); err != nil {
// Nothing to process, bail out
fmt.Fprintln(os.Stderr, "You must supply something to work with via filename or stdin")
- os.Exit(1)
+ ctx.ExitStatus = 1
+ return
}
err = peco.TtyReady()
if err != nil {
fmt.Fprintln(os.Stderr, err)
- os.Exit(1)
+ ctx.ExitStatus = 1
+ return
}
defer peco.TtyTerm()
err = termbox.Init()
if err != nil {
fmt.Fprintln(os.Stderr, err)
- os.Exit(1)
+ ctx.ExitStatus = 1
+ return
}
defer termbox.Close()
|
Avoid using os.Exit()
os.Exit() effectively cancels all our defer's. If we have resources
being released there, this is a serious problem
|
diff --git a/src/webpack-config.js b/src/webpack-config.js
index <HASH>..<HASH> 100644
--- a/src/webpack-config.js
+++ b/src/webpack-config.js
@@ -118,7 +118,7 @@ module.exports = function webpackConfig ({
config.plugins.push(new GlobEntriesPlugin())
// Add webextension polyfill
- if (['chrome', 'opera'].includes(vendor)) {
+ if (['chrome', 'opera', 'edge'].includes(vendor)) {
config.plugins.push(
new webpack.ProvidePlugin({
browser: require.resolve('webextension-polyfill')
|
Apply webextension polyfill for edge platform
Since Microsoft Edge version <I> they use Chromium based components.
This means that Edge also needs the webextension polyfill now.
fixes #<I>
|
diff --git a/library/Garp/Cache/Manager.php b/library/Garp/Cache/Manager.php
index <HASH>..<HASH> 100755
--- a/library/Garp/Cache/Manager.php
+++ b/library/Garp/Cache/Manager.php
@@ -74,8 +74,16 @@ class Garp_Cache_Manager {
} else {
foreach ($modelNames as $modelName) {
$model = new $modelName();
- $cache = new Garp_Cache_Store_Versioned($model->getName().'_version');
- $cache->incrementVersion();
+ self::_incrementMemcacheVersion($model);
+ if ($model->getObserver('Translatable')) {
+ // Make sure cache is cleared for all languages.
+ $locales = Garp_I18n::getAllPossibleLocales();
+ foreach ($locales as $locale) {
+ $modelFactory = new Garp_I18n_ModelFactory($locale);
+ $i18nModel = $modelFactory->getModel($model);
+ self::_incrementMemcacheVersion($i18nModel);
+ }
+ }
}
}
}
@@ -201,4 +209,14 @@ class Garp_Cache_Manager {
}
return $tags;
}
+
+ /**
+ * Increment the version to invalidate a given model's cache.
+ * @param Garp_Model_Db $model
+ * @return Void
+ */
+ protected static function _incrementMemcacheVersion(Garp_Model_Db $model) {
+ $cache = new Garp_Cache_Store_Versioned($model->getName().'_version');
+ $cache->incrementVersion();
+ }
}
|
Tweaks that clear internationalized cache.
|
diff --git a/includes/common.php b/includes/common.php
index <HASH>..<HASH> 100644
--- a/includes/common.php
+++ b/includes/common.php
@@ -559,10 +559,9 @@ set_exception_handler(function (?Throwable $exception) use ($_oldExceptionHandle
echo $exception->getMessage(), "\n";
echo $exception->getTraceAsString(), "\n";
} else if (DEBUG < DEBUG_INFO || !class_exists('wulaphp\io\Response')) {
- status_header(503);
print_exception($exception);
} else {
- Response::respond(503, $exception->getMessage());
+ Response::respond(500, $exception->getMessage());
}
} catch (Throwable $te) {
print_exception($te);
|
change <I> to <I>
|
diff --git a/src/feat/agencies/bootstrap.py b/src/feat/agencies/bootstrap.py
index <HASH>..<HASH> 100755
--- a/src/feat/agencies/bootstrap.py
+++ b/src/feat/agencies/bootstrap.py
@@ -212,7 +212,7 @@ class _Bootstrap(object):
def __exit__(self, type, value, traceback):
if type is not None:
- raise type, value, traceback
+ raise type(value), None, traceback
if self.opts.agency_daemonize:
tmp = tempfile.mktemp(suffix="feat.temp.log")
log.info("run", "Logging will temporarily be done to: %s", tmp)
|
Fixes PEP8 warning.
|
diff --git a/src/edeposit/amqp/ltp/info_composer.py b/src/edeposit/amqp/ltp/info_composer.py
index <HASH>..<HASH> 100755
--- a/src/edeposit/amqp/ltp/info_composer.py
+++ b/src/edeposit/amqp/ltp/info_composer.py
@@ -164,11 +164,15 @@ def compose_info(root_dir, files, hash_fn, aleph_record):
with open(hash_fn) as f:
hash_file_md5 = hashlib.md5(f.read()).hexdigest()
+ schema_location = "http://www.ndk.cz/standardy-digitalizace/info11.xsd"
document = {
"info": {
+ "@xmlns:xsi": "http://www.w3.org/2001/XMLSchema-instance",
+ "@xsi:noNamespaceSchemaLocation": schema_location,
"created": time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime()),
"metadataversion": "1.0",
"packageid": _path_to_id(root_dir),
+ "mainmets": "",
# not used in SIP
# "mainmets": _get_localized_fn(metadata_fn, root_dir),
|
Added schema location and blank <mainmets> as requested. Fixed #<I>.
|
diff --git a/src/Module/ImportSchemeCollectionProvider.php b/src/Module/ImportSchemeCollectionProvider.php
index <HASH>..<HASH> 100644
--- a/src/Module/ImportSchemeCollectionProvider.php
+++ b/src/Module/ImportSchemeCollectionProvider.php
@@ -6,6 +6,7 @@
*/
namespace BEAR\Resource\Module;
+use BEAR\Package\AppInjector;
use BEAR\Resource\Annotation\AppName;
use BEAR\Resource\Annotation\ImportAppConfig;
use BEAR\Resource\AppAdapter;
@@ -54,7 +55,9 @@ class ImportSchemeCollectionProvider implements ProviderInterface
{
$schemeCollection = (new SchemeCollectionProvider($this->appName, $this->injector))->get();
foreach ($this->importAppConfig as $importApp) {
- $adapter = new AppAdapter($this->injector, $importApp->appName);
+ /* @var \BEAR\Resource\ImportApp */
+ $injector = class_exists(AppInjector::class) ? new AppInjector($importApp->appName, $importApp->context) : $this->injector;
+ $adapter = new AppAdapter($injector, $importApp->appName);
$schemeCollection
->scheme('page')->host($importApp->host)->toAdapter($adapter)
->scheme('app')->host($importApp->host)->toAdapter($adapter);
|
quick dirty hack to fix import injector
Injector should be isloated in each name space
|
diff --git a/gridtk/manager.py b/gridtk/manager.py
index <HASH>..<HASH> 100644
--- a/gridtk/manager.py
+++ b/gridtk/manager.py
@@ -41,7 +41,7 @@ class JobManager:
if hasattr(self, 'session'):
raise RuntimeError('Dead lock detected. Please do not try to lock the session when it is already locked!')
- if sqlalchemy_version < (0,7,8):
+ if sqlalchemy_version < [0,7,8]:
# for old sqlalchemy versions, in some cases it is required to re-generate the enging for each session
self._engine = sqlalchemy.create_engine("sqlite:///"+self._database)
self._session_maker = sqlalchemy.orm.sessionmaker(bind=self._engine)
|
Added fix for python 3 that cannot compare tuples with lists.
|
diff --git a/salt/returners/pgjsonb.py b/salt/returners/pgjsonb.py
index <HASH>..<HASH> 100644
--- a/salt/returners/pgjsonb.py
+++ b/salt/returners/pgjsonb.py
@@ -128,6 +128,8 @@ from __future__ import absolute_import
from contextlib import contextmanager
import sys
import json
+import time
+import datetime
import logging
# Import salt libs
@@ -254,7 +256,7 @@ def returner(ret):
def event_return(events):
'''
- Return event to mysql server
+ Return event to Pg server
Requires that configuration be enabled via 'event_return'
option in master config.
@@ -263,8 +265,8 @@ def event_return(events):
for event in events:
tag = event.get('tag', '')
data = event.get('data', '')
- sql = '''INSERT INTO salt_events (tag, data, master_id)
- VALUES (%s, %s, %s)'''
+ sql = '''INSERT INTO salt_events (tag, data, master_id, alter_time)
+ VALUES (%s, %s, %s, time.localtime())
cur.execute(sql, (tag, psycopg2.extras.Json(data), __opts__['id']))
|
Fill alter_time field in salt_events with current time with timezone.
|
diff --git a/tests/test_transcript.py b/tests/test_transcript.py
index <HASH>..<HASH> 100644
--- a/tests/test_transcript.py
+++ b/tests/test_transcript.py
@@ -54,15 +54,15 @@ class CmdLineApp(Cmd):
"""Repeats what you tell me to."""
arg = ''.join(arg)
if opts.piglatin:
- arg = '%s%say' % (arg[1:].rstrip(), arg[0])
+ arg = '%s%say' % (arg[1:], arg[0])
if opts.shout:
arg = arg.upper()
repetitions = opts.repeat or 1
for i in range(min(repetitions, self.maxrepeats)):
- self.stdout.write(arg)
- self.stdout.write('\n')
- # self.stdout.write is better than "print", because Cmd can be
- # initialized with a non-standard output destination
+ self.poutput(arg)
+ # recommend using the poutput function instead of
+ # self.stdout.write or "print", because Cmd allows the user
+ # to redirect output
do_say = do_speak # now "say" is a synonym for "speak"
do_orate = do_speak # another synonym, but this one takes multi-line input
|
Updates to CmdLineApp()
|
diff --git a/stanza/tests/constituency/test_vietnamese.py b/stanza/tests/constituency/test_vietnamese.py
index <HASH>..<HASH> 100644
--- a/stanza/tests/constituency/test_vietnamese.py
+++ b/stanza/tests/constituency/test_vietnamese.py
@@ -58,7 +58,7 @@ def test_vi_embedding():
with tempfile.TemporaryDirectory() as tempdir:
emb_filename = os.path.join(tempdir, "emb.txt")
pt_filename = os.path.join(tempdir, "emb.pt")
- with open(emb_filename, "w") as fout:
+ with open(emb_filename, "w", encoding="utf-8") as fout:
fout.write(VI_EMBEDDING)
pt = pretrain.Pretrain(vec_filename=emb_filename, save_to_file=False)
|
Set encoding in the unit test to make it run in Windows
|
diff --git a/app/view/Gruntfile.js b/app/view/Gruntfile.js
index <HASH>..<HASH> 100644
--- a/app/view/Gruntfile.js
+++ b/app/view/Gruntfile.js
@@ -167,6 +167,21 @@ module.exports = function(grunt) {
]
}]
},
+ locale_datepicker: {
+ options: {
+ preserveComments: 'some'
+ },
+ files: [{
+ expand: true,
+ ext: '.min.js',
+ cwd: 'lib/datepicker',
+ src: '*.js',
+ dest: 'js/locale/datepicker',
+ rename: function(destBase, destPath) {
+ return destBase + '/' + destPath.replace('datepicker-', '').replace('-', '_');
+ }
+ }]
+ },
bootstrap: {
files: {
'lib/bootstrap-sass.generated/bootstrap.min.js': [
|
Make grunt generate uglified datepicker locale file and rename them to ease usage
|
diff --git a/lib/config.js b/lib/config.js
index <HASH>..<HASH> 100644
--- a/lib/config.js
+++ b/lib/config.js
@@ -195,6 +195,10 @@ function config (opts) {
})
transport.on('error', err => {
+ agent.logger.error('APM Server transport error:', err.stack)
+ })
+
+ transport.on('request-error', err => {
const haveAccepted = Number.isFinite(err.accepted)
const haveErrors = Array.isArray(err.errors)
let msg
|
fix: log APM Server API errors correctly (#<I>)
APM Server API errors are emitted by the HTTP client as 'request-error'
events.
In case of more serious errors, the 'error' event is still used, but
indicates that the client could not recover from the error.
|
diff --git a/languagetool-language-modules/de/src/main/java/org/languagetool/rules/de/GermanSpellerRule.java b/languagetool-language-modules/de/src/main/java/org/languagetool/rules/de/GermanSpellerRule.java
index <HASH>..<HASH> 100644
--- a/languagetool-language-modules/de/src/main/java/org/languagetool/rules/de/GermanSpellerRule.java
+++ b/languagetool-language-modules/de/src/main/java/org/languagetool/rules/de/GermanSpellerRule.java
@@ -326,6 +326,12 @@ public class GermanSpellerRule extends CompoundAwareHunspellRule {
return Collections.singletonList("Worüber");
} else if (word.equals("par")) {
return Collections.singletonList("paar");
+ } else if (word.equals("vllt")) {
+ return Collections.singletonList("vielleicht");
+ } else if (word.equals("iwie")) {
+ return Collections.singletonList("irgendwie");
+ } else if (word.equals("sry")) {
+ return Collections.singletonList("sorry");
} else if (word.equals("Zynik")) {
return Collections.singletonList("Zynismus");
} else if (word.matches("[aA]wa")) {
|
[de] suggestions for colloquial abbreviations
|
diff --git a/packages/resume/index.js b/packages/resume/index.js
index <HASH>..<HASH> 100644
--- a/packages/resume/index.js
+++ b/packages/resume/index.js
@@ -1,8 +1,5 @@
require("../../babel.register.js");
-const path = require("path");
-process.env.NODE_CONFIG_DIR = path.join(__dirname, "../../config");
-
const render = require("./lib/renderHtml").default;
module.exports = {
|
chore(resume): Remove unnecessary reference to `NODE_CONFIG_DIR`.
To actually kick a patch release of `cea4edfabd<I>e1c<I>ac<I>c0e<I>b6b8add<I>c7`. Should actually find a way of doing this in #<I> without having to do this. Not the first or second or third time I've had to find something to "fix" to push a release out...
|
diff --git a/lib/memcached.js b/lib/memcached.js
index <HASH>..<HASH> 100644
--- a/lib/memcached.js
+++ b/lib/memcached.js
@@ -596,8 +596,10 @@ Client.config = {
}
};
- memcached.delegateCallback = function(master, err, data, cb){
+ memcached.delegateCallback = function(){
this.activeQueries--;
+ var master = arguments[0];
+ var cb = arguments[arguments.length-1];
var args = Array.prototype.slice.call(arguments, 1, arguments.length-1);
cb.apply(master, args);
};
diff --git a/test/common.js b/test/common.js
index <HASH>..<HASH> 100644
--- a/test/common.js
+++ b/test/common.js
@@ -14,7 +14,7 @@
* @type {Object}
* @api public
*/
-var testMemcachedHost = process.env.MEMCACHED__HOST || '10.211.55.5';
+var testMemcachedHost = '127.0.0.1';
exports.servers = {
single: testMemcachedHost + ':11211'
|
Fixed a bug in multi caused by the new queue limit. Modified mocha tests to point locally by default.
|
diff --git a/src/Oci8/Connectors/OracleConnector.php b/src/Oci8/Connectors/OracleConnector.php
index <HASH>..<HASH> 100644
--- a/src/Oci8/Connectors/OracleConnector.php
+++ b/src/Oci8/Connectors/OracleConnector.php
@@ -183,8 +183,11 @@ class OracleConnector extends Connector implements ConnectorInterface
$address .= '(ADDRESS = (PROTOCOL = ' . $config['protocol'] . ')(HOST = ' . trim($host[$i]) . ')(PORT = ' . $config['port'] . '))';
}
+ // backwards compatibility for users dont have this field in their php config
+ $loadBalance = $config['load_balance'] ?? 'yes';
+
// create a tns with multiple address connection
- $config['tns'] = "(DESCRIPTION = {$address} (LOAD_BALANCE = {$config['load_balance']}) (FAILOVER = on) (CONNECT_DATA = (SERVER = DEDICATED) ({$config['service']})))";
+ $config['tns'] = "(DESCRIPTION = {$address} (LOAD_BALANCE = {$loadBalance}) (FAILOVER = on) (CONNECT_DATA = (SERVER = DEDICATED) ({$config['service']})))";
}
return $config;
|
add backwards compatibility for load balance
|
diff --git a/core/src/main/java/tachyon/TachyonURI.java b/core/src/main/java/tachyon/TachyonURI.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/tachyon/TachyonURI.java
+++ b/core/src/main/java/tachyon/TachyonURI.java
@@ -102,7 +102,7 @@ public class TachyonURI implements Comparable<TachyonURI> {
// Add a slash to parent's path so resolution is compatible with URI's
URI parentUri = parent.mUri;
String parentPath = parentUri.getPath();
- if (!parentPath.equals("")) {
+ if ((!parentPath.equals("")) && (!parentPath.equals("/"))) {
parentPath += SEPARATOR;
}
try {
|
this commit fix the failed unit test in last commit
|
diff --git a/src/app/phpbob/analyze/PhpFileBuilder.php b/src/app/phpbob/analyze/PhpFileBuilder.php
index <HASH>..<HASH> 100644
--- a/src/app/phpbob/analyze/PhpFileBuilder.php
+++ b/src/app/phpbob/analyze/PhpFileBuilder.php
@@ -131,7 +131,7 @@ class PhpFileBuilder {
switch (strtolower($codePart)) {
case Phpbob::KEYWORD_EXTENDS:
$inExtendsClause = true;
- continue;
+ break;
}
}
@@ -464,4 +464,4 @@ class PhpFileBuilder {
private function createPrependingCode(PhpStatement $phpStatement) {
return implode(PHP_EOL, $phpStatement->getPrependingCommentLines());
}
-}
\ No newline at end of file
+}
|
continue in switch causes error in PHP <I> break
|
diff --git a/tests.py b/tests.py
index <HASH>..<HASH> 100644
--- a/tests.py
+++ b/tests.py
@@ -35,6 +35,21 @@ class MarylandLookupTestCase(unittest.TestCase):
self.assertNotEqual(us.states.lookup('Virginia'), us.states.MD)
+class LookupTestCase(unittest.TestCase):
+
+ def test_abbr_lookup(self):
+ for state in us.STATES:
+ self.assertEqual(us.states.lookup(state.abbr), state)
+
+ def test_fips_lookup(self):
+ for state in us.STATES:
+ self.assertEqual(us.states.lookup(state.fips), state)
+
+ def test_name_lookup(self):
+ for state in us.STATES:
+ self.assertEqual(us.states.lookup(state.name), state)
+
+
class MappingTestCase(unittest.TestCase):
def test_mapping(self):
|
add additional tests for lookups by name, abbr, and fips
|
diff --git a/docstamp/inkscape.py b/docstamp/inkscape.py
index <HASH>..<HASH> 100644
--- a/docstamp/inkscape.py
+++ b/docstamp/inkscape.py
@@ -80,9 +80,12 @@ def inkscape_export(input_file, output_file, export_flag="-A", dpi=90):
if not '=' in export_flag:
export_flag += ' '
- arg_strings = "{}{} --export-dpi={} {}".format(export_flag, output_file, dpi, input_file)
+ arg_strings = []
+ arg_strings += ['{}{}'.format(export_flag, output_file)]
+ arg_strings += ['--export-dpi={}'.format(dpi)]
+ arg_strings += [input_file]
- return call_inkscape(arg_strings.split())
+ return call_inkscape(arg_strings)
def svg2pdf(svg_file_path, pdf_file_path, dpi=150):
|
inkscape.py: build args for cmd call as a list
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -4,7 +4,7 @@ from setuptools import setup, find_packages, Command
install_requires = [
- 'Django==1.4.2',
+ 'Django==1.4.3',
'South==0.7.6',
'Pillow==1.7.7',
'celery==3.0.12',
@@ -15,10 +15,10 @@ install_requires = [
'django-uuidfield==0.4.0',
'django-storages==1.1.5',
'django-configurations==0.1',
- 'docutils==0.8.1',
- 'eventlet==0.9.16',
- 'gunicorn==0.14.6',
- 'netaddr==0.7.6',
+ 'docutils==0.10',
+ 'eventlet==0.10.0',
+ 'gunicorn==0.17.1',
+ 'netaddr==0.7.10',
'requests==1.0.4',
]
|
Upgraded a few more dependencies.
|
diff --git a/closure/goog/ui/datepicker.js b/closure/goog/ui/datepicker.js
index <HASH>..<HASH> 100644
--- a/closure/goog/ui/datepicker.js
+++ b/closure/goog/ui/datepicker.js
@@ -1099,7 +1099,7 @@ goog.ui.DatePicker.prototype.createButton_ = function(parentNode, label,
// Since this is a button, the default action is to submit a form if the
// node is added inside a form. Prevent this.
e.preventDefault();
- method.call(this);
+ method.call(this, e);
});
return el;
|
Fix for JS error in goog.ui.DatePicker
R=pupius
DELTA=1 (0 added, 0 deleted, 1 changed)
Revision created by MOE tool push_codebase.
MOE_MIGRATION=<I>
git-svn-id: <URL>
|
diff --git a/gspreadsheet/gspreadsheet.py b/gspreadsheet/gspreadsheet.py
index <HASH>..<HASH> 100644
--- a/gspreadsheet/gspreadsheet.py
+++ b/gspreadsheet/gspreadsheet.py
@@ -188,6 +188,7 @@ class GSpreadsheet(object):
pass
self.feed = self.get_feed()
+ self.fieldnames = self.feed.entry[0].custom.keys()
def get_client(self):
"""Get the google data client."""
diff --git a/gspreadsheet/tests.py b/gspreadsheet/tests.py
index <HASH>..<HASH> 100644
--- a/gspreadsheet/tests.py
+++ b/gspreadsheet/tests.py
@@ -47,6 +47,11 @@ class Basics(TestCase):
# continue in the same test to avoid making a new connection :(
+ # test_fieldnames_exist_and_are_accurate(self):
+ # assertListEqual requires python>=2.7
+ self.assertListEqual(sorted(sheet.fieldnames),
+ sorted(['name', 'widgets', 'date', 'price']))
+
# test_can_mark_row_as_readonly(self):
sheet.readonly = True
with self.assertRaises(ReadOnlyException):
|
add fieldnames property that stores fields used in the sheet
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -23,7 +23,10 @@ setup(
author_email='mixxorz@gmail.com',
maintainer='Mitchel Cabuloy',
maintainer_email='mixxorz@gmail.com',
- install_requires=open('requirements.txt').read().split(),
+ install_requires=[
+ 'behave',
+ 'Django>=1.4'
+ ],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
|
Reverted install_requires section of setup.py to a regular list
|
diff --git a/Arabic/Numbers.php b/Arabic/Numbers.php
index <HASH>..<HASH> 100755
--- a/Arabic/Numbers.php
+++ b/Arabic/Numbers.php
@@ -430,7 +430,7 @@ class I18N_Arabic_Numbers
}
}
- if ($segment[$key] != '') {
+ if (isset($segment[$key]) && $segment[$key] != '') {
$segment[$key] = trim($segment[$key]);
}
}
|
Check if the index is set to resolve #<I> Undefined index notice
|
diff --git a/lib/endpoints/class-wp-rest-users-controller.php b/lib/endpoints/class-wp-rest-users-controller.php
index <HASH>..<HASH> 100755
--- a/lib/endpoints/class-wp-rest-users-controller.php
+++ b/lib/endpoints/class-wp-rest-users-controller.php
@@ -501,7 +501,7 @@ class WP_REST_Users_Controller extends WP_REST_Controller {
'email' => $user->user_email,
'url' => $user->user_url,
'description' => $user->description,
- 'link' => get_author_posts_url( $user->ID ),
+ 'link' => get_author_posts_url( $user->ID, $user->user_nicename ),
'nickname' => $user->nickname,
'slug' => $user->user_nicename,
'registered_date' => date( 'c', strtotime( $user->user_registered ) ),
|
Avoid unnecessary SQL query by passing `$user_nicename`
If no `$user_nicename` is supplied, `get_author_posts_url()` calls `get_userdata()` to get the `$user_nicename`. Because we already have it, we don't need to incur an unnecessary database hit.
|
diff --git a/jax/random.py b/jax/random.py
index <HASH>..<HASH> 100644
--- a/jax/random.py
+++ b/jax/random.py
@@ -759,7 +759,8 @@ def gumbel(key, shape=(), dtype=onp.float64):
@partial(jit, static_argnums=(1, 2))
def _gumbel(key, shape, dtype):
_check_shape("gumbel", shape)
- return -np.log(-np.log(uniform(key, shape, dtype)))
+ return -np.log(-np.log(
+ uniform(key, shape, dtype, minval=onp.finfo(dtype).eps, maxval=1.)))
def laplace(key, shape=(), dtype=onp.float64):
@@ -781,7 +782,8 @@ def laplace(key, shape=(), dtype=onp.float64):
@partial(jit, static_argnums=(1, 2))
def _laplace(key, shape, dtype):
_check_shape("laplace", shape)
- u = uniform(key, shape, dtype, minval=-1., maxval=1.)
+ u = uniform(
+ key, shape, dtype, minval=-1. + np.finfo(dtype).epsneg, maxval=1.)
return lax.mul(lax.sign(u), lax.log1p(lax.neg(lax.abs(u))))
|
Avoid generating non-finite values from gumbel and laplace
In the case of gumbel, we take the log(-log(x)), as such we would not want to let x be 0 or 1 as we would get a non-finite number.
In the case of laplace, we take the log1p(-abs(x)), as such we would not want to let x be -1 or 1 as we would get a non-finite number.
This was found by inspection, I have no evidence that this happens in practice.
|
diff --git a/Entity/RealTimeCampaignRepository.php b/Entity/RealTimeCampaignRepository.php
index <HASH>..<HASH> 100644
--- a/Entity/RealTimeCampaignRepository.php
+++ b/Entity/RealTimeCampaignRepository.php
@@ -10,9 +10,7 @@ use Mautic\CampaignBundle\Executioner\ContactFinder\Limiter\ContactLimiter;
class RealTimeCampaignRepository extends CampaignRepository
{
-
/**
- *
* @param EntityManager $em
* @param ClassMetadata $class
*/
@@ -22,7 +20,7 @@ class RealTimeCampaignRepository extends CampaignRepository
$class = new ClassMetadata(Campaign::class);
}
parent::__construct($em, $class);
- }
+ }
/**
* Get pending contact IDs for a campaign through ContactLimiter, skipping
@@ -49,8 +47,10 @@ class RealTimeCampaignRepository extends CampaignRepository
if ($limiter->hasCampaignLimit() && $limiter->getCampaignLimitRemaining() < $limiter->getBatchLimit()) {
$pulled = [];
for ($i = $limiter->getCampaignLimitRemaining();
- $i >= ($limiter->getCampaignLimitRemaining() - $limiter->getBatchLimit()); $i--) {
- $pulled[] = $contacts[$i];
+ $i > ($limiter->getCampaignLimitRemaining() - $limiter->getBatchLimit()); --$i) {
+ if (isset($contacts[$i])) {
+ $pulled[] = $contacts[$i];
+ }
}
$pulled = $contacts;
}
|
[ENG-<I>] Fixed RealTimeCampaignRepository when they're limits set on ContactLimiter
|
diff --git a/phpsec/phpsec.crypt.php b/phpsec/phpsec.crypt.php
index <HASH>..<HASH> 100644
--- a/phpsec/phpsec.crypt.php
+++ b/phpsec/phpsec.crypt.php
@@ -13,7 +13,7 @@
* Provides methods for encrypting data.
*/
class phpsecCrypt {
- const ALGO = MCRYPT_BLOWFISH;
+ const ALGO = MCRYPT_RIJNDAEL_256;
const ALGO_MODE = MCRYPT_MODE_CBC;
const HASH_TYPE = 'sha256';
@@ -41,7 +41,7 @@ class phpsecCrypt {
}
$td = mcrypt_module_open(self::ALGO, '', self::ALGO_MODE, '');
-
+print_r(mcrypt_list_algorithms());
/* Create IV. */
$iv = phpsecRand::bytes(mcrypt_enc_get_iv_size($td));
|
Changed default encryption algorithm to RIJNDAEL-<I>.
|
diff --git a/connection_test.go b/connection_test.go
index <HASH>..<HASH> 100644
--- a/connection_test.go
+++ b/connection_test.go
@@ -9,6 +9,7 @@ package amqp
import (
"net"
+ "net/url"
"sync"
"testing"
)
@@ -75,3 +76,14 @@ func TestConcurrentClose(t *testing.T) {
}
wg.Wait()
}
+
+func TestConnectionWithInvalidURIFails(t *testing.T) {
+ _, err := DialConfig(":,%not-a-valid-URI((", Config{})
+ if err == nil {
+ t.Fatalf("connection with invalid URI is expected to fail")
+ }
+
+ if _, urlerr := err.(*url.Error); !urlerr {
+ t.Fatalf("expected a url.Error, got %+v", err)
+ }
+}
\ No newline at end of file
diff --git a/uri.go b/uri.go
index <HASH>..<HASH> 100644
--- a/uri.go
+++ b/uri.go
@@ -54,6 +54,11 @@ type URI struct {
func ParseURI(uri string) (URI, error) {
builder := defaultURI
+ _, err := url.ParseRequestURI(uri)
+ if err != nil {
+ return builder, err
+ }
+
u, err := url.Parse(uri)
if err != nil {
return builder, err
|
Validate URI before attempting to use it
|
diff --git a/src/main/java/net/dv8tion/jda/audio/AudioWebSocket.java b/src/main/java/net/dv8tion/jda/audio/AudioWebSocket.java
index <HASH>..<HASH> 100644
--- a/src/main/java/net/dv8tion/jda/audio/AudioWebSocket.java
+++ b/src/main/java/net/dv8tion/jda/audio/AudioWebSocket.java
@@ -51,7 +51,7 @@ public class AudioWebSocket extends WebSocketAdapter
private boolean connected = false;
private boolean ready = false;
private Thread keepAliveThread;
- public static WebSocket socket;
+ public WebSocket socket;
private String endpoint;
private String wssEndpoint;
|
made socket no longer static. This is a leftover from when I was first implementing audio. Thank you Kantenkugel the bug-hunter.
|
diff --git a/ng-background.js b/ng-background.js
index <HASH>..<HASH> 100644
--- a/ng-background.js
+++ b/ng-background.js
@@ -8,13 +8,15 @@ module.exports = angular.module('ngBackground', [])
element.css({
'background-image': 'url(' + img +')',
'background-size': attrs.ngBackgroundSize || 'cover',
- 'background-position': attrs.ngBackgroundPosition || 'center'
+ 'background-position': attrs.ngBackgroundPosition || 'center',
+ 'background-repeat': attrs.ngBackgroundRepeat || 'no-repeat'
});
} else {
element.css({
'background-image': '',
'background-size': '',
- 'background-position': ''
+ 'background-position': '',
+ 'background-repeat': ''
});
}
};
|
add ng-background-repeat
|
diff --git a/seefor.go b/seefor.go
index <HASH>..<HASH> 100644
--- a/seefor.go
+++ b/seefor.go
@@ -128,10 +128,15 @@ func WrapBeforeHandler(handler http.Handler) Before {
}
// UseTimer set timer for meaturing endpoint performance.
-// If timer is nil then a new timer will be created.
+// If timer is nil and no timer exists
+// then a new timer will be created
+// else existing timer will be returned.
// You can serve statistics internal using Timer as handler
func (c4 *Seefor) UseTimer(timer *Timer) *Timer {
if timer == nil {
+ if c4.timer != nil {
+ return c4.timer
+ }
timer = NewTimer()
}
c4.timer = timer
diff --git a/seefor_test.go b/seefor_test.go
index <HASH>..<HASH> 100644
--- a/seefor_test.go
+++ b/seefor_test.go
@@ -188,6 +188,9 @@ func TestSeeforTimer(t *testing.T) {
})
timer := router.UseTimer(nil)
+ timer2 := router.UseTimer(nil)
+ assert.Exactly(t, timer, timer2)
+ assert.True(t, assert.ObjectsAreEqual(timer, timer2))
ts := httptest.NewServer(router)
defer ts.Close()
|
Check first if there is a timer and return it
|
diff --git a/spec/infinispan_cluster_spec.js b/spec/infinispan_cluster_spec.js
index <HASH>..<HASH> 100644
--- a/spec/infinispan_cluster_spec.js
+++ b/spec/infinispan_cluster_spec.js
@@ -32,13 +32,17 @@ describe('Infinispan cluster client', function() {
.catch(t.failed(done)).finally(done);
});
- it('can iterate over entries in a cluster, one entry at the time',
- tests.iterateEntries('cluster', 1, client)
- );
+ if (process.env.protocol == null || process.env.protocol >= '2.5') {
- it('can iterate over entries in a cluster, more than one entry at the time',
- tests.iterateEntries('cluster', 3, client)
- );
+ it('can iterate over entries in a cluster, one entry at the time',
+ tests.iterateEntries('cluster', 1, client)
+ );
+
+ it('can iterate over entries in a cluster, more than one entry at the time',
+ tests.iterateEntries('cluster', 3, client)
+ );
+
+ }
it('can remove listener in cluster', function(done) { client
.then(t.assert(t.clear()))
|
HRJS-<I> Iteration tests should only run with protocol <I> or above
|
diff --git a/NavigationReactNative/src/android/app/src/main/java/com/navigation/reactnative/CoordinatorLayoutView.java b/NavigationReactNative/src/android/app/src/main/java/com/navigation/reactnative/CoordinatorLayoutView.java
index <HASH>..<HASH> 100644
--- a/NavigationReactNative/src/android/app/src/main/java/com/navigation/reactnative/CoordinatorLayoutView.java
+++ b/NavigationReactNative/src/android/app/src/main/java/com/navigation/reactnative/CoordinatorLayoutView.java
@@ -3,6 +3,7 @@ package com.navigation.reactnative;
import android.content.Context;
import android.os.Build;
import android.view.MotionEvent;
+import android.view.View;
import android.view.ViewConfiguration;
import android.widget.ScrollView;
@@ -47,7 +48,8 @@ public class CoordinatorLayoutView extends CoordinatorLayout {
return (ScrollView) getChildAt(i);
if (getChildAt(i) instanceof TabBarView) {
TabBarView tabBarView = ((TabBarView) getChildAt(i));
- return (ScrollView) tabBarView.getTabAt(tabBarView.getCurrentItem()).getChildAt(0);
+ View tabContent = tabBarView.getTabAt(tabBarView.getCurrentItem()).getChildAt(0);
+ return tabContent instanceof ScrollView ? (ScrollView) tabContent : null;
}
}
return null;
|
Prevented error if tab content view isn't a scrollview
|
diff --git a/growler/mw/__init__.py b/growler/mw/__init__.py
index <HASH>..<HASH> 100644
--- a/growler/mw/__init__.py
+++ b/growler/mw/__init__.py
@@ -8,6 +8,10 @@ namespace_packages keyword in their setup.py's setup() function.
"""
import sys
-from growler.ext import GrowlerExtensionImporter
+import growler.ext
-sys.modules[__name__] = GrowlerExtensionImporter(__name__)
+importer = growler.ext.__class__()
+importer.__path__ = 'growler.mw'
+importer.__mods__ = {}
+
+sys.modules[__name__] = importer
diff --git a/tests/test_growler_ext.py b/tests/test_growler_ext.py
index <HASH>..<HASH> 100644
--- a/tests/test_growler_ext.py
+++ b/tests/test_growler_ext.py
@@ -5,6 +5,7 @@
import sys
import pytest
from unittest import mock
+import growler.mw
@pytest.fixture
|
growler.mw: Fixed old implementation, now makes its own extension importer object as module replacement.
|
diff --git a/ipp-v3-java-devkit/src/main/java/com/intuit/ipp/services/ReportService.java b/ipp-v3-java-devkit/src/main/java/com/intuit/ipp/services/ReportService.java
index <HASH>..<HASH> 100755
--- a/ipp-v3-java-devkit/src/main/java/com/intuit/ipp/services/ReportService.java
+++ b/ipp-v3-java-devkit/src/main/java/com/intuit/ipp/services/ReportService.java
@@ -263,6 +263,8 @@ public class ReportService {
private String subcol_pct_exp = null;
+ private String showrows = null;
+
/**
@@ -1209,5 +1211,12 @@ public class ReportService {
public void setSubcol_pct_exp(String subcol_pct_exp) {
this.subcol_pct_exp = subcol_pct_exp;
}
-
+
+ public String getShowrows() {
+ return showrows;
+ }
+
+ public void setShowrows(String showrows) {
+ this.showrows = showrows;
+ }
}
|
add showrows in the report service
|
diff --git a/src/joint.dia.paper.js b/src/joint.dia.paper.js
index <HASH>..<HASH> 100644
--- a/src/joint.dia.paper.js
+++ b/src/joint.dia.paper.js
@@ -344,8 +344,10 @@ joint.dia.Paper = Backbone.View.extend({
if (this.sourceView) {
this.sourceView.pointerup(evt, localPoint.x, localPoint.y);
- delete this.sourceView;
-
+
+ //"delete sourceView" occasionally throws an error in chrome (illegal access exception)
+ this.sourceView = null;
+
} else {
this.trigger('blank:pointerup', evt, localPoint.x, localPoint.y);
|
fix chrome's illegal access exception on pointerup in joint.dia.Paper
|
diff --git a/test/normal-test.js b/test/normal-test.js
index <HASH>..<HASH> 100644
--- a/test/normal-test.js
+++ b/test/normal-test.js
@@ -10,14 +10,9 @@ function parse(src) {
try {
return toml.parse(src);
} catch (err) {
- var o = {};
- Object.defineProperty(o, 'ERROR', {
- get: function() {
- return 'Line ' + err.line + ', column ' + err.column + ': ' + err;
- },
- enumerable: true
- });
- return o;
+ return {
+ ERROR: 'Line ' + err.line + ', column ' + err.column + ': ' + err
+ };
}
}
|
Remove dirty hacks in test scripts
|
diff --git a/hypercorn/logging.py b/hypercorn/logging.py
index <HASH>..<HASH> 100644
--- a/hypercorn/logging.py
+++ b/hypercorn/logging.py
@@ -13,6 +13,7 @@ class AccessLogger:
self.logger = target
elif target is not None:
self.logger = logging.getLogger("hypercorn.access")
+ self.logger.propagate = False
self.logger.handlers = []
if target == "-":
self.logger.addHandler(logging.StreamHandler(sys.stdout))
|
Don't propagate access logs
If logging has been setup within an application, it is used here for the
access logs instead of the format specified in the hypercorn config
file. Not propagating the logger fixes this.
This follows the same pattern as gunicorn, as seen here:
<URL>
|
diff --git a/core/DataAccess/ArchiveWriter.php b/core/DataAccess/ArchiveWriter.php
index <HASH>..<HASH> 100644
--- a/core/DataAccess/ArchiveWriter.php
+++ b/core/DataAccess/ArchiveWriter.php
@@ -239,7 +239,7 @@ class ArchiveWriter
return true;
}
- protected function flushSpools()
+ public function flushSpools()
{
$this->flushSpool('numeric');
$this->flushSpool('blob');
|
Makes flushSpools public available (#<I>)
|
diff --git a/media/boom-assets/js/boom.assets.js b/media/boom-assets/js/boom.assets.js
index <HASH>..<HASH> 100755
--- a/media/boom-assets/js/boom.assets.js
+++ b/media/boom-assets/js/boom.assets.js
@@ -361,7 +361,7 @@ $.widget( 'boom.browser_asset', $.boom.browser,
$('#b-assets-view-thumbs div').removeClass('selected');
})
.on( 'click', '#b-button-multiaction-tag', function(){
- $.boom.dialog.open({
+ var dialog = $.boom.dialog.open({
url: '/cms/tags/asset/list/' + self.selected.join( '-' ),
title: 'Asset tags',
width: 440,
@@ -370,7 +370,17 @@ $.widget( 'boom.browser_asset', $.boom.browser,
type: 'asset',
id: self.selected.join( '-' )
});
- }
+ },
+ buttons: [
+ {
+ text: 'Close',
+ class : 'b-button',
+ icons: {primary : 'b-button-icon b-button-icon-accept'},
+ click: function(event) {
+ $.boom.dialog.destroy(dialog);
+ }
+ }
+ ]
});
});
|
Removed cancel button from asset multi-tagger
|
diff --git a/bin/templates/scripts/cordova/lib/build.js b/bin/templates/scripts/cordova/lib/build.js
index <HASH>..<HASH> 100644
--- a/bin/templates/scripts/cordova/lib/build.js
+++ b/bin/templates/scripts/cordova/lib/build.js
@@ -89,8 +89,15 @@ module.exports.run = function (buildOpts) {
events.emit('log','\tConfiguration: ' + configuration);
events.emit('log','\tPlatform: ' + (buildOpts.device ? 'device' : 'emulator'));
- var xcodebuildArgs = getXcodeBuildArgs(projectName, projectPath, configuration, buildOpts.device);
- return spawn('xcodebuild', xcodebuildArgs, projectPath);
+ var buildOutputDir = path.join(projectPath, 'build', 'device');
+
+ // remove the build/device folder before building
+ return spawn('rm', [ '-rf', buildOutputDir ], projectPath)
+ .then(function() {
+ var xcodebuildArgs = getXcodeBuildArgs(projectName, projectPath, configuration, buildOpts.device);
+ return spawn('xcodebuild', xcodebuildArgs, projectPath);
+ });
+
}).then(function () {
if (!buildOpts.device || buildOpts.noSign) {
return;
|
CB-<I> - Remove build/device folder before building
|
diff --git a/buildozer/targets/android.py b/buildozer/targets/android.py
index <HASH>..<HASH> 100644
--- a/buildozer/targets/android.py
+++ b/buildozer/targets/android.py
@@ -521,13 +521,13 @@ class TargetAndroid(Target):
try:
with open(join(self.pa_dir, "setup.py")) as fd:
setup = fd.read()
- deps = re.findall("install_reqs = (\[[^\]]*\])", setup, re.DOTALL | re.MULTILINE)[1]
+ deps = re.findall("^install_reqs = (\[[^\]]*\])", setup, re.DOTALL | re.MULTILINE)[0]
deps = ast.literal_eval(deps)
except Exception:
deps = []
pip_deps = []
for dep in deps:
- pip_deps.append('"{}"'.format(dep))
+ pip_deps.append("'{}'".format(dep))
# in virtualenv or conda env
options = "--user"
|
Updates p4a deps parsing
Now deals with conditional statements introduced in:
<URL>
|
diff --git a/lib/step.js b/lib/step.js
index <HASH>..<HASH> 100755
--- a/lib/step.js
+++ b/lib/step.js
@@ -64,25 +64,24 @@ function Step() {
next.parallel = function () {
var i = counter;
counter++;
+
function check() {
- counter--;
if (counter === 0) {
// When they're all done, call the callback
next.apply(null, results);
}
}
+ process.nextTick(check); // Ensures that check is called at least once
+
return function () {
+ counter--;
// Compress the error from any result to the first argument
if (arguments[0]) {
results[0] = arguments[0];
}
// Send the other results as arguments
results[i + 1] = arguments[1];
- if (lock) {
- process.nextTick(check);
- return
- }
- check();
+ if (!lock) { check(); }
};
};
|
Made next.parallel more consistent with next.group
|
diff --git a/activerecord/test/cases/attributes_test.rb b/activerecord/test/cases/attributes_test.rb
index <HASH>..<HASH> 100644
--- a/activerecord/test/cases/attributes_test.rb
+++ b/activerecord/test/cases/attributes_test.rb
@@ -241,7 +241,7 @@ module ActiveRecord
test "attributes not backed by database columns are always initialized" do
OverloadedType.create!
- model = OverloadedType.first
+ model = OverloadedType.last
assert_nil model.non_existent_decimal
model.non_existent_decimal = "123"
@@ -253,7 +253,7 @@ module ActiveRecord
attribute :non_existent_decimal, :decimal, default: 123
end
child.create!
- model = child.first
+ model = child.last
assert_equal 123, model.non_existent_decimal
end
@@ -264,7 +264,7 @@ module ActiveRecord
attribute :foo, :string, default: "lol"
end
child.create!
- model = child.first
+ model = child.last
assert_equal "lol", model.foo
|
Should find last created record
Tables in tests are not always empty so `klass.first` does not always
find last created record.
Fixes #<I>.
|
diff --git a/lxd/device/nic_bridged.go b/lxd/device/nic_bridged.go
index <HASH>..<HASH> 100644
--- a/lxd/device/nic_bridged.go
+++ b/lxd/device/nic_bridged.go
@@ -28,6 +28,7 @@ import (
"github.com/lxc/lxd/lxd/revert"
"github.com/lxc/lxd/lxd/util"
"github.com/lxc/lxd/shared"
+ "github.com/lxc/lxd/shared/api"
log "github.com/lxc/lxd/shared/log15"
"github.com/lxc/lxd/shared/logger"
)
@@ -83,6 +84,10 @@ func (d *nicBridged) validateConfig(instConf instance.ConfigReader) error {
return errors.Wrapf(err, "Error loading network config for %q", d.config["network"])
}
+ if n.Status() == api.NetworkStatusPending {
+ return fmt.Errorf("Specified network is not fully created")
+ }
+
if n.Type() != "bridge" {
return fmt.Errorf("Specified network must be of type bridge")
}
|
lxc/device/nic/bridged: Only allow using non-Pending networks
|
diff --git a/django_q/models.py b/django_q/models.py
index <HASH>..<HASH> 100644
--- a/django_q/models.py
+++ b/django_q/models.py
@@ -141,6 +141,7 @@ class Schedule(models.Model):
return self.func
success.boolean = True
+ last_run.allow_tags = True
class Meta:
app_label = 'django_q'
|
fixed regression of task link in schedule admin
|
diff --git a/access_group.go b/access_group.go
index <HASH>..<HASH> 100644
--- a/access_group.go
+++ b/access_group.go
@@ -101,7 +101,7 @@ type AccessGroupCertificateCommonName struct {
type AccessGroupGSuite struct {
Gsuite struct {
Email string `json:"email"`
- IdentityProviderID string `json:"identity_provider_id"`
+ ConnectionID string `json:"connection_id"`
} `json:"gsuite"`
}
@@ -125,7 +125,7 @@ type AccessGroupAzure struct {
type AccessGroupOkta struct {
Okta struct {
Name string `json:"name"`
- IdentityProviderID string `json:"identity_provider_id"`
+ ConnectionID string `json:"connection_id"`
} `json:"okta"`
}
|
Update the Okta and Gsuite access group structs to match the 'connection_id' format returned by the API (#<I>)
identity_provider_id is not the returned value within the access group object at least for okta or gsuite
This is related to <URL>
|
diff --git a/packages/insomnia-app/app/models/settings.js b/packages/insomnia-app/app/models/settings.js
index <HASH>..<HASH> 100644
--- a/packages/insomnia-app/app/models/settings.js
+++ b/packages/insomnia-app/app/models/settings.js
@@ -79,7 +79,7 @@ export function init(): BaseSettings {
updateAutomatically: true,
disableUpdateNotification: false,
environmentHighlightColorStyle: 'sidebar-indicator',
- autocompleteDelay: 700,
+ autocompleteDelay: 1200,
fontMonospace: null,
fontInterface: null,
fontSize: 13,
|
Change default autocomplete delay from <I>ms to <I>ms
|
diff --git a/client/state/notification-settings/actions.js b/client/state/notification-settings/actions.js
index <HASH>..<HASH> 100644
--- a/client/state/notification-settings/actions.js
+++ b/client/state/notification-settings/actions.js
@@ -56,10 +56,9 @@ export const fetchSettings = () => ( dispatch ) => {
data,
} )
)
- .catch( ( error ) =>
+ .catch( () =>
dispatch( {
type: NOTIFICATION_SETTINGS_FETCH_FAILED,
- error,
} )
);
};
@@ -103,16 +102,13 @@ export const saveSettings = ( source, settings, applyToAll = false ) => ( dispat
dispatch( showSaveSuccessNotice() );
dispatch( {
type: NOTIFICATION_SETTINGS_SAVE_COMPLETE,
- error: undefined,
data,
} );
} )
- .catch( ( error ) => {
+ .catch( () => {
dispatch( showSaveErrorNotice() );
dispatch( {
type: NOTIFICATION_SETTINGS_SAVE_FAILED,
- error,
- data: undefined,
} );
} );
};
|
State: Cleanup notification settings action creators (#<I>)
|
diff --git a/cmd/puppeth/puppeth.go b/cmd/puppeth/puppeth.go
index <HASH>..<HASH> 100644
--- a/cmd/puppeth/puppeth.go
+++ b/cmd/puppeth/puppeth.go
@@ -20,6 +20,7 @@ package main
import (
"math/rand"
"os"
+ "strings"
"time"
"github.com/ethereum/go-ethereum/log"
@@ -34,7 +35,7 @@ func main() {
app.Flags = []cli.Flag{
cli.StringFlag{
Name: "network",
- Usage: "name of the network to administer",
+ Usage: "name of the network to administer (no spaces or hyphens, please)",
},
cli.IntFlag{
Name: "loglevel",
@@ -47,6 +48,10 @@ func main() {
log.Root().SetHandler(log.LvlFilterHandler(log.Lvl(c.Int("loglevel")), log.StreamHandler(os.Stdout, log.TerminalFormat(true))))
rand.Seed(time.Now().UnixNano())
+ network := c.String("network")
+ if strings.Contains(network, " ") || strings.Contains(network, "-") {
+ log.Crit("No spaces or hyphens allowed in network name")
+ }
// Start the wizard and relinquish control
makeWizard(c.String("network")).run()
return nil
|
cmd/puppeth: add constraints to network name (#<I>)
* cmd/puppeth: add constraints to network name
* cmd/puppeth: update usage of network arg
* cmd/puppeth: avoid package dependency on utils
|
diff --git a/GPy/models/GP_regression.py b/GPy/models/GP_regression.py
index <HASH>..<HASH> 100644
--- a/GPy/models/GP_regression.py
+++ b/GPy/models/GP_regression.py
@@ -103,7 +103,7 @@ class GP_regression(model):
return dL_dK
def log_likelihood_gradients(self):
- return (self.kern.dK_dtheta(self.X,slices1=self.Xslices)*self.dL_dK()[:,:,None]).sum(0).sum(0)
+ return self.kern.dK_dtheta(self.X,partial=self.dL_dK())
def predict(self,Xnew, slices=None):
"""
|
made GP_regression wwork with partial-passed gradients
|
diff --git a/lib/github_changelog_generator/options.rb b/lib/github_changelog_generator/options.rb
index <HASH>..<HASH> 100644
--- a/lib/github_changelog_generator/options.rb
+++ b/lib/github_changelog_generator/options.rb
@@ -105,7 +105,11 @@ module GitHubChangelogGenerator
def print_options
return unless self[:verbose]
Helper.log.info "Using these options:"
- pp(censored_values)
+ # For ruby 2.5.0+
+ censored_values.each do |key, value|
+ print(key.inspect, "=>", value.inspect)
+ puts ""
+ end
puts ""
end
|
fix #<I>; Hang on pretty print of options in <I>-rc (#<I>)
* fix #<I>; Hang on pretty print of options in <I>-rc
* [formatting] Use #inspect
|
diff --git a/models/paper2/get_pc_sites.py b/models/paper2/get_pc_sites.py
index <HASH>..<HASH> 100644
--- a/models/paper2/get_pc_sites.py
+++ b/models/paper2/get_pc_sites.py
@@ -35,12 +35,19 @@ for db in dbs:
if not proteins:
continue
for protein in proteins:
+ name = bpc.BiopaxProcessor._get_element_name(protein)
+ db_refs = bpc.BiopaxProcessor._get_db_refs(protein)
+ agent = Agent(name, mods=[mc], db_refs=db_refs)
reactions = protein.getParticipantOf().toArray()
+ if not reactions:
+ upstream = protein.getMemberPhysicalEntityOf().toArray()
+ for u in upstream:
+ reactions += u.getParticipantOf().toArray()
for reaction in reactions:
- for contr in reaction.getControlledOf().toArray():
- name = bpc.BiopaxProcessor._get_element_name(protein)
- db_refs = bpc.BiopaxProcessor._get_db_refs(protein)
- agent = Agent(name, mods=[mc], db_refs=db_refs)
+ controls = reaction.getControlledOf().toArray()
+ if not controls:
+ agents.append(agent)
+ for contr in controls:
agents.append(agent)
with open('pc_%s_modified_agents.pkl' % db, 'wb') as fh:
|
Collect modified agents by reactions/controls
|
diff --git a/src/ProxyManager/Exception/ExceptionInterface.php b/src/ProxyManager/Exception/ExceptionInterface.php
index <HASH>..<HASH> 100644
--- a/src/ProxyManager/Exception/ExceptionInterface.php
+++ b/src/ProxyManager/Exception/ExceptionInterface.php
@@ -4,9 +4,11 @@ declare(strict_types=1);
namespace ProxyManager\Exception;
+use Throwable;
+
/**
* Base exception class for the proxy manager
*/
-interface ExceptionInterface
+interface ExceptionInterface extends Throwable
{
}
|
`ExceptionInterface` must be `Throwable` in order to be caught
|
diff --git a/src/main/java/io/github/bonigarcia/wdm/Downloader.java b/src/main/java/io/github/bonigarcia/wdm/Downloader.java
index <HASH>..<HASH> 100644
--- a/src/main/java/io/github/bonigarcia/wdm/Downloader.java
+++ b/src/main/java/io/github/bonigarcia/wdm/Downloader.java
@@ -48,7 +48,7 @@ public class Downloader {
File binary = null;
// Check if binary exists
- boolean download = !targetFile.getParentFile().exists()
+ boolean download = !targetFile.exists()
|| WdmConfig.getBoolean("wdm.override");
if (!download) {
|
Check for target file existence
The parent directory could already exists
|
diff --git a/eZ/Publish/Core/Persistence/Legacy/Content/FieldValue/Converter/Float.php b/eZ/Publish/Core/Persistence/Legacy/Content/FieldValue/Converter/Float.php
index <HASH>..<HASH> 100644
--- a/eZ/Publish/Core/Persistence/Legacy/Content/FieldValue/Converter/Float.php
+++ b/eZ/Publish/Core/Persistence/Legacy/Content/FieldValue/Converter/Float.php
@@ -100,16 +100,12 @@ class Float implements Converter
{
if ( !empty( $storageDef->dataFloat1 ) )
{
- $fieldDef->fieldTypeConstraints->validators = array(
- self::FLOAT_VALIDATOR_IDENTIFIER => array( 'minFloatValue' => $storageDef->dataFloat1 )
- );
+ $fieldDef->fieldTypeConstraints->validators[self::FLOAT_VALIDATOR_IDENTIFIER]['minFloatValue'] = $storageDef->dataFloat1;
}
if ( !empty( $storageDef->dataFloat2 ) )
{
- $fieldDef->fieldTypeConstraints->validators = array(
- self::FLOAT_VALIDATOR_IDENTIFIER => array( 'maxFloatValue' => $storageDef->dataFloat2 )
- );
+ $fieldDef->fieldTypeConstraints->validators[self::FLOAT_VALIDATOR_IDENTIFIER]['maxFloatValue'] = $storageDef->dataFloat2;
}
}
|
Make it possible to have a float min and max value
|
diff --git a/lib/scsocket.js b/lib/scsocket.js
index <HASH>..<HASH> 100644
--- a/lib/scsocket.js
+++ b/lib/scsocket.js
@@ -377,6 +377,7 @@ SCSocket.prototype._suspendSubscriptions = function () {
} else {
newState = channel.UNSUBSCRIBED;
}
+
this._triggerChannelUnsubscribe(channel, newState);
}
};
@@ -593,6 +594,8 @@ SCSocket.prototype._triggerChannelUnsubscribe = function (channel, newState) {
} else {
channel.state = channel.UNSUBSCRIBED;
}
+ this._cancelPendingSubscribeCallback(channel);
+
if (oldState == channel.SUBSCRIBED) {
channel.emit('unsubscribe', channelName);
SCEmitter.prototype.emit.call(this, 'unsubscribe', channelName);
|
Bug fix - Channel would get stuck in pending state if the connection failed while the subscription was in the middle of being processed
|
diff --git a/spec/show_models_spec.rb b/spec/show_models_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/show_models_spec.rb
+++ b/spec/show_models_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
describe "show-models" do
it "should print a list of models" do
- output = mock_pry('show-models', 'exit-all')
+ output = mock_pry('Pry.color = false;', 'show-models', 'exit-all')
ar_models = <<MODELS
Beer
|
ShowModelsSpec: fix failing test
For some strange reason it is showing wrong results because `Pry.color`
at the moment of the `show-models` command invocation is set to true.
|
diff --git a/icekit/images_api/views.py b/icekit/images_api/views.py
index <HASH>..<HASH> 100644
--- a/icekit/images_api/views.py
+++ b/icekit/images_api/views.py
@@ -10,7 +10,7 @@ from . import serializers
Image = apps.get_model('icekit_plugins_image.Image')
-class ImageViewSet(viewsets.ReadOnlyModelViewSet):
+class ImageViewSet(viewsets.ModelViewSet):
"""
Read only viewset for image objects.
"""
@@ -18,8 +18,9 @@ class ImageViewSet(viewsets.ReadOnlyModelViewSet):
serializer_class = serializers.ImageSerializer
# NOTE: `get_queryset` method is used instead of this `queryset` class
# attribute to return results, but we still need this defined here so
- # the API router can auto-generate the right endpoint URL.
- queryset = Image.objects.all()
+ # the API router can auto-generate the right endpoint URL and apply
+ # `DjangoModelPermissions`.
+ queryset = Image.objects.none()
def get_queryset(self):
return Image.objects.all()
|
#<I> Enable writing to the images API now that auth and perms are OK
|
diff --git a/src/java/org/apache/cassandra/streaming/StreamOutManager.java b/src/java/org/apache/cassandra/streaming/StreamOutManager.java
index <HASH>..<HASH> 100644
--- a/src/java/org/apache/cassandra/streaming/StreamOutManager.java
+++ b/src/java/org/apache/cassandra/streaming/StreamOutManager.java
@@ -171,24 +171,4 @@ public class StreamOutManager
{
return Collections.unmodifiableList(files);
}
-
- public class StreamFile extends File
- {
- private long ptr = 0;
- public StreamFile(String path)
- {
- super(path);
- ptr = 0;
- }
-
- private void update(long ptr)
- {
- this.ptr = ptr;
- }
-
- public long getPtr()
- {
- return ptr;
- }
- }
}
|
nix StreamFile. Patch by gdusbabek, reviewed by stuhood. CASSANDRA-<I>
git-svn-id: <URL>
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.