diff
stringlengths 65
26.7k
| message
stringlengths 7
9.92k
|
|---|---|
diff --git a/lib/right_agent/serialize/serializable.rb b/lib/right_agent/serialize/serializable.rb
index <HASH>..<HASH> 100644
--- a/lib/right_agent/serialize/serializable.rb
+++ b/lib/right_agent/serialize/serializable.rb
@@ -26,7 +26,18 @@ module RightScale
# MessagePack and JSON serializable types that are sent to and from agents
module Serializable
+ @check_active_support = true
+
def self.included(base)
+ if @check_active_support
+ if require_succeeds?("active_support") && (v = Gem.loaded_specs['activesupport'].version.to_s) != "2.3.5"
+ raise Exception.new("Some versions of the activesupport gem modify json in ways that are incompatible with this " +
+ "RightScale::Serializable module. Version #{v} used here is not allowed, use 2.3.5 instead.")
+ else
+ @check_active_support = false
+ end
+ end
+
base.extend ClassMethods
base.send(:include, InstanceMethods)
end
|
acu<I> - Ensure that incompatible version of active_support is not loaded
|
diff --git a/aegea/build_ami.py b/aegea/build_ami.py
index <HASH>..<HASH> 100644
--- a/aegea/build_ami.py
+++ b/aegea/build_ami.py
@@ -53,6 +53,9 @@ def build_ami(args):
raise Exception("cloud-init encountered errors")
sys.stderr.write(GREEN("OK") + "\n")
description = "Built by {} for {}".format(__name__, ARN.get_iam_username())
+ for existing_ami in resources.ec2.images.filter(Owners=["self"], Filters=[{"Name": "name", "Values": [args.name]}]):
+ logger.info("Deleting existing image {}".format(existing_ami))
+ existing_ami.deregister()
image = instance.create_image(Name=args.name, Description=description, BlockDeviceMappings=get_bdm())
tags = dict(tag.split("=", 1) for tag in args.tags)
base_ami = resources.ec2.Image(args.ami)
|
Remove existing AMI by the same name
|
diff --git a/commands/command_filter_process.go b/commands/command_filter_process.go
index <HASH>..<HASH> 100644
--- a/commands/command_filter_process.go
+++ b/commands/command_filter_process.go
@@ -207,7 +207,7 @@ func filterCommand(cmd *cobra.Command, args []string) {
if len(malformed) > 0 {
fmt.Fprintln(os.Stderr, tr.Tr.GetN(
- "Encountered %d file that should have been pointers, but wasn't:",
+ "Encountered %d file that should have been a pointer, but wasn't:",
"Encountered %d files that should have been pointers, but weren't:",
len(malformed),
len(malformed),
|
commands/command_filter_process.go: fix message
Now that we have quantity-sensitive text messages for
translation purposes, we can correct one message to use
the singular form.
h/t bk<I> on PR review.
|
diff --git a/jbpm-designer-client/src/main/resources/org/jbpm/designer/public/js/Plugins/saveplugin.js b/jbpm-designer-client/src/main/resources/org/jbpm/designer/public/js/Plugins/saveplugin.js
index <HASH>..<HASH> 100644
--- a/jbpm-designer-client/src/main/resources/org/jbpm/designer/public/js/Plugins/saveplugin.js
+++ b/jbpm-designer-client/src/main/resources/org/jbpm/designer/public/js/Plugins/saveplugin.js
@@ -369,7 +369,7 @@ ORYX.Plugins.SavePlugin = Clazz.extend({
if(ORYX.CONFIG.STORESVGONSAVE && ORYX.CONFIG.STORESVGONSAVE == "true") {
// svg save
- var formattedSvgDOM = DataManager.serialize(ORYX.EDITOR.getCanvas().getSVGRepresentation(false));
+ var formattedSvgDOM = DataManager.serialize(ORYX.EDITOR.getCanvas().getSVGRepresentation(true));
var rawSvgDOM = DataManager.serialize(ORYX.EDITOR.getCanvas().getRootNode().cloneNode(true));
var processJSON = ORYX.EDITOR.getSerializedJSON();
var processId = jsonPath(processJSON.evalJSON(), "$.properties.id");
|
JBPM-<I> - Non escaped characters (&, <, >) in CDATA section in BPMN2 result in invalid SVG file being generated (#<I>)
|
diff --git a/packages/theme-data/src/__stories__/stories.js b/packages/theme-data/src/__stories__/stories.js
index <HASH>..<HASH> 100644
--- a/packages/theme-data/src/__stories__/stories.js
+++ b/packages/theme-data/src/__stories__/stories.js
@@ -74,6 +74,11 @@ export default [
readme: undefined
},
{
+ description: "Component - Accordion",
+ schema: filterMatchByKey(baseTheme.unresolvedRoles, /^accordion./),
+ readme: undefined
+ },
+ {
description: "Component - Avatar",
schema: filterMatchByKey(baseTheme.unresolvedRoles, /^avatar./),
readme: undefined
|
docs: update theme-data stories for Accordion
|
diff --git a/bcbio/variation/genotype.py b/bcbio/variation/genotype.py
index <HASH>..<HASH> 100644
--- a/bcbio/variation/genotype.py
+++ b/bcbio/variation/genotype.py
@@ -45,6 +45,8 @@ def _shared_gatk_call_prep(align_bams, ref_file, config, dbsnp, region, out_file
params = ["-R", ref_file,
"--standard_min_confidence_threshold_for_calling", confidence,
"--standard_min_confidence_threshold_for_emitting", confidence,
+ "--downsample_to_coverage", "250",
+ "--downsampling_type", "BY_SAMPLE",
]
for a in annotation.get_gatk_annotations(config):
params += ["--annotation", a]
|
Provide standard downsampling during GATK variant calling to match <I> defaults on older versions: <I>bp per sample downsampling
|
diff --git a/src/ThrottleServiceProvider.php b/src/ThrottleServiceProvider.php
index <HASH>..<HASH> 100644
--- a/src/ThrottleServiceProvider.php
+++ b/src/ThrottleServiceProvider.php
@@ -46,7 +46,7 @@ class ThrottleServiceProvider extends ServiceProvider
*/
protected function setupConfig()
{
- $source = realpath(__DIR__.'/../config/throttle.php');
+ $source = realpath($raw = __DIR__.'/../config/throttle.php') ?: $raw;
if ($this->app instanceof LaravelApplication && $this->app->runningInConsole()) {
$this->publishes([$source => config_path('throttle.php')]);
|
Fixed config in phars
|
diff --git a/src/encoding/json/jsonschema/schemaDocument.go b/src/encoding/json/jsonschema/schemaDocument.go
index <HASH>..<HASH> 100644
--- a/src/encoding/json/jsonschema/schemaDocument.go
+++ b/src/encoding/json/jsonschema/schemaDocument.go
@@ -192,6 +192,7 @@ func (d *JsonSchemaDocument) parseSchema(documentNode interface{}, currentSchema
return errors.New(fmt.Sprintf(ERROR_MESSAGE_X_MUST_BE_OF_TYPE_Y, KEY_ITEMS, STRING_OBJECT))
}
newSchema := &jsonSchema{parent: currentSchema, property: k}
+ newSchema.ref = currentSchema.ref
currentSchema.SetItemsChild(newSchema)
err := d.parseSchema(m[k], newSchema)
if err != nil {
|
bugfix : schema inherited from items had nil reference
|
diff --git a/telethon/_client/telegrambaseclient.py b/telethon/_client/telegrambaseclient.py
index <HASH>..<HASH> 100644
--- a/telethon/_client/telegrambaseclient.py
+++ b/telethon/_client/telegrambaseclient.py
@@ -146,6 +146,7 @@ def init(
# Cache session data for convenient access
self._session_state = None
self._all_dcs = None
+ self._state_cache = statecache.StateCache(None, self._log)
self._entity_cache = entitycache.EntityCache()
self.api_id = int(api_id)
|
Init update state cache to empty in init
|
diff --git a/poetry/installation/pip_installer.py b/poetry/installation/pip_installer.py
index <HASH>..<HASH> 100644
--- a/poetry/installation/pip_installer.py
+++ b/poetry/installation/pip_installer.py
@@ -229,7 +229,7 @@ class PipInstaller(BaseInstaller):
args.append(req)
- return self.run_pip(*args)
+ return self.run(*args)
if package.develop:
args.append("-e")
|
pip installer: fix incorrect method call
Resolves: #<I>
|
diff --git a/qiskit/visualization/utils.py b/qiskit/visualization/utils.py
index <HASH>..<HASH> 100644
--- a/qiskit/visualization/utils.py
+++ b/qiskit/visualization/utils.py
@@ -25,6 +25,7 @@ from qiskit.circuit import (
Gate,
Instruction,
Measure,
+ ControlFlowOp,
)
from qiskit.circuit.library import PauliEvolutionGate
from qiskit.circuit.tools import pi_check
@@ -129,6 +130,9 @@ def get_param_str(op, drawer, ndigits=3):
if not hasattr(op, "params") or any(isinstance(param, np.ndarray) for param in op.params):
return ""
+ if isinstance(op, ControlFlowOp):
+ return ""
+
if isinstance(op, Delay):
param_list = [f"{op.params[0]}[{op.unit}]"]
else:
|
Omit QuantumCircuit as a parameter when drawing ControlFlowOps. (#<I>)
Most ControlFlowOps have at least one param of type QuantumCircuit
which qiskit.visualization.utils.get_param_str will attempt to
stringify and display as an instruction parameter. This largely
doesn't work, since the circuit's string will contain newlines, so
instead draw ControlFlowOps with only their name.
|
diff --git a/test/index_test.rb b/test/index_test.rb
index <HASH>..<HASH> 100644
--- a/test/index_test.rb
+++ b/test/index_test.rb
@@ -28,6 +28,11 @@ class IndexTest < Minitest::Test
assert !old_index.exists?
end
+ def test_total_docs
+ store_names ["Product A"]
+ assert_equal 1, Product.searchkick_index.total_docs
+ end
+
def test_mapping
store_names ["Dollar Tree"], Store
assert_equal [], Store.search(query: {match: {name: "dollar"}}).map(&:name)
|
Added test for total_docs
|
diff --git a/packages/es-dev-server/src/config.js b/packages/es-dev-server/src/config.js
index <HASH>..<HASH> 100644
--- a/packages/es-dev-server/src/config.js
+++ b/packages/es-dev-server/src/config.js
@@ -184,7 +184,7 @@ export function createConfig(config) {
let openPath;
if (typeof open === 'string' && open !== '') {
// user-provided open path
- openPath = path.normalize(open);
+ openPath = open;
} else if (appIndex) {
// if an appIndex was provided, use it's directory as open path
openPath = `${basePath || ''}${appIndexDir}/`;
|
fix(es-dev-server): don't normalize file path on a browser path
|
diff --git a/lib/ib/project.rb b/lib/ib/project.rb
index <HASH>..<HASH> 100644
--- a/lib/ib/project.rb
+++ b/lib/ib/project.rb
@@ -109,6 +109,27 @@ class IB::Project
DEFAULT_FRAMEWORKS.each do |framework|
target.add_system_framework framework
end
+
+ extra_frameworks.each do |framework|
+ add_extra_framework framework
+ end
+ end
+
+ def extra_frameworks
+ extra_frameworks = Motion::Project::App.config.vendor_projects
+ extra_frameworks = extra_frameworks.select { |vp| vp.opts[:ib] }
+ end
+
+ def add_extra_framework(framework)
+ framework_name = framework.path.split('/').last
+ framework_group = project.new_group(framework_name)
+ framework_group.path = File.join(project_path, framework.path)
+ framework_target = project.new_target(:framework, framework_name, platform)
+ Dir.glob("#{framework.path}/**/*.{h,m}") do |file|
+ file_ref = framework_group.new_file File.join(project_path, file)
+ framework_target.add_file_references([file_ref])
+ puts "path: #{framework.path}, file: #{file}"
+ end
end
def ib_project_path
|
add ability to pass along custom frameworks
Example in your project Rakefile:
app.vendor_project('frameworks/DesignableKit', :static, { ib: true })
passing `ib: true` in the opts for vendor_project will include the dirs h/m
files in a custom framework for you.
|
diff --git a/qunit/qunit.js b/qunit/qunit.js
index <HASH>..<HASH> 100644
--- a/qunit/qunit.js
+++ b/qunit/qunit.js
@@ -953,16 +953,14 @@ QUnit.jsDump = (function() {
type = "date";
} else if (QUnit.is("Function", obj)) {
type = "function";
- } else if (QUnit.is("Array", obj)) {
- type = "array";
- } else if (QUnit.is("Window", obj) || QUnit.is("global", obj)) {
+ } else if (obj.setInterval && obj.document && !obj.nodeType) {
type = "window";
} else if (obj.nodeType === 9) {
type = "document";
- } else if (QUnit.is("HTMLCollection", obj) || QUnit.is("NodeList", obj)) {
- type = "nodelist";
} else if (obj.nodeType) {
type = "node";
+ } else if (typeof obj === "object" && typeof obj.length === "number" && obj.length >= 0) {
+ type = "array";
} else {
type = typeof obj;
}
|
More changes to the detection of types in jsDump's typeOf.
|
diff --git a/src/Administration/Resources/app/administration/src/module/sw-cms/service/cms.service.js b/src/Administration/Resources/app/administration/src/module/sw-cms/service/cms.service.js
index <HASH>..<HASH> 100644
--- a/src/Administration/Resources/app/administration/src/module/sw-cms/service/cms.service.js
+++ b/src/Administration/Resources/app/administration/src/module/sw-cms/service/cms.service.js
@@ -93,7 +93,7 @@ function getEntityData(element, configKey) {
if (Array.isArray(configValue)) {
const entityIds = [];
- if (configValue[0].mediaId) {
+ if (configValue.length && configValue[0].mediaId) {
configValue.forEach((val) => {
entityIds.push(val.mediaId);
});
|
NEXT-<I> - Added additional check before adding mediaId
Fixes #<I>
|
diff --git a/elasticsearch-transport/test/unit/transport_manticore_test.rb b/elasticsearch-transport/test/unit/transport_manticore_test.rb
index <HASH>..<HASH> 100644
--- a/elasticsearch-transport/test/unit/transport_manticore_test.rb
+++ b/elasticsearch-transport/test/unit/transport_manticore_test.rb
@@ -103,8 +103,13 @@ else
should "allow to set options for Manticore" do
options = { :headers => {"User-Agent" => "myapp-0.0" }}
transport = Manticore.new :hosts => [ { :host => 'foobar', :port => 1234 } ], :options => options
- transport.connections.first.connection.expects(:get).
- with('http://foobar:1234//', options).returns(stub_everything)
+ transport.connections.first.connection
+ .expects(:get)
+ .with do |host, options|
+ assert_equal 'myapp-0.0', options[:headers]['User-Agent']
+ true
+ end
+ .returns(stub_everything)
transport.perform_request 'GET', '/', {}
end
|
[CLIENT] Fixed a failing Manticore unit test
|
diff --git a/app/AppKernel.php b/app/AppKernel.php
index <HASH>..<HASH> 100644
--- a/app/AppKernel.php
+++ b/app/AppKernel.php
@@ -17,12 +17,12 @@ class AppKernel extends Kernel
new Symfony\Bundle\AsseticBundle\AsseticBundle(),
new Sensio\Bundle\FrameworkExtraBundle\SensioFrameworkExtraBundle(),
new JMS\SecurityExtraBundle\JMSSecurityExtraBundle(),
+ new Acme\DemoBundle\AcmeDemoBundle(),
);
if (in_array($this->getEnvironment(), array('dev', 'test'))) {
$bundles[] = new Symfony\Bundle\WebProfilerBundle\WebProfilerBundle();
$bundles[] = new Symfony\Bundle\WebConfiguratorBundle\SymfonyWebConfiguratorBundle();
- $bundles[] = new Acme\DemoBundle\AcmeDemoBundle();
}
return $bundles;
|
activated the AcmeDemoBundle for all env to ease things
|
diff --git a/db/archives.go b/db/archives.go
index <HASH>..<HASH> 100644
--- a/db/archives.go
+++ b/db/archives.go
@@ -228,10 +228,7 @@ func (db *DB) PurgeArchive(id uuid.UUID) error {
return fmt.Errorf("Invalid attempt to purge a 'valid' archive detected")
}
- err = db.Exec(`UPDATE archives SET purge_reason =
- (SELECT status FROM archives WHERE uuid = ?)
- WHERE uuid = ?
- `, id.String(), id.String())
+ err = db.Exec(`UPDATE archives SET purge_reason = status WHERE uuid = ?`, id.String())
if err != nil {
return err
}
|
Simplify purge_reason update SQL
It should now work on PostgreSQL and MySQL (as well as SQLite tests)
|
diff --git a/src/services/printutils.js b/src/services/printutils.js
index <HASH>..<HASH> 100644
--- a/src/services/printutils.js
+++ b/src/services/printutils.js
@@ -62,18 +62,20 @@ ngeo.PrintUtils.prototype.createPrintMaskPostcompose = function(getSize,
var center = [viewportWidth / 2, viewportHeight / 2];
var size = getSize();
+ var height = size[1] * ol.has.DEVICE_PIXEL_RATIO;
+ var width = size[0] * ol.has.DEVICE_PIXEL_RATIO;
var scale = getScale(frameState);
var ppi = ngeo.PrintUtils.DOTS_PER_INCH_;
var ipm = ngeo.PrintUtils.INCHES_PER_METER_;
var extentHalfWidth =
- (((size[0] / ppi) / ipm) * scale / resolution) / 2;
+ (((width / ppi) / ipm) * scale / resolution) / 2;
self.extentHalfHorizontalDistance_ =
(((size[0] / ppi) / ipm) * scale) / 2;
var extentHalfHeight =
- (((size[1] / ppi) / ipm) * scale / resolution) / 2;
+ (((height / ppi) / ipm) * scale / resolution) / 2;
self.extentHalfVerticalDistance_ =
(((size[1] / ppi) / ipm) * scale) / 2;
|
Take device pixel ratio when displaying print mask (#<I>)
|
diff --git a/src/Palladium/Mapper/Identity.php b/src/Palladium/Mapper/Identity.php
index <HASH>..<HASH> 100644
--- a/src/Palladium/Mapper/Identity.php
+++ b/src/Palladium/Mapper/Identity.php
@@ -46,8 +46,8 @@ class Identity extends DataMapper
/**
- * @param Entity\Identity $entity
- */
+ * @param Entity\Identity $entity
+ */
public function fetch(Entity\Identity $entity)
{
diff --git a/src/Palladium/Mapper/OneTimeIdentity.php b/src/Palladium/Mapper/OneTimeIdentity.php
index <HASH>..<HASH> 100644
--- a/src/Palladium/Mapper/OneTimeIdentity.php
+++ b/src/Palladium/Mapper/OneTimeIdentity.php
@@ -8,7 +8,6 @@ namespace Palladium\Mapper;
use Palladium\Component\DataMapper;
use Palladium\Entity as Entity;
-use PDOStatement;
class OneTimeIdentity extends DataMapper
{
|
Minor: fixed minor inconsistencies
|
diff --git a/oauthlib/oauth2/rfc6749/endpoints/authorization.py b/oauthlib/oauth2/rfc6749/endpoints/authorization.py
index <HASH>..<HASH> 100644
--- a/oauthlib/oauth2/rfc6749/endpoints/authorization.py
+++ b/oauthlib/oauth2/rfc6749/endpoints/authorization.py
@@ -7,7 +7,7 @@ This module is an implementation of various logic needed
for consuming and providing OAuth 2.0 RFC6749.
"""
from __future__ import absolute_import, unicode_literals
-
+import sys
import logging
from oauthlib.common import Request
@@ -66,7 +66,8 @@ class AuthorizationEndpoint(BaseEndpoint):
BaseEndpoint.__init__(self)
self._response_types = {}
# response_types are sorted internally so ordered comparison is faster/easier later
- for k, v in response_types.iteritems():
+
+ for k, v in response_types.iteritems() if sys.version_info[0] == 2 else iter(response_types.items()):
self._response_types[",".join(sorted(k.split()))] = v
self._default_response_type = default_response_type
|
Iterate correctly over response_types dict without using six
|
diff --git a/src/labels/label_line.js b/src/labels/label_line.js
index <HASH>..<HASH> 100644
--- a/src/labels/label_line.js
+++ b/src/labels/label_line.js
@@ -252,24 +252,4 @@ export default class LabelLine extends Label {
this.obb = new OBB(p[0], p[1], -this.angle[0], width, height);
this.aabb = this.obb.getExtent();
}
-
- // Try to move the label into the tile bounds
- // Returns true if label was moved into tile, false if it couldn't be moved
- moveIntoTile () {
- let in_tile = false;
- let fits_to_segment = false;
-
- while (!in_tile) {
- let segment = this.getNextFittingSegment();
- if (segment) {
- this.update();
- in_tile = this.inTileBounds();
- }
- else {
- return false;
- }
- }
-
- return true;
- }
}
|
Remove moveIntoTile method for LabelLine
|
diff --git a/lib/guard/rspec/runner.rb b/lib/guard/rspec/runner.rb
index <HASH>..<HASH> 100644
--- a/lib/guard/rspec/runner.rb
+++ b/lib/guard/rspec/runner.rb
@@ -106,6 +106,7 @@ module Guard
def zeus_guard_env_file
unless @zeus_guard_env_file
@zeus_guard_env_file = Tempfile.new(['zeus_guard_env','.rb'])
+ @zeus_guard_env_file.puts '# encoding: UTF-8'
@zeus_guard_env_file.puts '# Extra settings for Guard when using Zeus'
@zeus_guard_env_file.puts "ENV['GUARD_NOTIFICATIONS']=#{ENV['GUARD_NOTIFICATIONS'].inspect}" if ENV['GUARD_NOTIFICATIONS']
@zeus_guard_env_file.puts "ENV['GUARD_NOTIFY']=#{ENV['GUARD_NOTIFY'].inspect}" if ENV['GUARD_NOTIFY']
|
allow utf-8 characters in guard environment with zeus, fixes #<I>
|
diff --git a/seqcluster/libs/tool.py b/seqcluster/libs/tool.py
index <HASH>..<HASH> 100644
--- a/seqcluster/libs/tool.py
+++ b/seqcluster/libs/tool.py
@@ -289,7 +289,7 @@ def _add_complete_cluster(idx, clus1):
locilen_sorted = sorted(clus1.locilen.iteritems(), key=operator.itemgetter(1), reverse=True)
maxidl = locilen_sorted[0][0]
c = cluster(idx)
- c.add_id_member(clus1.loci2seq[maxidl], maxidl)
+ c.add_id_member(clus1.idmembers, maxidl)
c.id = idx
c.toomany = len(locilen_sorted)
return c
@@ -527,7 +527,7 @@ def _solve_conflict(list_c, s2p, n_cluster):
return list_c
-def _split_cluster(c , pairs, n):
+def _split_cluster(c, pairs, n):
"""split cluster by exclussion"""
old = c[p[0]]
new = c[p[1]]
|
for big clusters, add everything, but keep one location
|
diff --git a/src/main/java/com/sun/glass/ui/monocle/MonocleRobot.java b/src/main/java/com/sun/glass/ui/monocle/MonocleRobot.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/sun/glass/ui/monocle/MonocleRobot.java
+++ b/src/main/java/com/sun/glass/ui/monocle/MonocleRobot.java
@@ -214,7 +214,7 @@ class MonocleRobot extends GlassRobot {
int colStop = Math.min(x + width, scrWidth);
for (int row = y; row < rowStop; row++) {
for (int col = x; col < colStop; col++) {
- data[row * scrWidth + col] = buffer.get(row * scrWidth + col);
+ data[(row - y) * (colStop - x) + (col - x)] = buffer.get(row * scrWidth + col);
}
}
}
|
Add upstream partial screen capture fix (JDK-<I>).
|
diff --git a/internal/provider/resource_integer.go b/internal/provider/resource_integer.go
index <HASH>..<HASH> 100644
--- a/internal/provider/resource_integer.go
+++ b/internal/provider/resource_integer.go
@@ -21,7 +21,7 @@ func resourceInteger() *schema.Resource {
"old and new resources exist concurrently.",
CreateContext: CreateInteger,
ReadContext: schema.NoopContext,
- Delete: schema.RemoveFromState,
+ DeleteContext: DeleteInteger,
Importer: &schema.ResourceImporter{
State: ImportInteger,
},
@@ -93,6 +93,11 @@ func CreateInteger(_ context.Context, d *schema.ResourceData, meta interface{})
return nil
}
+func DeleteInteger(_ context.Context, d *schema.ResourceData, _ interface{}) diag.Diagnostics {
+ d.SetId("")
+ return nil
+}
+
func ImportInteger(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) {
parts := strings.Split(d.Id(), ",")
if len(parts) != 3 && len(parts) != 4 {
|
Replace usage of Delete field with DeleteContext in resource_integer
|
diff --git a/plugins/crusher.js b/plugins/crusher.js
index <HASH>..<HASH> 100644
--- a/plugins/crusher.js
+++ b/plugins/crusher.js
@@ -86,9 +86,9 @@ module.exports = Plugin.extend({
if (this.analyse) return this.analyser(function analyser(err, results) {
if (err) return self.emit('error', err);
- self.logger.info('The fastest engine: %s', results.fastest.engines);
- self.logger.info('The smallest content: %s', results.filesize.engines);
- self.logger.info('The best compressed: %s', results.bandwidth.engines);
+ self.logger.info('The fastest engine: '+ results.fastest.engines);
+ self.logger.info('The smallest content: '+ results.filesize.engines);
+ self.logger.info('The best compressed: '+ results.bandwidth.engines);
self.emit('data');
});
|
[minor] The %s doesn't work, yo
|
diff --git a/nion/instrumentation/test/CameraControl_test.py b/nion/instrumentation/test/CameraControl_test.py
index <HASH>..<HASH> 100644
--- a/nion/instrumentation/test/CameraControl_test.py
+++ b/nion/instrumentation/test/CameraControl_test.py
@@ -38,6 +38,7 @@ class TestCameraControlClass(unittest.TestCase):
def tearDown(self):
HardwareSource.HardwareSourceManager()._close_hardware_sources()
+ HardwareSource.HardwareSourceManager()._close_instruments()
def _acquire_one(self, document_controller, hardware_source):
hardware_source.start_playing()
diff --git a/nion/instrumentation/test/ScanControl_test.py b/nion/instrumentation/test/ScanControl_test.py
index <HASH>..<HASH> 100644
--- a/nion/instrumentation/test/ScanControl_test.py
+++ b/nion/instrumentation/test/ScanControl_test.py
@@ -38,6 +38,7 @@ class TestScanControlClass(unittest.TestCase):
def tearDown(self):
HardwareSource.HardwareSourceManager()._close_hardware_sources()
+ HardwareSource.HardwareSourceManager()._close_instruments()
def _acquire_one(self, document_controller, hardware_source):
hardware_source.start_playing(3.0)
|
Ensure proper instrument shutdowns (was causing failing tests).
|
diff --git a/cli.py b/cli.py
index <HASH>..<HASH> 100755
--- a/cli.py
+++ b/cli.py
@@ -153,6 +153,7 @@ def _generate_reload(generator, input, output):
print('watch:', path)
observer.schedule(event_handler, path, recursive=True)
for entry in input:
+ entry = entry.dirname().expand().abspath()
print('watch:', entry)
observer.schedule(event_handler, entry, recursive=True)
path = Path(__file__).parent / 'qface'
|
Fixed issue with watcher when a file name was passed as src
|
diff --git a/lib/taxonomite/entity.rb b/lib/taxonomite/entity.rb
index <HASH>..<HASH> 100644
--- a/lib/taxonomite/entity.rb
+++ b/lib/taxonomite/entity.rb
@@ -23,7 +23,7 @@ module Taxonomite
protected
# classes overload to create the appropriate taxonomy_node
# def create_taxonomy_node
- # Taxonomite::Taxon.new(name: self.name)
+ # Taxonomite::Node.new(name: self.name)
# end
private
|
fix comment (get rid of Taxon)
|
diff --git a/core/broker/uplink.go b/core/broker/uplink.go
index <HASH>..<HASH> 100644
--- a/core/broker/uplink.go
+++ b/core/broker/uplink.go
@@ -158,6 +158,10 @@ func (b *broker) HandleUplink(uplink *pb.UplinkMessage) (err error) {
// Select best DownlinkOption
if len(downlinkOptions) > 0 {
downlinkMessage = &pb.DownlinkMessage{
+ DevEui: device.DevEui,
+ AppEui: device.AppEui,
+ AppId: device.AppId,
+ DevId: device.DevId,
DownlinkOption: selectBestDownlink(downlinkOptions),
}
}
|
Pass valid DownlinkMessage from Broker to NS
|
diff --git a/src/gitgraph.js b/src/gitgraph.js
index <HASH>..<HASH> 100644
--- a/src/gitgraph.js
+++ b/src/gitgraph.js
@@ -469,7 +469,7 @@
// Add height of detail div (normal vertical mode only)
if (commit.detail !== null) {
commit.detail.style.display = "block";
- this.parent.commitOffsetY -= commit.detail.clientHeight;
+ this.parent.commitOffsetY -= commit.detail.clientHeight - 40;
}
// Auto-render
|
Reduce margin bottom of detail (canvas)
|
diff --git a/tests/test_publish_heroku.py b/tests/test_publish_heroku.py
index <HASH>..<HASH> 100644
--- a/tests/test_publish_heroku.py
+++ b/tests/test_publish_heroku.py
@@ -24,7 +24,7 @@ def test_publish_heroku_installs_plugin(mock_call, mock_check_output, mock_which
with runner.isolated_filesystem():
open("t.db", "w").write("data")
result = runner.invoke(cli.cli, ["publish", "heroku", "t.db"], input="y\n")
- assert -1 == result.exit_code
+ assert 0 != result.exit_code
mock_check_output.assert_has_calls(
[mock.call(["heroku", "plugins"]), mock.call(["heroku", "apps:list", "--json"])]
)
|
Fix for test failure with Click <I>
|
diff --git a/src/Application.php b/src/Application.php
index <HASH>..<HASH> 100644
--- a/src/Application.php
+++ b/src/Application.php
@@ -16,6 +16,7 @@ use Nyholm\Psr7\Factory\Psr17Factory;
use Nyholm\Psr7\Response as PsrResponse;
use Psr\Http\Message\ResponseInterface;
use Psr\Http\Message\ServerRequestInterface;
+use Exception;
use RuntimeException;
use Symfony\Bridge\PsrHttpMessage\Factory\PsrHttpFactory;
use Symfony\Component\HttpFoundation\Request as SymfonyRequest;
|
Fix missing import
Instead of getting expected error `'Unable to resolve PSR request. Please install symfony/psr-http-message-bridge and nyholm/psr7.'`, this error is shown instead: `Class 'Laravel\\Lumen\\Exception' not found in src/Application.php:<I>`
|
diff --git a/src/mappers/har.js b/src/mappers/har.js
index <HASH>..<HASH> 100644
--- a/src/mappers/har.js
+++ b/src/mappers/har.js
@@ -190,6 +190,6 @@ function getOptionalDuration (start, end) {
return -1;
}
- return parseInt(end) - parseInt(start);
+ return end - start;
}
|
Remove unnecessary calls to parseInt.
|
diff --git a/config/ember-try.js b/config/ember-try.js
index <HASH>..<HASH> 100644
--- a/config/ember-try.js
+++ b/config/ember-try.js
@@ -15,7 +15,7 @@ module.exports = function() {
name: 'ember-lts-2.12',
env: {
EMBER_OPTIONAL_FEATURES: JSON.stringify({
- 'jquery-integration': true
+ 'jquery-integration': false
})
},
npm: {
@@ -29,12 +29,11 @@ module.exports = function() {
name: 'ember-lts-2.16',
env: {
EMBER_OPTIONAL_FEATURES: JSON.stringify({
- 'jquery-integration': true
+ 'jquery-integration': false
})
},
npm: {
devDependencies: {
- '@ember/jquery': '^0.5.2',
'ember-source': '~2.16.0'
}
}
@@ -43,12 +42,11 @@ module.exports = function() {
name: 'ember-lts-2.18',
env: {
EMBER_OPTIONAL_FEATURES: JSON.stringify({
- 'jquery-integration': true
+ 'jquery-integration': false
})
},
npm: {
devDependencies: {
- '@ember/jquery': '^0.5.2',
'ember-source': '~2.18.0'
}
}
|
also disable for the ember try sections
|
diff --git a/src/Schema/Type/TypeReference.php b/src/Schema/Type/TypeReference.php
index <HASH>..<HASH> 100644
--- a/src/Schema/Type/TypeReference.php
+++ b/src/Schema/Type/TypeReference.php
@@ -80,8 +80,6 @@ class TypeReference
if (empty($path)) {
return false;
}
- // Remove the named type
- array_pop($path);
return in_array($nodeKind, $path);
}
|
BUGFIX: hasWrapper() unnecessarily popping the path
|
diff --git a/src/Traits/HasRole.php b/src/Traits/HasRole.php
index <HASH>..<HASH> 100644
--- a/src/Traits/HasRole.php
+++ b/src/Traits/HasRole.php
@@ -135,7 +135,9 @@ trait HasRole
}
/**
- * @param $query
+ * Query scope for user having the given roles.
+ *
+ * @param \Illuminate\Database\Eloquent\Builder $query
* @param array $roles
* @return mixed
*/
@@ -152,7 +154,7 @@ trait HasRole
/**
* Revokes the given role from the user using slug.
*
- * @param $slug
+ * @param string $slug
* @return bool
*/
public function revokeRoleBySlug($slug)
@@ -165,7 +167,7 @@ trait HasRole
/**
* Revokes the given role from the user.
*
- * @param $role
+ * @param mixed $role
* @return bool
*/
public function revokeRole($role = "")
|
Update doc blocks and bump <I>
|
diff --git a/sitetree/admin.py b/sitetree/admin.py
index <HASH>..<HASH> 100644
--- a/sitetree/admin.py
+++ b/sitetree/admin.py
@@ -1,6 +1,9 @@
from django.conf import settings as django_settings
from django import VERSION as django_version
-from django.urls import get_urlconf, get_resolver
+try:
+ from django.urls import get_urlconf, get_resolver
+except ImportError:
+ from django.conf.urls import get_urlconf, get_resolver
from django.utils.translation import ugettext_lazy as _
from django.utils import six
from django.http import HttpResponseRedirect
|
added backward compatiblity
this adds compatibility with django prior to <I>
|
diff --git a/initializers/initRedis.js b/initializers/initRedis.js
index <HASH>..<HASH> 100644
--- a/initializers/initRedis.js
+++ b/initializers/initRedis.js
@@ -194,9 +194,8 @@ var initPingAndCheck = function(api, next){
// start timers
api.redis.ping(api, function(){
- api.redis.checkForDroppedPeers(api, function(){
- next();
- });
+ api.redis.lostPeerTimer = setTimeout(api.redis.checkForDroppedPeers, api.redis.lostPeerCheckTime, api);
+ next();
});
}
|
delay first peer check until tasks have been loaded
|
diff --git a/lib/db.js b/lib/db.js
index <HASH>..<HASH> 100644
--- a/lib/db.js
+++ b/lib/db.js
@@ -22,7 +22,7 @@ exports.index = function (collection) {
// Ensures there's a reasonable index for the poling dequeue
// Status is first b/c querying by status = queued should be very selective
- collection.ensureIndex({ status: 1, queue: 1, priority: 1, _id: 1, delay: 1 }, function (err) {
+ collection.ensureIndex({ status: 1, queue: 1, priority: -1, _id: 1, delay: 1 }, function (err) {
if (err) console.error(err);
});
};
|
Switch sort direction of index in order to improve performance
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -20,7 +20,6 @@ setuptools.setup(
keywords="example boilerplate",
packages=setuptools.find_packages(exclude=["tests", "tests.*"]),
install_requires=[],
- test_suite="tests",
entry_points={
"console_scripts": [
"boilerplate_script = boilerplate.script:main"
diff --git a/tests/test_example.py b/tests/test_example.py
index <HASH>..<HASH> 100644
--- a/tests/test_example.py
+++ b/tests/test_example.py
@@ -1,7 +1,11 @@
-import unittest
+import pytest
-class TestExample(unittest.TestCase):
+class TestExample(object):
def test_example(self):
- self.assertTrue(True)
+ assert True, "This should not fail"
+
+ @pytest.mark.xfail
+ def test_failing(self):
+ assert False, "This will always fail"
diff --git a/tox.ini b/tox.ini
index <HASH>..<HASH> 100644
--- a/tox.ini
+++ b/tox.ini
@@ -2,7 +2,8 @@
envlist = py27,py3
[testenv]
deps = coverage
+ pytest
commands = coverage erase
- coverage run setup.py test
+ coverage run -m py.test
coverage report
coverage html
|
tests: switch to py.test
|
diff --git a/Test/FunctionalTestCase.php b/Test/FunctionalTestCase.php
index <HASH>..<HASH> 100755
--- a/Test/FunctionalTestCase.php
+++ b/Test/FunctionalTestCase.php
@@ -11,9 +11,11 @@ class FunctionalTestCase extends WebTestCase
$env = 'test';
$app = require __DIR__.'/../bootstrap.php';
- $app["swiftmailer.transport"] = new \Swift_Transport_NullTransport($app['swiftmailer.transport.eventdispatcher']);
- $app['mailer.logger'] = new MessageLogger();
- $app['mailer']->registerPlugin($app['mailer.logger']);
+ if (isset($app['swiftmailer.transport.eventdispatcher'])) {
+ $app["swiftmailer.transport"] = new \Swift_Transport_NullTransport($app['swiftmailer.transport.eventdispatcher']);
+ $app['mailer.logger'] = new MessageLogger();
+ $app['mailer']->registerPlugin($app['mailer.logger']);
+ }
$app['debug'] = true;
$app['exception_handler']->disable();
|
fix tests when swiftmailer is not used
|
diff --git a/select2.js b/select2.js
index <HASH>..<HASH> 100755
--- a/select2.js
+++ b/select2.js
@@ -203,7 +203,7 @@
function measureTextWidth(e) {
if (!sizer){
- var style = e.currentStyle || window.getComputedStyle(e, null);
+ var style = e[0].currentStyle || window.getComputedStyle(e[0], null);
sizer = $("<div></div>").css({
position: "absolute",
left: "-1000px",
|
style detection for search field sizer corrected
|
diff --git a/test/replication-test.js b/test/replication-test.js
index <HASH>..<HASH> 100644
--- a/test/replication-test.js
+++ b/test/replication-test.js
@@ -51,7 +51,7 @@ test('simple read from replicator (no ops)', function (t) {
i++
})
.on('end', function () {
- t.equal(i, 3434)
+ t.equal(i, 3130)
})
})
@@ -93,7 +93,7 @@ test('simple read from replicated index (no ops)', function (t) {
i++
})
.on('end', function () {
- t.equal(i, 3434)
+ t.equal(i, 3130)
})
})
@@ -138,7 +138,7 @@ test('validate gzip replication', function (t) {
i++
})
.on('end', function () {
- t.equal(i, 3434)
+ t.equal(i, 3130)
})
})
|
test(replication): correct number of keys
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -4,7 +4,7 @@ from setuptools import find_packages
import os
-_version = '0.5.4'
+_version = '0.6'
_packages = find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"])
_short_description = "pylint-django is a Pylint plugin to aid Pylint in recognising and understanding" \
|
Moving development version back to <I> after merge from master
|
diff --git a/client.go b/client.go
index <HASH>..<HASH> 100644
--- a/client.go
+++ b/client.go
@@ -969,7 +969,7 @@ func (c *Client) do(r *Request) (resp *Response, err error) {
if r.Headers == nil {
header = make(http.Header)
} else {
- header = r.Headers.Clone()
+ header = r.Headers
}
contentLength := int64(len(r.body))
|
avoid concurrent map iteration and map write(fix #<I>)
|
diff --git a/mruby.go b/mruby.go
index <HASH>..<HASH> 100644
--- a/mruby.go
+++ b/mruby.go
@@ -90,5 +90,9 @@ func (m *Mrb) LoadString(code string) (*Value, error) {
// Close a Mrb, this must be called to properly free resources, and
// should only be called once.
func (m *Mrb) Close() {
+ // Delete all the methods from the state
+ delete(stateMethodTable, m.state)
+
+ // Close the state
C.mrb_close(m.state)
}
|
Clean up the method table on state close
|
diff --git a/sphinxcontrib/spelling/filters.py b/sphinxcontrib/spelling/filters.py
index <HASH>..<HASH> 100644
--- a/sphinxcontrib/spelling/filters.py
+++ b/sphinxcontrib/spelling/filters.py
@@ -7,7 +7,7 @@
# TODO - Words with multiple uppercase letters treated as classes and ignored
import builtins
-import imp
+import importlib
import subprocess
import sys
from xmlrpc import client as xmlrpc_client
@@ -189,13 +189,18 @@ class ImportableModuleFilter(Filter):
if word not in self.sought_modules:
self.sought_modules.add(word)
try:
- module_file, _, _ = imp.find_module(word)
- except ImportError:
- pass
+ mod = importlib.util.find_spec(word)
+ except Exception as err:
+ # This could be an ImportError, some more detailed
+ # error out of distutils, or something else triggered
+ # by failing to be able to import a parent package to
+ # use the metadata to search for a subpackage.
+ logger.debug(
+ 'find_spec({!r}) failed, invalid module name: {}'.format(
+ word, err))
else:
- if module_file is not None:
- module_file.close()
- self.found_modules.add(word)
+ if mod is not None:
+ self.found_modules.add(word)
return word in self.found_modules
|
filters: do not use imp in ImportableModuleFilter
The imp module is deprecated, so replace it with calls to importlib.
|
diff --git a/packages/avet-shared/src/httpclient.js b/packages/avet-shared/src/httpclient.js
index <HASH>..<HASH> 100644
--- a/packages/avet-shared/src/httpclient.js
+++ b/packages/avet-shared/src/httpclient.js
@@ -18,6 +18,8 @@ export function getHttpClient(ctx, options = {}) {
Object.keys(headers).forEach(key => {
axios.defaults.headers.common[key] = headers[key];
});
+ // 设置 ip
+ axios.defaults.headers.common['X-Forwarded-For'] = ctx.ip;
}
const baseURL = getBaseURL(ctx);
|
feat: httpclient setting ip headers
|
diff --git a/examples/slurm_at_maxwell/analysis/framework.py b/examples/slurm_at_maxwell/analysis/framework.py
index <HASH>..<HASH> 100644
--- a/examples/slurm_at_maxwell/analysis/framework.py
+++ b/examples/slurm_at_maxwell/analysis/framework.py
@@ -54,6 +54,12 @@ class SlurmWorkflow(law.slurm.SlurmWorkflow):
significant=False,
description="target queue partition; default: cms-uhh",
)
+ max_runtime = law.DurationParameter(
+ default=1.0,
+ unit="h",
+ significant=False,
+ description="the maximum job runtime; default unit is hours; default: 1h",
+ )
def slurm_output_directory(self):
# the directory where submission meta data should be stored
@@ -69,10 +75,11 @@ class SlurmWorkflow(law.slurm.SlurmWorkflow):
config.render_variables["analysis_path"] = os.getenv("ANALYSIS_PATH")
# useful defaults
- config.custom_content.append(("time", "00:10:00"))
+ job_time = law.util.human_duration(
+ seconds=law.util.parse_duration(self.max_runtime, input_unit="h") - 1,
+ colon_format=True,
+ )
+ config.custom_content.append(("time", job_time))
config.custom_content.append(("nodes", 1))
- # # copy the entire environment
- # config.custom_content.append(("export", "ALL"))
-
return config
|
Update slurm example.
|
diff --git a/contrib/externs/chrome_extensions.js b/contrib/externs/chrome_extensions.js
index <HASH>..<HASH> 100644
--- a/contrib/externs/chrome_extensions.js
+++ b/contrib/externs/chrome_extensions.js
@@ -317,6 +317,21 @@ chrome.app.window.AppWindow.prototype.getBounds = function() {};
chrome.app.window.AppWindow.prototype.setBounds = function(bounds) {};
+/**
+ * @return {boolean}
+ * @see http://developer.chrome.com/apps/app.window.html#type-AppWindow
+ */
+chrome.app.window.AppWindow.prototype.isAlwaysOnTop = function() {};
+
+
+/**
+ * @param {boolean} alwaysOnTop Set whether the window should stay above most
+ * other windows.
+ * @see http://developer.chrome.com/apps/app.window.html#type-AppWindow
+ */
+chrome.app.window.AppWindow.prototype.setAlwaysOnTop = function(alwaysOnTop) {};
+
+
/** @type {ChromeEvent} */
chrome.app.window.AppWindow.prototype.onBoundsChanged;
|
Add alwaysOnTop methods to Chrome app window extern definitions.
-------------
Created by MOE: <URL>
|
diff --git a/engine.go b/engine.go
index <HASH>..<HASH> 100644
--- a/engine.go
+++ b/engine.go
@@ -101,13 +101,18 @@ func (eng *Engine) Observe(name string, value interface{}, tags ...Tag) {
// Clock returns a new clock identified by name and tags.
func (eng *Engine) Clock(name string, tags ...Tag) *Clock {
+ return eng.ClockAt(name, time.Now(), tags...)
+}
+
+// ClockAt returns a new clock identified by name and tags with a specified
+// start time.
+func (eng *Engine) ClockAt(name string, start time.Time, tags ...Tag) *Clock {
cpy := make([]Tag, len(tags), len(tags)+1) // clock always appends a stamp.
copy(cpy, tags)
- now := time.Now()
return &Clock{
name: name,
- first: now,
- last: now,
+ first: start,
+ last: start,
tags: cpy,
eng: eng,
}
|
feat(engine): add ClockAt for creating a clock with a specific start time (#<I>)
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -63,7 +63,7 @@ setup(
* Repository: https://github.com/datamade/dedupe
* Issues: https://github.com/datamade/dedupe/issues
* Mailing list: https://groups.google.com/forum/#!forum/open-source-deduplication
- * IRC channel, #dedupe on irc.freenode.net
+ * IRC channel, `#dedupe on irc.freenode.net <http://webchat.freenode.net/?channels=dedupe>`_
* Examples: https://github.com/datamade/dedupe-examples
"""
)
|
add link to webchat for irc channel in long description
|
diff --git a/pytorch2keras/operation_layers.py b/pytorch2keras/operation_layers.py
index <HASH>..<HASH> 100644
--- a/pytorch2keras/operation_layers.py
+++ b/pytorch2keras/operation_layers.py
@@ -139,9 +139,13 @@ def convert_clip(params, w_name, scope_name, inputs, layers, weights, names):
"""
print('Converting clip ...')
- def target_layer(x, vmin=params['min'], vmax=params['max']):
- import tensorflow as tf
- return tf.clip_by_value(x, vmin, vmax)
+ if params['min'] == 0:
+ print("using ReLU({0})".format(params['max']))
+ layer = keras.layers.ReLU(max_value=params['max'])
+ else:
+ def target_layer(x, vmin=params['min'], vmax=params['max']):
+ import tensorflow as tf
+ return tf.clip_by_value(x, vmin, vmax)
+ layer = keras.layers.Lambda(target_layer)
- lambda_layer = keras.layers.Lambda(target_layer)
- layers[scope_name] = lambda_layer(layers[inputs[0]])
+ layers[scope_name] = layer(layers[inputs[0]])
|
Converting clip with a min value of 0 as a ReLU
|
diff --git a/user/edit.php b/user/edit.php
index <HASH>..<HASH> 100644
--- a/user/edit.php
+++ b/user/edit.php
@@ -19,7 +19,7 @@
require_login($course);
} else if (!isloggedin()) {
if (empty($SESSION->wantsurl)) {
- $SESSION->wantsurl = $CFG->httpswwwroot.'/edit/user.php';
+ $SESSION->wantsurl = $CFG->httpswwwroot.'/user/edit.php';
}
redirect($CFG->httpswwwroot.'/login/index.php');
}
|
MDL-<I> edit/user.php typo; merged from MOODLE_<I>_STABLE
|
diff --git a/datatable/dt.py b/datatable/dt.py
index <HASH>..<HASH> 100644
--- a/datatable/dt.py
+++ b/datatable/dt.py
@@ -175,18 +175,16 @@ class DataTable(object):
if names is None:
names = srcdt.names
self._fill_from_dt(srcdt.internal, names=names)
- elif is_type(src, PandasDataFrame_t):
- self._fill_from_pandas(src, names)
- elif is_type(src, PandasSeries_t):
- self._fill_from_pandas(src, names)
- elif is_type(src, NumpyArray_t):
- self._fill_from_numpy(src, names=names)
elif src is None:
self._fill_from_list([])
elif is_type(src, DataTable_t):
if names is None:
names = src.names
self._fill_from_dt(src.internal, names=names)
+ elif is_type(src, PandasDataFrame_t, PandasSeries_t):
+ self._fill_from_pandas(src, names)
+ elif is_type(src, NumpyArray_t):
+ self._fill_from_numpy(src, names=names)
else:
raise TTypeError("Cannot create DataTable from %r" % src)
|
Make import datatable faster (#<I>)
|
diff --git a/anyconfig/backend/xml.py b/anyconfig/backend/xml.py
index <HASH>..<HASH> 100644
--- a/anyconfig/backend/xml.py
+++ b/anyconfig/backend/xml.py
@@ -386,7 +386,7 @@ def container_to_etree(obj, parent=None, **options):
_str = str if options.get("ac_parse_value") else anyconfig.utils.noop
if not anyconfig.mdicts.is_dict_like(obj):
- obj = _str(obj)
+ obj = False if obj is None else _str(obj)
if parent is not None and obj:
parent.text = obj # Parent is a leaf text node.
return # All attributes and text should be set already.
|
fix: correct processing of cases if is None (no text) in XML backend
|
diff --git a/src/naomi.js b/src/naomi.js
index <HASH>..<HASH> 100644
--- a/src/naomi.js
+++ b/src/naomi.js
@@ -37,11 +37,11 @@ exports.create = function (type, props) {
if (/postgres/i.test(type)) {
return new PostgresDatabase({
- host: props.host || process.env.MYSQL_HOST || 'localhost',
- port: props.port || parseInt(process.env.MYSQL_PORT, 10) || 5432,
- user: props.user || process.env.MYSQL_USER || 'root',
- password: props.password || process.env.MYSQL_PASSWORD || '',
- database: props.database || process.env.MYSQL_DATABASE || null,
+ host: props.host || process.env.POSTGRES_HOST || 'localhost',
+ port: props.port || parseInt(process.env.POSTGRES_PORT, 10) || 5432,
+ user: props.user || process.env.POSTGRES_USER || 'root',
+ password: props.password || process.env.POSTGRES_PASSWORD || '',
+ database: props.database || process.env.POSTGRES_DATABASE || null,
connectionLimit: props.connectionLimit || props.poolSize || 10, // connectionLimit used to be poolSize
poolIdleTimeout: 30000,
reapIntervalMillis: 1000
|
This is what happens when you don't run the unit tests. Shaaaaaaa....
|
diff --git a/quilt/refresh.py b/quilt/refresh.py
index <HASH>..<HASH> 100644
--- a/quilt/refresh.py
+++ b/quilt/refresh.py
@@ -63,6 +63,11 @@ class Refresh(Command):
with TmpFile(prefix="pquilt-") as tmpfile:
f = tmpfile.open()
+
+ if patch_file.exists():
+ header = patch.get_header(self.quilt_patches)
+ tmpfile.write(header)
+
for file_name in files:
if file_name == ".timestamp":
continue
|
Don't omit header from current patch in refresh
Add the current header when trying to refreshing a patch.
|
diff --git a/test/activerecord_test.rb b/test/activerecord_test.rb
index <HASH>..<HASH> 100644
--- a/test/activerecord_test.rb
+++ b/test/activerecord_test.rb
@@ -174,10 +174,11 @@ describe Enumerize::ActiveRecordSupport do
end
it 'sets value to enumerized field from db when record is reloaded' do
- user = InterestsRequiredUser.create!(interests: [:music])
- user.update(interests: [])
+ user = User.create!(interests: [:music])
+ User.find(user.id).update(interests: %i[music dancing])
+ user.interests.must_equal %w[music]
user.reload
- user.interests.must_equal ['music']
+ user.interests.must_equal %w[music dancing]
end
it 'validates inclusion when using write_attribute with string attribute' do
|
make AR reload support test more obvious
|
diff --git a/src/js/Node.js b/src/js/Node.js
index <HASH>..<HASH> 100644
--- a/src/js/Node.js
+++ b/src/js/Node.js
@@ -571,8 +571,10 @@ Node.prototype.clone = function() {
clone.field = this.field;
clone.fieldInnerText = this.fieldInnerText;
clone.fieldEditable = this.fieldEditable;
+ clone.previousField = this.previousField;
clone.value = this.value;
clone.valueInnerText = this.valueInnerText;
+ clone.previousValue = this.previousValue;
clone.expanded = this.expanded;
clone.visibleChilds = this.visibleChilds;
|
Fixed `clone` not copying `previousField` and `previousValue`
|
diff --git a/src/index.js b/src/index.js
index <HASH>..<HASH> 100644
--- a/src/index.js
+++ b/src/index.js
@@ -222,7 +222,7 @@ function rebuild(psModule) {
if (!Array.isArray(res.result)) {
return res.resultType === 'success'
? resolve(psModule)
- : reject(res.result)
+ : reject(res)
}
Promise.map(res.result, (item, i) => {
@@ -232,7 +232,7 @@ function rebuild(psModule) {
.then(compileMessages => {
if (res.resultType === 'error') {
cache.errors = compileMessages
- reject(res.result)
+ reject(res)
} else {
cache.warnings = compileMessages
resolve(psModule)
|
Recompile on when psc-client-ide receives UnknownModule.
|
diff --git a/src/Console/Application.php b/src/Console/Application.php
index <HASH>..<HASH> 100644
--- a/src/Console/Application.php
+++ b/src/Console/Application.php
@@ -31,10 +31,6 @@ class Application extends BaseApplication
*/
protected $directoryRoot;
/**
- * @var array
- */
- protected $errorMessages = [];
- /**
* @var \Composer\Autoload\ClassLoader
* The Drupal autoload file.
*/
@@ -249,12 +245,4 @@ class Application extends BaseApplication
$defaultHelperset->set($helper, is_int($alias) ? null : $alias);
}
}
-
- /**
- * @param array $errorMessages
- */
- public function addErrorMessages(array $errorMessages)
- {
- $this->errorMessages = $errorMessages;
- }
}
|
#<I> Remove messages feature from Application class
|
diff --git a/lib/multi_exiftool/values.rb b/lib/multi_exiftool/values.rb
index <HASH>..<HASH> 100644
--- a/lib/multi_exiftool/values.rb
+++ b/lib/multi_exiftool/values.rb
@@ -1,5 +1,7 @@
# coding: utf-8
require 'date'
+require 'set'
+
module MultiExiftool
# Representing (tag, value) pairs of metadata.
@@ -7,9 +9,13 @@ module MultiExiftool
# method_missing.
class Values
+ attr_reader :tags
+
def initialize values
@values = {}
+ @tags = Set.new
values.map do |tag,val|
+ @tags << tag
val = val.kind_of?(Hash) ? Values.new(val) : val
@values[Values.unify_tag(tag)] = val
end
diff --git a/test/test_values.rb b/test/test_values.rb
index <HASH>..<HASH> 100644
--- a/test/test_values.rb
+++ b/test/test_values.rb
@@ -79,4 +79,14 @@ class TestValues < Test::Unit::TestCase
end
+ context 'tags' do
+
+ test 'tags preserves the original tag names' do
+ hash = {'FNumber' => 8, 'Author' => 'janfri', 'E-MailAddress' => 'janfri26@gmail.com'}
+ @values = MultiExiftool::Values.new(hash)
+ assert_equal hash.keys, @values.tags.to_a
+ end
+
+ end
+
end
|
New method Values#tags to get access to unmodified tag names.
|
diff --git a/lib/algoliasearch-rails.rb b/lib/algoliasearch-rails.rb
index <HASH>..<HASH> 100644
--- a/lib/algoliasearch-rails.rb
+++ b/lib/algoliasearch-rails.rb
@@ -658,7 +658,7 @@ module AlgoliaSearch
algolia_object_id_of(hit)
end
results = json['hits'].map do |hit|
- o = results_by_id[hit['objectID']]
+ o = results_by_id[hit['objectID'].to_s]
if o
o.highlight_result = hit['_highlightResult']
o.snippet_result = hit['_snippetResult']
|
Convert objectID to string when using as hash key
|
diff --git a/languagetool-language-modules/br/src/main/java/org/languagetool/tagging/br/BretonTagger.java b/languagetool-language-modules/br/src/main/java/org/languagetool/tagging/br/BretonTagger.java
index <HASH>..<HASH> 100644
--- a/languagetool-language-modules/br/src/main/java/org/languagetool/tagging/br/BretonTagger.java
+++ b/languagetool-language-modules/br/src/main/java/org/languagetool/tagging/br/BretonTagger.java
@@ -70,6 +70,14 @@ public class BretonTagger extends BaseTagger {
Matcher matcher;
for (String word : sentenceTokens) {
String probeWord = word;
+ if (probeWord.length() > 50) {
+ // avoid excessively long computation times for long (probably artificial) tokens:
+ List<AnalyzedToken> l = new ArrayList<>();
+ l.add(new AnalyzedToken(word, null, null));
+ tokenReadings.add(new AnalyzedTokenReadings(l, pos));
+ pos += word.length();
+ continue;
+ }
// This loop happens when we need to retry probing the dictionary
// which happens rarely when trying to remove suffixes -mañ, -se, etc.
|
[br] avoid excessively long computation times for long (probably artificial) tokens
|
diff --git a/angr/vexer.py b/angr/vexer.py
index <HASH>..<HASH> 100644
--- a/angr/vexer.py
+++ b/angr/vexer.py
@@ -171,6 +171,8 @@ class VEXer:
:return:
'''
+ block.statements = [ x for x in block.statements if x.tag != 'Ist_NoOp' ]
+
funcname = "_post_process_%s" % self.arch.name
if hasattr(self, funcname):
block = getattr(self, funcname)(block)
|
Move de-nopping into vexer
|
diff --git a/lib/linguist/language.rb b/lib/linguist/language.rb
index <HASH>..<HASH> 100644
--- a/lib/linguist/language.rb
+++ b/lib/linguist/language.rb
@@ -9,6 +9,7 @@ module Linguist
# Langages are defined in `lib/linguist/langages.yml`.
class Language
@languages = []
+ @index = {}
@name_index = {}
@alias_index = {}
@extension_index = {}
@@ -30,7 +31,7 @@ module Linguist
end
# Language name index
- @name_index[language.name] = language
+ @index[language.name] = @name_index[language.name] = language
language.aliases.each do |name|
# All Language aliases should be unique. Warn if there is a duplicate.
@@ -38,7 +39,7 @@ module Linguist
warn "Duplicate alias: #{name}"
end
- @alias_index[name] = language
+ @index[name] = @alias_index[name] = language
end
language.extensions.each do |extension|
@@ -134,6 +135,8 @@ module Linguist
#
# name - The String name of the Language
#
+ # TODO: Consider returning nil instead of Text
+ #
# Examples
#
# Language['Ruby']
@@ -142,9 +145,9 @@ module Linguist
# Language['ruby']
# # => #<Language name="Ruby">
#
- # Returns the Language or nil if none was found.
+ # Returns the Language or Text if none was found.
def self.[](name)
- find_by_name(name) || find_by_alias(name) || self['Text']
+ @index[name] || self['Text']
end
# Public: A List of popular languages
|
Create a separate index for lookup
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -22,5 +22,5 @@ setup(
py_modules=["SAM","utilities"],
- install_requires=['pandas','numpy','scikit-learn','matplotlib','scipy']
+ install_requires=['pandas','numpy','scikit-learn','matplotlib','scipy','anndata','scanpy']
)
|
added anndata and scanpy requirements
|
diff --git a/lib/index.js b/lib/index.js
index <HASH>..<HASH> 100644
--- a/lib/index.js
+++ b/lib/index.js
@@ -7,6 +7,8 @@ module.exports = {
'avoid-leaking-state-in-components': require('./rules/avoid-leaking-state-in-components'),
'avoid-leaking-state-in-ember-objects': require('./rules/avoid-leaking-state-in-ember-objects'),
'avoid-using-needs-in-controllers': require('./rules/avoid-using-needs-in-controllers'),
+ 'classic-decorator-hooks': require('./rules/classic-decorator-hooks'),
+ 'classic-decorator-no-classic-methods': require('./rules/classic-decorator-no-classic-methods'),
'closure-actions': require('./rules/closure-actions'),
'jquery-ember-run': require('./rules/jquery-ember-run'),
'local-modules': require('./rules/local-modules'),
|
fix: add missing rules `classic-decorator-hooks` and `classic-decorator-no-classic-methods` to index.js
|
diff --git a/src/cli/cli.js b/src/cli/cli.js
index <HASH>..<HASH> 100644
--- a/src/cli/cli.js
+++ b/src/cli/cli.js
@@ -110,7 +110,7 @@ class Cli extends mix(Configurable, Emitter) {
exec(){
_.forEach(requireAll(this._commandsDir), c => this.command(c.command, c.action, c.config || {}));
- return arguments.length ? this._execFromString.apply(Array.from(arguments)) : this._execFromArgv();
+ return arguments.length ? this._execFromString.apply(this, Array.from(arguments)) : this._execFromArgv();
}
theme(theme) {
|
Fix to failing exec() method
|
diff --git a/lib/yard/parser/ruby/legacy/ruby_lex.rb b/lib/yard/parser/ruby/legacy/ruby_lex.rb
index <HASH>..<HASH> 100644
--- a/lib/yard/parser/ruby/legacy/ruby_lex.rb
+++ b/lib/yard/parser/ruby/legacy/ruby_lex.rb
@@ -599,7 +599,7 @@ module YARD
end
@OP.def_rules(" ", "\t", "\f", "\r", "\13") do |chars, _io|
- @space_seen = TRUE
+ @space_seen = true
while (ch = getc) =~ /[ \t\f\r\13]/
chars << ch
end
@@ -642,9 +642,9 @@ module YARD
@colonblock_seen = false
case @lex_state
when EXPR_BEG, EXPR_FNAME, EXPR_DOT
- @continue = TRUE
+ @continue = true
else
- @continue = FALSE
+ @continue = false
@lex_state = EXPR_BEG
end
Token(TkNL).set_text("\n")
@@ -1166,8 +1166,8 @@ module YARD
end
type = TkINTEGER
- allow_point = TRUE
- allow_e = TRUE
+ allow_point = true
+ allow_e = true
while ch = getc
case ch
when /[0-9_]/
|
Replace deprecated TRUE and FALSE constants with their equivalents.
|
diff --git a/addon/file.js b/addon/file.js
index <HASH>..<HASH> 100644
--- a/addon/file.js
+++ b/addon/file.js
@@ -74,11 +74,11 @@ function upload(file, url, opts, uploadFn) {
}
request.onprogress = function (evt) {
- if (evt.lengthComputable) {
- set(file, 'loaded', evt.loaded);
- set(file, 'size', evt.total);
- set(file, 'progress', (evt.loaded / evt.total) * 100);
- }
+ if (!evt.lengthComputable || evt.total === 0) return;
+
+ set(file, 'loaded', evt.loaded);
+ set(file, 'size', evt.total);
+ set(file, 'progress', (evt.loaded / evt.total) * 100);
};
request.ontimeout = function () {
|
Ignore progress events when the total file size is reported as 0 bytes (#<I>)
Have tested this manually using the docs site. Wasn't able to reproduce a NaN value.
### Before
Drop a file
Progresses from 0 => <I>%
Progress set back to 0% for a moment
Finishes
### After
Drop a file
Progresses from 0 => <I>%
Progress stays at <I>% for a moment
Finishes
Fixes #<I>
|
diff --git a/maintain/release/cocoapods.py b/maintain/release/cocoapods.py
index <HASH>..<HASH> 100644
--- a/maintain/release/cocoapods.py
+++ b/maintain/release/cocoapods.py
@@ -39,7 +39,7 @@ class CocoaPodsReleaser(Releaser):
def bump(self, new_version):
with open(self.podspec) as fp:
spec = json.load(fp, object_pairs_hook=collections.OrderedDict)
- spec['version'] = new_version
+ spec['version'] = str(new_version)
with open(self.podspec, 'w') as fp:
json.dump(spec, fp, indent=2, separators=(',', ': '))
diff --git a/maintain/release/npm.py b/maintain/release/npm.py
index <HASH>..<HASH> 100644
--- a/maintain/release/npm.py
+++ b/maintain/release/npm.py
@@ -14,7 +14,7 @@ class NPMReleaser(Releaser):
def bump(self, new_version):
with open('package.json') as fp:
spec = json.load(fp, object_pairs_hook=collections.OrderedDict)
- spec['version'] = new_version
+ spec['version'] = str(new_version)
with open('package.json', 'w') as fp:
json.dump(spec, fp, indent=2, separators=(',', ': '))
|
[release] Fix issue releasing CP and NPM
|
diff --git a/src/components/zoombuttonlist/zoombuttonlist.js b/src/components/zoombuttonlist/zoombuttonlist.js
index <HASH>..<HASH> 100644
--- a/src/components/zoombuttonlist/zoombuttonlist.js
+++ b/src/components/zoombuttonlist/zoombuttonlist.js
@@ -77,12 +77,6 @@ var ZoomButtonList = Component.extend({
}
this.model_binds = {};
-
- this._super(config, context);
-
- if(this.model.ui.cursorMode == undefined) {
- this.model.ui.set('cursorMode', null, false, false);
- }
Object.keys(this._available_buttons).forEach(function(buttonId) {
var button = _this._available_buttons[buttonId];
@@ -93,7 +87,8 @@ var ZoomButtonList = Component.extend({
}
});
-
+ this._super(config, context);
+
},
readyOnce: function() {
|
Fix zoomButtonList unresponsive button styling
|
diff --git a/h2o-py/h2o/job.py b/h2o-py/h2o/job.py
index <HASH>..<HASH> 100644
--- a/h2o-py/h2o/job.py
+++ b/h2o-py/h2o/job.py
@@ -79,11 +79,12 @@ class H2OJob(object):
symbols_remaining = width - last_display_amnt
if estimated_finish_time > last_display_time:
display_speed = symbols_remaining / (estimated_finish_time - last_display_time)
- next_display_time = last_display_time + 1 / max(display_speed, 1)
+ next_display_time = last_display_time + 1 / max(min(display_speed, 100), 1)
else:
display_speed = 0
next_display_time = next_poll_time + 1 # Force polling before displaying an update
- if next_poll_time <= next_display_time:
+ # Polling should always occur if it is past due -- takes precedence over displaying
+ if next_poll_time <= min(current_time, next_display_time):
if next_poll_time > current_time:
time.sleep(next_poll_time - current_time)
poll_interval = min(1, poll_interval + 0.2)
|
Prevent accidental dead-locking in job.py poll()
|
diff --git a/chess/__init__.py b/chess/__init__.py
index <HASH>..<HASH> 100644
--- a/chess/__init__.py
+++ b/chess/__init__.py
@@ -1376,7 +1376,7 @@ class Board(BaseBoard):
Use :func:`~chess.Board.is_valid()` to detect invalid positions.
"""
- aliases = ["Standard", "Chess", "Classical", "Normal", "Illegal"]
+ aliases = ["Standard", "Chess", "Classical", "Normal", "Illegal", "From Position"]
uci_variant: ClassVar[Optional[str]] = "chess"
xboard_variant: ClassVar[Optional[str]] = "normal"
starting_fen = STARTING_FEN
|
Add From Position as alias for standard chess (fixes #<I>)
|
diff --git a/lib/monadic/maybe.rb b/lib/monadic/maybe.rb
index <HASH>..<HASH> 100644
--- a/lib/monadic/maybe.rb
+++ b/lib/monadic/maybe.rb
@@ -26,7 +26,7 @@ module Monadic
# @return [true, false] true if the underlying value is true
def truly?
- @value == true
+ !!@value == true
end
end
|
Use a double bang (double negation) to force @value to a boolean
|
diff --git a/build/webpack.dev.config.js b/build/webpack.dev.config.js
index <HASH>..<HASH> 100644
--- a/build/webpack.dev.config.js
+++ b/build/webpack.dev.config.js
@@ -62,7 +62,10 @@ module.exports = {
},
devServer: {
contentBase: resolve('../dev'),
- publicPath: '/dev/'
+ publicPath: '/dev/',
+ host: process.env.HOST || 'localhost',
+ port: process.env.PORT || '8080',
+ disableHostCheck: true
},
plugins: [
new ExtractTextPlugin({
|
build(dev): Added ability to change port and host
Added the ability to specify a HOST or PORT through environment variables for the dev script
|
diff --git a/test/marker-index.test.js b/test/marker-index.test.js
index <HASH>..<HASH> 100644
--- a/test/marker-index.test.js
+++ b/test/marker-index.test.js
@@ -39,15 +39,19 @@ describe('MarkerIndex', () => {
if (MarkerIndex === NativeMarkerIndex) verifyHighestPossiblePaths()
}
- verifyRanges()
- testDump()
- testFindIntersecting()
- testFindContaining()
- testFindContainedIn()
- testFindStartingIn()
- testFindEndingIn()
- testFindStartingAt()
- testFindEndingAt()
+ const verifications = [
+ verifyRanges,
+ testDump,
+ testFindIntersecting,
+ testFindContaining,
+ testFindContainedIn,
+ testFindStartingIn,
+ testFindEndingIn,
+ testFindStartingAt,
+ testFindEndingAt,
+ ].sort((a, b) => random.intBetween(-1, 1))
+
+ verifications.forEach(verification => verification())
}
}
|
Perform test verifications in random order
This way, if any read-only method writes an invalid value to the cache, the
test will catch the error.
|
diff --git a/test/extended/util/test.go b/test/extended/util/test.go
index <HASH>..<HASH> 100644
--- a/test/extended/util/test.go
+++ b/test/extended/util/test.go
@@ -534,6 +534,8 @@ var (
// SDN-587: OVN-Kubernetes doesn't support hairpin services
`\[sig-network\] Services should allow pods to hairpin back to themselves through services`,
`\[sig-network\] Networking Granular Checks: Services should function for endpoint-Service`,
+ // https://github.com/ovn-org/ovn-kubernetes/issues/928
+ `\[sig-network\] Services should be rejected when no endpoints exist`,
},
"[Suite:openshift/scalability]": {},
// tests that replace the old test-cmd script
|
test/extended: skip "Services should be rejected when no endpoints exist" for OVNKubernetes
Not expected to work yet. Only recently was re-enabled for SDN too, so clearly
it's not a huge issue that the test fails (at least for now).
|
diff --git a/integration/images/volume-ownership/tools/get_owner_windows.go b/integration/images/volume-ownership/tools/get_owner_windows.go
index <HASH>..<HASH> 100644
--- a/integration/images/volume-ownership/tools/get_owner_windows.go
+++ b/integration/images/volume-ownership/tools/get_owner_windows.go
@@ -26,7 +26,7 @@ import (
func main() {
if len(os.Args) != 2 {
- fmt.Printf("Usage: %s file_or_directory\n", os.Args[0])
+ fmt.Println("Usage: get_owner_windows.exe file_or_directory")
os.Exit(1)
}
|
integration/images/volume-ownership: strip path information from usage output
POSIX guidelines describes; <URL>
|
diff --git a/lib/stripe/util.rb b/lib/stripe/util.rb
index <HASH>..<HASH> 100644
--- a/lib/stripe/util.rb
+++ b/lib/stripe/util.rb
@@ -17,23 +17,26 @@ module Stripe
def self.object_classes
@object_classes ||= {
+ # data structures
+ 'list' => ListObject,
+
+ # business objects
+ 'application_fee' => ApplicationFee,
'balance' => Balance,
'balance_transaction' => BalanceTransaction,
+ 'card' => Card,
'charge' => Charge,
+ 'coupon' => Coupon,
'customer' => Customer,
+ 'event' => Event,
+ 'fee_refund' => ApplicationFeeRefund,
'invoiceitem' => InvoiceItem,
'invoice' => Invoice,
'plan' => Plan,
- 'coupon' => Coupon,
- 'event' => Event,
- 'transfer' => Transfer,
'recipient' => Recipient,
- 'card' => Card,
- 'subscription' => Subscription,
- 'list' => ListObject,
'refund' => Refund,
- 'application_fee' => ApplicationFee,
- 'fee_refund' => ApplicationFeeRefund
+ 'subscription' => Subscription,
+ 'transfer' => Transfer
}
end
|
formatting. alphabetize key-values by keys and add comments.
|
diff --git a/state/presence/presence_test.go b/state/presence/presence_test.go
index <HASH>..<HASH> 100644
--- a/state/presence/presence_test.go
+++ b/state/presence/presence_test.go
@@ -692,4 +692,4 @@ func (s *PresenceSuite) TestRobustness(c *gc.C) {
}
}
c.Check(atomic.LoadUint32(&observed), gc.Equals, uint32(numKeys))
-}
\ No newline at end of file
+}
|
Driveby `go vet` fix - no EOL at end of file
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -32,7 +32,7 @@ setup(name="karabo_bridge",
long_description=read("README.rst"),
license="BSD-3-Clause",
install_requires=[
- 'msgpack>=0.5.4',
+ 'msgpack==0.5.4',
'msgpack-numpy',
'numpy',
'pyzmq>=17.0.0',
|
new msgpack version breaks test for now...
|
diff --git a/examples/vueds/styleguide/vueds-theme.js b/examples/vueds/styleguide/vueds-theme.js
index <HASH>..<HASH> 100644
--- a/examples/vueds/styleguide/vueds-theme.js
+++ b/examples/vueds/styleguide/vueds-theme.js
@@ -40,7 +40,8 @@ module.exports = {
'& & a': {
fontSize: '13px';
fontWeight: 'normal',
- color: '#258aef'
+ color: '#258aef',
+ cursor: 'pointer'
}
}
},
|
docs: cusor: pointer
|
diff --git a/dingo/core/__init__.py b/dingo/core/__init__.py
index <HASH>..<HASH> 100644
--- a/dingo/core/__init__.py
+++ b/dingo/core/__init__.py
@@ -134,7 +134,7 @@ class NetworkDingo:
orm_GridDistrict.geom, srid)).\
label('poly_geom'),
func.ST_AsText(func.ST_Transform(
- orm_EgoDeuSubstation.geom, srid)).\
+ orm_EgoDeuSubstation.point, srid)).\
label('subs_geom')).\
join(orm_EgoDeuSubstation, orm_GridDistrict.subst_id==
orm_EgoDeuSubstation.id).\
|
modify column for substation import
Column 'geom' was renamed to 'point'. See here for context: <URL>
|
diff --git a/manifest.php b/manifest.php
index <HASH>..<HASH> 100755
--- a/manifest.php
+++ b/manifest.php
@@ -29,7 +29,7 @@ return array(
'label' => 'QTI test model',
'description' => 'TAO QTI test implementation',
'license' => 'GPL-2.0',
- 'version' => '15.5.1',
+ 'version' => '15.5.2',
'author' => 'Open Assessment Technologies',
'requires' => array(
'taoTests' => '>=6.4.0',
diff --git a/scripts/update/Updater.php b/scripts/update/Updater.php
index <HASH>..<HASH> 100644
--- a/scripts/update/Updater.php
+++ b/scripts/update/Updater.php
@@ -1589,6 +1589,6 @@ class Updater extends \common_ext_ExtensionUpdater {
$this->setVersion('14.1.5');
}
- $this->skip('14.1.5', '15.5.1');
+ $this->skip('14.1.5', '15.5.2');
}
}
|
Bump to version <I>
|
diff --git a/simpleai/search/models.py b/simpleai/search/models.py
index <HASH>..<HASH> 100644
--- a/simpleai/search/models.py
+++ b/simpleai/search/models.py
@@ -79,16 +79,16 @@ class SearchProblem(object):
def state_representation(self, state):
"""
Returns a string representation of a state.
- By default it returns repr(state).
+ By default it returns str(state).
"""
- return repr(state)
+ return str(state)
def action_representation(self, action):
"""
Returns a string representation of an action.
- By default it returns repr(action).
+ By default it returns str(action).
"""
- return repr(action)
+ return str(action)
class SearchNode(object):
|
The default representations for actions and states use str instead of repr
|
diff --git a/lib/rubocop/cop/variable_force.rb b/lib/rubocop/cop/variable_force.rb
index <HASH>..<HASH> 100644
--- a/lib/rubocop/cop/variable_force.rb
+++ b/lib/rubocop/cop/variable_force.rb
@@ -284,11 +284,7 @@ module RuboCop
def process_scope(node)
if TWISTED_SCOPE_TYPES.include?(node.type)
# See the comment at the end of file for this behavior.
- twisted_nodes = [node.children[0]]
- twisted_nodes << node.children[1] if node.class_type?
- twisted_nodes.compact!
-
- twisted_nodes.each do |twisted_node|
+ twisted_nodes(node).each do |twisted_node|
process_node(twisted_node)
scanned_nodes << twisted_node
end
@@ -298,6 +294,12 @@ module RuboCop
skip_children!
end
+ def twisted_nodes(node)
+ twisted_nodes = [node.children[0]]
+ twisted_nodes << node.children[1] if node.class_type?
+ twisted_nodes.compact
+ end
+
def process_send(node)
_receiver, method_name, args = *node
return unless method_name == :binding
|
Reduce ABC size of VariableForce
|
diff --git a/pytypes/type_util.py b/pytypes/type_util.py
index <HASH>..<HASH> 100644
--- a/pytypes/type_util.py
+++ b/pytypes/type_util.py
@@ -777,7 +777,7 @@ def _funcsigtypes(func0, slf, func_class = None, globs = None, prop_getter = Fal
for t in argNames)], retTp if not retTp is None else type(None))
if infer_defaults:
resType = _handle_defaults(resType, argSpecs, unspecIndices)
- if not pytypes.annotations_override_typestring and not (tpStr is None or tpStr[0] is None):
+ if not pytypes.annotations_override_typestring and not (tpStr is None or tpStr[0] is None or tpStr[0] == 'ignore'):
if pytypes.strict_annotation_collision_check:
raise TypeError('%s.%s has multiple type declarations.'
% (func.__module__, func.__name__))
|
Do not complain about type mismatch for "type: ignore".
Before ignore was seen as a type itself.
|
diff --git a/js/bybit.js b/js/bybit.js
index <HASH>..<HASH> 100644
--- a/js/bybit.js
+++ b/js/bybit.js
@@ -2035,7 +2035,7 @@ module.exports = class bybit extends Exchange {
if (limit !== undefined) {
request['limit'] = limit;
}
- const response = await this.openapiGetWalletWithdrawList (this.extend (request, params));
+ const response = await this.v2PrivateGetWalletWithdrawList (this.extend (request, params));
//
// {
// "ret_code": 0,
@@ -2158,7 +2158,7 @@ module.exports = class bybit extends Exchange {
if (limit !== undefined) {
request['limit'] = limit;
}
- const response = await this.openapiGetWalletFundRecords (this.extend (request, params));
+ const response = await this.v2PrivateGetWalletFundRecords (this.extend (request, params));
//
// {
// "ret_code": 0,
|
bybit fetchWithdrawals, fetchLedger v2
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -3,10 +3,8 @@ var path = require('path');
function routesResolver(filePath) {
var ngModule = require(filePath).default || require(filePath);
- if (ngModule.routes && ngModule.routes.length) {
- return ngModule.routes;
- }
- return [];
+ var routes = ngModule.routes || require(filePath).routes || [];
+ return routes;
}
function resolveNgRoute(srcPath, config, defaultFile, resolver) {
|
refactor: check for routes in both default and export
|
diff --git a/lib/rubocop/cop/util.rb b/lib/rubocop/cop/util.rb
index <HASH>..<HASH> 100644
--- a/lib/rubocop/cop/util.rb
+++ b/lib/rubocop/cop/util.rb
@@ -157,8 +157,7 @@ module RuboCop
end
def begins_its_line?(range)
- source_before_range = range.source_buffer.source[0...range.begin_pos]
- source_before_range.rpartition("\n").last.strip.empty?
+ (range.source_line =~ /\S/) == range.column
end
def within_node?(inner, outer)
|
Much faster implementation of Util#begins_its_line?
|
diff --git a/mtools/test/test_mlogfilter.py b/mtools/test/test_mlogfilter.py
index <HASH>..<HASH> 100644
--- a/mtools/test/test_mlogfilter.py
+++ b/mtools/test/test_mlogfilter.py
@@ -175,5 +175,13 @@ class TestMLogFilter(object):
(ll.datetime >= event2 - padding and ll.datetime <= event2 + padding)
)
+ @raises(SystemExit)
+ def test_no_logfile(self):
+ """ mlogfilter: test that not providing at least 1 log file throws clean error. """
+
+ self.tool.run('--from Jan 1')
+
+
+
# output = sys.stdout.getvalue().strip()
\ No newline at end of file
|
added test to error out cleanly when no log file is provided for mlogfilter.
|
diff --git a/agent/core/src/main/java/org/jolokia/http/AgentServlet.java b/agent/core/src/main/java/org/jolokia/http/AgentServlet.java
index <HASH>..<HASH> 100644
--- a/agent/core/src/main/java/org/jolokia/http/AgentServlet.java
+++ b/agent/core/src/main/java/org/jolokia/http/AgentServlet.java
@@ -350,9 +350,9 @@ public class AgentServlet extends HttpServlet {
long now = System.currentTimeMillis();
pResp.setDateHeader("Date",now);
// 1h in the past since it seems, that some servlet set the date header on their
- // own so that it cannot be guaranteed that these heades are really equals.
- // It happend on Tomcat that Date: was finally set *before* Expires: in the final
- // answers some times which seems to be an implementation percularity from Tomcat
+ // own so that it cannot be guaranteed that these headers are really equals.
+ // It happened on Tomcat that Date: was finally set *before* Expires: in the final
+ // answers some times which seems to be an implementation peculiarity from Tomcat
pResp.setDateHeader("Expires",now - 3600000);
}
|
Cleared up JavaDoc a bit.
|
diff --git a/js/reportico.js b/js/reportico.js
index <HASH>..<HASH> 100755
--- a/js/reportico.js
+++ b/js/reportico.js
@@ -334,6 +334,8 @@ reportico_jquery(document).on('click', '.swAdminButton, .swAdminButton2, .swMenu
params += "&reportico_ajax_called=1";
csvpdfoutput = false;
+
+ if ( reportico_jquery(this).prop("name") != "submit_design_mode" )
reportico_jquery(reportico_container).find("input:radio").each(function() {
d = 0;
nm = reportico_jquery(this).prop("value");
|
After running PDF, running design mode executed report in new window
instead of entering design mode
|
diff --git a/lib/simpletestlib.php b/lib/simpletestlib.php
index <HASH>..<HASH> 100644
--- a/lib/simpletestlib.php
+++ b/lib/simpletestlib.php
@@ -37,7 +37,8 @@ function recurseFolders($path, $callback, $fileregexp = '/.*/', $exclude = false
foreach ($files as $file) {
$filepath = $path .'/'. $file;
- if ($file == '.' || $file == '..') {
+ if (strpos($file, '.') === 0) {
+ /// Don't check hidden files.
continue;
} else if (is_dir($filepath)) {
if (!in_array($filepath, $ignorefolders)) {
|
Unit tests: in recurseFolders, skip files/folders whose names begnis with '.'/
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.