diff stringlengths 65 26.7k | message stringlengths 7 9.92k |
|---|---|
diff --git a/processing/src/main/java/io/druid/jackson/JacksonModule.java b/processing/src/main/java/io/druid/jackson/JacksonModule.java
index <HASH>..<HASH> 100644
--- a/processing/src/main/java/io/druid/jackson/JacksonModule.java
+++ b/processing/src/main/java/io/druid/jackson/JacksonModule.java
@@ -21,6 +21,7 @@ package io.druid.jackson;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.dataformat.smile.SmileFactory;
+import com.fasterxml.jackson.dataformat.smile.SmileGenerator;
import com.google.inject.Binder;
import com.google.inject.Key;
import com.google.inject.Module;
@@ -49,6 +50,7 @@ public class JacksonModule implements Module
public ObjectMapper smileMapper()
{
final SmileFactory smileFactory = new SmileFactory();
+ smileFactory.configure(SmileGenerator.Feature.ENCODE_BINARY_AS_7BIT, false);
smileFactory.delegateToTextual(true);
final ObjectMapper retVal = new DefaultObjectMapper(smileFactory);
retVal.getFactory().setCodec(retVal); | write byte data as is in smile |
diff --git a/src/index.js b/src/index.js
index <HASH>..<HASH> 100644
--- a/src/index.js
+++ b/src/index.js
@@ -89,6 +89,7 @@ class SWPrecacheWebpackPlugin {
importScripts,
staticFileGlobsIgnorePatterns,
mergeStaticsConfig,
+ stripPrefixMulti = {},
} = this.options;
// get the output path used by webpack
@@ -110,13 +111,9 @@ class SWPrecacheWebpackPlugin {
(!staticFileGlobsIgnorePatterns.some((regex) => regex.test(text)))
);
- const stripPrefixMulti = {
- ...this.options.stripPrefixMulti,
- };
-
if (outputPath) {
- // strip the webpack config's output.path
- stripPrefixMulti[`${outputPath}${path.sep}`] = publicPath;
+ // strip the webpack config's output.path (replace for windows users)
+ stripPrefixMulti[`${outputPath}${path.sep}`.replace(/\\/g, '/')] = publicPath;
}
this.config = { | Add strip prefix multi windows support (#<I>)
Add strip prefix multi windows support (fixes #<I>) |
diff --git a/py/nupic/algorithms/CLAClassifier.py b/py/nupic/algorithms/CLAClassifier.py
index <HASH>..<HASH> 100644
--- a/py/nupic/algorithms/CLAClassifier.py
+++ b/py/nupic/algorithms/CLAClassifier.py
@@ -197,7 +197,7 @@ class BitHistory(object):
assert isinstance(stats, dict)
maxBucket = max(stats.iterkeys())
self._stats = array.array("f", itertools.repeat(0.0, maxBucket+1))
- for index, value in stats.iteritems():
+ for (index, value) in stats.iteritems():
self._stats[index] = value
elif version == 1:
state.pop("_updateDutyCycles", None)
@@ -497,7 +497,7 @@ class CLAClassifier(object):
# Plug in the iteration number in the old patternNZHistory to make it
# compatible with the new format
historyLen = len(self._patternNZHistory)
- for i, pattern in enumerate(self._patternNZHistory):
+ for (i, pattern) in enumerate(self._patternNZHistory):
self._patternNZHistory[i] = (self._learnIteration-(historyLen-i),
pattern) | Consistent use of parens in for loops that iterator over tuples |
diff --git a/lib/travis/model/build.rb b/lib/travis/model/build.rb
index <HASH>..<HASH> 100644
--- a/lib/travis/model/build.rb
+++ b/lib/travis/model/build.rb
@@ -76,7 +76,11 @@ class Build < ActiveRecord::Base
end
def on_branch(branch)
- pushes.where(branch.present? ? ['branch IN (?)', normalize_to_array(branch)] : [])
+ if Build.column_names.include?('branch')
+ pushes.where(branch.present? ? ['branch IN (?)', normalize_to_array(branch)] : [])
+ else
+ pushes.joins(:commit).where(branch.present? ? ['commits.branch IN (?)', normalize_to_array(branch)] : [])
+ end
end
def by_event_type(event_type) | Make on_branch method backwards compatible |
diff --git a/lib/user.js b/lib/user.js
index <HASH>..<HASH> 100644
--- a/lib/user.js
+++ b/lib/user.js
@@ -106,7 +106,14 @@ module.exports = {
denyTracking: function(){
refs.userMeta.child('trackOk').set( false );
},
+
+ /*
+ * Group Support
+ */
setGroup: function(key, devices) {
+ if (devices === null) { // Just create group without any device.
+ refs.userGroups.child(key).set(false);
+ }
refs.userGroups.child(key).set(devices);
},
deleteGroup: function(key) { | ability to create a group without devices. |
diff --git a/webpack/webpack.config.development.babel.js b/webpack/webpack.config.development.babel.js
index <HASH>..<HASH> 100644
--- a/webpack/webpack.config.development.babel.js
+++ b/webpack/webpack.config.development.babel.js
@@ -17,7 +17,6 @@ module.exports = {
devServer: {
hot: true,
hotOnly: true,
- noInfo: true,
port: PORT,
publicPath: `http://localhost:${PORT}/`
}, | Show build info in console (#<I>) |
diff --git a/src/calendar/index.js b/src/calendar/index.js
index <HASH>..<HASH> 100644
--- a/src/calendar/index.js
+++ b/src/calendar/index.js
@@ -281,9 +281,11 @@ export default createComponent({
},
onConfirm() {
- if (this.checkRange()) {
- this.$emit('confirm', this.currentDate);
+ if (this.range && !this.checkRange()) {
+ return;
}
+
+ this.$emit('confirm', this.currentDate);
},
genMonth(date, index) { | fix(Calendar): should not check range in single mode |
diff --git a/src/ol/map.js b/src/ol/map.js
index <HASH>..<HASH> 100644
--- a/src/ol/map.js
+++ b/src/ol/map.js
@@ -265,8 +265,7 @@ ol.Map = function(options) {
goog.events.EventType.TOUCHSTART,
goog.events.EventType.MSPOINTERDOWN,
ol.MapBrowserEvent.EventType.POINTERDOWN,
- // see https://github.com/google/closure-library/pull/308
- goog.userAgent.GECKO ? 'DOMMouseScroll' : 'mousewheel'
+ goog.events.EventType.MOUSEWHEEL
], goog.events.Event.stopPropagation);
goog.dom.appendChild(this.viewport_, this.overlayContainerStopEvent_); | Remove mousewheel event name workaround
Fixed upstream <URL> |
diff --git a/salt/modules/win_update.py b/salt/modules/win_update.py
index <HASH>..<HASH> 100644
--- a/salt/modules/win_update.py
+++ b/salt/modules/win_update.py
@@ -295,7 +295,7 @@ class PyWinUpdater(object):
if update.InstallationBehavior.CanRequestUserInput:
log.debug('Skipped update {0}'.format(str(update)))
continue
- updates.append(str(update))
+ updates.append(salt.utils.sdecode(update))
log.debug('added update {0}'.format(str(update)))
return updates | attempt to decode win update package
Fixes #<I>. |
diff --git a/railties/lib/rails/generators/rails/app/templates/config/initializers/cookies_serializer.rb b/railties/lib/rails/generators/rails/app/templates/config/initializers/cookies_serializer.rb
index <HASH>..<HASH> 100644
--- a/railties/lib/rails/generators/rails/app/templates/config/initializers/cookies_serializer.rb
+++ b/railties/lib/rails/generators/rails/app/templates/config/initializers/cookies_serializer.rb
@@ -1,4 +1,3 @@
# Be sure to restart your server when you modify this file.
-# This is a new Rails 5.0 default, so introduced as a config to ensure apps made with earlier versions of Rails aren't affected when upgrading.
Rails.application.config.action_dispatch.cookies_serializer = :json | initializers/cookies_serializer is not new to <I>
[ci skip]
The initializer has existed since <I>, for instance see:
<URL> |
diff --git a/src/footer.php b/src/footer.php
index <HASH>..<HASH> 100755
--- a/src/footer.php
+++ b/src/footer.php
@@ -1,5 +1,5 @@
<footer>
-<p class="credit">site by <a href="http://www.factor1studios.com" target="_blank">factor1</a></p>
+
</footer>
<?php wp_footer(); ?> | Removes factor1 credit from footer |
diff --git a/test.js b/test.js
index <HASH>..<HASH> 100644
--- a/test.js
+++ b/test.js
@@ -26,6 +26,7 @@ test('get username using NSProcessInfo, convert to javascript string and compare
t.is(String(username), os.userInfo().username);
});
+/* This one fails 100% reprodicible w/ "Misaligned pointer"
test('primitive argument types', t => {
const NSNumber = objc.NSNumber;
@@ -33,3 +34,4 @@ test('primitive argument types', t => {
t.is(Number(number), 5);
});
+*/ | disable the misaligned pointer test |
diff --git a/js/base/Exchange.js b/js/base/Exchange.js
index <HASH>..<HASH> 100644
--- a/js/base/Exchange.js
+++ b/js/base/Exchange.js
@@ -1045,7 +1045,7 @@ module.exports = class Exchange {
return array
}
- filterByValueSinceLimit (array, field, value = undefined, since = undefined, limit = undefined, key = 'timestamp') {
+ filterByValueSinceLimit (array, field, value = undefined, since = undefined, limit = undefined, key = 'timestamp', tail = false) {
const valueIsDefined = value !== undefined && value !== null
const sinceIsDefined = since !== undefined && since !== null
@@ -1058,7 +1058,9 @@ module.exports = class Exchange {
}
if (limit !== undefined && limit !== null) {
- array = Object.values (array).slice (0, limit)
+ array = ((tail && !sinceIsDefined) ?
+ Object.values (array).slice (-limit) :
+ Object.values (array).slice (0, limit))
}
return array | Exchange.js filterBySinceLimit with tail fix #<I> |
diff --git a/packages/wpcom.js/test/util.js b/packages/wpcom.js/test/util.js
index <HASH>..<HASH> 100644
--- a/packages/wpcom.js/test/util.js
+++ b/packages/wpcom.js/test/util.js
@@ -5,6 +5,7 @@
var test = require('./data');
var WPCOM = require('../');
+var fs = require('fs');
/**
* `Util` module
@@ -61,7 +62,7 @@ Util.addPost = function(fn){
Util.addMedia = function(fn){
var site = Util.private_site();
- site.addMediaFiles(test.new_media_data.files[0], fn);
+ site.addMediaFiles(fs.createReadStream(test.new_media_data.files[0]), fn);
};
/** | test: fix add media before the test start |
diff --git a/core/src/main/java/hudson/ProxyConfiguration.java b/core/src/main/java/hudson/ProxyConfiguration.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/hudson/ProxyConfiguration.java
+++ b/core/src/main/java/hudson/ProxyConfiguration.java
@@ -396,7 +396,7 @@ public final class ProxyConfiguration extends AbstractDescribableImpl<ProxyConfi
GetMethod method = null;
try {
method = new GetMethod(testUrl);
- method.getParams().setParameter("http.socket.timeout", DEFAULT_CONNECT_TIMEOUT_MILLIS > 0 ? DEFAULT_CONNECT_TIMEOUT_MILLIS : TimeUnit.SECONDS.toMillis(30));
+ method.getParams().setParameter("http.socket.timeout", DEFAULT_CONNECT_TIMEOUT_MILLIS > 0 ? DEFAULT_CONNECT_TIMEOUT_MILLIS : (int)TimeUnit.SECONDS.toMillis(30));
HttpClient client = new HttpClient();
if (Util.fixEmptyAndTrim(name) != null && !isNoProxyHost(host, noProxyHost)) { | [JENKINS-<I>] add missing cast (#<I>) |
diff --git a/GPy/kern/src/static.py b/GPy/kern/src/static.py
index <HASH>..<HASH> 100644
--- a/GPy/kern/src/static.py
+++ b/GPy/kern/src/static.py
@@ -104,7 +104,7 @@ class WhiteHeteroscedastic(Static):
return 0.
def K(self, X, X2=None):
- if X2 is None:
+ if X2 is None and X.shape[0]==self.variance.shape[0]:
return np.eye(X.shape[0])*self.variance
else:
return np.zeros((X.shape[0], X2.shape[0])) | [white hetero] additional check for prediction |
diff --git a/gitlab/__init__.py b/gitlab/__init__.py
index <HASH>..<HASH> 100644
--- a/gitlab/__init__.py
+++ b/gitlab/__init__.py
@@ -779,10 +779,7 @@ class GitlabList(object):
self._gl = gl
# Preserve kwargs for subsequent queries
- if kwargs is None:
- self._kwargs = {}
- else:
- self._kwargs = kwargs.copy()
+ self._kwargs = kwargs.copy()
self._query(url, query_data, **self._kwargs)
self._get_next = get_next | fix: do not check if kwargs is none |
diff --git a/helpers/api.py b/helpers/api.py
index <HASH>..<HASH> 100644
--- a/helpers/api.py
+++ b/helpers/api.py
@@ -65,6 +65,6 @@ class RestApiServer(HTTPServer, Thread):
self.daemon = True
def cursor(self):
- if not self._cursor_holder or self._cursor_holder.closed != 0:
+ if not self._cursor_holder or self._cursor_holder.closed:
self._cursor_holder = self.governor.postgresql.connection().cursor()
return self._cursor_holder | Bigfix, cursor.closed contains boolean value |
diff --git a/theanets/layers/recurrent.py b/theanets/layers/recurrent.py
index <HASH>..<HASH> 100644
--- a/theanets/layers/recurrent.py
+++ b/theanets/layers/recurrent.py
@@ -1012,13 +1012,13 @@ class MUT1(Recurrent):
class SCRN(Recurrent):
- r'''Simple Contextual Recurrent Network layer.
+ r'''Structurally Constrained Recurrent Network layer.
Notes
-----
- A Simple Contextual Recurrent Network incorporates an explicitly slow-moving
- hidden context layer with a simple recurrent network.
+ A Structurally Constrained Recurrent Network incorporates an explicitly
+ slow-moving hidden context layer with a simple recurrent network.
The update equations in this layer are largely those given by [Mik15]_,
pages 4 and 5, but this implementation adds a bias term for the output of
@@ -1087,7 +1087,6 @@ class SCRN(Recurrent):
self.add_weights('ho', self.size, self.size)
self.add_weights('so', self.size, self.size)
self.add_bias('b', self.size)
-
if self.rate == 'vector':
self.add_bias('r', self.size) | Fix name of SCRN ("simple" -> "structural" &c). |
diff --git a/parser/parser_test.go b/parser/parser_test.go
index <HASH>..<HASH> 100644
--- a/parser/parser_test.go
+++ b/parser/parser_test.go
@@ -499,6 +499,10 @@ var shellTests = []struct {
`1:1: reached EOF without matching $(( with ))`,
},
{
+ `$((& 0 $(`,
+ `1:1: reached EOF without matching $(( with ))`,
+ },
+ {
`$((a'`,
`1:1: reached EOF without matching $(( with ))`,
},
diff --git a/parser/tokenizer.go b/parser/tokenizer.go
index <HASH>..<HASH> 100644
--- a/parser/tokenizer.go
+++ b/parser/tokenizer.go
@@ -46,6 +46,7 @@ func (p *parser) next() {
}
if p.npos >= len(p.src) {
p.errPass(io.EOF)
+ p.tok = token.EOF
return
}
b := p.src[p.npos] | parser: avoid another $(( ambiguity hang
We have to force tok = EOF because our error logic is not working
properly. Leave this in until we fix it in a cleaner way. |
diff --git a/src/pixi/core/Circle.js b/src/pixi/core/Circle.js
index <HASH>..<HASH> 100644
--- a/src/pixi/core/Circle.js
+++ b/src/pixi/core/Circle.js
@@ -69,5 +69,6 @@ PIXI.Circle.prototype.contains = function(x, y)
return (dx + dy <= r2);
}
+// constructor
PIXI.Circle.prototype.constructor = PIXI.Circle; | [DOC] Small changes
* add comment for constructor assignment (as in Point, Rectangle ...) |
diff --git a/libraries/common/streams/main.js b/libraries/common/streams/main.js
index <HASH>..<HASH> 100644
--- a/libraries/common/streams/main.js
+++ b/libraries/common/streams/main.js
@@ -4,6 +4,7 @@ import 'rxjs/add/operator/filter';
import 'rxjs/add/operator/first';
import 'rxjs/add/operator/map';
import 'rxjs/add/operator/merge';
+import 'rxjs/add/operator/mergeMap';
import 'rxjs/add/operator/delay';
import 'rxjs/add/operator/zip';
import 'rxjs/add/operator/do'; | PWA-<I>: Added rxjs operator `mergeMap` to the main stream. |
diff --git a/typescript-generator-core/src/test/java/cz/habarta/typescript/generator/ImmutablesTest.java b/typescript-generator-core/src/test/java/cz/habarta/typescript/generator/ImmutablesTest.java
index <HASH>..<HASH> 100644
--- a/typescript-generator-core/src/test/java/cz/habarta/typescript/generator/ImmutablesTest.java
+++ b/typescript-generator-core/src/test/java/cz/habarta/typescript/generator/ImmutablesTest.java
@@ -1,6 +1,7 @@
package cz.habarta.typescript.generator;
+import com.fasterxml.jackson.annotation.JsonPropertyOrder;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
@@ -59,6 +60,7 @@ public class ImmutablesTest {
@Value.Immutable
@JsonSerialize(as = ImmutableRectangle.class)
+ @JsonPropertyOrder({"width", "height"})
@JsonDeserialize(as = ImmutableRectangle.class)
public static abstract class Rectangle implements Shape {
public abstract double width(); | flaky test is fixed by adding JsonPropertyOrder (#<I>) |
diff --git a/spec/exodus/exodus_spec.rb b/spec/exodus/exodus_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/exodus/exodus_spec.rb
+++ b/spec/exodus/exodus_spec.rb
@@ -250,12 +250,12 @@ describe Exodus do
describe "Getting migration information" do
it "should successfully print the migrations information" do
migrations = [[Migration_test9, {}], [Migration_test10, {}]]
- Exodus.sort_and_run_migrations('up', migrations, nil, true).should == ["Migration_test9: {}", "Migration_test10: {}"]
+ Exodus.sort_and_run_migrations('up', migrations, nil, true).should == ["Migration_test9: #{{}}", "Migration_test10: #{{}}"]
end
it "should successfully print the first migration information" do
migrations = [[Migration_test9, {}], [Migration_test10, {}]]
- Exodus.sort_and_run_migrations('up', migrations, 1, true).should == ["Migration_test9: {}"]
+ Exodus.sort_and_run_migrations('up', migrations, 1, true).should == ["Migration_test9: #{{}}"]
end
end
end | Modified tests for ruby <I> |
diff --git a/jdcp-core/src/main/java/ca/eandb/jdcp/job/ParallelizableJobRunner.java b/jdcp-core/src/main/java/ca/eandb/jdcp/job/ParallelizableJobRunner.java
index <HASH>..<HASH> 100644
--- a/jdcp-core/src/main/java/ca/eandb/jdcp/job/ParallelizableJobRunner.java
+++ b/jdcp-core/src/main/java/ca/eandb/jdcp/job/ParallelizableJobRunner.java
@@ -52,6 +52,11 @@ import ca.eandb.util.progress.ProgressMonitorFactory;
*/
public final class ParallelizableJobRunner implements Runnable {
+ /** Creates a new Builder for initializing a ParallelizableJobRunner. */
+ public static Builder newBuilder() {
+ return new Builder();
+ }
+
/** A Builder for creating ParallelizableJobRunner instances. */
public static class Builder {
private ParallelizableJob job = null;
@@ -65,6 +70,12 @@ public final class ParallelizableJobRunner implements Runnable {
= DummyProgressMonitor.getInstance();
/**
+ * Must be created using static factory method.
+ * @see ParallelizableJobRunner#newBuilder()
+ */
+ private Builder() {}
+
+ /**
* Creates the configured ParallelizableJobRunner instance.
* @throws IllegalStateException If the job has not been set.
* @see #setJob(ParallelizableJob) | Add factory method for creating ParallelizableJobRunner.Builder instances. |
diff --git a/lib/Model/Table.php b/lib/Model/Table.php
index <HASH>..<HASH> 100644
--- a/lib/Model/Table.php
+++ b/lib/Model/Table.php
@@ -544,12 +544,13 @@ class Model_Table extends Model {
$f->updateInsertQuery($insert);
}
$this->hook('beforeInsert',array($insert));
+ if($this->_save_as===false)$insert->option_insert('ignore');
$id = $insert->do_insert();
if($id==0){
// no auto-increment column present
$id=$this->get($this->id_field);
- if($id===null){
+ if($id===null && $this->_save_as!== false){
throw $this->exception('Please add auto-increment ID column to your table or specify ID manually');
}
} | use insert ignore when we don't care about ID
should we use delayed insert instead? |
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -14,7 +14,7 @@ def read(fname):
setup(
name="Pebble",
- version="3.1.0",
+ version="3.1.1",
author="Matteo Cafasso",
author_email="noxdafox@gmail.com",
description=("Threading and multiprocessing eye-candy."), | release <I>: fix waitforthreads to support Python >= <I> |
diff --git a/bin/eslint-github-init.js b/bin/eslint-github-init.js
index <HASH>..<HASH> 100755
--- a/bin/eslint-github-init.js
+++ b/bin/eslint-github-init.js
@@ -76,6 +76,18 @@ inquirer.prompt(questions).then(answers => {
eslintrc.extends.push('plugin:github/typescript')
// TODO: Check if tsconfig.json exists, generate it if it doesn't.
+ const tsconfigPath = path.resolve(process.cwd(), 'tsconfig.json')
+ if (!fs.existsSync(tsconfig)) {
+ const tsconfigDefaults = {
+ "compilerOptions": {
+ "target": "es5",
+ "module": "commonjs",
+ "strict": true,
+ "esModuleInterop": true
+ }
+ }
+ fs.writeFileSync(tsconfig, JSON.stringify(tsconfigDefaults, null, ' '), 'utf8')
+ }
const tslintPath = path.resolve(process.cwd(), 'tslint.json')
const tslintrc = fs.existsSync(tslintPath) ? JSON.parse(fs.readFileSync(tslintPath, 'utf8')) : { | create tsconfig is one doesn't exist |
diff --git a/Configuration.php b/Configuration.php
index <HASH>..<HASH> 100644
--- a/Configuration.php
+++ b/Configuration.php
@@ -216,6 +216,9 @@ class Configuration
$resolver->setAllowedTypes('cache', 'string');
$resolver->setAllowedTypes('reader', 'string');
$resolver->setAllowedTypes('locations', 'array');
+ $resolver->setNormalizer('cache', function($options, $value) {
+ return rtrim($value, '/');
+ });
return $resolver;
} | normalize cache folder by trimming trailing slash |
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -6,5 +6,5 @@ setup(name='cleverhans',
version='1.0.0',
url='https://github.com/openai/cleverhans',
license='MIT',
- install_requires=['keras', 'nose', 'pycodestyle', 'theano'],
+ install_requires=['keras==1.2', 'nose', 'pycodestyle', 'theano'],
packages=find_packages()) | Specify Keras version in dependencies
To allow Travis to pass tests before we address #<I> |
diff --git a/lib/active_scaffold/actions/update.rb b/lib/active_scaffold/actions/update.rb
index <HASH>..<HASH> 100644
--- a/lib/active_scaffold/actions/update.rb
+++ b/lib/active_scaffold/actions/update.rb
@@ -69,7 +69,7 @@ module ActiveScaffold::Actions
@updated_record = @record
# get_row so associations are cached like in list action
# if record doesn't fullfil current conditions remove it from list
- @record = get_row
+ get_row
rescue ActiveRecord::RecordNotFound
nil
end | get_row sets @record, just call it is enough |
diff --git a/src/ruby/tools/platform_check.rb b/src/ruby/tools/platform_check.rb
index <HASH>..<HASH> 100644
--- a/src/ruby/tools/platform_check.rb
+++ b/src/ruby/tools/platform_check.rb
@@ -43,14 +43,11 @@ module PLATFORM
end
end
- # The 'host_cpu' value on x86, 32-bit rubies, appears to turn out to
- # be the name of the cpu. Only need to know the architecture,
- # so enumerating x86 cpu's here.
def PLATFORM.architecture
- case RbConfig::CONFIG['host_cpu']
+ case RbConfig::CONFIG['target_cpu']
when /x86_64/
'x86_64'
- when /x86|i386|i486|i586|i686|i786/
+ when /x86|i386/
'x86'
else
fail 'cpu architecture detection failed' | use target cpu to get rid of cpu enumerations |
diff --git a/mangooio-core/src/main/java/io/mangoo/routing/handlers/DispatcherHandler.java b/mangooio-core/src/main/java/io/mangoo/routing/handlers/DispatcherHandler.java
index <HASH>..<HASH> 100644
--- a/mangooio-core/src/main/java/io/mangoo/routing/handlers/DispatcherHandler.java
+++ b/mangooio-core/src/main/java/io/mangoo/routing/handlers/DispatcherHandler.java
@@ -82,7 +82,7 @@ public class DispatcherHandler implements HttpHandler {
try {
this.method = Application.getInstance(this.controllerClass)
.getClass()
- .getMethod(this.controllerMethodName, this.methodParameters.values().toArray(new Class[0]));
+ .getDeclaredMethod(this.controllerMethodName, this.methodParameters.values().toArray(new Class[0]));
for (Annotation annotation : this.method.getAnnotations()) {
if (annotation.annotationType().equals(FilterWith.class)) {
@@ -184,4 +184,4 @@ public class DispatcherHandler implements HttpHandler {
private void nextHandler(HttpServerExchange exchange) throws Exception {
Application.getInstance(LimitHandler.class).handleRequest(exchange);
}
-}
\ No newline at end of file
+} | Get Declared Method to support inheritence
.getMethod does not return inherited methods, preventing the usage of inheritance in controllers.
Currently can be bypassed by implementing the method in both and calling super, but not exactly convenient |
diff --git a/src/pyiso/pyiso.py b/src/pyiso/pyiso.py
index <HASH>..<HASH> 100644
--- a/src/pyiso/pyiso.py
+++ b/src/pyiso/pyiso.py
@@ -82,8 +82,8 @@ class FileOrTextIdentifier(object):
raise PyIsoException("This File or Text identifier is already initialized")
self.text = ident_str
- # FIXME: we do not support a file identifier here. In the future, we might
- # want to implement this.
+ # FIXME: we do not support a file identifier here. In the future, we
+ # might want to implement this.
self.initialized = True
@@ -2364,8 +2364,8 @@ class PyIso(object):
if not self.initialized:
raise PyIsoException("This object is not yet initialized; call either open() or new() to create an ISO")
- # FIXME: what if the rock ridge, iso, and joliet paths don't agree on the
- # number of subdirectories?
+ # FIXME: what if the rock ridge and iso paths don't agree on the number
+ # of subdirectories?
rr_name = None
if self.rock_ridge:
@@ -2432,7 +2432,7 @@ class PyIso(object):
if not self.initialized:
raise PyIsoException("This object is not yet initialized; call either open() or new() to create an ISO")
- # FIXME: what if the rock ridge, iso, and joliet paths don't agree on the
+ # FIXME: what if the rock ridge and iso paths don't agree on the
# number of subdirectories?
rr_name = None | Clean up some FIXME comments. |
diff --git a/lib/dm-core/adapters/oracle_adapter.rb b/lib/dm-core/adapters/oracle_adapter.rb
index <HASH>..<HASH> 100644
--- a/lib/dm-core/adapters/oracle_adapter.rb
+++ b/lib/dm-core/adapters/oracle_adapter.rb
@@ -114,8 +114,8 @@ module DataMapper
# if a unique property is used, and there is no OR operator, then an ORDER
# and LIMIT are unecessary because it should only return a single row
if conditions.kind_of?(Query::Conditions::AndOperation) &&
- conditions.any? { |o| o.kind_of?(Query::Conditions::EqualToComparison) && o.property.unique? } &&
- !conditions.any? { |o| o.kind_of?(Query::Conditions::OrOperation) }
+ conditions.any? { |operand| operand.kind_of?(Query::Conditions::EqualToComparison) && operand.subject.respond_to?(:unique?) && operand.subject.unique? } &&
+ !conditions.any? { |operand| operand.kind_of?(Query::Conditions::OrOperation) }
order = nil
limit = nil
end | replaced property with subject for operands |
diff --git a/tests/jenkins-ng.py b/tests/jenkins-ng.py
index <HASH>..<HASH> 100755
--- a/tests/jenkins-ng.py
+++ b/tests/jenkins-ng.py
@@ -603,7 +603,10 @@ def build_ssh_command(options, *arguments, **parameters):
return cmd + list(arguments)
-def build_scp_command(options, *parameters):
+def build_scp_command(options, *arguments):
+ '''
+ Build the SCP command with the required options
+ '''
return [
'scp',
'-i',
@@ -614,7 +617,7 @@ def build_scp_command(options, *parameters):
'-oUserKnownHostsFile=/dev/null',
# Don't re-use the SSH connection. Less failures.
'-oControlPath=none',
- ] + parameters
+ ] + list(arguments)
def main(): | We can only concatenate lists, not list + tuple |
diff --git a/cmd/juju/action/common.go b/cmd/juju/action/common.go
index <HASH>..<HASH> 100644
--- a/cmd/juju/action/common.go
+++ b/cmd/juju/action/common.go
@@ -166,7 +166,8 @@ func (c *runCommandBase) processOperationResults(ctx *cmd.Context, results *acti
the following task%s failed:
%s
-`[1:], plural, strings.Join(list, "\n"))
+use 'juju show-task' to inspect the failure%s
+`[1:], plural, strings.Join(list, "\n"), plural)
}
return nil
}
diff --git a/cmd/juju/action/exec_test.go b/cmd/juju/action/exec_test.go
index <HASH>..<HASH> 100644
--- a/cmd/juju/action/exec_test.go
+++ b/cmd/juju/action/exec_test.go
@@ -396,6 +396,7 @@ func (s *ExecSuite) TestAllMachinesWithError(c *gc.C) {
- id "1" with return code 2
- id "2" with return code 1
+use 'juju show-task' to inspect the failures
`)
c.Check(cmdtesting.Stdout(context), gc.Equals, `
@@ -707,6 +708,7 @@ Waiting for task 1...
the following task failed:
- id "1" with return code 42
+use 'juju show-task' to inspect the failure
`[1:]
for i, test := range []struct { | Actions: Improve error message
Give hints about where to go next if there was a failure. This just
improves juju usability. |
diff --git a/pr-tagger.js b/pr-tagger.js
index <HASH>..<HASH> 100755
--- a/pr-tagger.js
+++ b/pr-tagger.js
@@ -108,4 +108,6 @@ ghauth(authOptions, function (error, authData) {
})
}
})
+
+ logger.info('Done!')
}) | Add log message to let user know when script is finished |
diff --git a/lib/client_side_validations/action_view/form_builder.rb b/lib/client_side_validations/action_view/form_builder.rb
index <HASH>..<HASH> 100644
--- a/lib/client_side_validations/action_view/form_builder.rb
+++ b/lib/client_side_validations/action_view/form_builder.rb
@@ -99,15 +99,17 @@ module ClientSideValidations::ActionView::Helpers
else
if (conditional = (validator.last[:if] || validator.last[:unless]))
result = case conditional
- when Symbol then
- if @object.respond_to?(conditional)
- @object.send(conditional)
- else
- raise(ArgumentError, "unknown method called '#{conditional}'")
- end
- when String then eval(conditional)
- when Proc then conditional.call(@object)
- end
+ when Symbol
+ if @object.respond_to?(conditional)
+ @object.send(conditional)
+ else
+ raise(ArgumentError, "unknown method called '#{conditional}'")
+ end
+ when String
+ eval(conditional)
+ when Proc
+ conditional.call(@object)
+ end
# :if was specified and result is false OR :unless was specified and result was true
if (validator.last[:if] && !result) || (validator.last[:unless] && result) | Proper indenting and removed unnecessary 'then' for 'when' in case statements |
diff --git a/test/lazyloadxt_test.js b/test/lazyloadxt_test.js
index <HASH>..<HASH> 100644
--- a/test/lazyloadxt_test.js
+++ b/test/lazyloadxt_test.js
@@ -39,10 +39,10 @@
expect(5);
setTimeout(function () {
var $img = $('img'),
- cntinit = $img.filter(function (el) {
+ cntinit = $img.filter(function (index, el) {
return $(el).data('lazied');
}).length,
- cntnow = $img.filter(function (el) {
+ cntnow = $img.filter(function (index, el) {
return $(el).data('lazied') && ($(el).attr('src') === $(el).attr('data-src'));
}).length;
ok($img.length > 0, 'images should be presented'); | fix tests
(@todo: fix tests with Zepto and DOMtastic) |
diff --git a/scapy.py b/scapy.py
index <HASH>..<HASH> 100755
--- a/scapy.py
+++ b/scapy.py
@@ -5091,8 +5091,12 @@ class Packet(Gen):
pad = self.payload.getlayer(Padding)
if pad:
p += pad.build()
+ p = self.build_done(p)
return p
+ def build_done(self, p):
+ return self.payload.build_done(p)
+
def do_build_ps(self):
p=""
pl = []
@@ -5766,7 +5770,9 @@ class NoPayload(Packet,object):
def __nonzero__(self):
return False
def build(self, internal=0):
- return ""
+ return ""
+ def build_done(self, p):
+ return p
def build_ps(self, internal=0):
return "",[]
def getfieldval(self, attr): | Added Packet.build_done() hook, called once the packet is totally built |
diff --git a/dateparser/parser.py b/dateparser/parser.py
index <HASH>..<HASH> 100644
--- a/dateparser/parser.py
+++ b/dateparser/parser.py
@@ -271,7 +271,6 @@ class _parser(object):
for token, type, _ in self.unset_tokens:
if type == 0:
params.update({attr: int(token)})
- datetime(**params)
setattr(self, '_token_%s' % attr, token)
setattr(self, attr, int(token)) | Remove parser.py line of unknown purpose causing TypeError (#<I>) |
diff --git a/trezor_agent/gpg/decode.py b/trezor_agent/gpg/decode.py
index <HASH>..<HASH> 100644
--- a/trezor_agent/gpg/decode.py
+++ b/trezor_agent/gpg/decode.py
@@ -48,9 +48,10 @@ def _parse_nist256p1_verifier(mpi):
hashfunc=hashlib.sha256)
def _nist256p1_verify(signature, digest):
- vk.verify_digest(signature=signature,
- digest=digest,
- sigdecode=lambda rs, order: rs)
+ result = vk.verify_digest(signature=signature,
+ digest=digest,
+ sigdecode=lambda rs, order: rs)
+ log.debug('nist256p1 ECDSA signature is OK (%s)', result)
return _nist256p1_verify
@@ -62,7 +63,8 @@ def _parse_ed25519_verifier(mpi):
def _ed25519_verify(signature, digest):
sig = b''.join(util.num2bytes(val, size=32)
for val in signature)
- vk.verify(sig, digest)
+ result = vk.verify(sig, digest)
+ log.debug('ed25519 ECDSA signature is OK (%s)', result)
return _ed25519_verify | gpg: debug logging for ECDSA verification |
diff --git a/lib/connection.js b/lib/connection.js
index <HASH>..<HASH> 100644
--- a/lib/connection.js
+++ b/lib/connection.js
@@ -116,7 +116,7 @@ Connection.prototype._pollForPort = function(callback) {
});
});
- poll.every(100).ask(7);
+ poll.every(100).ask(20);
poll(function(foundPort) {
if (foundPort) { return callback(null); } | increases generosity with port polling after reset |
diff --git a/pickleshare.py b/pickleshare.py
index <HASH>..<HASH> 100644
--- a/pickleshare.py
+++ b/pickleshare.py
@@ -36,7 +36,7 @@ License: MIT open source license.
from __future__ import print_function
-__version__ = "0.7.1"
+__version__ = "0.7.2"
try:
from pathlib import Path
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -17,7 +17,7 @@ setup(
description="Tiny 'shelve'-like database with concurrency support",
license="MIT",
extras_require = {
- ':python_version == "2.7"': ['pathlib2'],
+ ':python_version < "3.4"': ['pathlib2'],
},
url="https://github.com/pickleshare/pickleshare",
keywords="database persistence pickle ipc shelve", | Require pathlib2 on Python <I> as well |
diff --git a/NavigationSample/Scripts/navigation.mvc.js b/NavigationSample/Scripts/navigation.mvc.js
index <HASH>..<HASH> 100644
--- a/NavigationSample/Scripts/navigation.mvc.js
+++ b/NavigationSample/Scripts/navigation.mvc.js
@@ -15,9 +15,12 @@
var elements = e.target.elements;
var req = new win.XMLHttpRequest();
req.onreadystatechange = onReady(req, true, null);
+ var inputTypes = /^(button|image|submit|reset|file)$/i;
var data = {};
for (var i = 0; i < elements.length; i++) {
- data[elements[0].name] = elements[0].value;
+ var element = elements[i];
+ if (!inputTypes.test(element.type) && !element.disabled)
+ data[element.name] = element.value;
}
e.preventDefault();
req.open('post', getAjaxLink(e.target.action)); | Only send down valid input types.
Go for blacklist instead of whitelist because unrecognised types are taken as text |
diff --git a/src/Denner/Client/ServiceDescription/Shop.php b/src/Denner/Client/ServiceDescription/Shop.php
index <HASH>..<HASH> 100644
--- a/src/Denner/Client/ServiceDescription/Shop.php
+++ b/src/Denner/Client/ServiceDescription/Shop.php
@@ -75,7 +75,7 @@ return array(
),
),
'responseClass' => Response\ListResponse::CLASS,
- 'responseDataRoot' => 'wines',
+ 'responseDataRoot' => 'articles',
),
),
'models' => array( | changed name of root element (wines -> articles) |
diff --git a/moco-core/src/main/java/com/github/dreamhead/moco/handler/AbstractProxyResponseHandler.java b/moco-core/src/main/java/com/github/dreamhead/moco/handler/AbstractProxyResponseHandler.java
index <HASH>..<HASH> 100644
--- a/moco-core/src/main/java/com/github/dreamhead/moco/handler/AbstractProxyResponseHandler.java
+++ b/moco-core/src/main/java/com/github/dreamhead/moco/handler/AbstractProxyResponseHandler.java
@@ -286,7 +286,7 @@ public abstract class AbstractProxyResponseHandler extends AbstractHttpResponseH
private Optional<URL> remoteUrl(final HttpRequest request) {
Optional<String> remoteUrl = this.doRemoteUrl(request);
- return remoteUrl.flatMap(actual -> doGetRemoteUrl(request, remoteUrl.get()));
+ return remoteUrl.flatMap(actual -> doGetRemoteUrl(request, actual));
}
private Optional<URL> doGetRemoteUrl(final HttpRequest request, final String actual) { | simplified remote url in abstract proxy response handler |
diff --git a/packages/node_modules/samples/browser-single-party-call-with-mute/test/wdio/spec/normal-dialing.js b/packages/node_modules/samples/browser-single-party-call-with-mute/test/wdio/spec/normal-dialing.js
index <HASH>..<HASH> 100644
--- a/packages/node_modules/samples/browser-single-party-call-with-mute/test/wdio/spec/normal-dialing.js
+++ b/packages/node_modules/samples/browser-single-party-call-with-mute/test/wdio/spec/normal-dialing.js
@@ -55,7 +55,7 @@ describe('samples/browser-single-party-call-with-mute', () => {
it('turns the local camera back on', () => {
browserSpock.assertText('#outgoing-video-stats', noStreamText);
browserSpock.click('[title="start sending video"]');
- browserSpock.waitForSpecificText('#camera-state', 'on', true);
+ browserSpock.waitForSpecificText('#camera-state', 'on');
});
it('ends the call', () => { | test(samples): fix failing mute test |
diff --git a/src/Users.php b/src/Users.php
index <HASH>..<HASH> 100644
--- a/src/Users.php
+++ b/src/Users.php
@@ -454,7 +454,9 @@ class Users
$this->app['logger.flash']->info(Trans::__('general.phrase.missing-root-jackpot'));
// If we reach this point, there is no user 'root'. We promote the current user.
- return $this->addRole($this->getCurrentUser(), 'root');
+ $user = $this->getCurrentUser();
+
+ return $this->addRole($user['id'], 'root');
}
/** | Only pass user's ID into Users::addRole() |
diff --git a/lib/python/dxpy/program_builder.py b/lib/python/dxpy/program_builder.py
index <HASH>..<HASH> 100644
--- a/lib/python/dxpy/program_builder.py
+++ b/lib/python/dxpy/program_builder.py
@@ -19,7 +19,7 @@ def get_program_spec(src_dir):
program_spec_file = os.path.join(src_dir, "dxprogram")
if not os.path.exists(program_spec_file):
program_spec_file = os.path.join(src_dir, "dxprogram.json")
- with open(os.path.join(src_dir, "dxprogram")) as fh:
+ with open(program_spec_file) as fh:
program_spec = json.load(fh)
validate_program_spec(program_spec) | Load either dxprogram or dxprogram.json, whichever is available. |
diff --git a/winpty/__init__.py b/winpty/__init__.py
index <HASH>..<HASH> 100644
--- a/winpty/__init__.py
+++ b/winpty/__init__.py
@@ -15,5 +15,6 @@ from .winpty_wrapper import PTY
# yapf: enable
PTY
-VERSION_INFO = (0, 2, 2, 'dev0')
+PtyProcess
+VERSION_INFO = (0, 3, 0, 'dev0')
__version__ = '.'.join(map(str, VERSION_INFO)) | Update version to <I>.dev0 |
diff --git a/py/testdir_multi_jvm/test_model_management.py b/py/testdir_multi_jvm/test_model_management.py
index <HASH>..<HASH> 100644
--- a/py/testdir_multi_jvm/test_model_management.py
+++ b/py/testdir_multi_jvm/test_model_management.py
@@ -498,7 +498,7 @@ models_to_build = [
# This DL job is SLOOOOWW.... at 1 epoch its still 33sec of train time
# which is half of the total runtime for this entire file. Ponder
# switching to a smaller dataset.
- ModelSpec.for_dataset('deeplearning_airlines_binomial', 'deeplearning', datasets['airlines_binomial'], {'epochs': '1' } ),
+ ModelSpec.for_dataset('deeplearning_airlines_binomial', 'deeplearning', datasets['airlines_binomial'], {'hidden': '[50, 50]', 'epochs': '1.0' } ),
ModelSpec.for_dataset('deeplearning_iris_multinomial', 'deeplearning', datasets['iris_multinomial'], { } ),
ModelSpec.for_dataset('gbm_prostate_regression', 'gbm', datasets['prostate_regression'], { } ), | Modify DL parameters to run faster and pass (1 vs <I>). |
diff --git a/tests/PuliBinTest.php b/tests/PuliBinTest.php
index <HASH>..<HASH> 100644
--- a/tests/PuliBinTest.php
+++ b/tests/PuliBinTest.php
@@ -12,7 +12,9 @@
namespace Puli\Cli\Tests;
use PHPUnit_Framework_TestCase;
+use Symfony\Component\Process\PhpExecutableFinder;
use Symfony\Component\Process\Process;
+use Webmozart\PathUtil\Path;
/**
* @since 1.0
@@ -23,8 +25,14 @@ class PuliBinTest extends PHPUnit_Framework_TestCase
{
public function testRunHelp()
{
- $rootDir = realpath(__DIR__.'/..');
- $process = new Process($rootDir.'/bin/puli');
+ $phpFinder = new PhpExecutableFinder();
+
+ if (!($php = $phpFinder->find())) {
+ $this->markTestSkipped('The "php" command could not be found.');
+ }
+
+ $rootDir = Path::normalize(realpath(__DIR__.'/..'));
+ $process = new Process($php.' '.$rootDir.'/bin/puli');
$status = $process->run();
$output = $process->getOutput(); | Fixed PuliBinTest on Windows |
diff --git a/src/main/org/openscience/cdk/inchi/InChIGenerator.java b/src/main/org/openscience/cdk/inchi/InChIGenerator.java
index <HASH>..<HASH> 100644
--- a/src/main/org/openscience/cdk/inchi/InChIGenerator.java
+++ b/src/main/org/openscience/cdk/inchi/InChIGenerator.java
@@ -218,15 +218,15 @@ public class InChIGenerator {
}
// Check whether isotopic
- int isotopeNumber = atom.getMassNumber();
- if (isotopeNumber > 0 && ifact != null) {
+ Integer isotopeNumber = atom.getMassNumber();
+ if (isotopeNumber != CDKConstants.UNSET && ifact != null) {
IAtom isotope = atomContainer.getBuilder().newAtom(el);
ifact.configure(isotope);
- if (isotope.getMassNumber() == isotopeNumber) {
+ if (isotope.getMassNumber().intValue() == isotopeNumber.intValue()) {
isotopeNumber = 0;
}
}
- if (isotopeNumber != 0) {
+ if (isotopeNumber != CDKConstants.UNSET) {
iatom.setIsotopicMass(isotopeNumber);
} | Fixed a NPE caused by mass number now being an Object instead of native
git-svn-id: <URL> |
diff --git a/example/fetch_all_patrons.rb b/example/fetch_all_patrons.rb
index <HASH>..<HASH> 100644
--- a/example/fetch_all_patrons.rb
+++ b/example/fetch_all_patrons.rb
@@ -23,7 +23,7 @@ campaign_id = campaign_response.data[0].id
all_pledges = []
cursor = nil
while true do
- page_response = api_client.fetch_page_of_pledges(campaign_id, 25, cursor)
+ page_response = api_client.fetch_page_of_pledges(campaign_id, { :count => 25, :cursor => cursor })
all_pledges += page_response.data
next_page_link = page_response.links[page_response.data]['next']
if next_page_link | Fix example/fetch_all_patrons.rb (#<I>) |
diff --git a/filters/test.py b/filters/test.py
index <HASH>..<HASH> 100644
--- a/filters/test.py
+++ b/filters/test.py
@@ -169,7 +169,18 @@ class BaseFilterTestCase(TestCase):
:param kwargs:
Keyword params to pass to the Filter's initializer.
"""
- assert len(args) > 0, 'First argument must be the filtered value.'
+ if not callable(self.filter_type):
+ self.fail('{cls}.filter_type is not callable.'.format(
+ cls = type(self).__name__,
+ ))
+
+ if not args:
+ self.fail(
+ 'First argument to {cls}._filter '
+ 'must be the filtered value.'.format(
+ cls = type(self).__name__,
+ ),
+ )
return FilterRunner(
starting_filter = self.filter_type(*args[1:], **kwargs), | Better error messages when filter test case is misconfigured. |
diff --git a/src/OptionsStore.php b/src/OptionsStore.php
index <HASH>..<HASH> 100644
--- a/src/OptionsStore.php
+++ b/src/OptionsStore.php
@@ -92,14 +92,13 @@ class OptionsStore
*
* @since 0.1.0
*
- * @param Option $option Option with new value.
- * @param bool $persist Whether to immediately persist the change.
+ * @param Option $option Option with new value.
*
* @return bool Whether the update was successful.
*/
- public function update(Option $option, bool $persist = true): bool
+ public function update(Option $option): bool
{
-
+ return $this->repository->save($option);
}
/**
@@ -107,15 +106,16 @@ class OptionsStore
*
* @since 0.1.0
*
- * @param string $key Option key to set the value of.
- * @param mixed $value New value to set the option to.
- * @param bool $persist Whether to immediately persist the change.
+ * @param string $key Option key to set the value of.
+ * @param mixed $value New value to set the option to.
*
* @return bool Whether the change of value was successful.
*/
- public function set(string $key, $value, bool $persist = true): bool
+ public function set(string $key, $value): bool
{
+ $option = $this->repository->find($key);
+ return $this->update($option->setValue($value));
}
/** | Filled some empty stubs in `OptionsStore` class. |
diff --git a/test/response_test.rb b/test/response_test.rb
index <HASH>..<HASH> 100644
--- a/test/response_test.rb
+++ b/test/response_test.rb
@@ -134,6 +134,7 @@ class RubySamlTest < Minitest::Test
no_signature_response.settings.idp_cert_fingerprint = "28:74:9B:E8:1F:E8:10:9C:A8:7C:A9:C3:E3:C5:01:6C:92:1C:B4:BA"
XMLSecurity::SignedDocument.any_instance.expects(:validate_signature).returns(true)
assert no_signature_response.validate!
+ XMLSecurity::SignedDocument.any_instance.unstub(:validate_signature)
end
it "validate ADFS assertions" do | Unstub after use, otherwise conflicts with other test in JRuby |
diff --git a/lib/fakes.rb b/lib/fakes.rb
index <HASH>..<HASH> 100644
--- a/lib/fakes.rb
+++ b/lib/fakes.rb
@@ -17,3 +17,8 @@ module Kernel
return Fakes::Fake.new
end
end
+class Object
+ def matches
+ return Fakes::Matches
+ end
+end | Added the matches convienience method |
diff --git a/spec/yelp/client_spec.rb b/spec/yelp/client_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/yelp/client_spec.rb
+++ b/spec/yelp/client_spec.rb
@@ -16,8 +16,8 @@ describe Yelp::Client do
subject { client }
context 'with valid configuration' do
- its(:configuration) { should be_a(Yelp::Configuration) }
- its(:configuration) { should be_frozen }
+ its(:configuration) { is_expected.to be_a Yelp::Configuration }
+ its(:configuration) { is_expected.to be_frozen }
it 'should not be reconfigurable' do
expect {
@@ -52,8 +52,8 @@ describe Yelp::Client do
expect { configure_client_with_api_keys(valid_api_keys) }.to raise_error
end
- it { should be_a(Yelp::Configuration) }
- it { should be_frozen }
+ it { is_expected.to be_a Yelp::Configuration }
+ it { is_expected.to be_frozen }
end
context 'with invalid configuration' do | Updates to expect syntax in client_spec |
diff --git a/lib/buoy_data/noaa_buoy_forecast.rb b/lib/buoy_data/noaa_buoy_forecast.rb
index <HASH>..<HASH> 100644
--- a/lib/buoy_data/noaa_buoy_forecast.rb
+++ b/lib/buoy_data/noaa_buoy_forecast.rb
@@ -137,7 +137,11 @@ module BuoyData
#"http://polar.ncep.noaa.gov/waves/latest_run/wna.#{buoy_id}.bull"
#
- "http://polar.ncep.noaa.gov/waves/WEB_P/multi_1.latest_run/plots/multi_1.#{buoy_id}.bull"
+ "#{hostname}/waves/WEB/multi_1.latest_run/plots/multi_1.#{buoy_id}.bull"
+ end
+
+ def hostname(protocol = 'http')
+ "#{protocol}://polar.ncep.noaa.gov"
end
# The header is the first 7 lines | Update noaa_buoy_forecast base url. |
diff --git a/src/Collection/Iterator/BufferedIterator.php b/src/Collection/Iterator/BufferedIterator.php
index <HASH>..<HASH> 100644
--- a/src/Collection/Iterator/BufferedIterator.php
+++ b/src/Collection/Iterator/BufferedIterator.php
@@ -15,13 +15,14 @@
namespace Cake\Collection\Iterator;
use Cake\Collection\Collection;
+use Countable;
use SplDoublyLinkedList;
/**
* Creates an iterator from another iterator that will keep the results of the inner
* iterator in memory, so that results don't have to be re-calculated.
*/
-class BufferedIterator extends Collection {
+class BufferedIterator extends Collection implements Countable {
/**
* The in-memory cache containing results from previous iterators
@@ -151,4 +152,24 @@ class BufferedIterator extends Collection {
}
}
+/**
+ * Returns the number or items in this collection
+ *
+ * @return int
+ */
+ public function count() {
+ if ($this->getInnerIterator() instanceof Countable) {
+ return $this->getInnerIterator()->count();
+ }
+
+ if (!$this->_started) {
+ $this->rewind();
+ }
+
+ while ($this->valid()) {
+ $this->next();
+ }
+
+ return $this->_buffer->count();
+ }
} | Making BufferedIterator Countable as an optimization for the ORM |
diff --git a/sortinghat/cmd/init.py b/sortinghat/cmd/init.py
index <HASH>..<HASH> 100644
--- a/sortinghat/cmd/init.py
+++ b/sortinghat/cmd/init.py
@@ -119,7 +119,10 @@ class Init(Command):
import csv
import pkg_resources
- f = pkg_resources.resource_stream('sortinghat', 'data/countries.csv')
- reader = csv.DictReader(f, fieldnames=['name', 'code', 'alpha3'])
+ filename = pkg_resources.resource_filename('sortinghat', 'data/countries.csv')
- return [Country(**c) for c in reader]
+ with open(filename, 'r') as f:
+ reader = csv.DictReader(f, fieldnames=['name', 'code', 'alpha3'])
+ countries = [Country(**c) for c in reader]
+
+ return countries | [cmd:init] Update CSV reader to support Python 2/3
In Python 3, the CSV reader needs a str object as input
and not a stream object. pkg_resources returned a stream
object so, it was needed to open the CSV file in text
mode. This method is compatible on both Python versions. |
diff --git a/tests/unit_project/test_core/test_publishable.py b/tests/unit_project/test_core/test_publishable.py
index <HASH>..<HASH> 100644
--- a/tests/unit_project/test_core/test_publishable.py
+++ b/tests/unit_project/test_core/test_publishable.py
@@ -43,7 +43,7 @@ class TestPublishable(DatabaseTestCase):
self.assert_equals(None, self.publishable.main_placement)
def test_main_placement_with_two_placements_on_one_site(self):
- p = Placement.objects.create(
+ Placement.objects.create(
target_ct=self.publishable_ct,
target_id=self.publishable.pk,
category=self.category,
@@ -64,7 +64,7 @@ class TestPublishable(DatabaseTestCase):
slug=u'zai-jian-category',
)
- p = Placement.objects.create(
+ Placement.objects.create(
target_ct=self.publishable_ct,
target_id=self.publishable.pk,
category=category, | Removed unused variables
(aka testing buildbots :)) |
diff --git a/lib/ransack/helpers/form_helper.rb b/lib/ransack/helpers/form_helper.rb
index <HASH>..<HASH> 100644
--- a/lib/ransack/helpers/form_helper.rb
+++ b/lib/ransack/helpers/form_helper.rb
@@ -77,15 +77,15 @@ module Ransack
def url(routing_proxy)
if routing_proxy && respond_to?(routing_proxy)
- send(routing_proxy).url_for(options_for_url(@options))
+ send(routing_proxy).url_for(options_for_url)
else
- url_for(options_for_url(@options))
+ url_for(options_for_url)
end
end
- def options_for_url(options)
- options[@search_object.context.search_key] = search_and_sort_params
- params.merge(options)
+ def options_for_url
+ @options[@search_object.context.search_key] = search_and_sort_params
+ params.merge(@options)
end
def search_and_sort_params | Don't pass @options ivar needlessly |
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -6,5 +6,5 @@ setup(
description='Utility functions for strings checking and manipulation.',
author='Davide Zanotti',
author_email='davidezanotti@gmail.com',
- # url='https://www.python.org/sigs/distutils-sig/',
+ url='https://github.com/daveoncode/python-string-utils',
) | added github project url to setup.py |
diff --git a/src/PeskyCMF/Config/helpers.php b/src/PeskyCMF/Config/helpers.php
index <HASH>..<HASH> 100644
--- a/src/PeskyCMF/Config/helpers.php
+++ b/src/PeskyCMF/Config/helpers.php
@@ -11,11 +11,13 @@ if (!function_exists('routeTpl')) {
function routeTpl($routeName, array $parameters = [], array $tplParams = [], $absolute = false) {
$replacements = [];
foreach ($tplParams as $name => $tplName) {
+ $dotJsVarPrefix = '';
if (is_numeric($name)) {
- $name = 'it.' . $tplName;
+ $name = $tplName;
+ $dotJsVarPrefix = 'it.';
}
$parameters[$name] = '__' . $name . '__';
- $replacements['%' . preg_quote($parameters[$name], '%') . '%'] = "{{= {$tplName} }}";
+ $replacements['%' . preg_quote($parameters[$name], '%') . '%'] = "{{= {$dotJsVarPrefix}{$tplName} }}";
}
$url = route($routeName, $parameters, $absolute);
return preg_replace(array_keys($replacements), array_values($replacements), $url); | helpers.php - routeTpl - added possibility to provide any template var name, not only vars in 'it' object inside dotJs (fix) |
diff --git a/tasks/html.js b/tasks/html.js
index <HASH>..<HASH> 100644
--- a/tasks/html.js
+++ b/tasks/html.js
@@ -80,7 +80,7 @@ function executeTransform (filepath, incremental) {
var fileInPath = filepath
var fileRelativePath = path.relative(config.html.src, fileInPath)
- page.source = fileRelativePath
+ page.source = fileRelativePath // fileRelativePath changes below hence we assign it here
var fileOutDir = path.dirname(path.join(config.html.dest, fileRelativePath))
var fileParsedPath = path.parse(fileInPath)
@@ -92,6 +92,7 @@ function executeTransform (filepath, incremental) {
if (blog) {
fileOutDir = blog.config.dest
+ fileRelativePath = path.join(path.relative(config.out, blog.config.dest), (fileName + '.' + fileExt))
post = blogs.getPost(blog, fileName)
if (post === null) {
return // most probably a draft in production build | Fixing fileRelativePath for blog posts |
diff --git a/src/index.js b/src/index.js
index <HASH>..<HASH> 100644
--- a/src/index.js
+++ b/src/index.js
@@ -187,13 +187,6 @@ class Libp2p extends EventEmitter {
})
this._peerDiscovered = this._peerDiscovered.bind(this)
-
- // promisify all instance methods
- ;['start', 'stop', 'dial', 'dialProtocol', 'dialFSM', 'hangUp', 'ping'].forEach(method => {
- this[method] = promisify(this[method], {
- context: this
- })
- })
}
/**
@@ -557,6 +550,11 @@ class Libp2p extends EventEmitter {
}
}
+// promisify all instance methods
+['start', 'stop', 'dial', 'dialProtocol', 'dialFSM', 'hangUp', 'ping'].forEach(method => {
+ Libp2p[method] = promisify(Libp2p[method])
+})
+
module.exports = Libp2p
/**
* Like `new Libp2p(options)` except it will create a `PeerInfo` | fix: dont override methods of created instance (#<I>)
* fix: dont override methods of created instance
* chore: fix lint |
diff --git a/lib/block.js b/lib/block.js
index <HASH>..<HASH> 100644
--- a/lib/block.js
+++ b/lib/block.js
@@ -36,11 +36,11 @@ function Block() {
Block.getDescription = function( type ) {
switch( type ) {
case UDIF.BLOCK.ZEROFILL: return 'ZEROFILL'; break
- case UDIF.BLOCK.RAW: return 'UDRW (UDIF read/write) / UDRO (UDIF read-only)'; break
- case UDIF.BLOCK.FREE: return 'FREE (Unallocated)'; break
- case UDIF.BLOCK.UDCO: return 'UDCO (UDIF ADC-compressed)'; break
- case UDIF.BLOCK.UDZO: return 'UDZO (UDIF zlib-compressed)'; break
- case UDIF.BLOCK.UDBZ: return 'UDBZ (UDIF bzip2-compressed)'; break
+ case UDIF.BLOCK.RAW: return 'UDRW (raw)'; break
+ case UDIF.BLOCK.FREE: return 'FREE (unallocated)'; break
+ case UDIF.BLOCK.UDCO: return 'UDCO (adc-compressed)'; break
+ case UDIF.BLOCK.UDZO: return 'UDZO (zlib-compressed)'; break
+ case UDIF.BLOCK.UDBZ: return 'UDBZ (bzip2-compressed)'; break
case UDIF.BLOCK.COMMENT: return 'COMMENT'; break
case UDIF.BLOCK.TERMINATOR: return 'TERMINATOR'; break
default: return 'UNKNOWN'; break | feat(block): Improve block type descriptions |
diff --git a/src/AnyContent/Client/Repository.php b/src/AnyContent/Client/Repository.php
index <HASH>..<HASH> 100755
--- a/src/AnyContent/Client/Repository.php
+++ b/src/AnyContent/Client/Repository.php
@@ -30,8 +30,13 @@ class Repository
}
- public function getContentTypeDefinition($contentTypeName)
+ public function getContentTypeDefinition($contentTypeName = null)
{
+ if ($contentTypeName == null AND $this->contentTypeDefinition)
+ {
+ return $this->contentTypeDefinition;
+ }
+
if ($this->hasContentType($contentTypeName))
{
$cmdl = $this->client->getCMDL($contentTypeName); | - added possibility to retrieve content type definition of a repository object without specifying the contentTypeName (if a content type is selected) |
diff --git a/lib/config.js b/lib/config.js
index <HASH>..<HASH> 100644
--- a/lib/config.js
+++ b/lib/config.js
@@ -22,7 +22,6 @@ Config.defaults = {
*/
apis: [
'fs',
- 'foo',
'secrets'
],
@@ -31,7 +30,6 @@ Config.defaults = {
*/
defaultManifestApis: [
'fs',
- 'foo',
'secrets'
], | [TASK] Remove foo from config |
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -276,7 +276,7 @@ async function exec(options = {}) {
}
if (errorLines.length) {
let error = errorLines.join('\n');
- error = error.replace(/===*.*====*\nLicense acceptance recorded. Continuing.\n?/, '');
+ error = error.replace(/===*.*====*\\nLicense acceptance recorded. Continuing.\n?/, '');
const acceptLicenseMessage = /To accept the message please run speedtest interactively or use the following:[\s\S]*speedtest --accept-license/;
const acceptGdprMessage = /To accept the message please run speedtest interactively or use the following:[\s\S]*speedtest --accept-gdpr/;
if (acceptLicenseMessage.test(error)) { | fix: fix regex issue in error check
escaped \n in the regex check, as the string to test is using \n as string. this fixes an issue where the "license accepted" info message would throw an error. |
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -3,7 +3,7 @@ import f3
setup(
name='f3',
- version='1.0.2',
+ version='1.0.4',
license='MIT',
long_description=open('README.rst').read(),
author='Ben Montet', | oops, forgot to increment version |
diff --git a/writer.go b/writer.go
index <HASH>..<HASH> 100644
--- a/writer.go
+++ b/writer.go
@@ -243,7 +243,6 @@ func (p *MediaPlaylist) Encode() *bytes.Buffer {
p.buf.WriteString("#EXTM3U\n#EXT-X-VERSION:")
p.buf.WriteString(strver(p.ver))
p.buf.WriteRune('\n')
- p.buf.WriteString("#EXT-X-ALLOW-CACHE:NO\n")
// default key (workaround for Widevine)
if p.Key != nil {
p.buf.WriteString("#EXT-X-KEY:")
@@ -262,6 +261,7 @@ func (p *MediaPlaylist) Encode() *bytes.Buffer {
switch p.MediaType {
case EVENT:
p.buf.WriteString("EVENT\n")
+ p.buf.WriteString("#EXT-X-ALLOW-CACHE:NO\n")
case VOD:
p.buf.WriteString("VOD\n")
} | Move the writing of the no cache header to inside playlist type event.
Let the client determine if it should cache or not unless we are
publishing a live stream. |
diff --git a/src/Leevel/Kernel/App.php b/src/Leevel/Kernel/App.php
index <HASH>..<HASH> 100644
--- a/src/Leevel/Kernel/App.php
+++ b/src/Leevel/Kernel/App.php
@@ -164,7 +164,10 @@ class App implements IApp
return \PHP_SAPI === 'cli';
}
- return $this->container->make('request')->isConsole();
+ /** @var \Leevel\Http\Request $request */
+ $request = $this->container->make('request');
+
+ return $request->isConsole();
}
/**
diff --git a/src/Leevel/Kernel/ExceptionRuntime.php b/src/Leevel/Kernel/ExceptionRuntime.php
index <HASH>..<HASH> 100644
--- a/src/Leevel/Kernel/ExceptionRuntime.php
+++ b/src/Leevel/Kernel/ExceptionRuntime.php
@@ -328,7 +328,7 @@ abstract class ExceptionRuntime implements IExceptionRuntime
extract($vars);
ob_start();
require $filepath;
- $content = ob_get_contents();
+ $content = ob_get_contents() ?: '';
ob_end_clean();
return $content; | fix(kernel): fix for phpstan level 7 |
diff --git a/logdna/logdna.py b/logdna/logdna.py
index <HASH>..<HASH> 100644
--- a/logdna/logdna.py
+++ b/logdna/logdna.py
@@ -33,7 +33,7 @@ class LogDNAHandler(logging.Handler):
if message and message['line']:
if self.max_length and len(message['line']) > defaults['MAX_LINE_LENGTH']:
message['line'] = message['line'][:defaults['MAX_LINE_LENGTH']] + ' (cut off, too long...)'
- print('Line was longer than ' + defaults['MAX_LINE_LENGTH'] + ' chars and was truncated.')
+ print('Line was longer than {0} chars and was truncated.'.format(defaults['MAX_LINE_LENGTH'])
self.bufByteLength += sys.getsizeof(message)
self.buf.append(message) | Fixing TypeError in error printing |
diff --git a/git/repo/base.py b/git/repo/base.py
index <HASH>..<HASH> 100644
--- a/git/repo/base.py
+++ b/git/repo/base.py
@@ -713,11 +713,14 @@ class Repo(object):
committed_date=int(props[b'committer-time']))
commits[hexsha] = c
else:
- # Discard the next line (it's a filename end tag)
- line = next(stream)
- tag, value = line.split(b' ', 1)
- assert tag == b'filename', 'Unexpected git blame output'
- orig_filename = value
+ # Discard all lines until we find "filename" which is
+ # guaranteed to be the last line
+ while True:
+ line = next(stream)
+ tag, value = line.split(b' ', 1)
+ if tag == b'filename':
+ orig_filename = value
+ break
yield BlameEntry(commits[hexsha],
range(lineno, lineno + num_lines), | Ignore all lines of subsequent hunks until last one is found
Git version <I>+ introduced extra lines into the subsequent hunk
sections for incremental blame output. The documentation notes that
parsers of this output should ignore all lines between the start and end
for robust parsing. |
diff --git a/spec/graph_matching/algorithm/mwm_general_spec.rb b/spec/graph_matching/algorithm/mwm_general_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/graph_matching/algorithm/mwm_general_spec.rb
+++ b/spec/graph_matching/algorithm/mwm_general_spec.rb
@@ -90,9 +90,9 @@ RSpec.describe GraphMatching::Algorithm::MWMGeneral do
[3, 4, 2]
]
m = described_class.new(g).match
- expect(m.vertexes).to match_array([1, 2, 3, 4])
- expect(m.has_edge?([1, 2])).to eq(true)
- expect(m.has_edge?([3, 4])).to eq(true)
+ expect(m.vertexes).to match_array([0, 1, 2, 3])
+ expect(m.has_edge?([0, 1])).to eq(true)
+ expect(m.has_edge?([2, 3])).to eq(true)
expect(m.weight(g)).to eq(6)
end
end | Update test from 1-indexing to 0-indexing |
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -1,6 +1,6 @@
(function () {
- var helpers = (function () {
+ function assemble() {
var helpers = {
noop: function () {},
no: function () { return false; },
@@ -62,15 +62,20 @@
}
}
return d3h;
- }());
+ }
- if (typeof window === 'object') {
- /* global window */
- window.d3h = helpers;
- } else if (typeof module === 'object') {
- module.exports = helpers;
- } else {
- throw new Error('Do not know how to exports D3 helpers');
+ function register(value, name) {
+ if (typeof window === 'object') {
+ /* global window */
+ window[name] = value;
+ } else if (typeof module === 'object') {
+ module.exports = value;
+ } else {
+ throw new Error('Do not know how to register ' + name);
+ }
}
+ var d3h = assemble();
+ register(d3h, 'd3h');
+
}()); | refactored, fixes #6 |
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -16,7 +16,7 @@ def long_desc():
setup(
name='cellulario',
- version='0',
+ version='1',
description='Cellular IO',
author='Justin Mayfield',
author_email='tooker@gmail.com', | v1 instead of 0 which was already used. |
diff --git a/cmd/server-mux.go b/cmd/server-mux.go
index <HASH>..<HASH> 100644
--- a/cmd/server-mux.go
+++ b/cmd/server-mux.go
@@ -188,9 +188,9 @@ func NewMuxServer(addr string, handler http.Handler) *MuxServer {
m := &MuxServer{
Server: http.Server{
Addr: addr,
- // Adding timeout of 10 minutes for unresponsive client connections.
- ReadTimeout: 10 * time.Minute,
- WriteTimeout: 10 * time.Minute,
+ // Do not add any timeouts Golang net.Conn
+ // closes connections right after 10mins even
+ // if they are not idle.
Handler: handler,
MaxHeaderBytes: 1 << 20,
}, | server: http.Server do not add deadlines causes issues. (#<I>)
Adding deadlines is a no go since Golang doesn't back off
the timers if there is an active i/o in progress.
It is meant to be for applications to handle this themselves
and manually progress the deadlines.
Fixes #<I> |
diff --git a/mgmtfn/k8splugin/kubeClient.go b/mgmtfn/k8splugin/kubeClient.go
index <HASH>..<HASH> 100644
--- a/mgmtfn/k8splugin/kubeClient.go
+++ b/mgmtfn/k8splugin/kubeClient.go
@@ -237,7 +237,11 @@ func (c *APIClient) GetPodLabel(ns, name, label string) (string, error) {
}
}
+ c.podCache.labelsMutex.Lock()
+ defer c.podCache.labelsMutex.Unlock()
+
res, found := c.podCache.labels[label]
+
if found {
return res, nil
} | Add lock for getting podCache label |
diff --git a/iopipe/contrib/profiler/plugin.py b/iopipe/contrib/profiler/plugin.py
index <HASH>..<HASH> 100644
--- a/iopipe/contrib/profiler/plugin.py
+++ b/iopipe/contrib/profiler/plugin.py
@@ -26,10 +26,9 @@ class ProfilerPlugin(Plugin):
"""
Instantiates the profiler plugin
- :param enabled: Whether or not the profiler should be enabled for all invocations.
- Alternatively this plugin can be enabled/disabled via
- the `IOPIPE_PROFILER_ENABLED` environment
- variable.
+ :param enabled: Whether or not the profiler should be enabled for all
+ invocations. Alternatively this plugin can be enabled/disabled
+ via the `IOPIPE_PROFILER_ENABLED` environment variable.
:type enabled: bool
"""
self._enabled = enabled
@@ -65,10 +64,10 @@ class ProfilerPlugin(Plugin):
def post_invoke(self, event, context):
if self.profile is not None:
self.profile.disable()
+ self.context.iopipe.label("@iopipe/plugin-profiler")
def pre_report(self, report):
if self.profile is not None:
- self.context.iopipe.label("@iopipe/plugin-profiler")
if self.signed_request is not None:
if isinstance(self.signed_request, Future):
wait([self.signed_request]) | Add profiler auto label earlier to ensure it is added to report |
diff --git a/lib/tugboat/cli.rb b/lib/tugboat/cli.rb
index <HASH>..<HASH> 100644
--- a/lib/tugboat/cli.rb
+++ b/lib/tugboat/cli.rb
@@ -87,12 +87,12 @@ module Tugboat
method_option "size",
:type => :numeric,
:aliases => "-s",
- :default => 64,
+ :default => 66,
:desc => "The size_id of the droplet"
method_option "image",
:type => :numeric,
:aliases => "-i",
- :default => 2676,
+ :default => 284203,
:desc => "The image_id of the droplet"
method_option "region",
:type => :numeric, | Update the defaults for droplet creation
DigitalOcean seems to have changed the ID's of their server
images...this update continues to use:
Ubuntu <I> x<I> Server (id: <I>, distro: Ubuntu)
As the default image.
I also lowered the default droplet size to <I>mb. |
diff --git a/activemodel/lib/active_model/dirty.rb b/activemodel/lib/active_model/dirty.rb
index <HASH>..<HASH> 100644
--- a/activemodel/lib/active_model/dirty.rb
+++ b/activemodel/lib/active_model/dirty.rb
@@ -255,7 +255,7 @@ module ActiveModel
end
# Remove changes information for the provided attributes.
- def clear_attribute_changes(attributes)
+ def clear_attribute_changes(attributes) # :doc:
attributes_changed_by_setter.except!(*attributes)
end
end | [Enh] Changed the visibility of the ActiveModel::Dirty#clear_attribute_changes method
In Rails <I> it is impossible to define a custom default value for a model's
attribute without making it appear as _changed?, especially when the model
is first initialized. Making this method publicly visible will allow such a behaviour,
without the need to use private APIs. |
diff --git a/includes/class-freemius.php b/includes/class-freemius.php
index <HASH>..<HASH> 100755
--- a/includes/class-freemius.php
+++ b/includes/class-freemius.php
@@ -18469,9 +18469,16 @@
function _get_invoice_api_url( $payment_id = false ) {
$this->_logger->entrance();
- return $this->get_api_user_scope()->get_signed_url(
+ $url = $this->get_api_user_scope()->get_signed_url(
"/payments/{$payment_id}/invoice.pdf"
);
+
+ if ( ! fs_starts_with( $url, 'https://' ) ) {
+ // Always use HTTPS for invoices.
+ $url = 'https' . substr( $url, 4 );
+ }
+
+ return $url;
}
/** | [invoices] [update] Always use HTTPS for invoices as those are opened on a new browser page anyways. |
diff --git a/lib/godmin/resources/resource_controller.rb b/lib/godmin/resources/resource_controller.rb
index <HASH>..<HASH> 100644
--- a/lib/godmin/resources/resource_controller.rb
+++ b/lib/godmin/resources/resource_controller.rb
@@ -76,7 +76,11 @@ module Godmin
protected
def set_resource_service
- @resource_service = resource_service_class.new
+ @resource_service = if authentication_enabled?
+ resource_service_class.new(nil, admin_user: admin_user)
+ else
+ resource_service_class.new
+ end
end
def set_resource_class
diff --git a/lib/godmin/resources/resource_service.rb b/lib/godmin/resources/resource_service.rb
index <HASH>..<HASH> 100644
--- a/lib/godmin/resources/resource_service.rb
+++ b/lib/godmin/resources/resource_service.rb
@@ -15,7 +15,10 @@ module Godmin
include Pagination
include Scopes
- def initialize(resource_class = nil)
+ attr_reader :options
+
+ def initialize(resource_class = nil, options = {})
+ @options = options
@resource_class = resource_class
end | Allow passing in options to the resource services |
diff --git a/lib/puppet/parser/functions.rb b/lib/puppet/parser/functions.rb
index <HASH>..<HASH> 100644
--- a/lib/puppet/parser/functions.rb
+++ b/lib/puppet/parser/functions.rb
@@ -162,7 +162,8 @@ module Functions
type is defined, either as a native type or a defined type, or whether a class is defined.
This is useful for checking whether a class is defined and only including it if it is.
This function can also test whether a resource has been defined, using resource references
- (e.g., ``if defined(File['/tmp/myfile'] { ... }``).") do |vals|
+ (e.g., ``if defined(File['/tmp/myfile'] { ... }``). This function is unfortunately
+ dependent on the parse order of the configuration when testing whether a resource is defined.") do |vals|
result = false
vals.each do |val|
case val | Applying a version of the diff to the defined() docs from David Schmitt
git-svn-id: <URL> |
diff --git a/salt/renderers/json_mako.py b/salt/renderers/json_mako.py
index <HASH>..<HASH> 100644
--- a/salt/renderers/json_mako.py
+++ b/salt/renderers/json_mako.py
@@ -13,11 +13,11 @@ from salt.exceptions import SaltRenderError
import salt.utils.templates
-def render(template):
+def render(template_file, env='', sls=''):
'''
Render the data passing the functions and grains into the rendering system
'''
- if not os.path.isfile(template):
+ if not os.path.isfile(template_file):
return {}
tmp_data = salt.utils.templates.mako( | Use consistent signature for json_mako renderer
Use the same signature for both jinja2 and mako templates
The `env` & `sls` names were undefined |
diff --git a/js/bootstrap-select.js b/js/bootstrap-select.js
index <HASH>..<HASH> 100644
--- a/js/bootstrap-select.js
+++ b/js/bootstrap-select.js
@@ -1214,7 +1214,7 @@
li.appendChild(a);
dropdownHeader.appendChild(text.cloneNode(true));
- if (this._liWidest) menuInner.appendChild(this._liWidest);
+ if (this._liWidest) menuInner.appendChild(this._liWidest.cloneNode(true));
menuInner.appendChild(li);
menuInner.appendChild(divider);
menuInner.appendChild(dropdownHeader); | prevent removal of li from the DOM when calling refresh |
diff --git a/xgraphics/text.go b/xgraphics/text.go
index <HASH>..<HASH> 100644
--- a/xgraphics/text.go
+++ b/xgraphics/text.go
@@ -33,7 +33,7 @@ func (im *Image) Text(x, y int, clr color.Color, fontSize float64,
c.SetSrc(textClr)
// Now let's actually draw the text...
- pt := freetype.Pt(x, y+c.FUnitToPixelRU(font.UnitsPerEm()))
+ pt := freetype.Pt(x, y+int(font.FUnitsPerEm()))
newpt, err := c.DrawString(text, pt)
if err != nil {
return 0, 0, err
@@ -54,10 +54,7 @@ func (im *Image) Text(x, y int, clr color.Color, fontSize float64,
func TextMaxExtents(font *truetype.Font, fontSize float64,
text string) (width int, height int) {
- // We need a context to calculate the extents
- c := ftContext(font, fontSize)
-
- emSquarePix := c.FUnitToPixelRU(font.UnitsPerEm())
+ emSquarePix := int(font.FUnitsPerEm())
return len(text) * emSquarePix, emSquarePix
} | fix breakage due to change in freetype package. |
diff --git a/js/kucoin2.js b/js/kucoin2.js
index <HASH>..<HASH> 100644
--- a/js/kucoin2.js
+++ b/js/kucoin2.js
@@ -556,14 +556,16 @@ module.exports = class kucoin2 extends Exchange {
const marketId = this.marketId (symbol);
// required param, cannot be used twice
const clientOid = this.uuid ();
- const request = {
+ let request = {
'clientOid': clientOid,
- 'price': this.priceToPrecision (symbol, price),
'side': side,
'size': this.amountToPrecision (symbol, amount),
'symbol': marketId,
'type': type,
};
+ if (type !== 'market') {
+ request['price'] = this.priceToPrecision (symbol, price);
+ }
const response = await this.privatePostOrders (this.extend (request, params));
const responseData = response['data'];
return { | kucoin fix for market orders |
diff --git a/activesupport/lib/active_support/reloader.rb b/activesupport/lib/active_support/reloader.rb
index <HASH>..<HASH> 100644
--- a/activesupport/lib/active_support/reloader.rb
+++ b/activesupport/lib/active_support/reloader.rb
@@ -58,7 +58,7 @@ module ActiveSupport
prepare!
end
- def self.run! # :nodoc:
+ def self.run!(reset: false) # :nodoc:
if check!
super
else | Fix reloader to work with new Executor signature
This is a follow up to [CVE-<I>-<I>]. |
diff --git a/lib/index.js b/lib/index.js
index <HASH>..<HASH> 100644
--- a/lib/index.js
+++ b/lib/index.js
@@ -370,6 +370,7 @@ Server.prototype.serve = function(req, res){
}
debug('serve client source');
+ res.setHeader("Cache-Control", "public, max-age=0");
res.setHeader('Content-Type', 'application/javascript');
res.setHeader('ETag', expectedEtag);
res.writeHead(200); | [feat] Add cache-control header when serving the client source (#<I>) |
diff --git a/openquake/logs.py b/openquake/logs.py
index <HASH>..<HASH> 100644
--- a/openquake/logs.py
+++ b/openquake/logs.py
@@ -53,10 +53,8 @@ flags.DEFINE_string('logfile', '',
# TODO: get rid of this
LOG = logging.getLogger()
-LOGGING_AMQP_FORMAT = '%(asctime)s %(loglevel)-5s %(processName)s' \
- ' [%(name)s] - Job %(job_id)s - %(message)s'
-LOGGING_STDOUT_FORMAT = '%(levelname)-5s %(processName)s' \
- ' [%(name)s] - %(message)s'
+LOGGING_STDERR_FORMAT = '%(hostname)s [%(asctime)s] %(levelname)s ' \
+ '%(processName)s/%(process)s [%(name)s] %(message)s'
def init_logs_amqp_send(level='warn'): | added hostname to stderr log format |
diff --git a/src/lokijs.js b/src/lokijs.js
index <HASH>..<HASH> 100644
--- a/src/lokijs.js
+++ b/src/lokijs.js
@@ -228,10 +228,6 @@
return listener;
};
- function applyListener(listener, args) {
- listener.apply(null, args);
- }
-
/**
* @propt emit(eventName, data) - emits a particular event
* with the option of passing optional parameters which are going to be processed by the callback | removed applyListener as its not used anymore |
diff --git a/lib/builder/version.rb b/lib/builder/version.rb
index <HASH>..<HASH> 100644
--- a/lib/builder/version.rb
+++ b/lib/builder/version.rb
@@ -2,7 +2,7 @@ module Builder
VERSION_NUMBERS = [
VERSION_MAJOR = 3,
VERSION_MINOR = 1,
- VERSION_BUILD = 1,
+ VERSION_BUILD = 2,
]
VERSION = VERSION_NUMBERS.join(".")
end | Bump to version <I> |
diff --git a/examples/consumer_example/consumer_example.go b/examples/consumer_example/consumer_example.go
index <HASH>..<HASH> 100644
--- a/examples/consumer_example/consumer_example.go
+++ b/examples/consumer_example/consumer_example.go
@@ -39,8 +39,7 @@ func main() {
broker := os.Args[1]
group := os.Args[2]
topics := os.Args[3:]
-
- sigchan := make(chan os.Signal)
+ sigchan := make(chan os.Signal, 1)
signal.Notify(sigchan, syscall.SIGINT, syscall.SIGTERM)
c, err := kafka.NewConsumer(&kafka.ConfigMap{ | consumer_example: make term signal channel buffered to avoid select() miss |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.