diff stringlengths 65 26.7k | message stringlengths 7 9.92k |
|---|---|
diff --git a/spec/features/thredded/user_replies_to_post_spec.rb b/spec/features/thredded/user_replies_to_post_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/features/thredded/user_replies_to_post_spec.rb
+++ b/spec/features/thredded/user_replies_to_post_spec.rb
@@ -73,8 +73,10 @@ feature 'User replying to topic' do
end
def posts_exist_in_a_topic
- topic = create(:topic, messageboard: messageboard)
- posts = create_list(:post, 2, postable: topic, messageboard: messageboard)
- PageObject::Posts.new(posts)
+ topic = create(:topic,
+ with_posts: 2,
+ messageboard: messageboard,
+ user: create(:user, name: 'R2D2'))
+ PageObject::Posts.new(topic.posts)
end
end | Fix flaky user_replies_to_post_spec.rb
Avoid creating users with the same prefix as the user we auto-complete.
Example failure: <URL> |
diff --git a/dvc/tree/base.py b/dvc/tree/base.py
index <HASH>..<HASH> 100644
--- a/dvc/tree/base.py
+++ b/dvc/tree/base.py
@@ -3,7 +3,7 @@ import json
import logging
import tempfile
from concurrent.futures import ThreadPoolExecutor, as_completed
-from functools import partial, wraps
+from functools import partial
from multiprocessing import cpu_count
from operator import itemgetter
from urllib.parse import urlparse
@@ -55,24 +55,6 @@ class RemoteMissingDepsError(DvcException):
pass
-class DirCacheError(DvcException):
- def __init__(self, hash_):
- super().__init__(
- f"Failed to load dir cache for hash value: '{hash_}'."
- )
-
-
-def index_locked(f):
- @wraps(f)
- def wrapper(obj, named_cache, remote, *args, **kwargs):
- if hasattr(remote, "index"):
- with remote.index:
- return f(obj, named_cache, remote, *args, **kwargs)
- return f(obj, named_cache, remote, *args, **kwargs)
-
- return wrapper
-
-
class BaseRemoteTree:
scheme = "base"
REQUIRES = {} | tree: remove duplicated code (#<I>)
Leftover from remote/tree separation. |
diff --git a/php/WP_CLI/CommandWithTranslation.php b/php/WP_CLI/CommandWithTranslation.php
index <HASH>..<HASH> 100644
--- a/php/WP_CLI/CommandWithTranslation.php
+++ b/php/WP_CLI/CommandWithTranslation.php
@@ -178,6 +178,7 @@ abstract class CommandWithTranslation extends \WP_CLI_Command {
'language' => 'en_US',
'english_name' => 'English (United States)',
'native_name' => 'English (United States)',
+ 'updated' => '',
);
array_push( $translations, $en_us ); | Prevent error caused by #<I> by ensuring item confirms to expected |
diff --git a/plugins/providers/virtualbox/action/destroy_unused_network_interfaces.rb b/plugins/providers/virtualbox/action/destroy_unused_network_interfaces.rb
index <HASH>..<HASH> 100644
--- a/plugins/providers/virtualbox/action/destroy_unused_network_interfaces.rb
+++ b/plugins/providers/virtualbox/action/destroy_unused_network_interfaces.rb
@@ -13,8 +13,8 @@ module VagrantPlugins
if env[:machine].provider_config.destroy_unused_network_interfaces
@logger.info("Destroying unused network interfaces...")
env[:machine].provider.driver.delete_unused_host_only_networks
- @app.call(env)
end
+ @app.call(env)
end
end
end | Call the rest of the middleware stack all the time. |
diff --git a/lib/dm-core/associations/many_to_many.rb b/lib/dm-core/associations/many_to_many.rb
index <HASH>..<HASH> 100644
--- a/lib/dm-core/associations/many_to_many.rb
+++ b/lib/dm-core/associations/many_to_many.rb
@@ -233,6 +233,12 @@ module DataMapper
raise NotImplementedError
end
+ def save
+ # TODO: create the new intermediaries
+ # TODO: destroy the orphaned intermediaries
+ raise NotImplementedError
+ end
+
def destroy
# TODO: destroy the intermediaries
# TODO: destroy the resources in the child model
@@ -245,6 +251,17 @@ module DataMapper
raise NotImplementedError
end
+ private
+
+ def relate_resource(resource)
+ # TODO: queue up new intermediaries for creation
+ raise NotImplementedError
+ end
+
+ def orphan_resource(resource)
+ # TODO: queue up orphaned intermediaries for destruction
+ raise NotImplementedError
+ end
end # class Collection
end # module ManyToMany
end # module Associations | Stubbed more of ManyToMany::Collection |
diff --git a/src/lib/generator.js b/src/lib/generator.js
index <HASH>..<HASH> 100644
--- a/src/lib/generator.js
+++ b/src/lib/generator.js
@@ -14,6 +14,7 @@ import generateEditorconfig from './subgenerators/generate-swap-editorconfig/gen
import generateNpmrc from './subgenerators/generate-swap-npmrc/generator'
import generateContribute from './subgenerators/generate-swap-contribute/generator'
import generateLicense from './subgenerators/generate-swap-license/generator'
+import generateMain from './subgenerators/generate-swap-main/generator'
import promptTask from './tasks/prompt'
@@ -41,6 +42,7 @@ export default function (app) {
app.register('npmrc', generateNpmrc)
app.register('contribute', generateContribute)
app.register('license', generateLicense)
+ app.register('main', generateMain)
/**
* Scaffold out a(n) swap-project project. Also aliased as the [default](#default) task. | register swap-main as sub-generator |
diff --git a/lib/lentil/instagram_harvester.rb b/lib/lentil/instagram_harvester.rb
index <HASH>..<HASH> 100644
--- a/lib/lentil/instagram_harvester.rb
+++ b/lib/lentil/instagram_harvester.rb
@@ -20,7 +20,7 @@ module Lentil
def configure_connection(opts = {})
opts['client_id'] ||= Lentil::Engine::APP_CONFIG["instagram_client_id"]
opts['client_secret'] ||= Lentil::Engine::APP_CONFIG["instagram_client_secret"]
- opts['access_token'] ||= nil
+ opts['access_token'] ||= Lentil::Engine::APP_CONFIG["instagram_access_token"] || nil
Instagram.configure do |config|
config.client_id = opts['client_id']
@@ -218,7 +218,7 @@ module Lentil
response.body
end
-
+
#
# Retrieve the binary video data for a given Image object
# | Look for access_token in config file |
diff --git a/src/Extension/ConfigTrait.php b/src/Extension/ConfigTrait.php
index <HASH>..<HASH> 100644
--- a/src/Extension/ConfigTrait.php
+++ b/src/Extension/ConfigTrait.php
@@ -49,7 +49,11 @@ trait ConfigTrait
$filesystem->getFile(sprintf('config://extensions/%s.%s.yml', strtolower($this->getName()), strtolower($this->getVendor())), $file);
if (!$file->exists()) {
- $this->copyDistFile($file);
+ try {
+ $this->copyDistFile($file);
+ } catch (\RuntimeException $e) {
+ return $this->config;
+ }
}
$this->addConfig($file);
@@ -98,7 +102,7 @@ trait ConfigTrait
/** @var YamlFile $distFile */
$distFile = $filesystem->get(sprintf('%s/config/config.yml.dist', $this->getBaseDirectory()->getPath()), new YamlFile());
if (!$distFile->exists()) {
- return;
+ throw new \RuntimeException(sprintf('No config.yml.dist file found at extensions://%s', $this->getBaseDirectory()->getPath()));
}
$file->write($distFile->read());
$app['logger.system']->info( | Throw & catch an exception if and extension is missing config.yml.dist |
diff --git a/lib/chef/http.rb b/lib/chef/http.rb
index <HASH>..<HASH> 100644
--- a/lib/chef/http.rb
+++ b/lib/chef/http.rb
@@ -242,12 +242,8 @@ class Chef
raise Chef::Exceptions::RedirectLimitExceeded if @redirects_followed >= redirect_limit
@redirects_followed += 1
Chef::Log.debug("Following redirect #{@redirects_followed}/#{redirect_limit}")
- if @sign_on_redirect
- yield
- else
- @authenticator.sign_request = false
- yield
- end
+
+ yield
ensure
@redirects_followed = 0
@authenticator.sign_request = true
diff --git a/lib/chef/rest.rb b/lib/chef/rest.rb
index <HASH>..<HASH> 100644
--- a/lib/chef/rest.rb
+++ b/lib/chef/rest.rb
@@ -192,6 +192,14 @@ class Chef
end
end
+ def follow_redirect
+ unless @sign_on_redirect
+ @authenticator.sign_request = false
+ end
+ super
+ ensure
+ @authenticator.sign_request = true
+ end
private
def stream_to_tempfile(url, response) | Remove references to authenticator from base HTTP class |
diff --git a/GPy/testing/link_function_tests.py b/GPy/testing/link_function_tests.py
index <HASH>..<HASH> 100644
--- a/GPy/testing/link_function_tests.py
+++ b/GPy/testing/link_function_tests.py
@@ -7,7 +7,6 @@ _lim_val_exp = np.log(_lim_val)
_lim_val_square = np.sqrt(_lim_val)
_lim_val_cube = cbrt(_lim_val)
from GPy.likelihoods.link_functions import Identity, Probit, Cloglog, Log, Log_ex_1, Reciprocal, Heaviside
-#np.seterr(over='raise')
class LinkFunctionTests(np.testing.TestCase):
def setUp(self):
@@ -80,8 +79,7 @@ class LinkFunctionTests(np.testing.TestCase):
assert np.isinf(np.exp(np.log(self.f_upper_lim)))
#Check the clipping works
np.testing.assert_almost_equal(link.transf(self.f_lower_lim), 0, decimal=5)
- #Need to look at most significant figures here rather than the decimals
- np.testing.assert_approx_equal(link.transf(self.f_upper_lim), _lim_val, significant=5)
+ self.assertTrue(np.isfinite(link.transf(self.f_upper_lim)))
self.check_overflow(link, lim_of_inf)
#Check that it would otherwise fail | tidied link_fn tests |
diff --git a/src/semantic_tree/semantic_tree.js b/src/semantic_tree/semantic_tree.js
index <HASH>..<HASH> 100644
--- a/src/semantic_tree/semantic_tree.js
+++ b/src/semantic_tree/semantic_tree.js
@@ -1028,11 +1028,16 @@ sre.SemanticTree.prototype.processRelationsInRow_ = function(nodes) {
var children = partition.comp.map(
goog.bind(this.processOperationsInRow_, this));
if (partition.rel.some(
- function(x) {return x.role !== firstRel.role;})) {
- return this.makeBranchNode_(
+ function(x) {return !x.equals(firstRel);})) {
+ var node = this.makeBranchNode_(
sre.SemanticAttr.Type.MULTIREL, children, partition.rel);
+ if (partition.rel.every(
+ function(x) {return x.role === firstRel.role;})) {
+ node.role = firstRel.role;
+ }
+ return node;
}
- var node = this.makeBranchNode_(sre.SemanticAttr.Type.RELSEQ,
+ node = this.makeBranchNode_(sre.SemanticAttr.Type.RELSEQ,
children, partition.rel,
sre.SemanticTree.getEmbellishedInner_(firstRel).textContent);
node.role = firstRel.role; | Slightly less intrusive change of semantic tree structure:
Giving the multirel element a role if all the children roles are the same. |
diff --git a/wallace/nodes.py b/wallace/nodes.py
index <HASH>..<HASH> 100644
--- a/wallace/nodes.py
+++ b/wallace/nodes.py
@@ -4,6 +4,7 @@ from wallace.models import Node, Info
from wallace.information import State
import random
from sqlalchemy import and_
+from sqlalchemy.ext.hybrid import hybrid_property
class Agent(Node):
@@ -12,16 +13,21 @@ class Agent(Node):
__mapper_args__ = {"polymorphic_identity": "agent"}
- def set_fitness(self, fitness):
- self.property1 = repr(fitness)
-
- @property
+ @hybrid_property
def fitness(self):
if self.property1 is None:
return None
else:
return float(self.property1)
+ @fitness.setter
+ def fitness(self, fitness):
+ self.property1 = repr(fitness)
+
+ @fitness.expression
+ def generation(self):
+ return self.property1.label('fitness')
+
class ReplicatorAgent(Agent): | make fitness a hybrid_property with a setter |
diff --git a/lib/Gitlab/Api/Projects.php b/lib/Gitlab/Api/Projects.php
index <HASH>..<HASH> 100644
--- a/lib/Gitlab/Api/Projects.php
+++ b/lib/Gitlab/Api/Projects.php
@@ -828,7 +828,7 @@ class Projects extends AbstractApi
*/
public function removeShare($project_id, $group_id)
{
- return $this->delete($this->getProjectPath($project_id, 'services/'.$group_id));
+ return $this->delete($this->getProjectPath($project_id, 'share/' . $group_id));
}
/** | fix Projects::removeShare (service/ was used instead of share/) |
diff --git a/pypsa/linopf.py b/pypsa/linopf.py
index <HASH>..<HASH> 100644
--- a/pypsa/linopf.py
+++ b/pypsa/linopf.py
@@ -244,6 +244,10 @@ def define_ramp_limit_constraints(n, sns, c):
return
fix_i = get_non_extendable_i(n, c)
ext_i = get_extendable_i(n, c)
+ if "committable" in n.df(c):
+ com_i = n.df(c).query('committable').index.difference(ext_i)
+ else:
+ com_i = []
p = get_var(n, c, 'p').loc[sns[1:]]
p_prev = get_var(n, c, 'p').shift(1).loc[sns[1:]]
active = get_activity_mask(n, c, sns[1:])
@@ -282,10 +286,7 @@ def define_ramp_limit_constraints(n, sns, c):
kwargs = dict(spec='ext.', mask=active[gens_i])
define_constraints(n, lhs, '>=', 0, c, 'mu_ramp_limit_down', **kwargs)
- if "committable" in n.df(c):
- com_i = n.df(c).query('committable').index.difference(ext_i)
- else:
- com_i = []
+
# com up
gens_i = rup_i.intersection(com_i) | moved the `committable` conditional |
diff --git a/pkg/minikube/download/preload.go b/pkg/minikube/download/preload.go
index <HASH>..<HASH> 100644
--- a/pkg/minikube/download/preload.go
+++ b/pkg/minikube/download/preload.go
@@ -41,7 +41,7 @@ const (
// PreloadVersion is the current version of the preloaded tarball
//
// NOTE: You may need to bump this version up when upgrading auxiliary docker images
- PreloadVersion = "v5"
+ PreloadVersion = "v6"
// PreloadBucket is the name of the GCS bucket where preloaded volume tarballs exist
PreloadBucket = "minikube-preloaded-volume-tarballs"
) | Increase preload version to v6
We upgraded the dashboard image in #<I>, this will rebuild tarballs to include the latest version.
This should also fix a small performance regression, where start time was increased by <I>% on kvm2, probably becaues we were waiting for the new dashboard image to download. [PR bot comment](<URL>) |
diff --git a/django_extensions/management/commands/sqldiff.py b/django_extensions/management/commands/sqldiff.py
index <HASH>..<HASH> 100644
--- a/django_extensions/management/commands/sqldiff.py
+++ b/django_extensions/management/commands/sqldiff.py
@@ -449,7 +449,7 @@ class SQLDiff(object):
table_name = meta.db_table
app_label = meta.app_label
- if meta.proxy:
+ if self.options.get('include_proxy_models', False) and meta.proxy:
continue
if cur_app_label != app_label:
@@ -918,6 +918,10 @@ to check/debug ur models compared to the real database tables and columns."""
'--output_text', '-t', action='store_false', dest='sql',
default=True,
help="Outputs the differences as descriptive text instead of SQL")
+ parser.add_argument(
+ '--include-proxy-models', action='store_true', dest='include_proxy_models',
+ default=False,
+ help="Include proxy models in the graph")
def __init__(self, *args, **kwargs):
super(Command, self).__init__(*args, **kwargs) | add cli support for adding proxy models back in the diff |
diff --git a/mesh_tensorflow/transformer/transformer.py b/mesh_tensorflow/transformer/transformer.py
index <HASH>..<HASH> 100644
--- a/mesh_tensorflow/transformer/transformer.py
+++ b/mesh_tensorflow/transformer/transformer.py
@@ -997,7 +997,7 @@ class Unitransformer(object):
mesh_shape=self.mesh_shape,
layout=self.layout)
return mtf.gather(
- beams, mtf.constant(inputs.mesh, 2, dtype=tf.int32), beam_dim)
+ beams, mtf.constant(inputs.mesh, 0, dtype=tf.int32), beam_dim)
@gin.configurable | Revert a mistake I made in cl/<I> . This should fix beam search.
PiperOrigin-RevId: <I> |
diff --git a/baggageclaimcmd/driver_linux.go b/baggageclaimcmd/driver_linux.go
index <HASH>..<HASH> 100644
--- a/baggageclaimcmd/driver_linux.go
+++ b/baggageclaimcmd/driver_linux.go
@@ -51,7 +51,7 @@ func (cmd *BaggageclaimCommand) driver(logger lager.Logger) (volume.Driver, erro
volumesDir := cmd.VolumesDir.Path()
- if cmd.Driver == "btrfs" && fsStat.Type != btrfsFSType {
+ if cmd.Driver == "btrfs" && uint32(fsStat.Type) != btrfsFSType {
volumesImage := volumesDir + ".img"
filesystem := fs.New(logger.Session("fs"), volumesImage, volumesDir, cmd.MkfsBin) | baggageclaimcmd: fix build issues on <I>bit platforms
fsStat.Type is int<I> on <I>bit platforms and 0x<I>e overflows it. |
diff --git a/code/plugins/system/koowa/pattern/observer.php b/code/plugins/system/koowa/pattern/observer.php
index <HASH>..<HASH> 100644
--- a/code/plugins/system/koowa/pattern/observer.php
+++ b/code/plugins/system/koowa/pattern/observer.php
@@ -25,7 +25,7 @@ interface KPatternObserver
* @param object An associative array of arguments
* @return mixed
*/
- public function update(ArrayObject $args);
+ public function update(KConfig $args);
/**
* This function returns an unique identifier for the object. This id can be used as | Update method should use KConfig instead of ArrayObject as parameter. |
diff --git a/core-bundle/contao/library/Contao/Database/Updater.php b/core-bundle/contao/library/Contao/Database/Updater.php
index <HASH>..<HASH> 100644
--- a/core-bundle/contao/library/Contao/Database/Updater.php
+++ b/core-bundle/contao/library/Contao/Database/Updater.php
@@ -375,6 +375,14 @@ class Updater extends \Controller
->execute(serialize($arrCss), $objLayout->id);
}
+ // Add the jQuery fields if they do not yet exist (see #5689)
+ if (!$this->Database->fieldExists('addJQuery', 'tl_layout'))
+ {
+ $this->Database->query("ALTER TABLE `tl_layout` ADD `addJQuery` char(1) NOT NULL default ''");
+ $this->Database->query("ALTER TABLE `tl_layout` ADD `jSource` varchar(16) NOT NULL default ''");
+ $this->Database->query("ALTER TABLE `tl_layout` ADD `jquery` text NULL");
+ }
+
// Get all page layouts that use the moo_mediabox template
$objLayout = $this->Database->query("SELECT `id`, `addJQuery`, `jquery`, `mootools` FROM `tl_layout` WHERE `addMooTools`=1 AND `mootools` LIKE '%\"moo_mediaelement\"%'"); | [Core] Make sure the jQuery related database fields exist when the version <I> update is executed (see <I>) |
diff --git a/tests/test_pipenv.py b/tests/test_pipenv.py
index <HASH>..<HASH> 100644
--- a/tests/test_pipenv.py
+++ b/tests/test_pipenv.py
@@ -843,6 +843,29 @@ requests = "*"
c = p.pipenv('install')
assert c.return_code == 0
+ @pytest.mark.extras
+ @pytest.mark.lock
+ @pytest.mark.requirements
+ @pytest.mark.complex
+ def test_complex_lock_deep_extras(self):
+ # records[pandas] requires tablib[pandas] which requires pandas.
+
+ with PipenvInstance() as p:
+ with open(p.pipfile_path, 'w') as f:
+ contents = """
+[packages]
+records = {extras = ["pandas"], version = "==0.5.2"}
+ """.strip()
+ f.write(contents)
+
+ c = p.pipenv('lock')
+ assert c.return_code == 0
+ assert 'tablib' in p.lockfile['default']
+ assert 'pandas' in p.lockfile['default']
+
+ c = p.pipenv('install')
+ assert c.return_code == 0
+
@pytest.mark.lock
@pytest.mark.deploy
def test_deploy_works(self): | Test confirming that deep extras resolves works |
diff --git a/cheroot/test/test_ssl.py b/cheroot/test/test_ssl.py
index <HASH>..<HASH> 100644
--- a/cheroot/test/test_ssl.py
+++ b/cheroot/test/test_ssl.py
@@ -31,8 +31,6 @@ from ..testing import (
IS_LIBRESSL_BACKEND = ssl.OPENSSL_VERSION.startswith('LibreSSL')
PY27 = sys.version_info[:2] == (2, 7)
-PY37 = sys.version_info[:2] == (3, 7)
-PY38 = sys.version_info[:2] == (3, 8)
fails_under_py3 = pytest.mark.xfail(
@@ -323,7 +321,7 @@ def test_http_over_https_error(ca, adapter_type, tls_http_server, ip_addr):
expect_fallback_response_over_plain_http = (
(adapter_type == 'pyopenssl'
- and (IS_ABOVE_OPENSSL10 or PY37 or PY38))
+ and (IS_ABOVE_OPENSSL10 or six.PY3))
or PY27
)
if expect_fallback_response_over_plain_http: | Expect nice behavior from py3+pyopenssl |
diff --git a/lib/data_mapper.rb b/lib/data_mapper.rb
index <HASH>..<HASH> 100644
--- a/lib/data_mapper.rb
+++ b/lib/data_mapper.rb
@@ -108,4 +108,7 @@ module DataMapper
Repository.context.pop
end
end
+
+ # A logger should always be present.
+ Logger.new(nil, 7)
end | Don't blow up when there's no logger set. |
diff --git a/lib/mongo/types/objectid.rb b/lib/mongo/types/objectid.rb
index <HASH>..<HASH> 100644
--- a/lib/mongo/types/objectid.rb
+++ b/lib/mongo/types/objectid.rb
@@ -124,6 +124,12 @@ module Mongo
legacy
end
+ # Returns the utc time at which this ObjectID was generated. This may
+ # be used in lieu of a created_at timestamp.
+ def generation_time
+ Time.at(@data.pack("C4").unpack("N")[0])
+ end
+
private
begin
diff --git a/test/test_objectid.rb b/test/test_objectid.rb
index <HASH>..<HASH> 100644
--- a/test/test_objectid.rb
+++ b/test/test_objectid.rb
@@ -121,4 +121,10 @@ class ObjectIDTest < Test::Unit::TestCase
assert_equal s, ObjectID.legacy_string_convert(l)
end
+ def test_generation_time
+ time = Time.now
+ id = ObjectID.new
+
+ assert_in_delta time.to_i, id.generation_time.to_i, 2
+ end
end | Added generation_time method on ObjectID |
diff --git a/RemexHtml/DOM/DOMBuilder.php b/RemexHtml/DOM/DOMBuilder.php
index <HASH>..<HASH> 100644
--- a/RemexHtml/DOM/DOMBuilder.php
+++ b/RemexHtml/DOM/DOMBuilder.php
@@ -58,7 +58,7 @@ class DOMBuilder implements TreeHandler {
private $isFragment;
/** @var bool */
- private $coerced;
+ private $coerced = false;
/**
* @param array $options An associative array of options: | Fix missing initialization for DOMBuilder::$coerced
Because this wasn't initialized, DOMBuilder::isCoerced would return
`null` instead of `false`. That's still a falsey value, so would only
bite the called if (eg) they explicitly tested against `false`.
Change-Id: Ia<I>b<I>e1d3d<I>efd<I>af3ffb4dabc<I> |
diff --git a/lib/src/main/java/com/ibm/mobilefirstplatform/clientsdk/android/analytics/api/MFPAnalytics.java b/lib/src/main/java/com/ibm/mobilefirstplatform/clientsdk/android/analytics/api/MFPAnalytics.java
index <HASH>..<HASH> 100644
--- a/lib/src/main/java/com/ibm/mobilefirstplatform/clientsdk/android/analytics/api/MFPAnalytics.java
+++ b/lib/src/main/java/com/ibm/mobilefirstplatform/clientsdk/android/analytics/api/MFPAnalytics.java
@@ -161,7 +161,7 @@ public class MFPAnalytics {
metadata.remove(KEY_METADATA_START_TIME);
- log(metadata);Get rid of
+ log(metadata);R
lifecycleEvents.remove(TAG_SESSION);
} catch (JSONException e) { | Delete unused text from MFPAnalytics. |
diff --git a/proso_common/middleware.py b/proso_common/middleware.py
index <HASH>..<HASH> 100644
--- a/proso_common/middleware.py
+++ b/proso_common/middleware.py
@@ -11,7 +11,7 @@ class ToolbarMiddleware(object):
def process_response(self, request, response):
- if not request.user.is_staff:
+ if not hasattr(request, "user") or not request.user.is_staff:
return response
# Check for responses where the config_bar can't be inserted. | fix Toolbar middleware error raised when request is not standard (WSGI request) |
diff --git a/tensorboard/uploader/uploader_main.py b/tensorboard/uploader/uploader_main.py
index <HASH>..<HASH> 100644
--- a/tensorboard/uploader/uploader_main.py
+++ b/tensorboard/uploader/uploader_main.py
@@ -92,7 +92,7 @@ def _define_flags(parser):
parser.add_argument(
'--endpoint',
type=str,
- default='localhost:10000',
+ default='api.tensorboard.dev:443',
help='URL for the API server accepting write requests.')
parser.add_argument( | uploader: set default endpoint to production (#<I>)
Test Plan:
Build the Pip package and install it into a new virtualenv. Using the
new package, revoke auth (`tensorboard dev auth revoke`), then upload an
experiment. Note that the correct Terms of Service and Privacy Policy
documents are clearly displayed in the consent prompt.
wchargin-branch: uploader-prod |
diff --git a/src/CodeGen/Utils.php b/src/CodeGen/Utils.php
index <HASH>..<HASH> 100644
--- a/src/CodeGen/Utils.php
+++ b/src/CodeGen/Utils.php
@@ -4,25 +4,27 @@ use Twig_Loader_String;
use Twig_Environment;
use Closure;
+
+
class Utils
{
static $stringloader = null;
+
static $twig;
- static public function renderStringTemplate($templateContent, array $args = array())
+ static public function renderStringTemplate($templateContent, array $args = array(), Twig_Environment $env = null)
{
- if (!self::$stringloader) {
- self::$stringloader = new Twig_Loader_String();
- }
- if (!self::$twig) {
- self::$twig = new Twig_Environment(self::$stringloader);
+ if (!$env) {
+ $env = new Twig_Environment;
}
+ $template = $twig->createTemplate($templateContent);
+
if (is_callable($args)) {
$args = call_user_func($args);
} elseif ($args instanceof Closure) {
$args = $args();
}
- return self::$twig->render($templateContent, $args);
+ return $template->render($args);
}
static public function evalCallback($cb) | Rewrite render string method to support newer Twig engine |
diff --git a/AlphaTwirl/Counter/GenericKeyComposerB.py b/AlphaTwirl/Counter/GenericKeyComposerB.py
index <HASH>..<HASH> 100755
--- a/AlphaTwirl/Counter/GenericKeyComposerB.py
+++ b/AlphaTwirl/Counter/GenericKeyComposerB.py
@@ -2,7 +2,7 @@
##____________________________________________________________________________||
class GenericKeyComposerB(object):
- """This class is a faster of GenericKeyComposer.
+ """This class is a faster version of GenericKeyComposer.
This class can be used with BEvents. | update the docstring in GenericKeyComposerB |
diff --git a/ryu/ofproto/nx_actions.py b/ryu/ofproto/nx_actions.py
index <HASH>..<HASH> 100644
--- a/ryu/ofproto/nx_actions.py
+++ b/ryu/ofproto/nx_actions.py
@@ -28,8 +28,6 @@ from ryu.ofproto.ofproto_parser import StringifyMixin
def generate(ofp_name, ofpp_name):
import sys
- import string
- import functools
ofp = sys.modules[ofp_name]
ofpp = sys.modules[ofpp_name]
@@ -582,10 +580,10 @@ def generate(ofp_name, ofpp_name):
kwargs['range_ipv6_max'] = (
type_desc.IPv6Addr.to_user(rest[:16]))
rest = rest[16:]
- if range_present & NX_NAT_RANGE_PROTO_MIN:
+ if range_present & nicira_ext.NX_NAT_RANGE_PROTO_MIN:
kwargs['range_proto_min'] = type_desc.Int2.to_user(rest[:2])
rest = rest[2:]
- if range_present & NX_NAT_RANGE_PROTO_MAX:
+ if range_present & nicira_ext.NX_NAT_RANGE_PROTO_MAX:
kwargs['range_proto_max'] = type_desc.Int2.to_user(rest[:2])
return cls(flags, **kwargs) | ofproto/nx_actions: Flake8 Fixes |
diff --git a/autotls.go b/autotls.go
index <HASH>..<HASH> 100644
--- a/autotls.go
+++ b/autotls.go
@@ -19,7 +19,17 @@ func RunWithManager(r http.Handler, m *autocert.Manager) error {
Handler: r,
}
- go http.ListenAndServe(":http", m.HTTPHandler(nil))
+ go http.ListenAndServe(":http", m.HTTPHandler(http.HandlerFunc(redirect)))
return s.ListenAndServeTLS("", "")
}
+
+func redirect(w http.ResponseWriter, req *http.Request) {
+ target := "https://" + req.Host + req.URL.Path
+
+ if len(req.URL.RawQuery) > 0 {
+ target += "?" + req.URL.RawQuery
+ }
+
+ http.Redirect(w, req, target, http.StatusTemporaryRedirect)
+} | chore: support http redirect to https (#<I>) |
diff --git a/raiden/transfer/state.py b/raiden/transfer/state.py
index <HASH>..<HASH> 100644
--- a/raiden/transfer/state.py
+++ b/raiden/transfer/state.py
@@ -95,7 +95,7 @@ class PaymentNetworkState(State):
__slots__ = (
'address',
- 'tokensidentifiers_to_tokennetworks',
+ 'tokenidentifiers_to_tokennetworks',
'tokenaddresses_to_tokennetworks',
)
@@ -108,7 +108,7 @@ class PaymentNetworkState(State):
raise ValueError('address must be an address instance')
self.address = address
- self.tokensidentifiers_to_tokennetworks = {
+ self.tokenidentifiers_to_tokennetworks = {
token_network.address: token_network
for token_network in token_network_list
}
@@ -125,7 +125,7 @@ class PaymentNetworkState(State):
isinstance(other, PaymentNetworkState) and
self.address == other.address and
self.tokenaddresses_to_tokennetworks == other.tokenaddresses_to_tokennetworks and
- self.tokensidentifiers_to_tokennetworks == other.tokensidentifiers_to_tokennetworks
+ self.tokenidentifiers_to_tokennetworks == other.tokenidentifiers_to_tokennetworks
)
def __ne__(self, other): | Remove extra `s` from tokensidentifiers_to_tokennetworks |
diff --git a/hangups/__main__.py b/hangups/__main__.py
index <HASH>..<HASH> 100644
--- a/hangups/__main__.py
+++ b/hangups/__main__.py
@@ -37,8 +37,10 @@ class UserInterface(object):
exit(1)
tornado_loop = urwid.TornadoEventLoop(ioloop.IOLoop.instance())
- self._urwid_loop = urwid.MainLoop(LoadingWidget(), URWID_PALETTE,
- event_loop=tornado_loop)
+ self._urwid_loop = urwid.MainLoop(
+ LoadingWidget(), URWID_PALETTE, event_loop=tornado_loop,
+ handle_mouse=False
+ )
self._client = hangups.Client(cookies, self.on_event)
future = self._client.connect()
ioloop.IOLoop.instance().add_future(future, lambda f: f.result()) | Disable mouse handling
Fixes copy and paste on chromebook's terminal. |
diff --git a/keyring/tests/backends/test_Google.py b/keyring/tests/backends/test_Google.py
index <HASH>..<HASH> 100644
--- a/keyring/tests/backends/test_Google.py
+++ b/keyring/tests/backends/test_Google.py
@@ -2,6 +2,7 @@ import codecs
import base64
import cPickle
+import keyring.py27compat
from ..py30compat import unittest
from ..test_backend import BackendBasicTests
from keyring.backends import Google
@@ -18,7 +19,7 @@ def is_gdata_supported():
return True
def init_google_docs_keyring(client, can_create=True,
- input_getter=raw_input):
+ input_getter=keyring.py27compat.input):
credentials = SimpleCredential('foo', 'bar')
return Google.DocsKeyring(credentials,
'test_src', | Fix NameError on Python 3 in tests |
diff --git a/upnpclient/soap.py b/upnpclient/soap.py
index <HASH>..<HASH> 100644
--- a/upnpclient/soap.py
+++ b/upnpclient/soap.py
@@ -102,12 +102,12 @@ class SOAP(object):
# If the body of the error response contains XML then it should be a UPnP error,
# otherwise reraise the HTTPError.
try:
- err_xml = etree.fromstring(exc.response.text)
+ err_xml = etree.fromstring(exc.response.content)
except etree.XMLSyntaxError:
raise exc
raise SOAPError(*self._extract_upnperror(err_xml))
- xml_str = resp.text.strip()
+ xml_str = resp.content.strip()
try:
xml = etree.fromstring(xml_str)
except etree.XMLSyntaxError: | Use resp.content in soap client too. |
diff --git a/endpoints/cinema/index.js b/endpoints/cinema/index.js
index <HASH>..<HASH> 100644
--- a/endpoints/cinema/index.js
+++ b/endpoints/cinema/index.js
@@ -24,7 +24,7 @@ var getMovies = function (req, res, next) {
try {
$ = cheerio.load( body );
} catch (e) {
- exports.logError( e );
+ throw new Error( e );
}
// Base object to be added to | Throws a new Error |
diff --git a/tests/test_model_relations.py b/tests/test_model_relations.py
index <HASH>..<HASH> 100644
--- a/tests/test_model_relations.py
+++ b/tests/test_model_relations.py
@@ -35,15 +35,15 @@ class TestModelRelations:
self.prepare_testbed()
user = User(name='Joe').save()
employee = Employee(eid='E1', usr=user).save()
+ # need to wait a sec because we will query solr in the
+ # _save_backlinked_models of User object
+ sleep(1)
employee_from_db = Employee.objects.get(employee.key)
assert employee_from_db.usr.name == user.name
user_from_db = User.objects.get(user.key)
+
user_from_db.name = 'Joen'
- # pprint(user_from_db.clean_value())
- # FIXME: this 1 sec wait shouldn't be required
- sleep(1)
- # pprint(user_from_db.clean_value())
user_from_db.save()
employee_from_db = Employee.objects.get(employee.key)
assert employee_from_db.usr.name == user_from_db.name | we need to wait a sec to give enough time for riak/solr sync |
diff --git a/worker/uniter/util_test.go b/worker/uniter/util_test.go
index <HASH>..<HASH> 100644
--- a/worker/uniter/util_test.go
+++ b/worker/uniter/util_test.go
@@ -36,6 +36,7 @@ import (
"github.com/juju/juju/core/leadership"
corelease "github.com/juju/juju/core/lease"
"github.com/juju/juju/core/machinelock"
+ "github.com/juju/juju/core/model"
"github.com/juju/juju/core/status"
"github.com/juju/juju/juju/sockets"
"github.com/juju/juju/juju/testing"
@@ -958,7 +959,7 @@ func (s updateStatusHookTick) step(c *gc.C, ctx *context) {
type changeConfig map[string]interface{}
func (s changeConfig) step(c *gc.C, ctx *context) {
- err := ctx.application.UpdateCharmConfig(corecharm.Settings(s))
+ err := ctx.application.UpdateCharmConfig(model.GenerationCurrent, corecharm.Settings(s))
c.Assert(err, jc.ErrorIsNil)
} | Fixes uniter worker tests for generational charm config. |
diff --git a/gardenrunner/gardenrunner.go b/gardenrunner/gardenrunner.go
index <HASH>..<HASH> 100644
--- a/gardenrunner/gardenrunner.go
+++ b/gardenrunner/gardenrunner.go
@@ -140,7 +140,6 @@ func (r *Runner) Run(signals <-chan os.Signal, ready chan<- struct{}) error {
} else { // garden-runc
gardenArgs = appendDefaultFlag(gardenArgs, "--init-bin", r.binPath+"/init")
gardenArgs = appendDefaultFlag(gardenArgs, "--dadoo-bin", r.binPath+"/dadoo")
- gardenArgs = appendDefaultFlag(gardenArgs, "--kawasaki-bin", r.binPath+"/kawasaki")
gardenArgs = appendDefaultFlag(gardenArgs, "--nstar-bin", r.binPath+"/nstar")
gardenArgs = appendDefaultFlag(gardenArgs, "--tar-bin", r.binPath+"/tar")
gardenArgs = appendDefaultFlag(gardenArgs, "--runc-bin", r.binPath+"/runc") | Remove garden-runc kawasaki flag
* We need to add this back once the kawasaki dependency hits master of
garden-runc-release |
diff --git a/searx/search.py b/searx/search.py
index <HASH>..<HASH> 100644
--- a/searx/search.py
+++ b/searx/search.py
@@ -414,6 +414,9 @@ class Search(object):
self.categories.remove(category)
if not load_default_categories:
+ if not self.categories:
+ self.categories = list(set(engine['category']
+ for engine in self.engines))
return
# if no category is specified for this search, | [fix] display categories of the selected engines |
diff --git a/bliss/core/seq.py b/bliss/core/seq.py
index <HASH>..<HASH> 100644
--- a/bliss/core/seq.py
+++ b/bliss/core/seq.py
@@ -56,7 +56,7 @@ class Seq (object):
self.pathname = pathname
self.cmddict = cmddict or cmd.getDefaultCmdDict()
self.crc32 = None
- self.seqid = id
+ self.seqid = int(id)
self.lines = [ ]
self.header = { }
self.version = version
@@ -227,12 +227,12 @@ class Seq (object):
if 'seqid' in self.header:
self.seqid = self.header['seqid']
- else:
+ elif self.seqid is None:
self.log.error('No sequence id present in header.')
if 'version' in self.header:
self.version = self.header['version']
- else:
+ elif self.version is None:
self.log.warning('No version present in header. Defaulting to zero (0).')
self.version = 0 | Issue #<I>: Update seq.py to more gracefully handle errors |
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -5,7 +5,7 @@ from setuptools import setup
setup(
name='laws',
packages=['laws'],
- version='0.6.2',
+ version='0.6.3',
description='-ls- for AWS EC2 instances',
long_description='https://github.com/cyrillk/laws/blob/master/README.md',
author='Kirill Kulikov', | Version bumped to <I> |
diff --git a/src/datatable/main.js b/src/datatable/main.js
index <HASH>..<HASH> 100644
--- a/src/datatable/main.js
+++ b/src/datatable/main.js
@@ -209,6 +209,14 @@ class DataTable extends Relation {
}
/**
+ * Returns index and field details in an object where key is the field name.
+ * @return {Object} field definitions
+ */
+ getFieldMap () {
+ return this.fieldMap;
+ }
+
+ /**
* It helps to define the sorting order of the returned data.
* This is similar to the orderBy functionality of the database
* you have to pass the array of array [['columnName', 'sortType(asc|desc)']] and the
diff --git a/src/datatable/relation.js b/src/datatable/relation.js
index <HASH>..<HASH> 100644
--- a/src/datatable/relation.js
+++ b/src/datatable/relation.js
@@ -24,6 +24,13 @@ class Relation {
// This will create a new fieldStore with the fields
nameSpace = fieldStore.createNameSpace(fieldArr, name);
this.columnNameSpace = nameSpace;
+ this.fieldMap = schema.reduce((acc, fieldDef, i) => {
+ acc[fieldDef.name] = {
+ index: i,
+ def: fieldDef
+ };
+ return acc;
+ }, {});
// If data is provided create the default colIdentifier and rowDiffset
this.rowDiffset = `0-${normalizeData[0] ? (normalizeData[0].length - 1) : 0}`;
this.colIdentifier = (schema.map(_ => _.name)).join(); | FSB-<I>: Add field definitions |
diff --git a/src/main/java/com/zandero/rest/data/RouteDefinition.java b/src/main/java/com/zandero/rest/data/RouteDefinition.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/zandero/rest/data/RouteDefinition.java
+++ b/src/main/java/com/zandero/rest/data/RouteDefinition.java
@@ -588,8 +588,13 @@ public class RouteDefinition {
public boolean requestHasBody() {
- // TODO: fix ... DELETE also has no body
- return !(HttpMethod.GET.equals(method) || HttpMethod.HEAD.equals(method));
+ // https://developer.mozilla.org/en-US/docs/Web/HTTP/Methods/
+ // also see:
+ // https://www.owasp.org/index.php/Test_HTTP_Methods_(OTG-CONFIG-006)
+ return HttpMethod.POST.equals(method) ||
+ HttpMethod.PUT.equals(method) ||
+ HttpMethod.PATCH.equals(method) ||
+ HttpMethod.TRACE.equals(method);
}
public boolean hasBodyParameter() { | Fixed request has body method according to specs |
diff --git a/src/createRequest.js b/src/createRequest.js
index <HASH>..<HASH> 100644
--- a/src/createRequest.js
+++ b/src/createRequest.js
@@ -105,9 +105,11 @@ var parseFMResponse = function (fmresultset) {
*/
var remapFields = function (fields) {
var obj = {};
- fields.forEach(function (field) {
- obj[field.$.name] = field.data[0]
- });
+ if (fields) {
+ fields.forEach(function (field) {
+ obj[field.$.name] = field.data[0]
+ });
+ }
return obj
}; | Fixing remaping field function for avoiding error |
diff --git a/packages/vaex-jupyter/vaex/jupyter/_version.py b/packages/vaex-jupyter/vaex/jupyter/_version.py
index <HASH>..<HASH> 100644
--- a/packages/vaex-jupyter/vaex/jupyter/_version.py
+++ b/packages/vaex-jupyter/vaex/jupyter/_version.py
@@ -1,2 +1,2 @@
-__version_tuple__ = (0, 5, 2, "dev.0")
-__version__ = '0.5.2-dev.0'
+__version_tuple__ = (0, 5, 2)
+__version__ = '0.5.2'
diff --git a/packages/vaex-meta/setup.py b/packages/vaex-meta/setup.py
index <HASH>..<HASH> 100644
--- a/packages/vaex-meta/setup.py
+++ b/packages/vaex-meta/setup.py
@@ -22,7 +22,7 @@ install_requires = [
'vaex-hdf5>=0.6.0,<0.7',
'vaex-astro>=0.7.0,<0.8',
'vaex-arrow>=0.5.0,<0.6',
- 'vaex-jupyter>=0.5.2-dev.0,<0.6',
+ 'vaex-jupyter>=0.5.2,<0.6',
'vaex-ml>=0.10.0,<0.11',
# vaex-graphql it not on conda-forge yet
] | Release <I> of vaex-jupyter |
diff --git a/doubles/proxy.py b/doubles/proxy.py
index <HASH>..<HASH> 100644
--- a/doubles/proxy.py
+++ b/doubles/proxy.py
@@ -22,6 +22,3 @@ class Proxy(object):
else:
method_double = self._method_doubles[method_name] = MethodDouble(method_name, self._obj)
return method_double
-
- def __repr__(self):
- return "<Proxy({!r})>".format(self._obj) | Remove unused repr for Proxy. |
diff --git a/unicum/server.py b/unicum/server.py
index <HASH>..<HASH> 100644
--- a/unicum/server.py
+++ b/unicum/server.py
@@ -111,6 +111,8 @@ class Session(object):
item = dict(zip(keys, values))
elif isinstance(item, list):
item = [_prepickle(i) for i in item]
+ elif isinstance(item, tuple):
+ item = (_prepickle(i) for i in item)
elif isinstance(item, (bool, int, long, float, str)):
pass
else: | tuples work in _prepickle in server.py |
diff --git a/lib/metasploit_data_models/active_record_models/host.rb b/lib/metasploit_data_models/active_record_models/host.rb
index <HASH>..<HASH> 100755
--- a/lib/metasploit_data_models/active_record_models/host.rb
+++ b/lib/metasploit_data_models/active_record_models/host.rb
@@ -23,10 +23,11 @@ module MetasploitDataModels::ActiveRecordModels::Host
validates_presence_of :workspace
scope :alive, where({'hosts.state' => 'alive'})
- scope :search,
- lambda { |*args| where(
- [%w{address hosts.name os_name os_flavor os_sp mac purpose comments}.map { |c| "#{c} ILIKE ?" }.join(" OR ")] + ["%#{args[0]}%"] * 8)
- }
+ scope :search, lambda { |*args| {:conditions =>
+ [ %w{hosts.name os_name os_flavor os_sp mac purpose comments}.map{|c| "#{c} ILIKE ?"}.join(" OR ") ] + [ "%#{args[0]}%" ] * 7 }
+ }
+ scope :address_search, lambda { |*args| {:conditions =>
+ [ "address=?",args[0]]}}
scope :tag_search,
lambda { |*args| where("tags.name" => args[0]).includes(:tags) } | Attempting to fix issue with hosts search |
diff --git a/src/Package.php b/src/Package.php
index <HASH>..<HASH> 100644
--- a/src/Package.php
+++ b/src/Package.php
@@ -65,7 +65,8 @@ trait Package
{
try {
$v = self::getPackageVendor();
- } catch (LogicException $vendorException) {}
+ } catch (LogicException $vendorException) {
+ }
try {
$p = self::getPackageName(); | Apply fixes from StyleCI (#5) |
diff --git a/test/test.js b/test/test.js
index <HASH>..<HASH> 100644
--- a/test/test.js
+++ b/test/test.js
@@ -83,7 +83,6 @@ function getManifest(server, callback) {
assert('COMMENT' in manifest, 'Comment expected in manifest');
assert('CACHE' in manifest, 'Cache expected in manifest');
assert('NETWORK' in manifest, 'Network section expected in manifest');
- assert.deepEqual(manifest['NETWORK'], ['*'], 'Network section doesn\'t hold expected value'); //TODO: pull this from opts
assert.deepEqual(manifest['CACHE'].slice().sort(), manifest['CACHE'], 'Cache entries should be soted');
callback(null, manifest);
@@ -188,14 +187,18 @@ describe('Check initial data', function() {
getManifest(server, function(err, manifest) {
if (err) {
done(err);
+ server.stop();
return;
}
try {
console.log('CACHE is ' + JSON.stringify(manifest['CACHE']));
- server.stop();
+ assert.deepEqual(manifest['NETWORK'], ['*'], 'Network section doesn\'t hold expected value'); //TODO: pull this from opts
+ assert.deepEqual(manifest['CACHE'], INITIAL_URLS, 'Cache section doesn\'t hold expected value(s)');
done();
} catch (err) {
done(err);
+ } finally {
+ server.stop();
}
});
}); | Make sure server gets stopped even in error cases. Assert that initial url list is as expected |
diff --git a/src/Http/Requests/Frontarea/RegistrationRequest.php b/src/Http/Requests/Frontarea/RegistrationRequest.php
index <HASH>..<HASH> 100644
--- a/src/Http/Requests/Frontarea/RegistrationRequest.php
+++ b/src/Http/Requests/Frontarea/RegistrationRequest.php
@@ -5,14 +5,14 @@ declare(strict_types=1);
namespace Cortex\Tenants\Http\Requests\Frontarea;
use Illuminate\Foundation\Http\FormRequest;
-use Rinvex\Fort\Exceptions\GenericException;
+use Cortex\Foundation\Exceptions\GenericException;
class RegistrationRequest extends FormRequest
{
/**
* Determine if the user is authorized to make this request.
*
- * @throws \Rinvex\Fort\Exceptions\GenericException
+ * @throws \Cortex\Foundation\Exceptions\GenericException
*
* @return bool
*/ | Move GenericException to cortex/foundation from rinvex/fort |
diff --git a/lib/fog/libvirt/compute.rb b/lib/fog/libvirt/compute.rb
index <HASH>..<HASH> 100644
--- a/lib/fog/libvirt/compute.rb
+++ b/lib/fog/libvirt/compute.rb
@@ -46,10 +46,10 @@ module Fog
require 'libvirt'
begin
- if options[:libvirt_user] and options[:libvirt_password]
+ if options[:libvirt_username] and options[:libvirt_password]
@connection = ::Libvirt::open_auth(@uri.uri, [::Libvirt::CRED_AUTHNAME, ::Libvirt::CRED_PASSPHRASE]) do |cred|
if cred['type'] == ::Libvirt::CRED_AUTHNAME
- res = options[:libvirt_user]
+ res = options[:libvirt_username]
elsif cred["type"] == ::Libvirt::CRED_PASSPHRASE
res = options[:libvirt_password]
else | Made libvirt username param consistent with other providers libvirt_user -> libvirt_username |
diff --git a/src/LabelCollection.php b/src/LabelCollection.php
index <HASH>..<HASH> 100644
--- a/src/LabelCollection.php
+++ b/src/LabelCollection.php
@@ -138,14 +138,9 @@ class LabelCollection implements \Countable
*/
public function filter(callable $filterFunction)
{
- $filteredLabels = [];
- foreach ($this->labels as $label) {
- if ($filterFunction($label)) {
- $filteredLabels[] = $label;
- }
- }
-
- return new LabelCollection($filteredLabels);
+ return new LabelCollection(
+ array_filter($this->labels, $filterFunction)
+ );
}
/** | III-<I> Use array_filter inside the filter method of LabelCollection. |
diff --git a/tools/diagnose.py b/tools/diagnose.py
index <HASH>..<HASH> 100644
--- a/tools/diagnose.py
+++ b/tools/diagnose.py
@@ -110,6 +110,8 @@ def check_mxnet():
print('Commit Hash :', ch)
except ImportError:
print('No MXNet installed.')
+ except FileNotFoundError:
+ print('Hashtag not found. Not installed from pre-built package.')
except Exception as e:
import traceback
if not isinstance(e, IOError): | fix diagnose if hashtag not found. (#<I>) |
diff --git a/peri/fft.py b/peri/fft.py
index <HASH>..<HASH> 100644
--- a/peri/fft.py
+++ b/peri/fft.py
@@ -100,6 +100,15 @@ if hasfftw:
@atexit.register
def goodbye():
save_wisdom(conf.get_wisdom())
+
+ # need to provide a function which conditionally normalizes the result of
+ # an ifft because fftw does not norm while numpy does
+ def fftnorm(arr):
+ return arr * arr.size
+
else:
fftkwargs = {}
fft = np.fft
+
+ def fftnorm(arr):
+ return arr | fft: adding fftnorm function so that fftw and numpy normalization are equal |
diff --git a/src/Commands/BootpackCreatePackage.php b/src/Commands/BootpackCreatePackage.php
index <HASH>..<HASH> 100644
--- a/src/Commands/BootpackCreatePackage.php
+++ b/src/Commands/BootpackCreatePackage.php
@@ -185,8 +185,8 @@ class BootpackCreatePackage extends Command
$this->comment('Registering the service provider in the current laravel application...');
Helpers::strReplaceFile(
- 'ConsoleTVs\\Bootpack\\BootpackServiceProvider::class',
- "ConsoleTVs\\Bootpack\\BootpackServiceProvider::class,\n\t\t"
+ 'App\\Providers\\RouteServiceProvider::class,',
+ "App\\Providers\\RouteServiceProvider::class,\n\t\t"
. $package->namespace . "\\" . ucfirst($p_name) . 'ServiceProvider::class',
base_path('config/app.php')
); | Update BootpackCreatePackage.php |
diff --git a/test/permissions.js b/test/permissions.js
index <HASH>..<HASH> 100644
--- a/test/permissions.js
+++ b/test/permissions.js
@@ -5,23 +5,21 @@ var access = require('../lib/middleware/access')
var assert = require('assertmessage')
var http = require('http')
-var sandbox
-beforeEach(function (done) {
- if (sandbox) {
- sandbox.restore()
- }
- sandbox = sinon.sandbox.create()
- done()
-})
-
-describe('permissions', function () {
+describe('Permissions', function () {
var noop = function () {}
var res = { send: noop }
+ var sandbox
+
+ before(function () {
+ sandbox = sinon.sandbox.create()
+ })
beforeEach(function () {
+ sandbox.restore()
this.mock = sandbox.mock(res)
- this.mock.expects('send').once()
- .withArgs(403, { msg: http.STATUS_CODES[403] })
+ this.mock.expects('send').once().withArgs(403, {
+ msg: http.STATUS_CODES[403]
+ })
})
describe('with access that returns', function () { | refactor(tests): permissions |
diff --git a/zappa/zappa.py b/zappa/zappa.py
index <HASH>..<HASH> 100644
--- a/zappa/zappa.py
+++ b/zappa/zappa.py
@@ -749,7 +749,7 @@ class Zappa(object):
integration.Credentials = credentials
integration.IntegrationHttpMethod = 'POST'
integration.IntegrationResponses = []
- # integration.PassthroughBehavior = 'NEVER'
+ integration.PassthroughBehavior = 'NEVER'
# integration.RequestParameters = {}
integration.RequestTemplates = content_mapping_templates
integration.Type = 'AWS'
@@ -1057,7 +1057,13 @@ class Zappa(object):
# Something has gone wrong.
# Is raising enough? Should we also remove the Lambda function?
- if result['Stacks'][0]['StackStatus'] == 'ROLLBACK_IN_PROGRESS':
+ if result['Stacks'][0]['StackStatus'] in [
+ 'DELETE_COMPLETE',
+ 'DELETE_IN_PROGRESS',
+ 'ROLLBACK_IN_PROGRESS',
+ 'UPDATE_ROLLBACK_COMPLETE_CLEANUP_IN_PROGRESS',
+ 'UPDATE_ROLLBACK_COMPLETE'
+ ]:
raise EnvironmentError("Stack creation failed. Please check your CloudFormation console. You may also need to `undeploy`.")
count = 0 | put passthrough back, add more stack failure cases |
diff --git a/activerecord/lib/active_record/attribute_methods.rb b/activerecord/lib/active_record/attribute_methods.rb
index <HASH>..<HASH> 100644
--- a/activerecord/lib/active_record/attribute_methods.rb
+++ b/activerecord/lib/active_record/attribute_methods.rb
@@ -150,7 +150,7 @@ module ActiveRecord
BLACKLISTED_CLASS_METHODS.include?(method_name.to_s) || class_method_defined_within?(method_name, Base)
end
- def class_method_defined_within?(name, klass, superklass = klass.superclass) # :nodoc
+ def class_method_defined_within?(name, klass, superklass = klass.superclass) # :nodoc:
if klass.respond_to?(name, true)
if superklass.respond_to?(name, true)
klass.method(name).owner != superklass.method(name).owner | Fixed syntax error in RDoc directive |
diff --git a/core/src/main/java/hudson/tasks/MailSender.java b/core/src/main/java/hudson/tasks/MailSender.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/hudson/tasks/MailSender.java
+++ b/core/src/main/java/hudson/tasks/MailSender.java
@@ -363,5 +363,5 @@ public class MailSender {
public static boolean debug = false;
- private static final int MAX_LOG_LINES = 250;
+ private static final int MAX_LOG_LINES = Integer.getInteger(MailSender.class.getName()+".maxLogLines",250);
} | allowed this to be configured via a system property. Interim solution until we really fix the e-mail notification.
git-svn-id: <URL> |
diff --git a/src/Persistence/DbRepositoryBase.php b/src/Persistence/DbRepositoryBase.php
index <HASH>..<HASH> 100644
--- a/src/Persistence/DbRepositoryBase.php
+++ b/src/Persistence/DbRepositoryBase.php
@@ -145,14 +145,9 @@ abstract class DbRepositoryBase implements IObjectSetWithLoadCriteriaSupport
$this->validateHasRequiredColumns($sql, reset($rows));
- $table = $this->mapper->getPrimaryTable();
- $rowObjects = [];
+ $rowSet = $this->connection->getPlatform()->mapResultSetToPhpForm($this->mapper->getSelect()->getResultSetTableStructure(), $rows);
- foreach ($rows as $row) {
- $rowObjects[] = new Row($table, $row);
- }
-
- return $this->mapper->loadAll($this->loadingContext, $rowObjects);
+ return $this->mapper->loadAll($this->loadingContext, $rowSet->getRows());
}
protected function replaceQueryPlaceholders(string $sql) | Fix issue with loading objects from custom SQL query |
diff --git a/lib/buildpack/packager/package.rb b/lib/buildpack/packager/package.rb
index <HASH>..<HASH> 100644
--- a/lib/buildpack/packager/package.rb
+++ b/lib/buildpack/packager/package.rb
@@ -13,7 +13,7 @@ module Buildpack
def copy_buildpack_to_temp_dir(temp_dir)
FileUtils.cp_r(File.join(options[:root_dir], '.'), temp_dir)
- a_manifest = YAML.load_file(options[:manifest_path])
+ a_manifest = YAML.load_file(options[:manifest_path]).with_indifferent_access
unless options[:stack] == :any_stack
a_manifest = edit_manifest_for_stack(a_manifest)
end | Make symbols and strings both work.
Unclear how to predict which way a given manifest will get read.
[#<I>] |
diff --git a/jquery.i18n.properties.js b/jquery.i18n.properties.js
index <HASH>..<HASH> 100644
--- a/jquery.i18n.properties.js
+++ b/jquery.i18n.properties.js
@@ -254,7 +254,7 @@
/** Language reported by browser, normalized code */
$.i18n.browserLang = function () {
- return normaliseLanguageCode(navigator.language /* Mozilla */ || navigator.userLanguage /* IE */);
+ return normaliseLanguageCode(navigator.languages[0] /* Mozilla 32+ */ || navigator.language /* Mozilla */ || navigator.userLanguage /* IE */);
}; | Issue <I>: Language detection should use navigator.languages when able
In Chrome / Firefox <I>+ the list of preferred languages is set in
the navigator.languages array, where the primary preferred language is
placed in index 0.
<URL> |
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -3,8 +3,17 @@ import os
import sys
from setuptools import setup, Extension, find_packages
-from Cython.Build import cythonize
+try:
+ from Cython.Build import cythonize
+except ImportError:
+ # note: This is only to cheat the RTD builds from single requirement file
+ # with -e flag. This package will be either distributed with C
+ # sources or as properly built wheels from Travis CI and Appveyor.
+ if os.environ.get('READTHEDOCS', None) == 'True':
+ cythonize = lambda x: x
+ else:
+ raise
try:
from pypandoc import convert | docs: try to hackaround the installation issues on RTD |
diff --git a/mock/mock.py b/mock/mock.py
index <HASH>..<HASH> 100644
--- a/mock/mock.py
+++ b/mock/mock.py
@@ -357,7 +357,11 @@ class _SentinelObject(object):
return 'sentinel.%s' % self.name
def __reduce__(self):
- return 'sentinel.%s' % self.name
+ return _unpickle_sentinel, (self.name, )
+
+
+def _unpickle_sentinel(name):
+ return getattr(sentinel, name)
class _Sentinel(object): | Serhiy's approach didn't work on Python 2 or <I> |
diff --git a/lib/rules/no-multiple-empty-lines.js b/lib/rules/no-multiple-empty-lines.js
index <HASH>..<HASH> 100644
--- a/lib/rules/no-multiple-empty-lines.js
+++ b/lib/rules/no-multiple-empty-lines.js
@@ -31,7 +31,7 @@ module.exports = function(context) {
lastLocation,
blankCounter = 0,
location,
- trimmedLines = lines.map(function(str){
+ trimmedLines = lines.map(function(str) {
return str.trim();
}); | Fix: resolve linting issue in (fixes #<I>) |
diff --git a/s2reader/s2reader.py b/s2reader/s2reader.py
index <HASH>..<HASH> 100644
--- a/s2reader/s2reader.py
+++ b/s2reader/s2reader.py
@@ -49,6 +49,12 @@ class SentinelDataSet(object):
if self.is_zip:
self._zipfile = zipfile.ZipFile(self.path, 'r')
self._zip_root = os.path.basename(filename)
+ if not self._zip_root in self._zipfile.namelist():
+ self._zip_root = os.path.basename(filename) + ".SAFE/"
+ try:
+ assert self._zip_root in self._zipfile.namelist()
+ except:
+ raise IOError("unknown zipfile structure")
self.manifest_safe_path = os.path.join(
self._zip_root, "manifest.safe")
else: | handling different ZIP structure, if SAFE folder is root |
diff --git a/dcf/creditcurve.py b/dcf/creditcurve.py
index <HASH>..<HASH> 100644
--- a/dcf/creditcurve.py
+++ b/dcf/creditcurve.py
@@ -17,9 +17,24 @@ from interpolation import constant, linear, loglinear, logconstant
class CreditCurve(RateCurve):
""" generic curve for default probabilities (under construction) """
-
_forward_tenor = '1Y'
+ def cast(self, cast_type, **kwargs):
+ old_domain = kwargs.get('domain', self.domain)
+
+ if issubclass(cast_type, (SurvivalProbabilityCurve,)):
+ domain = kwargs.get('domain', self.domain)
+ origin = kwargs.get('origin', self.origin)
+ new_domain = list(domain) + [origin + '1d']
+ kwargs['domain'] = sorted(set(new_domain))
+
+ if issubclass(cast_type, (SurvivalProbabilityCurve,)):
+ domain = kwargs.get('domain', self.domain)
+ new_domain = list(domain) + [max(domain) + '1d']
+ kwargs['domain'] = sorted(set(new_domain))
+
+ return super(CreditCurve, self).cast(cast_type, **kwargs)
+
def get_survival_prob(self, start, stop=None): # aka get_discount_factor
if stop is None:
return self.get_survival_prob(self.origin, start) | adding credit curves + unit tests + cast |
diff --git a/tldap/backend/base.py b/tldap/backend/base.py
index <HASH>..<HASH> 100644
--- a/tldap/backend/base.py
+++ b/tldap/backend/base.py
@@ -165,6 +165,9 @@ class LDAPbase(object):
# Loop over list of search results
for result_item in result_list:
+ # skip searchResRef for now
+ if result_item['type'] != "searchResEntry":
+ continue
dn = result_item['dn']
attributes = result_item['raw_attributes']
# did we already retrieve this from cache? | Skip searchResRef results.
We don't support referrals. Yet.
Change-Id: Ieb4a<I>bb<I>b9ae4c5e<I>fbf9d<I>b<I> |
diff --git a/lib/dataflow/nodes/compute_node.rb b/lib/dataflow/nodes/compute_node.rb
index <HASH>..<HASH> 100644
--- a/lib/dataflow/nodes/compute_node.rb
+++ b/lib/dataflow/nodes/compute_node.rb
@@ -221,6 +221,12 @@ module Dataflow
on_computing_started
start_time = Time.now
+ if data_node.present? && clear_data_on_compute != data_node.use_double_buffering
+ # make sure the data node has a compatible settings
+ data_node.use_double_buffering = clear_data_on_compute
+ data_node.save
+ end
+
# update this node's schema with the necessary fields
data_node&.update_schema(required_schema) | Force the data node to use double buffering when necessary. |
diff --git a/extensions/helper/Number.php b/extensions/helper/Number.php
index <HASH>..<HASH> 100644
--- a/extensions/helper/Number.php
+++ b/extensions/helper/Number.php
@@ -12,7 +12,8 @@
namespace cms_core\extensions\helper;
-use lihtium\core\Environment;
+use lithium\core\Environment;
+use NumberFormatter;
class Number extends \lithium\template\Helper { | Add missing deps to number helper. |
diff --git a/core/server/worker/src/main/java/alluxio/worker/block/UnderFileSystemBlockReader.java b/core/server/worker/src/main/java/alluxio/worker/block/UnderFileSystemBlockReader.java
index <HASH>..<HASH> 100644
--- a/core/server/worker/src/main/java/alluxio/worker/block/UnderFileSystemBlockReader.java
+++ b/core/server/worker/src/main/java/alluxio/worker/block/UnderFileSystemBlockReader.java
@@ -161,7 +161,7 @@ public final class UnderFileSystemBlockReader implements BlockReader {
}
byte[] data = new byte[(int) bytesToRead];
int bytesRead = 0;
- Preconditions.checkNotNull(mUnderFileSystemInputStream);
+ Preconditions.checkNotNull(mUnderFileSystemInputStream, "mUnderFileSystemInputStream");
while (bytesRead < bytesToRead) {
int read;
try { | Passing variable name to Preconditions.checkNotNull (#<I>) |
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -45,14 +45,14 @@ extras = {
"ml": ["numpy>=1.16,<1.17", "torch==1.0.0"],
"ner": ["sklearn-crfsuite>=0.3.6"],
"ssg": ["ssg>=0.0.6"],
- "thai2fit": ["emoji==0.5.1", "gensim==3.0", "numpy>=1.16,<1.17"],
+ "thai2fit": ["emoji==0.5.1", "gensim==3.1.0", "numpy>=1.16,<1.17"],
"thai2rom": ["torch==1.0.0", "numpy>=1.16,<1.17"],
"full": [
"artagger>=0.1.0.3",
"attacut>=1.0.4",
"emoji==0.5.1",
"epitran>=1.1",
- "gensim==3.0",
+ "gensim==3.1.0",
"numpy>=1.16,<1.17",
"pandas>=0.24,<0.25",
"pyicu>=2.3", | Move gensim up to <I> |
diff --git a/docs/conf.py b/docs/conf.py
index <HASH>..<HASH> 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -22,6 +22,8 @@ import sys
sys.path.insert(0, os.path.abspath('..'))
+import pypika
+
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
@@ -35,7 +37,7 @@ extensions = [
'sphinx.ext.doctest',
'sphinx.ext.todo',
'sphinx.ext.coverage',
- 'sphinx.ext.pngmath',
+ 'sphinx.ext.ingmath',
'sphinx.ext.viewcode',
]
@@ -63,9 +65,9 @@ author = 'Timothy Heys'
# built documents.
#
# The short X.Y version.
-version = '0.0.1'
+version = pypika.__version__
# The full version, including alpha/beta/rc tags.
-release = '0.0.1'
+release = pypika.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages. | Updated pypika version in docs |
diff --git a/src/animations/animatedImage/index.js b/src/animations/animatedImage/index.js
index <HASH>..<HASH> 100644
--- a/src/animations/animatedImage/index.js
+++ b/src/animations/animatedImage/index.js
@@ -90,12 +90,13 @@ class AnimatedImage extends BaseComponent {
render() {
const {containerStyle, loader, ...others} = this.props;
return (
- <View testID={this.testID} style={containerStyle}>
+ <View style={containerStyle}>
<UIAnimatedImage
{...others}
style={[{opacity: this.state.opacity}, this.style]}
source={this.source}
onLoad={() => this.onLoad()}
+ testID={this.testID}
/>
{this.state.isLoading && loader && (
<View style={{...StyleSheet.absoluteFillObject, justifyContent: 'center'}}> | Fixed duplicate testID in AnimatedImage passed by Avatar (#<I>) |
diff --git a/src/defaults.js b/src/defaults.js
index <HASH>..<HASH> 100644
--- a/src/defaults.js
+++ b/src/defaults.js
@@ -44,7 +44,7 @@ module.exports = async (argv = {}) => {
const opts = mixinDeep(
{ project: { name, description: desc } },
defaults,
- { locals: await latestDeps() },
+ { locals: await latestDeps(argv.pkg) },
options,
); | fix: allow passing settings.pkg |
diff --git a/ELiDE/ELiDE/calendar/__init__.py b/ELiDE/ELiDE/calendar/__init__.py
index <HASH>..<HASH> 100644
--- a/ELiDE/ELiDE/calendar/__init__.py
+++ b/ELiDE/ELiDE/calendar/__init__.py
@@ -43,7 +43,6 @@ class CalendarDropMenuButton(CalendarWidget, Button):
self.modalview.add_widget(container)
container.size = container.minimum_size
-
def on_options(self, *args):
if not self.modalview:
Clock.schedule_once(self.on_options, 0) | Delete extraneous whitespace |
diff --git a/failure.go b/failure.go
index <HASH>..<HASH> 100644
--- a/failure.go
+++ b/failure.go
@@ -38,13 +38,18 @@ type FailureRecord struct {
Error string
}
-// Record a failure for the currently running test. Most users will want to use
-// ExpectThat, ExpectEq, etc. instead of this function. Those that do want to
-// report arbitrary errors will probably be satisfied with AddFailure, which is
-// easier to use.
+// Record a failure for the currently running test (and continue running it).
+// Most users will want to use ExpectThat, ExpectEq, etc. instead of this
+// function. Those that do want to report arbitrary errors will probably be
+// satisfied with AddFailure, which is easier to use.
func AddFailureRecord(r FailureRecord)
// Call AddFailureRecord with a record whose file name and line number come
// from the caller of this function, and whose error string is created by
// calling fmt.Sprintf using the arguments to this function.
func AddFailure(format string, a ...interface{})
+
+// Immediately stop executing the running test, causing it to fail with the
+// failures previously recorded. Behavior is undefined if no failures have been
+// recorded.
+func AbortTest() | Declared AbortTest too. |
diff --git a/lib/index.js b/lib/index.js
index <HASH>..<HASH> 100644
--- a/lib/index.js
+++ b/lib/index.js
@@ -1,5 +1,10 @@
-var Strategy = require('./strategy');
+'use strict';
+
+var Strategy = require('./strategy'),
+ ExtractJwt = require('./extract_jwt.js');
+
module.exports = {
- Strategy: Strategy
+ Strategy: Strategy,
+ ExtractJwt : ExtractJwt
}; | Export the jwt extraction functions. |
diff --git a/nomenclate/ui/token_widget.py b/nomenclate/ui/token_widget.py
index <HASH>..<HASH> 100644
--- a/nomenclate/ui/token_widget.py
+++ b/nomenclate/ui/token_widget.py
@@ -12,14 +12,11 @@ print(QtWidgets.__file__)
class CustomCompleter(QtWidgets.QCompleter):
def __init__(self, options, parent=None):
self.options = QtCore.QStringListModel(options)
- super(CustomCompleter, self).__init__(parent=parent)
+ super(CustomCompleter, self).__init__(self.options, parent)
self.popup().setStyleSheet(str('QListView{ color: rgb(200, 200, 200); '
- 'background-color: rgba(200, 200, 200, .1);'
- '}'
- 'QListView::item:selected{ '
- 'background-color: rgba(255, 0, 0); }'))
- # always show all (filtered) completions
- self.setCompletionMode(self.PopupCompletion)
+ 'background-color: rgba(200, 200, 200, .4);'
+ '}'))
+ self.setCompletionMode(self.UnfilteredPopupCompletion)
class TokenLineEdit(QtWidgets.QLineEdit): | Just left Completer showing unfiltered results and moving on. |
diff --git a/danceschool/core/forms.py b/danceschool/core/forms.py
index <HASH>..<HASH> 100644
--- a/danceschool/core/forms.py
+++ b/danceschool/core/forms.py
@@ -75,7 +75,7 @@ class CheckboxSelectMultipleWithDisabled(CheckboxSelectMultiple):
To make an option part of a separate "override" choice set, add a dictionary key {'override': True}
"""
- def render(self, name, value, attrs=None, choices=()):
+ def render(self, name, value, attrs=None, choices=(), renderer=None):
if value is None:
value = []
has_id = attrs and 'id' in attrs | Added renderer option for Django <I>+ compatibility |
diff --git a/api/python/quilt3/api.py b/api/python/quilt3/api.py
index <HASH>..<HASH> 100644
--- a/api/python/quilt3/api.py
+++ b/api/python/quilt3/api.py
@@ -25,7 +25,7 @@ def copy(src, dest):
-ApiTelemetry("api.delete_package")
+@ApiTelemetry("api.delete_package")
def delete_package(name, registry=None, top_hash=None):
"""
Delete a package. Deletes only the manifest entries and not the underlying files. | Fixed missing @ for decorator (#<I>) |
diff --git a/lib/Cake/Controller/Component/SecurityComponent.php b/lib/Cake/Controller/Component/SecurityComponent.php
index <HASH>..<HASH> 100644
--- a/lib/Cake/Controller/Component/SecurityComponent.php
+++ b/lib/Cake/Controller/Component/SecurityComponent.php
@@ -16,6 +16,8 @@
* @since CakePHP(tm) v 0.10.8.2156
* @license MIT License (http://www.opensource.org/licenses/mit-license.php)
*/
+
+App::uses('Component', 'Controller');
App::uses('String', 'Utility');
App::uses('Security', 'Utility');
diff --git a/lib/Cake/tests/cases/libs/controller/components/security.test.php b/lib/Cake/tests/cases/libs/controller/components/security.test.php
index <HASH>..<HASH> 100644
--- a/lib/Cake/tests/cases/libs/controller/components/security.test.php
+++ b/lib/Cake/tests/cases/libs/controller/components/security.test.php
@@ -16,8 +16,9 @@
* @since CakePHP(tm) v 1.2.0.5435
* @license MIT License (http://www.opensource.org/licenses/mit-license.php)
*/
+
+App::uses('SecurityComponent', 'Controller/Component');
App::uses('Controller', 'Controller');
-App::uses('SecurityComponent', 'Component');
/**
* TestSecurityComponent | Fixing sSecurity component tests |
diff --git a/src/main/java/com/simpligility/maven/plugins/android/phase08preparepackage/DexMojo.java b/src/main/java/com/simpligility/maven/plugins/android/phase08preparepackage/DexMojo.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/simpligility/maven/plugins/android/phase08preparepackage/DexMojo.java
+++ b/src/main/java/com/simpligility/maven/plugins/android/phase08preparepackage/DexMojo.java
@@ -634,7 +634,7 @@ public class DexMojo extends AbstractAndroidMojo
Set< File> inputFiles = getDexInputFiles();
StringBuilder sb = new StringBuilder();
- sb.append( '"' ).append( StringUtils.join( inputFiles, File.pathSeparatorChar ) ).append( '"' );
+ sb.append( StringUtils.join( inputFiles, File.pathSeparatorChar ) );
commands.add( sb.toString() );
String executable = getAndroidSdk().getMainDexClasses().getAbsolutePath(); | Fix quotes problem with dex and multidex
- see <URL> |
diff --git a/lib/graphql/client/hash_with_indifferent_access.rb b/lib/graphql/client/hash_with_indifferent_access.rb
index <HASH>..<HASH> 100644
--- a/lib/graphql/client/hash_with_indifferent_access.rb
+++ b/lib/graphql/client/hash_with_indifferent_access.rb
@@ -1,5 +1,6 @@
# frozen_string_literal: true
require "active_support/inflector"
+require "forwardable"
module GraphQL
class Client | fix bug - missing require forwardable |
diff --git a/tweepy/cursor.py b/tweepy/cursor.py
index <HASH>..<HASH> 100644
--- a/tweepy/cursor.py
+++ b/tweepy/cursor.py
@@ -180,7 +180,7 @@ class PageIterator(BaseIterator):
def __init__(self, method, *args, **kwargs):
BaseIterator.__init__(self, method, *args, **kwargs)
- self.current_page = 0
+ self.current_page = 1
def next(self):
if self.limit > 0: | Start on page 1 for PageIterator |
diff --git a/lib/model/txproposal.js b/lib/model/txproposal.js
index <HASH>..<HASH> 100644
--- a/lib/model/txproposal.js
+++ b/lib/model/txproposal.js
@@ -169,6 +169,25 @@ TxProposal.prototype.getRawTx = function() {
return t.uncheckedSerialize();
};
+TxProposal.prototype.estimateFee = function(walletN) {
+ // Note: found empirically based on all multisig P2SH inputs and within m & n allowed limits.
+ var safetyMargin = 0.05;
+
+ var walletM = this.requiredSignatures;
+
+ var overhead = 4 + 4 + 9 + 9;
+ var inputSize = walletM * 72 + walletN * 36 + 44;
+ var outputSize = 34;
+ var nbInputs = this.inputs.length;
+ var nbOutputs = (_.isArray(this.outputs) ? this.outputs.length : 1) + 1;
+
+ var size = overhead + inputSize * nbInputs + outputSize * nbOutputs;
+
+ var fee = this.feePerKb * (size * (1 + safetyMargin)) / 1000;
+
+ // Round up to nearest bit
+ this.fee = parseInt((Math.ceil(fee / 100) * 100).toFixed(0));
+};
/**
* getTotalAmount | add fee estimation method to txp |
diff --git a/lib/moodle2cc/canvas/question.rb b/lib/moodle2cc/canvas/question.rb
index <HASH>..<HASH> 100644
--- a/lib/moodle2cc/canvas/question.rb
+++ b/lib/moodle2cc/canvas/question.rb
@@ -115,7 +115,7 @@ module Moodle2CC::Canvas
material = question.text
material = question.content || '' if material.nil?
- material.gsub!(/\{(.*?)\}/, '[\1]')
+ material = material.gsub(/\{(.*?)\}/, '[\1]')
material = RDiscount.new(material).to_html if question.format == 4 # markdown
@material = material | Don't modify question text in place |
diff --git a/python/test/bigdl/dlframes/test_dl_image_transformer.py b/python/test/bigdl/dlframes/test_dl_image_transformer.py
index <HASH>..<HASH> 100644
--- a/python/test/bigdl/dlframes/test_dl_image_transformer.py
+++ b/python/test/bigdl/dlframes/test_dl_image_transformer.py
@@ -49,7 +49,7 @@ class TestDLImageTransformer():
# test, and withhold the support for Spark 1.5, until the unit test failure reason
# is clarified.
- if not self.sc.version.startswith("1.5"):
+ if not self.sc.version.startswith("1.5" and "3.0"):
image_frame = DLImageReader.readImages(self.image_path, self.sc)
transformer = DLImageTransformer(
Pipeline([Resize(256, 256), CenterCrop(224, 224), | [WIP] spark <I> (#<I>)
* spark <I> |
diff --git a/go/libkb/secret_store_file.go b/go/libkb/secret_store_file.go
index <HASH>..<HASH> 100644
--- a/go/libkb/secret_store_file.go
+++ b/go/libkb/secret_store_file.go
@@ -8,6 +8,7 @@ import (
"io/ioutil"
"os"
"path/filepath"
+ "runtime"
)
var ErrSecretForUserNotFound = NotFoundError{Msg: "No secret found for user"}
@@ -40,8 +41,11 @@ func (s *SecretStoreFile) StoreSecret(username NormalizedUsername, secret []byte
if err != nil {
return err
}
- if err := f.Chmod(0600); err != nil {
- return err
+ if runtime.GOOS != "windows" {
+ // os.Fchmod not supported on windows
+ if err := f.Chmod(0600); err != nil {
+ return err
+ }
}
if _, err := f.Write(secret); err != nil {
return err | Don't run chmod on file ptr in windows. |
diff --git a/lib/transports/polling-jsonp.js b/lib/transports/polling-jsonp.js
index <HASH>..<HASH> 100644
--- a/lib/transports/polling-jsonp.js
+++ b/lib/transports/polling-jsonp.js
@@ -67,6 +67,13 @@ function JSONPPolling (opts) {
// append to query string
this.query.j = this.index;
+
+ // prevent spurious errors from being emitted when the window is unloaded
+ if (global.document && global.addEventListener) {
+ global.addEventListener('beforeunload', function () {
+ if (self.script) self.script.onerror = empty;
+ });
+ }
}
/**
@@ -82,7 +89,7 @@ util.inherits(JSONPPolling, Polling);
JSONPPolling.prototype.supportsBinary = false;
/**
- * Closes the socket
+ * Closes the socket.
*
* @api private
*/
@@ -108,7 +115,7 @@ JSONPPolling.prototype.doClose = function () {
*/
JSONPPolling.prototype.doPoll = function () {
- var self = this;
+ var self = this;
var script = document.createElement('script');
if (this.script) {
@@ -126,7 +133,6 @@ JSONPPolling.prototype.doPoll = function () {
insertAt.parentNode.insertBefore(script, insertAt);
this.script = script;
-
if (util.ua.gecko) {
setTimeout(function () {
var iframe = document.createElement('iframe'); | polling-jsonp: prevent spurious errors from being emitted when the window is unloaded |
diff --git a/pantsbuild_migration.py b/pantsbuild_migration.py
index <HASH>..<HASH> 100644
--- a/pantsbuild_migration.py
+++ b/pantsbuild_migration.py
@@ -257,7 +257,7 @@ def handle_path(path):
print('PROCESSING: %s' % path)
srcfile = BuildFile(path)
srcfile.process()
- elif path.endswith('.rst') or path.endswith('.sh') or path.endswith('pants.bootstrap') or path.endswith('taskdev.asc'):
+ elif path.endswith('.rst') or path.endswith('.sh') or path.endswith('pants.bootstrap'):
print('PROCESSING: %s' % path)
with open(path, 'r') as infile:
content = infile.read() | Don't try to edit the asc file.
(sapling split of <I>ef3ca<I>cee<I>e9dcaefeb7d1dd3ccd<I>ed5) |
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -9,7 +9,7 @@ from setuptools import setup
if TkVersion <= 8.5:
- message = "This version of ttkthemes does not support Tk 8.5 and earlier. Please install an earlier version."
+ message = "This version of ttkthemes does not support Tk 8.5 and earlier. Please install a later version."
raise RuntimeError(message) | Fix RuntimeError message upon installation with Tk <I> or earlier (#<I>) |
diff --git a/DependencyInjection/Compiler/AddDashboardWidgetsPass.php b/DependencyInjection/Compiler/AddDashboardWidgetsPass.php
index <HASH>..<HASH> 100644
--- a/DependencyInjection/Compiler/AddDashboardWidgetsPass.php
+++ b/DependencyInjection/Compiler/AddDashboardWidgetsPass.php
@@ -25,17 +25,12 @@ class AddDashboardWidgetsPass implements CompilerPassInterface
*/
public function process(ContainerBuilder $container): void
{
- $ids = $container->findTaggedServiceIds('darvin_admin.dashboard_widget');
-
- if (empty($ids)) {
- return;
- }
+ $blacklist = $container->getParameter('darvin_admin.dashboard.blacklist');
+ $dashboard = $container->getDefinition('darvin_admin.dashboard');
+ $ids = $container->findTaggedServiceIds('darvin_admin.dashboard_widget');
(new TaggedServiceIdsSorter())->sort($ids);
- $dashboard = $container->getDefinition('darvin_admin.dashboard');
- $blacklist = $container->getParameter('darvin_admin.dashboard.blacklist');
-
foreach (array_keys($ids) as $id) {
if (!in_array($id, $blacklist)) {
$dashboard->addMethodCall('addWidget', [new Reference($id)]); | Simplify add dashboard widgets compiler pass. |
diff --git a/estnltk/estnltk/taggers/system/dict_taggers/phrase_tagger.py b/estnltk/estnltk/taggers/system/dict_taggers/phrase_tagger.py
index <HASH>..<HASH> 100644
--- a/estnltk/estnltk/taggers/system/dict_taggers/phrase_tagger.py
+++ b/estnltk/estnltk/taggers/system/dict_taggers/phrase_tagger.py
@@ -140,8 +140,8 @@ class PhraseTagger(Tagger):
for s in input_layer[i:i + len(tail) + 1])
span = EnvelopingSpan(base_span=base_span, layer=layer)
for record in self.vocabulary[phrase]:
- print(record)
- print(output_attributes)
+ #print(record)
+ #print(output_attributes)
annotation = Annotation(span, **{attr: record[attr]
for attr in output_attributes})
is_valid = self.decorator(span, annotation) | Removed printing from PhraseTaggr |
diff --git a/yalla-core.js b/yalla-core.js
index <HASH>..<HASH> 100644
--- a/yalla-core.js
+++ b/yalla-core.js
@@ -222,13 +222,12 @@ var yalla = (function () {
framework.getParentComponent = function(node){
var _node = node;
- while(_node.parentNode){
- var _parentNode = _node.parentNode;
- if('element' in _parentNode.attributes || _parentNode.nodeName == 'BODY'){
- return _parentNode;
+ do{
+ if('element' in _node.attributes || _node.nodeName == 'BODY'){
+ return _node;
}
_node = _node.parentNode;
- }
+ }while(_node)
return null;
}; | Enrich this object in .trigger. Todo for .bind and expression |
diff --git a/core/src/main/java/com/orientechnologies/orient/core/iterator/ORecordIteratorClass.java b/core/src/main/java/com/orientechnologies/orient/core/iterator/ORecordIteratorClass.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/iterator/ORecordIteratorClass.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/iterator/ORecordIteratorClass.java
@@ -192,6 +192,10 @@ public class ORecordIteratorClass<REC extends ORecordInternal<?>> extends ORecor
current.clusterPosition = firstClusterPosition - 1;
}
+ record = getTransactionEntry();
+ if (record != null)
+ return (REC) record;
+
throw new NoSuchElementException();
} | Fixed bug on iterating class records inside a transaction |
diff --git a/polyaxon/polypod/notebook.py b/polyaxon/polypod/notebook.py
index <HASH>..<HASH> 100644
--- a/polyaxon/polypod/notebook.py
+++ b/polyaxon/polypod/notebook.py
@@ -130,7 +130,7 @@ class NotebookSpawner(ProjectJobSpawner):
"--port={port} "
"--ip=0.0.0.0 "
"--allow-root "
- "--NotebookApp.allow_origin='*' "
+ "--NotebookApp.allow_origin=* "
"--NotebookApp.token={token} "
"--NotebookApp.trust_xheaders=True "
"--NotebookApp.base_url={base_url} " | Remove quote around `allow_origin=*` |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.