diff
stringlengths 65
26.7k
| message
stringlengths 7
9.92k
|
|---|---|
diff --git a/lib/dexter/indexer.rb b/lib/dexter/indexer.rb
index <HASH>..<HASH> 100644
--- a/lib/dexter/indexer.rb
+++ b/lib/dexter/indexer.rb
@@ -527,7 +527,7 @@ module Dexter
# as an extra defense against SQL-injection attacks.
# https://www.postgresql.org/docs/current/static/libpq-exec.html
query = squish(query) if pretty
- log colorize("SQL: #{query}", :cyan) if @log_sql
+ log colorize("[sql] #{query}", :cyan) if @log_sql
@mutex.synchronize do
conn.exec_params(query, []).to_a
|
Improved SQL output [skip ci]
|
diff --git a/spec/javascripts/unit/connection/protocol_wrapper_spec.js b/spec/javascripts/unit/connection/protocol_wrapper_spec.js
index <HASH>..<HASH> 100644
--- a/spec/javascripts/unit/connection/protocol_wrapper_spec.js
+++ b/spec/javascripts/unit/connection/protocol_wrapper_spec.js
@@ -244,8 +244,12 @@ describe("ProtocolWrapper", function() {
});
it("should emit an error after receiving invalid JSON", function() {
+ var error = {};
+
var onMessage = jasmine.createSpy("onMessage");
- var onError = jasmine.createSpy("onError");
+ var onError = jasmine.createSpy("onError").andCallFake(function(e) {
+ error = e;
+ });
this.wrapper.bind("message", onMessage);
this.wrapper.bind("error", onError);
@@ -253,11 +257,8 @@ describe("ProtocolWrapper", function() {
data: "this is not json"
})
expect(onMessage).not.toHaveBeenCalled();
- expect(onError).toHaveBeenCalledWith({
- type: "MessageParseError",
- error: {},
- data: "this is not json"
- });
+ expect(error.type).toEqual("MessageParseError");
+ expect(error.data).toEqual("this is not json");
});
});
|
Fix JSON error test to work on all browsers
|
diff --git a/ActiveRecord.php b/ActiveRecord.php
index <HASH>..<HASH> 100644
--- a/ActiveRecord.php
+++ b/ActiveRecord.php
@@ -4,6 +4,28 @@ namespace kato;
class ActiveRecord extends \yii\db\ActiveRecord
{
+
+ /**
+ * Attached Content Media, by type
+ * @return static
+ */
+ public function getContentMedia()
+ {
+ return $this->hasMany(\backend\models\ContentMedia::className(), ['content_id' => 'id'])
+ ->where('media_type = :type', [':type' => $this->className()]);
+ }
+
+ /**
+ * Relate Media
+ * Usage: $model->media();
+ * @return static
+ */
+ public function getMedia()
+ {
+ return $this->hasMany(\backend\models\Media::className(), ['id' => 'media_id'])
+ ->via('contentMedia');
+ }
+
/**
* Actions to be taken before saving the record.
* @param bool $insert
|
Added model relation to activerecords
|
diff --git a/law/decorator.py b/law/decorator.py
index <HASH>..<HASH> 100644
--- a/law/decorator.py
+++ b/law/decorator.py
@@ -476,10 +476,14 @@ def localize(fn, opts, task, *args, **kwargs):
input_kwargs = opts["input_kwargs"] or {}
output_kwargs = opts["output_kwargs"] or {}
+ # default modes
+ input_kwargs.setdefault("mode", "r")
+ output_kwargs.setdefault("mode", "w")
+
try:
# localize both target structs
- with localize_file_targets(input_struct, "r", **input_kwargs) as localized_inputs, \
- localize_file_targets(output_struct, "w", **output_kwargs) as localized_outputs:
+ with localize_file_targets(input_struct, **input_kwargs) as localized_inputs, \
+ localize_file_targets(output_struct, **output_kwargs) as localized_outputs:
# patch the input method to always return the localized inputs
if opts["input"]:
def input_patched(self):
|
Make file modes in localize decorator configurable.
|
diff --git a/spec/pcore/arith_spec.rb b/spec/pcore/arith_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/pcore/arith_spec.rb
+++ b/spec/pcore/arith_spec.rb
@@ -70,9 +70,12 @@ describe 'Flor procedures' do
+ 1 "nada"
})
- expect(r['point']).to eq('failed')
- expect(r['error']['kla']).to eq('TypeError')
- expect(r['error']['msg']).to eq("String can't be coerced into Integer")
+ expect(r['point']
+ ).to eq('failed')
+ expect(r['error']['kla']
+ ).to eq('TypeError')
+ expect(r['error']['msg']
+ ).to match(/\AString can't be coerced into (Integer|Fixnum)\z/)
end
it 'turns numbers intro strings when adding to a strings' do
|
Tighten "+" fail spec
|
diff --git a/pyghmi/ipmi/oem/lookup.py b/pyghmi/ipmi/oem/lookup.py
index <HASH>..<HASH> 100755
--- a/pyghmi/ipmi/oem/lookup.py
+++ b/pyghmi/ipmi/oem/lookup.py
@@ -21,8 +21,6 @@ import pyghmi.ipmi.oem.lenovo.handler as lenovo
oemmap = {
20301: lenovo, # IBM x86 (and System X at Lenovo)
19046: lenovo, # Lenovo x86 (e.g. Thinkserver)
- 7154: lenovo, # Technically, standard IPMI, but give lenovo a chance
- # to check for MegaRAC
}
|
Do not hook generic vendor identifier
A prerelease system was using <I> vendor id internally,
but this is not going to be the case when released. Stop
hooking generic value to avoid being overly aggressive.
Change-Id: I<I>e<I>c<I>b<I>c9a4a<I>eaa1de<I>c<I>
|
diff --git a/search/documents/user_document.php b/search/documents/user_document.php
index <HASH>..<HASH> 100644
--- a/search/documents/user_document.php
+++ b/search/documents/user_document.php
@@ -284,14 +284,14 @@ function user_single_document($id, $itemtype) {
return new UserSearchDocument($userhash, $user->id, 'user', null);
}
} elseif ($itemtype == 'post') {
- if ($post = $DB->get_records('post', array('id' => $id))){
+ if ($post = $DB->get_record('post', array('id' => $id))){
$texts = array();
$texts[] = $post->subject;
$texts[] = $post->summary;
$texts[] = $post->content;
$post->description = implode(" ", $texts);
$posthash = get_object_vars($post);
- return new UserPostSearchDocument($posthash, $user->id, 'post', null);
+ return new UserPostSearchDocument($posthash, $post->userid, 'post', null);
}
} elseif ($itemtype == 'attachment' && @$CFG->block_search_enable_file_indexing) {
if ($post = $DB->get_records('post', array('id' => $id))){
|
global search MDL-<I> fixes to blog posts search documents so that it will add new posts into the search index successfully
|
diff --git a/spec/unit/util/filetype.rb b/spec/unit/util/filetype.rb
index <HASH>..<HASH> 100644
--- a/spec/unit/util/filetype.rb
+++ b/spec/unit/util/filetype.rb
@@ -27,20 +27,10 @@ describe Puppet::Util::FileType do
@file.backup
end
- it "should use the filebucket named 'puppet' if it finds one" do
+ it "should use the default filebucket" do
bucket = mock 'bucket'
bucket.expects(:bucket).returns "mybucket"
- Puppet::Type.type(:filebucket).expects(:[]).with("puppet").returns bucket
-
- @file.bucket.should == "mybucket"
- end
-
- it "should use the default filebucket if none named 'puppet' is found" do
- bucket = mock 'bucket'
- bucket.expects(:bucket).returns "mybucket"
-
- Puppet::Type.type(:filebucket).expects(:[]).with("puppet").returns nil
Puppet::Type.type(:filebucket).expects(:mkdefaultbucket).returns bucket
@file.bucket.should == "mybucket"
|
Fixing broken filetype tests resulting from the loss of Type[]
|
diff --git a/NewIntegrationTest.py b/NewIntegrationTest.py
index <HASH>..<HASH> 100644
--- a/NewIntegrationTest.py
+++ b/NewIntegrationTest.py
@@ -226,7 +226,7 @@ class Repository( TestCase ):
self.assertEqual( self.r.html_url, "https://github.com/jacquev6/PyGithub" )
self.assertEqual( self.r.id, 3544490 )
self.assertEqual( self.r.language, "Python" )
- self.assertEqual( self.r.master_branch, None ) ### @todo Why does this trigger a new request to github ?
+ self.assertEqual( self.r.master_branch, None ) ### @todo Why does this trigger a new request to github ? Because the object does not know that it is already completed, and it tries to de-None-ify master_branch
self.assertEqual( self.r.mirror_url, None )
self.assertEqual( self.r.name, "PyGithub" )
self.assertEqual( self.r.open_issues, 15 )
|
Explanation about a todo
|
diff --git a/lib/classes/Variables.js b/lib/classes/Variables.js
index <HASH>..<HASH> 100644
--- a/lib/classes/Variables.js
+++ b/lib/classes/Variables.js
@@ -828,10 +828,10 @@ class Variables {
}
getValueToBool(variableString) {
- if (/^true$/.test(variableString)) {
+ if (/^toBool:true$/.test(variableString)) {
return BbPromise.resolve(true);
}
- else if (/^false$/.test(variableString)) {
+ else if (/^toBool:false$/.test(variableString)) {
return BbPromise.resolve(false);
}
return BbPromise.resolve(true);
|
Bug fix in toBool casting
|
diff --git a/blueflood-kafka/src/main/java/com/rackspacecloud/blueflood/kafkaproducer/KafkaConfig.java b/blueflood-kafka/src/main/java/com/rackspacecloud/blueflood/kafkaproducer/KafkaConfig.java
index <HASH>..<HASH> 100644
--- a/blueflood-kafka/src/main/java/com/rackspacecloud/blueflood/kafkaproducer/KafkaConfig.java
+++ b/blueflood-kafka/src/main/java/com/rackspacecloud/blueflood/kafkaproducer/KafkaConfig.java
@@ -31,7 +31,7 @@ class KafkaConfig {
try {
init();
} catch (IOException ex) {
- log.error("Error encountered while loading the Kafka Config");
+ log.error("Error encountered while loading the Kafka Config", ex);
throw new RuntimeException(ex);
}
}
|
Log exceptions while loading configs
|
diff --git a/src/js/Node.js b/src/js/Node.js
index <HASH>..<HASH> 100644
--- a/src/js/Node.js
+++ b/src/js/Node.js
@@ -68,7 +68,7 @@ define(['./ContextMenu', './appendNodeFactory', './util'], function (ContextMenu
var node = this;
var path = [];
while (node) {
- var field = node.field || node.index;
+ var field = node.field != undefined ? node.field : node.index;
if (field !== undefined) {
path.unshift(field);
}
|
Use `""` as valid path in JSON - fix for josdejong/jsoneditor#<I>
|
diff --git a/core/src/main/java/org/xillium/core/util/RemoteService.java b/core/src/main/java/org/xillium/core/util/RemoteService.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/org/xillium/core/util/RemoteService.java
+++ b/core/src/main/java/org/xillium/core/util/RemoteService.java
@@ -74,6 +74,7 @@ public class RemoteService {
URL url = new URL(server + '/' + service);
URLConnection connection = url.openConnection();
connection.setDoOutput(true);
+ connection.setRequestProperty("Content-Type", "application/x-www-form-urlencoded;charset=utf-8");
PrintWriter pw = new PrintWriter(connection.getOutputStream());
for (String param: params) {
_logger.fine(param);
|
force charset=utf-8 when making URLConnection
|
diff --git a/pbs.py b/pbs.py
index <HASH>..<HASH> 100644
--- a/pbs.py
+++ b/pbs.py
@@ -362,7 +362,7 @@ def run_repl(env):
try: line = raw_input("pbs> ")
except (ValueError, EOFError): break
- try: exec compile(line, "<dummy>", "single") in env, env
+ try: exec(compile(line, "<dummy>", "single"), env, env)
except SystemExit: break
except: print(traceback.format_exc())
@@ -428,7 +428,7 @@ from anywhere other than a stand-alone script. Do a 'from pbs import program' i
source = "".join(source)
exit_code = 0
- try: exec source in env, env
+ try: exec(source, env, env)
except SystemExit, e: exit_code = e.code
except: print(traceback.format_exc())
|
exec -> py2/3
|
diff --git a/Raven/Raven.php b/Raven/Raven.php
index <HASH>..<HASH> 100644
--- a/Raven/Raven.php
+++ b/Raven/Raven.php
@@ -2,6 +2,7 @@
namespace Kunstmaan\SentryBundle\Raven;
use Raven_Client;
+use Symfony\Component\HttpKernel\Kernel;
/**
* Raven
@@ -26,6 +27,11 @@ class Raven extends Raven_Client
if (isset($_SERVER["SERVER_NAME"])) {
$options['name'] = $_SERVER["SERVER_NAME"];
}
+ $options['tags'] = array(
+ 'php_version' => phpversion(),
+ 'symfony_version' => Kernel::VERSION
+ );
+ $options['trace'] = true;
parent::__construct($dsn, $options);
}
|
Update the integration with raven to use the latest features in Raven
|
diff --git a/freemius/includes/class-freemius.php b/freemius/includes/class-freemius.php
index <HASH>..<HASH> 100644
--- a/freemius/includes/class-freemius.php
+++ b/freemius/includes/class-freemius.php
@@ -5082,6 +5082,9 @@
// Sync licenses.
$this->_sync_licenses();
+ // Sync plans.
+ $this->_sync_plans();
+
// Check if plan / license changed.
if ( ! FS_Entity::equals( $site->plan, $this->_site->plan ) ||
// Check if trial started.
|
[plans] [caching] [bug-fix] Sync plans once in <I> hours, otherwise dashboard changes are never populated to the plugin's licensing logic.
|
diff --git a/src/PasswordHasherFactory.php b/src/PasswordHasherFactory.php
index <HASH>..<HASH> 100644
--- a/src/PasswordHasherFactory.php
+++ b/src/PasswordHasherFactory.php
@@ -14,7 +14,8 @@
*/
namespace Auth;
-use Auth\PasswordHasher\PasswordHasherInterface;
+use Auth\PasswordHasher\AbstractPasswordHasher;
+use Cake\Core\App;
use RuntimeException;
/**
@@ -43,9 +44,14 @@ class PasswordHasherFactory
unset($config['className']);
}
- $hasher = new $class($config);
- if (!($hasher instanceof PasswordHasherInterface)) {
- throw new RuntimeException('Password hasher must implment PasswordHasherInterface.');
+ $className = App::className($class, 'Auth\PasswordHasher', 'PasswordHasher');
+ if ($className === false) {
+ throw new RuntimeException(sprintf('Password hasher class "%s" was not found.', $class));
+ }
+
+ $hasher = new $className($config);
+ if (!($hasher instanceof AbstractPasswordHasher)) {
+ throw new RuntimeException('Password hasher must extend AbstractPasswordHasher class.');
}
return $hasher;
|
Updating the PasswordHasherFactory
|
diff --git a/src/test/java/org/cactoos/iterator/PartitionedTest.java b/src/test/java/org/cactoos/iterator/PartitionedTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/org/cactoos/iterator/PartitionedTest.java
+++ b/src/test/java/org/cactoos/iterator/PartitionedTest.java
@@ -55,6 +55,7 @@ public final class PartitionedTest {
}
@Test
+ @SuppressWarnings("unchecked")
public void partitionedOne() {
MatcherAssert.assertThat(
"Can't generate a Partitioned of partition size 1.",
@@ -73,6 +74,7 @@ public final class PartitionedTest {
}
@Test
+ @SuppressWarnings("unchecked")
public void partitionedEqualSize() {
MatcherAssert.assertThat(
"Can't generate a Partitioned of partition size 2.",
|
fix remaining checkstyle warnings with suppress annotation
|
diff --git a/Branch-SDK/src/io/branch/referral/SystemObserver.java b/Branch-SDK/src/io/branch/referral/SystemObserver.java
index <HASH>..<HASH> 100644
--- a/Branch-SDK/src/io/branch/referral/SystemObserver.java
+++ b/Branch-SDK/src/io/branch/referral/SystemObserver.java
@@ -533,7 +533,7 @@ public class SystemObserver {
public int getUpdateState(boolean updatePrefs) {
PrefHelper pHelper = PrefHelper.getInstance(context_);
String currAppVersion = getAppVersion();
- if (pHelper.getAppVersion() == PrefHelper.NO_STRING_VALUE) {
+ if (PrefHelper.NO_STRING_VALUE.equals(pHelper.getAppVersion())) {
// if no app version is in storage, this must be the first time Branch is here
if (updatePrefs) {
pHelper.setAppVersion(currAppVersion);
|
Never. Ever. Do. This.
Equality check in Java will do a reference equality on == for strings.
You almost always want to use .equals which calls into value equality
check.
|
diff --git a/classes/ServiceProvider.php b/classes/ServiceProvider.php
index <HASH>..<HASH> 100644
--- a/classes/ServiceProvider.php
+++ b/classes/ServiceProvider.php
@@ -29,7 +29,8 @@ class ServiceProvider extends BaseServiceProvider {
$this->registerDirectories();
// Register the routes.
- if (!$this->app->routesAreCached()) {
+ if (config('decoy.core.register_routes')
+ && !$this->app->routesAreCached()) {
$this->app['decoy.router']->registerAll();
}
diff --git a/config/core.php b/config/core.php
index <HASH>..<HASH> 100644
--- a/config/core.php
+++ b/config/core.php
@@ -25,4 +25,9 @@
// Allow regex in redirect rules
'allow_regex_in_redirects' => false,
+ // Register routes automatically in ServiceProvider->boot(). You might set
+ // this to false if the App needed to register some /admin routes and didn't
+ // want them to get trampled by the Decoy wildcard capture.
+ 'register_routes' => true,
+
);
|
Adding a config that can be used to disable the automatic registration of routes
#<I>
|
diff --git a/src/Controller/Controller.php b/src/Controller/Controller.php
index <HASH>..<HASH> 100644
--- a/src/Controller/Controller.php
+++ b/src/Controller/Controller.php
@@ -62,7 +62,6 @@ use Cake\View\ViewVarsTrait;
* a redirect is done.
* - `afterFilter(Event $event)` - Called after each action is complete and after the view is rendered.
*
- * @property \Cake\Controller\Component\AclComponent $Acl
* @property \Cake\Controller\Component\AuthComponent $Auth
* @property \Cake\Controller\Component\CookieComponent $Cookie
* @property \Cake\Controller\Component\CsrfComponent $Csrf
|
Deleted @property reference to removed AclComponent
Was forgotten in <URL>
|
diff --git a/src/main/java/skadistats/clarity/processor/entities/Entities.java b/src/main/java/skadistats/clarity/processor/entities/Entities.java
index <HASH>..<HASH> 100644
--- a/src/main/java/skadistats/clarity/processor/entities/Entities.java
+++ b/src/main/java/skadistats/clarity/processor/entities/Entities.java
@@ -418,7 +418,7 @@ public class Entities {
}
private void emitCreatedEvent(Entity entity) {
- if (resetInProgress || !evUpdated.isListenedTo()) return;
+ if (resetInProgress || !evCreated.isListenedTo()) return;
debugUpdateEvent("CREATE", entity);
evCreated.raise(entity);
}
|
Fix bug where EntityCreated would only be raised if EntityUpdated was listened to
|
diff --git a/datatableview/helpers.py b/datatableview/helpers.py
index <HASH>..<HASH> 100644
--- a/datatableview/helpers.py
+++ b/datatableview/helpers.py
@@ -88,6 +88,7 @@ def link_to_model(instance, text=None, *args, **kwargs):
@keyed_helper
def make_boolean_checkmark(value, true_value="✔", false_value="✘", *args, **kwargs):
+ value = kwargs.get('default_value', value)
if value:
return true_value
return false_value
|
Fix bug with make_boolean_checkmark's value
One had to always use a key function to get the right value from it as
a column callback.
|
diff --git a/packages/origin.js/src/index.js b/packages/origin.js/src/index.js
index <HASH>..<HASH> 100644
--- a/packages/origin.js/src/index.js
+++ b/packages/origin.js/src/index.js
@@ -9,13 +9,6 @@ var resources = {
class Origin {
constructor() {
- // Give each resource access to the origin services.
- // By having a single origin, its configuration can be changed
- // and all contracts will follow it
- for (let resourceName in resources) {
- resources[resourceName].origin = this
- this[resourceName] = resources[resourceName]
- }
this.contractService = new ContractService()
this.ipfsService = new IpfsService()
this.originService = new OriginService({
@@ -23,6 +16,15 @@ class Origin {
ipfsService: this.ipfsService
})
this.userRegistryService = new UserRegistryService()
+
+ // Instantiate each resource and give it access to contracts and IPFS
+ for (let resourceName in resources) {
+ let Resource = resources[resourceName]
+ this[resourceName] = new Resource({
+ contractService: this.contractService,
+ ipfsService: this.ipfsService
+ })
+ }
}
}
|
Instantiate each resource with access to ipfs and contract services
This no longer gives resources access to a global `origin` object.
Shared global objects are generally a bad idea, creating a cluttered
design that implies that all of these things will be used together. This
makes things more modular which will make it more extensible and also
simplify testing.
|
diff --git a/router.go b/router.go
index <HASH>..<HASH> 100644
--- a/router.go
+++ b/router.go
@@ -68,12 +68,21 @@ func (r *Router) addRoute(m, p, t string, fn Handle) {
// needed
if r.parent != nil {
// We have a subrouter, let the main router handle it
- r.parent.router.Handle(m, path, wf)
+ r.getRoot().router.Handle(m, path, wf)
} else {
r.router.Handle(m, path, wf)
}
}
+// getRoot returns the root router
+func (r *Router) getRoot() *Router {
+ if r.parent != nil {
+ return r.parent.getRoot()
+ }
+
+ return r
+}
+
// Get adds a GET route
func (r *Router) Get(path, title string, fn Handle) {
r.addRoute("GET", path, title, fn)
|
Add getRoot to get top level router
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -1,10 +1,14 @@
from setuptools import setup
+from sys import version_info
with open('README.rst') as f:
long_description = f.read()
+install_requires = ['futures >== 3.0.0'] if version_info.major == 2 else []
+
+
setup(
name='tornado_sqlalchemy',
version='0.1.0',
@@ -15,5 +19,6 @@ setup(
license='MIT',
url='https://github.com/siddhantgoel/tornado-sqlalchemy',
packages=['tornado_sqlalchemy'],
- keywords=['tornado', 'sqlalchemy']
+ keywords=['tornado', 'sqlalchemy'],
+ install_requires=install_requires
)
|
use sys.version_info to detect futures requirement
|
diff --git a/lib/svtplay_dl/service/oppetarkiv.py b/lib/svtplay_dl/service/oppetarkiv.py
index <HASH>..<HASH> 100644
--- a/lib/svtplay_dl/service/oppetarkiv.py
+++ b/lib/svtplay_dl/service/oppetarkiv.py
@@ -40,7 +40,7 @@ class OppetArkiv(Service, OpenGraphThumbMixin):
if "subtitleReferences" in data:
for i in data["subtitleReferences"]:
if i["format"] == "websrt":
- yield subtitle(copy.copy(self.config), "wrst", i["url"])
+ yield subtitle(copy.copy(self.config), "wrst", i["url"], output=self.output)
if len(data["videoReferences"]) == 0:
yield ServiceError("Media doesn't have any associated videos (yet?)")
|
öppetarkiv: this need to have output set
|
diff --git a/build-support/bin/generate_docs.py b/build-support/bin/generate_docs.py
index <HASH>..<HASH> 100644
--- a/build-support/bin/generate_docs.py
+++ b/build-support/bin/generate_docs.py
@@ -308,7 +308,9 @@ class ReferenceGenerator:
for goal, goal_info in help_info["name_to_goal_info"].items():
consumed_scopes = sorted(goal_info["consumed_scopes"])
linked_consumed_scopes = [
- f"[{cs}]({cls._link(cs, sync=sync)})" for cs in consumed_scopes if cs
+ f"[{cs}]({cls._link(cs, sync=sync)})"
+ for cs in consumed_scopes
+ if cs and cs != goal_info.name
]
comma_separated_consumed_scopes = ", ".join(linked_consumed_scopes)
scope_to_help_info[goal][
|
[Docs] Filter out self from list of related subsystems. (#<I>)
[ci skip-rust]
[ci skip-build-wheels]
|
diff --git a/builtin/providers/azurerm/resource_arm_virtual_machine_test.go b/builtin/providers/azurerm/resource_arm_virtual_machine_test.go
index <HASH>..<HASH> 100644
--- a/builtin/providers/azurerm/resource_arm_virtual_machine_test.go
+++ b/builtin/providers/azurerm/resource_arm_virtual_machine_test.go
@@ -3031,7 +3031,7 @@ resource "azurerm_virtual_machine" "test" {
location = "West US 2"
resource_group_name = "${azurerm_resource_group.test.name}"
network_interface_ids = ["${azurerm_network_interface.test.id}"]
- vm_size = "Standard_DS1_v2_v2"
+ vm_size = "Standard_DS1_v2"
storage_image_reference {
publisher = "kemptech"
|
fixed typo in vm_size
|
diff --git a/tests/test_django.py b/tests/test_django.py
index <HASH>..<HASH> 100644
--- a/tests/test_django.py
+++ b/tests/test_django.py
@@ -114,16 +114,14 @@ class WithFileFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.WithFile
- if django is not None:
- afile = factory.django.FileField()
+ afile = factory.django.FileField()
class WithImageFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.WithImage
- if django is not None:
- animage = factory.django.ImageField()
+ animage = factory.django.ImageField()
class WithSignalsFactory(factory.django.DjangoModelFactory):
|
Don't test Django's presence in tests
Django is a test requirement, there are import at the top of the
test_django.py file, there's not need to test if django is defined in
the factories.
|
diff --git a/pex/resolver.py b/pex/resolver.py
index <HASH>..<HASH> 100644
--- a/pex/resolver.py
+++ b/pex/resolver.py
@@ -167,7 +167,8 @@ class Resolver(object):
if dist is None:
raise Untranslateable('Package %s is not translateable by %s' % (package, translator))
if not distribution_compatible(dist, self._interpreter, self._platform):
- raise Untranslateable('Could not get distribution for %s on appropriate platform.' % package)
+ raise Untranslateable(
+ 'Could not get distribution for %s on platform %s.' % (package, self._platform))
return dist
def resolve(self, resolvables, resolvable_set=None):
|
Improve error messaging for platform constrained Untranslateable errors.
|
diff --git a/lib/server.js b/lib/server.js
index <HASH>..<HASH> 100644
--- a/lib/server.js
+++ b/lib/server.js
@@ -614,6 +614,8 @@ Server.prototype._run = function _run(req, res, route, chain, callback) {
});
d = domain.create();
+ d.add(req);
+ d.add(res);
d.on('error', function onError(err) {
log.trace({err: err}, 'uncaughtException');
self.emit('uncaughtException', req, res, route, err);
|
add req/res to route domain
|
diff --git a/src/internalHelpers/_endsWith.js b/src/internalHelpers/_endsWith.js
index <HASH>..<HASH> 100644
--- a/src/internalHelpers/_endsWith.js
+++ b/src/internalHelpers/_endsWith.js
@@ -1,3 +1,5 @@
+// @flow
+
/**
* Check if a string ends with something
*/
|
chore(internal): add @flow to endsWith
|
diff --git a/src/Controller/Base.php b/src/Controller/Base.php
index <HASH>..<HASH> 100644
--- a/src/Controller/Base.php
+++ b/src/Controller/Base.php
@@ -20,18 +20,19 @@ use ReflectionException;
/**
* Allow the app to add functionality, if needed
+ * Negative conditional helps with static analysis
*/
-if (class_exists('\App\Cron\Controller\Base')) {
- abstract class BaseMiddle extends \App\Cron\Controller\Base
- {
- }
-} else {
+if (!class_exists('\App\Cron\Controller\Base')) {
abstract class BaseMiddle
{
public function __construct()
{
}
}
+} else {
+ abstract class BaseMiddle extends \App\Cron\Controller\Base
+ {
+ }
}
// --------------------------------------------------------------------------
|
chore: Swap middle class to help static analysis
|
diff --git a/SingularityBase/src/main/java/com/hubspot/singularity/api/SingularityRunNowRequest.java b/SingularityBase/src/main/java/com/hubspot/singularity/api/SingularityRunNowRequest.java
index <HASH>..<HASH> 100644
--- a/SingularityBase/src/main/java/com/hubspot/singularity/api/SingularityRunNowRequest.java
+++ b/SingularityBase/src/main/java/com/hubspot/singularity/api/SingularityRunNowRequest.java
@@ -17,6 +17,16 @@ public class SingularityRunNowRequest {
private final Optional<Resources> resources;
private final Optional<Long> runAt;
+ public SingularityRunNowRequest(
+ Optional<String> message,
+ Optional<Boolean> skipHealthchecks,
+ Optional<String> runId,
+ Optional<List<String>> commandLineArgs,
+ Optional<Resources> resources
+ ) {
+ this(message, skipHealthchecks, runId, commandLineArgs, resources, Optional.<Long>absent());
+ }
+
@JsonCreator
public SingularityRunNowRequest(@JsonProperty("message") Optional<String> message,
@JsonProperty("skipHealthchecks") Optional<Boolean> skipHealthchecks,
|
Keep original constructor for backwards compatibility.
|
diff --git a/pyvodb/load.py b/pyvodb/load.py
index <HASH>..<HASH> 100644
--- a/pyvodb/load.py
+++ b/pyvodb/load.py
@@ -128,7 +128,7 @@ def load_from_dict(db, data, metadata):
'address': venue.get('address'),
'latitude': venue['location']['latitude'],
'longitude': venue['location']['longitude'],
- 'note': venue.get('note'),
+ 'notes': venue.get('notes'),
})
diff --git a/pyvodb/tables.py b/pyvodb/tables.py
index <HASH>..<HASH> 100644
--- a/pyvodb/tables.py
+++ b/pyvodb/tables.py
@@ -280,9 +280,9 @@ class Venue(TableBase):
slug = Column(
Unicode(), nullable=False,
doc=u"Identifier for use in URLs")
- note = Column(
+ notes = Column(
Unicode(), nullable=True,
- doc=u"Note about the venue, e.g. directions to get there")
+ doc=u"Notes about the venue, e.g. directions to get there")
city = relationship('City', backref=backref('venues'))
|
Rename venue 'note' back to 'notes'
|
diff --git a/lib/discordrb/webhooks/embeds.rb b/lib/discordrb/webhooks/embeds.rb
index <HASH>..<HASH> 100644
--- a/lib/discordrb/webhooks/embeds.rb
+++ b/lib/discordrb/webhooks/embeds.rb
@@ -144,6 +144,13 @@ module Discordrb::Webhooks
def initialize(url: nil)
@url = url
end
+
+ # @return A hash representation of this embed thumbnail, to be converted to JSON.
+ def to_hash
+ {
+ url: @url
+ }
+ end
end
# An embed's author will be shown at the top to indicate who "authored" the particular event the webhook was sent for.
|
:anchor: Add a method EmbedThumbnail#to_hash
|
diff --git a/lib/UA.js b/lib/UA.js
index <HASH>..<HASH> 100644
--- a/lib/UA.js
+++ b/lib/UA.js
@@ -81,6 +81,14 @@ var aliases = {
}
};
+// Chrome on iOS uses a UIWebView of the underlying platform to render
+// content, by stripping the CriOS string the useragent parser will alias the
+// user agent to ios_saf for the UIWebView, which is closer to the actual
+// renderer
+function stripCriOS(uaString) {
+ return uaString.replace(/(CriOS\/(\d+)\.(\d+)\.(\d+)\.(\d+))/, '');
+}
+
var UA = function(uaString) {
var semver, a;
@@ -89,7 +97,7 @@ var UA = function(uaString) {
if (normalized) {
this.ua = new useragent.Agent(normalized[1], normalized[2], (normalized[3] || 0), (normalized[4] || 0));
} else {
- this.ua = useragent.lookup(uaString);
+ this.ua = useragent.lookup(stripCriOS(uaString));
}
// For improved cache performance, remove the patch version. There are few cases in which a patch release drops the requirement for a polyfill, but if so, the polyfill can simply be served unnecessarily to the patch versions that contain the fix, and we can stop targeting at the next minor release.
|
Strip any mention of the CriOS version from the user agent string, the UA string is then parsed as the corresponding ios_saf version. (See <URL>)
|
diff --git a/docs/reference/themes/mongodb/static/js/scripts.js b/docs/reference/themes/mongodb/static/js/scripts.js
index <HASH>..<HASH> 100644
--- a/docs/reference/themes/mongodb/static/js/scripts.js
+++ b/docs/reference/themes/mongodb/static/js/scripts.js
@@ -45,7 +45,7 @@ jQuery(document).ready(function() {
});
jQuery('.body table').addClass('table').addClass('table-striped');
var siteInput = $('#search input[name="site"]');
- if (siteInput.val().substring(0, 4) != "http") {
+ if (siteInput.val().indexOf(window.location.hostname) < 0) {
siteInput.attr("value", window.location.hostname + siteInput.val());
}
jQuery("#search form").submit(function() {
|
Docs: JS fix for search form (#<I>)
As the protocol is not part of the window.location.hostname ensure if
the browser caches the form field that the hostname isn't duplicated on
refresh.
JAVA-<I>
|
diff --git a/glue/ligolw/ilwd.py b/glue/ligolw/ilwd.py
index <HASH>..<HASH> 100644
--- a/glue/ligolw/ilwd.py
+++ b/glue/ligolw/ilwd.py
@@ -101,7 +101,7 @@ class ILWD(object):
subclassed in order to provide specific values of the class
variables "table_name", "column_name", and "index_offset".
"""
- __slots__ = ("table_name", "column_name", "n", "index_offset")
+ __slots__ = ("n",)
table_name = None
column_name = None
index_offset = None
|
Experimentation reveals that the __slots__ attribute should not list
symbol names that are defined at the class level, only those that are
defined at the instance level.
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -43,6 +43,7 @@ setup(
packages=[
'luigi',
'luigi.contrib',
+ 'luigi.tools'
],
package_data={
'luigi': luigi_package_data
|
Add 'tools' package so the package works when installed
|
diff --git a/libnetwork/drivers/macvlan/macvlan.go b/libnetwork/drivers/macvlan/macvlan.go
index <HASH>..<HASH> 100644
--- a/libnetwork/drivers/macvlan/macvlan.go
+++ b/libnetwork/drivers/macvlan/macvlan.go
@@ -17,17 +17,15 @@ const (
vethLen = 7
containerVethPrefix = "eth"
vethPrefix = "veth"
- macvlanType = "macvlan" // driver type name
- modePrivate = "private" // macvlan mode private
- modeVepa = "vepa" // macvlan mode vepa
- modeBridge = "bridge" // macvlan mode bridge
- modePassthru = "passthru" // macvlan mode passthrough
- parentOpt = "parent" // parent interface -o parent
- modeOpt = "_mode" // macvlan mode ux opt suffix
+ macvlanType = "macvlan" // driver type name
+ modePrivate = "private" // macvlan mode private
+ modeVepa = "vepa" // macvlan mode vepa
+ modeBridge = "bridge" // macvlan mode bridge
+ modePassthru = "passthru" // macvlan mode passthrough
+ parentOpt = "parent" // parent interface -o parent
+ driverModeOpt = "macvlan_mode" // macvlan mode ux opt suffix
)
-var driverModeOpt = macvlanType + modeOpt // mode --option macvlan_mode
-
type endpointTable map[string]*endpoint
type networkTable map[string]*network
|
libnetwork: macvlan: clean up some consts
|
diff --git a/spec/unit_tests/job_spec.rb b/spec/unit_tests/job_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/unit_tests/job_spec.rb
+++ b/spec/unit_tests/job_spec.rb
@@ -251,6 +251,11 @@ describe JenkinsApi::Client::Job do
"/job/test_job/build").and_return(302)
@job.build("test_job").should == 302
end
+ it "accepts the job name with params and builds the job" do
+ @client.should_receive(:api_post_request).with(
+ "/job/test_job/buildWithParameters",{:branch => 'feature/new-stuff'}).and_return(302)
+ @job.build("test_job",{:branch => 'feature/new-stuff'}).should == 302
+ end
end
describe "#get_config" do
|
added spec for buildWithParameters
|
diff --git a/resources/views/admin/formElements/defaultForm.blade.php b/resources/views/admin/formElements/defaultForm.blade.php
index <HASH>..<HASH> 100644
--- a/resources/views/admin/formElements/defaultForm.blade.php
+++ b/resources/views/admin/formElements/defaultForm.blade.php
@@ -38,7 +38,7 @@
});
$(".datetimepicker input").datetimepicker({
- format: 'YYYY-MM-DD HH:mm'
+ format: 'YYYY-MM-DD HH:mm:ss'
});
tinymce.init({
|
Fix default format for datepicker to include seconds.
|
diff --git a/src/Composer/Installer/BinaryInstaller.php b/src/Composer/Installer/BinaryInstaller.php
index <HASH>..<HASH> 100644
--- a/src/Composer/Installer/BinaryInstaller.php
+++ b/src/Composer/Installer/BinaryInstaller.php
@@ -397,9 +397,8 @@ PROXY;
return <<<PROXY
#!/usr/bin/env sh
-self=\$(realpath \$0 >/dev/null 2>&1)
-if [ -z "\$self" ]
-then
+self=\$(realpath \$0 2> /dev/null)
+if [ -z "\$self" ]; then
self="\$0"
fi
|
Fix symlink resolution in shell proxy (#<I>)
|
diff --git a/lockfile.js b/lockfile.js
index <HASH>..<HASH> 100644
--- a/lockfile.js
+++ b/lockfile.js
@@ -141,10 +141,12 @@ exports.lock = function (path, opts, cb) {
if (typeof opts.retries === 'number' && opts.retries > 0) {
debug('has retries', opts.retries)
+ var retries = opts.retries
+ opts.retries = 0
cb = (function (orig) { return function cb (er, fd) {
debug('retry-mutated callback')
- opts.retries -= 1
- if (!er || opts.retries < 0) return orig(er, fd)
+ retries -= 1
+ if (!er || retries < 0) return orig(er, fd)
debug('lock retry', path, opts)
|
manage 'retries' so it does not clash with 'wait' polling
This fixes #5 by avoiding the confusion situation where every poll can
be retried, instead of every retry doing a polling phase.
|
diff --git a/src/googleclouddebugger/capture_collector.py b/src/googleclouddebugger/capture_collector.py
index <HASH>..<HASH> 100644
--- a/src/googleclouddebugger/capture_collector.py
+++ b/src/googleclouddebugger/capture_collector.py
@@ -354,7 +354,10 @@ class CaptureCollector(object):
"""
try:
if not hasattr(name, '__dict__'):
- name = str(name)
+ if isinstance(name, unicode):
+ name = name.encode('unicode_escape')
+ else:
+ name = str(name)
else: # TODO(vlif): call str(name) with immutability verifier here.
name = str(id(name))
self._total_size += len(name)
|
Allow unicode values for keys in dictionaries in the python agent
-------------
Created by MOE: <URL>
|
diff --git a/src/ol/projection.js b/src/ol/projection.js
index <HASH>..<HASH> 100644
--- a/src/ol/projection.js
+++ b/src/ol/projection.js
@@ -39,7 +39,7 @@ ol.ProjectionUnits = {
ol.METERS_PER_UNIT = {};
ol.METERS_PER_UNIT[ol.ProjectionUnits.DEGREES] =
2 * Math.PI * ol.sphere.NORMAL.radius / 360;
-ol.METERS_PER_UNIT[ol.ProjectionUnits.FEET] = 0.02540005080010160020;
+ol.METERS_PER_UNIT[ol.ProjectionUnits.FEET] = 0.3048;
ol.METERS_PER_UNIT[ol.ProjectionUnits.METERS] = 1;
|
Use correct value to convert from foot to meters
(was using value for converting to inches).
Thanks @twpayne
|
diff --git a/krakenapi.go b/krakenapi.go
index <HASH>..<HASH> 100644
--- a/krakenapi.go
+++ b/krakenapi.go
@@ -6,6 +6,7 @@ import (
"crypto/sha512"
"encoding/base64"
"encoding/json"
+ "errors"
"fmt"
"io/ioutil"
"net/http"
@@ -13,7 +14,6 @@ import (
"strconv"
"strings"
"time"
- "errors"
)
const (
@@ -52,6 +52,13 @@ var privateMethods = []string{
"TradeVolume",
"AddOrder",
"CancelOrder",
+ "DepositMethods",
+ "DepositAddresses",
+ "DepositStatus",
+ "WithdrawInfo",
+ "Withdraw",
+ "WithdrawStatus",
+ "WithdrawCancel",
}
// KrakenApi represents a Kraken API Client connection
@@ -229,15 +236,15 @@ func (api *KrakenApi) Depth(pair string, count int) (*OrderBook, error) {
_, err := api.queryPublic("Depth", url.Values{
"pair": {pair}, "count": {strconv.Itoa(count)},
}, &dr)
-
+
if err != nil {
return nil, err
}
-
+
if book, found := dr[pair]; found {
return &book, nil
}
-
+
return nil, errors.New("invalid response")
}
|
feat(API): Added missing private "User funding" methods. (#<I>)
|
diff --git a/src/formatter-jsonapi/lib/link.js b/src/formatter-jsonapi/lib/link.js
index <HASH>..<HASH> 100644
--- a/src/formatter-jsonapi/lib/link.js
+++ b/src/formatter-jsonapi/lib/link.js
@@ -81,7 +81,7 @@ export default function (model, opts={}) {
id: String(related.id)
};
} else {
- link.linkage = 'null';
+ link.linkage = null;
}
if (exporter) {
exporter(related);
|
return non-string null as linkage
|
diff --git a/js/coinbasepro.js b/js/coinbasepro.js
index <HASH>..<HASH> 100644
--- a/js/coinbasepro.js
+++ b/js/coinbasepro.js
@@ -16,7 +16,7 @@ module.exports = class coinbasepro extends gdax {
'logo': 'https://user-images.githubusercontent.com/1294454/41764625-63b7ffde-760a-11e8-996d-a6328fa9347a.jpg',
'api': 'https://api.pro.coinbase.com',
'www': 'https://pro.coinbase.com/',
- 'doc': 'https://docs.pro.coinbase.com/'
+ 'doc': 'https://docs.pro.coinbase.com/',
'fees': [
'https://www.gdax.com/fees',
'https://support.gdax.com/customer/en/portal/topics/939402-depositing-and-withdrawing-funds/articles',
|
Ooops... forgot a comma!
|
diff --git a/lib/common.js b/lib/common.js
index <HASH>..<HASH> 100644
--- a/lib/common.js
+++ b/lib/common.js
@@ -485,7 +485,7 @@ function getSampleInner(orig,options,samplerOptions,api){
}
function getSample(orig,options,samplerOptions,api){
- if (orig && orig.example) return orig.example;
+ if (orig && orig.example) return clean(orig.example);
let result = getSampleInner(orig,options,samplerOptions,api);
result = clean(result);
result = strim(result,options.maxDepth);
|
fix: remove x-widdershins-oldref from examples
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -10,6 +10,7 @@ setup(
packages=[
'edx_lint',
+ 'edx_lint.cmd',
'edx_lint.pylint',
],
|
Forgot to install the cmd package
|
diff --git a/eZ/Publish/Core/MVC/Legacy/Templating/LegacyEngine.php b/eZ/Publish/Core/MVC/Legacy/Templating/LegacyEngine.php
index <HASH>..<HASH> 100644
--- a/eZ/Publish/Core/MVC/Legacy/Templating/LegacyEngine.php
+++ b/eZ/Publish/Core/MVC/Legacy/Templating/LegacyEngine.php
@@ -68,6 +68,8 @@ class LegacyEngine implements EngineInterface
$tpl = eZTemplate::factory();
foreach ( $parameters as $varName => $param )
{
+ // If $param is an array, we recursively convert all objects contained in it (if any).
+ // Scalar parameters are passed as is
if ( is_array( $param ) )
{
array_walk_recursive(
|
Added comments in Templating\LegacyEngine
|
diff --git a/src/ossos/core/ossos/gui/controllers.py b/src/ossos/core/ossos/gui/controllers.py
index <HASH>..<HASH> 100644
--- a/src/ossos/core/ossos/gui/controllers.py
+++ b/src/ossos/core/ossos/gui/controllers.py
@@ -288,12 +288,12 @@ class ProcessRealsController(AbstractController):
if key != 'h':
source_cutout.update_pixel_location((cen_x, cen_y), hdulist_index)
except Exception as er:
- print "DAOPhot failure: {}".format(er)
+ print("DAOPhot failure: {}".format(er))
logger.critical("PHOT ERROR: {}".format(er))
phot_failure = sky_failure = cen_failure = True
obs_mag = None
obs_mag_err = None
- band = None
+ band = ''
default_comment = str(er)
obs_mag = phot_failure and None or obs_mag
@@ -352,7 +352,7 @@ class ProcessRealsController(AbstractController):
previous_observations.append(this_observation)
print Orbfit(previous_observations).summarize()
except Exception as ex:
- logger.error(type(ex), str(ex))
+ logger.error(str(type(ex))+" "+str(ex))
print "Failed to compute preliminary orbit."
if obs_mag < 24 and auto is not False:
|
Sent band to blank if no magnitude measured
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -7,7 +7,7 @@ with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
setup(
name='django-localized-fields',
- version='1.6',
+ version='1.7',
packages=find_packages(),
include_package_data=True,
license='MIT License',
|
Bumped version to <I>
|
diff --git a/draco/spec.py b/draco/spec.py
index <HASH>..<HASH> 100644
--- a/draco/spec.py
+++ b/draco/spec.py
@@ -6,6 +6,7 @@ import json
import os
from collections import defaultdict
from typing import Any, Dict, Iterable, List, Optional, Tuple, Union
+from copy import deepcopy
import agate
import numpy as np
@@ -481,6 +482,8 @@ class Query():
@staticmethod
def from_vegalite(full_spec: Dict) -> 'Query':
''' Parse from Vega-Lite spec that uses map for encoding. '''
+ full_spec = deepcopy(full_spec)
+
encodings: List[Encoding] = []
for channel, enc in full_spec.get('encoding', {}).items():
|
deepcopy spec in `from_vegalite` to avoid modifying input spec
|
diff --git a/lib/rim/command/status.rb b/lib/rim/command/status.rb
index <HASH>..<HASH> 100644
--- a/lib/rim/command/status.rb
+++ b/lib/rim/command/status.rb
@@ -73,6 +73,7 @@ class Status < Command
out = gs.execute "git rev-list --format=oneline -n 1 #{stat.git_rev}"
if out =~ /^(\w+) (.*)/
sha1, comment = $1, $2
+ comment = comment[0..76]+"..." if comment.size > 80
headline += "#{stat_info} #{sha1[0..6]} #{comment}"
end
else
|
cut comments in rim status output when too long
|
diff --git a/fsnotify_bsd.go b/fsnotify_bsd.go
index <HASH>..<HASH> 100644
--- a/fsnotify_bsd.go
+++ b/fsnotify_bsd.go
@@ -212,14 +212,12 @@ func (w *Watcher) readEvents() {
if errno != nil && errno != syscall.EINTR {
w.Error <- os.NewSyscallError("kevent", errno)
continue
- } else {
- events = eventbuf[0:n]
}
- }
- // Timeout, no big deal
- if n == 0 {
- continue
+ // Received some events
+ if n > 0 {
+ events = eventbuf[0:n]
+ }
}
// Flush the events we recieved to the events channel
|
BSD - fix for rob (EINTR)
|
diff --git a/src/directives/dxTree.js b/src/directives/dxTree.js
index <HASH>..<HASH> 100644
--- a/src/directives/dxTree.js
+++ b/src/directives/dxTree.js
@@ -25,7 +25,7 @@
post: function (scope, elm, attr, ctrl) {
scope.$dxLevel = isRoot ? 0 : scope.$dxLevel + 1;
scope.$dxIsRoot = isRoot;
- scope.$dxParent = isRoot
+ scope.$dxPrior = scope.$dxParent = isRoot
? parse(attr.dxTree || attr.root)(scope)
: parse(attr.dxNode || attr.node)(scope);
@@ -54,4 +54,7 @@
comp.directive('dxTree', $NodeDirective(true));
comp.directive('dxNode', $NodeDirective(false));
+
+ comp.directive('dxStartWith', $NodeDirective(true));
+ comp.directive('dxConnect', $NodeDirective(false));
}());
\ No newline at end of file
|
added Start With - Connect syntax.
|
diff --git a/src/location.js b/src/location.js
index <HASH>..<HASH> 100644
--- a/src/location.js
+++ b/src/location.js
@@ -64,7 +64,7 @@ if (global.history && global.location) {
_.filter(handler, _.locationFilter);
};
_.locationFilter = function(event, handler) {
- var matches = (event.uri || current).match(handler.uriRE);
+ var matches = (event.location || current).match(handler.uriRE);
if (matches) {
this.args.splice.apply(this.args, [1,0].concat(matches));
if (handler.keys) {
|
.uri was replaced with .location
|
diff --git a/lib/TaskRouterClient.js b/lib/TaskRouterClient.js
index <HASH>..<HASH> 100644
--- a/lib/TaskRouterClient.js
+++ b/lib/TaskRouterClient.js
@@ -40,6 +40,7 @@ function TaskRouterClient(sid, tkn, workspaceSid, options) {
//REST Resource - shorthand for just "workspace" and "workspaces" to match the REST API
var workspaceResource = require('./resources/task_router/Workspaces')(this);
this.workspaces = workspaceResource;
+ this.workspace = new workspaceResource(workspaceSid);
//mix the account object in with the client object - assume master account for resources
_.extend(this, workspaceResource);
|
exposed workspace attribute for TaskRouterClient
|
diff --git a/clientconn.go b/clientconn.go
index <HASH>..<HASH> 100644
--- a/clientconn.go
+++ b/clientconn.go
@@ -1304,7 +1304,7 @@ func (ac *addrConn) createTransport(addr resolver.Address, copts transport.Conne
//
// LB channel health checking is enabled when all requirements below are met:
// 1. it is not disabled by the user with the WithDisableHealthCheck DialOption
-// 2. internal.HealthCheckFunc is set by importing the grpc/healthcheck package
+// 2. internal.HealthCheckFunc is set by importing the grpc/health package
// 3. a service config with non-empty healthCheckConfig field is provided
// 4. the load balancer requests it
//
|
cleanup: fix mention of healthcheck to health (#<I>)
|
diff --git a/app/controllers/organizations_controller.rb b/app/controllers/organizations_controller.rb
index <HASH>..<HASH> 100644
--- a/app/controllers/organizations_controller.rb
+++ b/app/controllers/organizations_controller.rb
@@ -91,7 +91,11 @@ class OrganizationsController < ApplicationController
end
def destroy
- if Organization.count > 1
+ if current_organization == @organization
+ errors [_("Could not delete organization '#{params[:id]}'."), _("The current organization cannot be deleted. Please switch to a different organization before deleting.")]
+
+ render :text => "The current organization cannot be deleted. Please switch to a different organization before deleting.", :status=>:bad_request and return
+ elsif Organization.count > 1
@id = @organization.id
begin
@organization.destroy
|
<I> - Enforced the cant delete current org restriction on the controller side
|
diff --git a/structr-ui/src/main/java/org/structr/web/resource/LoginResource.java b/structr-ui/src/main/java/org/structr/web/resource/LoginResource.java
index <HASH>..<HASH> 100644
--- a/structr-ui/src/main/java/org/structr/web/resource/LoginResource.java
+++ b/structr-ui/src/main/java/org/structr/web/resource/LoginResource.java
@@ -158,7 +158,7 @@ public class LoginResource extends FilterableResource {
if (returnedMethodResult == null) {
// should not happen
- returnedMethodResult = new RestMethodResult(401);
+ throw new AuthenticationException(AuthHelper.STANDARD_ERROR_MSG);
}
return returnedMethodResult;
|
Minor: Do not return empty result for login action with empty username and password
|
diff --git a/ipuz/direction.py b/ipuz/direction.py
index <HASH>..<HASH> 100644
--- a/ipuz/direction.py
+++ b/ipuz/direction.py
@@ -3,7 +3,9 @@
def validate_direction(direction):
splitted = direction.split(':')
direction_name = direction
- if len(splitted) > 1:
+ if len(splitted) > 2:
+ return False
+ if len(splitted) <= 2:
direction_name = splitted[0]
return direction_name in (
"Across",
diff --git a/tests/test_ipuz.py b/tests/test_ipuz.py
index <HASH>..<HASH> 100644
--- a/tests/test_ipuz.py
+++ b/tests/test_ipuz.py
@@ -510,6 +510,13 @@ class IPUZCrosswordValueTestCase(IPUZBaseTestCase):
"Invalid CrosswordValue in saved element found"
)
+ def test_validate_crosswordvalue_with_invalid_direction(self):
+ self.puzzle["saved"] = [[{"Across:Horizontal:and_something": "A"}]]
+ self.validate_puzzle(
+ self.puzzle,
+ "Invalid CrosswordValue in saved element found"
+ )
+
class IPUZWriteTestCase(IPUZBaseTestCase):
|
Fixed issue with Direction validation with multiple colons
|
diff --git a/pkg/client/unversioned/fake/fake.go b/pkg/client/unversioned/fake/fake.go
index <HASH>..<HASH> 100644
--- a/pkg/client/unversioned/fake/fake.go
+++ b/pkg/client/unversioned/fake/fake.go
@@ -47,6 +47,7 @@ func (f roundTripperFunc) RoundTrip(req *http.Request) (*http.Response, error) {
type RESTClient struct {
Client *http.Client
NegotiatedSerializer runtime.NegotiatedSerializer
+ GroupName string
Req *http.Request
Resp *http.Response
@@ -94,8 +95,14 @@ func (c *RESTClient) request(verb string) *restclient.Request {
ns := c.NegotiatedSerializer
serializer, _ := ns.SerializerForMediaType(runtime.ContentTypeJSON, nil)
streamingSerializer, _ := ns.StreamingSerializerForMediaType(runtime.ContentTypeJSON, nil)
+
+ groupName := api.GroupName
+ if c.GroupName != "" {
+ groupName = c.GroupName
+ }
+
internalVersion := unversioned.GroupVersion{
- Group: registered.GroupOrDie(api.GroupName).GroupVersion.Group,
+ Group: registered.GroupOrDie(groupName).GroupVersion.Group,
Version: runtime.APIVersionInternal,
}
internalVersion.Version = runtime.APIVersionInternal
|
Make the fake RESTClient usable by all the API groups, not just core.
|
diff --git a/test/TextFilter/TextFilterTest.php b/test/TextFilter/TextFilterTest.php
index <HASH>..<HASH> 100644
--- a/test/TextFilter/TextFilterTest.php
+++ b/test/TextFilter/TextFilterTest.php
@@ -2,11 +2,13 @@
namespace Anax\TextFilter;
+use \PHPUnit\Framework\TestCase;
+
/**
* A testclass
*
*/
-class TextFilterTest extends \PHPUnit_Framework_TestCase
+class TextFilterTest extends TestCase
{
/**
* Supported filters.
diff --git a/test/TextFilter/TextFilterUtilitiesTest.php b/test/TextFilter/TextFilterUtilitiesTest.php
index <HASH>..<HASH> 100644
--- a/test/TextFilter/TextFilterUtilitiesTest.php
+++ b/test/TextFilter/TextFilterUtilitiesTest.php
@@ -2,11 +2,13 @@
namespace Anax\TextFilter;
+use \PHPUnit\Framework\TestCase;
+
/**
* A testclass
*
*/
-class TextFilterUtilitiesTest extends \PHPUnit_Framework_TestCase
+class TextFilterUtilitiesTest extends TestCase
{
/**
* Provider for TextWithLinks
|
Upgrade phpunit testclass to support newer version of phpunit.
|
diff --git a/src/js/pannellum.js b/src/js/pannellum.js
index <HASH>..<HASH> 100644
--- a/src/js/pannellum.js
+++ b/src/js/pannellum.js
@@ -1452,11 +1452,15 @@ function loadScene(sceneId, targetPitch, targetYaw) {
// Set new pointing
if (targetPitch === 'same') {
workingPitch = config.pitch;
+ } else {
+ workingPitch = targetPitch;
}
if (targetYaw === 'same') {
workingYaw = config.yaw;
} else if (targetYaw === 'sameAzimuth') {
workingYaw = config.yaw + config.northOffset - tourConfig.scenes[sceneId].northOffset;
+ } else {
+ workingYaw = targetYaw;
}
// Destroy hot spots from previous scene
|
Fix target yaw (fixes #<I>, thanks trumpton).
|
diff --git a/bpm/src/main/java/org/jboss/pnc/bpm/BpmManager.java b/bpm/src/main/java/org/jboss/pnc/bpm/BpmManager.java
index <HASH>..<HASH> 100644
--- a/bpm/src/main/java/org/jboss/pnc/bpm/BpmManager.java
+++ b/bpm/src/main/java/org/jboss/pnc/bpm/BpmManager.java
@@ -162,7 +162,7 @@ public class BpmManager {
public void cleanup() {
log.debug("Bpm manager tasks cleanup started");
Map<Integer, BpmTask> clonnedTasks = null;
- synchronized(this.tasks) {
+ synchronized(this) {
clonnedTasks = new HashMap<>(this.tasks);
}
|
Fix synchronization in BpmManager not to allow concurency issues on tasks field
|
diff --git a/imhotep/tools.py b/imhotep/tools.py
index <HASH>..<HASH> 100644
--- a/imhotep/tools.py
+++ b/imhotep/tools.py
@@ -94,4 +94,3 @@ class Tool(object):
run on over stdin.
"""
raise NotImplementedError()
-
|
W<I> blank line at end of file
|
diff --git a/api/src/opentrons/hardware_control/api.py b/api/src/opentrons/hardware_control/api.py
index <HASH>..<HASH> 100644
--- a/api/src/opentrons/hardware_control/api.py
+++ b/api/src/opentrons/hardware_control/api.py
@@ -551,9 +551,16 @@ class API(HardwareAPILike):
"""
Resume motion after a call to :py:meth:`pause`.
"""
+ # Resume must be called immediately to awaken thread running hardware
+ # methods (ThreadManager)
self._backend.resume()
- asyncio.run_coroutine_threadsafe(self._execution_manager.resume(),
- self._loop)
+
+ async def _chained_calls():
+ # mirror what happens API.pause.
+ await self._execution_manager.resume()
+ self._backend.resume()
+
+ asyncio.run_coroutine_threadsafe(_chained_calls(), self._loop)
def halt(self):
""" Immediately stop motion.
|
fix(api): fix race condition causing pause/running state mismatch. (#<I>)
closes #<I>
|
diff --git a/src/cf/commands/logout_test.go b/src/cf/commands/logout_test.go
index <HASH>..<HASH> 100644
--- a/src/cf/commands/logout_test.go
+++ b/src/cf/commands/logout_test.go
@@ -13,7 +13,7 @@ func TestLogoutClearsAccessTokenOrgAndSpace(t *testing.T) {
config, _ := configRepo.Get()
config.AccessToken = "MyAccessToken"
config.Organization = cf.Organization{Name: "MyOrg"}
- config.Space = cf.Space{Name: "MyOrg"}
+ config.Space = cf.Space{Name: "MySpace"}
ui := new(testhelpers.FakeUI)
|
Gave the space a better name
|
diff --git a/molo/core/migrations/0033_bannerindexpage_footerindexpage_sectionindexpage.py b/molo/core/migrations/0033_bannerindexpage_footerindexpage_sectionindexpage.py
index <HASH>..<HASH> 100644
--- a/molo/core/migrations/0033_bannerindexpage_footerindexpage_sectionindexpage.py
+++ b/molo/core/migrations/0033_bannerindexpage_footerindexpage_sectionindexpage.py
@@ -9,7 +9,6 @@ class Migration(migrations.Migration):
dependencies = [
('wagtailcore', '0028_merge'),
- ('wagtailcore', '0040_page_draft_title'),
('core', '0032_sitesettings_ga_tag_manager'),
]
|
Remove dependency on wagtailcore migration
|
diff --git a/parsl/tests/test_threads/test_error_code_bash.py b/parsl/tests/test_threads/test_error_code_bash.py
index <HASH>..<HASH> 100644
--- a/parsl/tests/test_threads/test_error_code_bash.py
+++ b/parsl/tests/test_threads/test_error_code_bash.py
@@ -45,7 +45,7 @@ def test_div_0(test_fn=div_0):
assert f.result() == err_code, "{0} expected err_code:{1} but got {2}".format(test_fn.__name__,
err_code,
f.result())
- os.listdir('.')
+ print(os.listdir('.'))
os.remove('std.err')
os.remove('std.out')
return True
|
Minor change. Will be rolled back later.
|
diff --git a/tests/RouteTest.php b/tests/RouteTest.php
index <HASH>..<HASH> 100644
--- a/tests/RouteTest.php
+++ b/tests/RouteTest.php
@@ -258,6 +258,7 @@ class RouteTest extends PHPUnit_Framework_TestCase
*/
public function testInvokeWhenDisablingOutputBuffer()
{
+ ob_start();
$callable = function ($req, $res, $args) {
echo 'foo';
return $res->write('bar');
@@ -277,5 +278,8 @@ class RouteTest extends PHPUnit_Framework_TestCase
$response = $route->__invoke($request, $response);
$this->assertEquals('bar', (string)$response->getBody());
+
+ $output = ob_get_clean();
+ $this->assertEquals('foo', $output);
}
}
|
Ensure that echo doesn't leak to phpunit
|
diff --git a/src/Dudulina/Scheduling/ScheduledCommandsDispatcher.php b/src/Dudulina/Scheduling/ScheduledCommandsDispatcher.php
index <HASH>..<HASH> 100644
--- a/src/Dudulina/Scheduling/ScheduledCommandsDispatcher.php
+++ b/src/Dudulina/Scheduling/ScheduledCommandsDispatcher.php
@@ -56,11 +56,12 @@ class ScheduledCommandsDispatcher
$this->logger->error(
'Scheduled command exception',
[
- 'exceptionClass' => \get_class($exception),
- 'trace' => $exception->getTrace(),
- 'dueDate' => $scheduledCommand->getFireDate()->format('c'),
- 'commandClass' => \get_class($scheduledCommand),
- 'commandDump' => print_r($scheduledCommand, true),
+ 'exceptionClass' => \get_class($exception),
+ 'exceptionMessage' => $exception->getMessage(),
+ 'trace' => $exception->getTraceAsString(),
+ 'dueDate' => $scheduledCommand->getFireDate()->format('c'),
+ 'commandClass' => \get_class($scheduledCommand),
+ 'commandDump' => print_r($scheduledCommand, true),
]
);
}
|
restructured exception handling in ScheduledCommandDispatcher
|
diff --git a/model/DataList.php b/model/DataList.php
index <HASH>..<HASH> 100644
--- a/model/DataList.php
+++ b/model/DataList.php
@@ -978,9 +978,8 @@ class DataList extends ViewableData implements SS_List, SS_Filterable, SS_Sortab
*/
public function remove($item) {
// By default, we remove an item from a DataList by deleting it.
- if($item instanceof $this->dataClass) $item->delete();
-
- }
+ $this->removeByID($item->ID);
+ }
/**
* Remove an item from this DataList by ID
|
FIX Make sure you can only remove items from a DataList that are actually in it
|
diff --git a/Resources/public/js/typeaheadbundle.js b/Resources/public/js/typeaheadbundle.js
index <HASH>..<HASH> 100644
--- a/Resources/public/js/typeaheadbundle.js
+++ b/Resources/public/js/typeaheadbundle.js
@@ -260,7 +260,7 @@
li = $( this.$id.data('prototype') );
li.data('value', data.id)
.find('input:hidden').val(data.id).attr('id', _id + '_' + data.id).attr('name', name).end()
- .find('a').text(text).end()
+ .find('.lifo-typeahead-item').text(text).end()
.appendTo(list)
;
}
|
use selector '.lifo-typeahead-item' instead of 'a' to add text to list items to allow for more flexible customizations.
|
diff --git a/internal/pixels/pixels.go b/internal/pixels/pixels.go
index <HASH>..<HASH> 100644
--- a/internal/pixels/pixels.go
+++ b/internal/pixels/pixels.go
@@ -177,6 +177,7 @@ func (p *Pixels) CreateImage(context *opengl.Context, width, height int, filter
return nil, err
}
}
+ p.image = gimg
p.basePixels, err = gimg.Pixels(context)
if err != nil {
return nil, err
|
pixels: Bug fix: Update image member when creating a new image
|
diff --git a/salt/master.py b/salt/master.py
index <HASH>..<HASH> 100644
--- a/salt/master.py
+++ b/salt/master.py
@@ -903,13 +903,13 @@ class AESFuncs(object):
return {}
if 'mine_get' in self.opts:
# If master side acl defined.
- if not isinstance(self.opts['mine_get'],dict):
+ if not isinstance(self.opts['mine_get'], dict):
return {}
perms = set()
for match in self.opts['mine_get']:
if re.match(match, load['id']):
if isinstance(self.opts['mine_get'][match], list):
- perms.update(self.opts['mine_get'][match])
+ perms.update(self.opts['mine_get'][match])
good = False
for perm in perms:
if re.match(perm, load['fun']):
|
Fix pylint issues in master.py
|
diff --git a/src/main/java/com/google/cloud/tools/managedcloudsdk/components/SdkComponent.java b/src/main/java/com/google/cloud/tools/managedcloudsdk/components/SdkComponent.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/google/cloud/tools/managedcloudsdk/components/SdkComponent.java
+++ b/src/main/java/com/google/cloud/tools/managedcloudsdk/components/SdkComponent.java
@@ -36,7 +36,9 @@ public enum SdkComponent {
GCD_EMULATOR("gcd-emulator"),
GSUTIL("gsutil"),
KUBECTL("kubectl"),
- PUBSUB_EMULATOR("pubsub-emulator");
+ PUBSUB_EMULATOR("pubsub-emulator"),
+ MINIKUBE("minikube"),
+ SKAFFOLD("skaffold");
private final String value;
|
Add minikube and skaffold to list of gcloud components (#<I>)
|
diff --git a/src/index.js b/src/index.js
index <HASH>..<HASH> 100644
--- a/src/index.js
+++ b/src/index.js
@@ -54,22 +54,20 @@ export function createContext (history, location, state) {
* given context object.
*
* @param {Function[]} middleware
- * @return {Function}
+ * @param {Object} context
*/
-export function runMiddleware (middleware) {
- return function (ctx) {
- var mw = middleware.slice(0);
- const callNext = function () {
- var next = mw.shift();
- if (!next) return;
- try {
- return Promise.resolve(next(context, callNext));
- } catch (err) {
- return Promise.reject(err);
- }
+export function runMiddleware (middleware, context) {
+ var mw = middleware.slice(0);
+ const callNext = function () {
+ var next = mw.shift();
+ if (!next) return;
+ try {
+ return Promise.resolve(next(context, callNext));
+ } catch (err) {
+ return Promise.reject(err);
}
- callNext();
}
+ callNext();
}
/**
|
runMiddleware() method is no longer a higher order function
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -19,7 +19,7 @@ history = open('HISTORY.rst').read().replace('.. :changelog:', '')
setup(
name='clarify_python',
- version='1.0.1',
+ version='1.0.0',
description='The Clarify Python 3 Helper Library wraps the entire Clarify API in Python 3.x function calls.',
long_description=readme + '\n\n' + history,
author='Paul Murphy',
|
Changed version number to <I>
|
diff --git a/src/search/FindReplace.js b/src/search/FindReplace.js
index <HASH>..<HASH> 100644
--- a/src/search/FindReplace.js
+++ b/src/search/FindReplace.js
@@ -720,11 +720,13 @@ define(function (require, exports, module) {
this._panel.$panel
.off(".replaceAll")
.on("click.replaceAll", ".replace-checked", function (e) {
- self.matches.reverse().forEach(function (match) {
- if (match.isChecked) {
- var rw = typeof self.replaceWhat === "string" ? self.replaceWith : FindUtils.parseDollars(self.replaceWith, match.result);
- self.editor.document.replaceRange(rw, match.start, match.end, "+replaceAll");
- }
+ self.editor.document.batchOperation(function () {
+ self.matches.reverse().forEach(function (match) {
+ if (match.isChecked) {
+ var rw = typeof self.replaceWhat === "string" ? self.replaceWith : FindUtils.parseDollars(self.replaceWith, match.result);
+ self.editor.document.replaceRange(rw, match.start, match.end);
+ }
+ });
});
self.hideResults();
});
|
Use batchOperation() instead of edit op merging for replace all
Logically, this is a single operation, not a set of merged adjacent
operations, and we wouldn't want a second replaceAll done immediately
afterward to merge with it (though it would be difficult to get into that
case).
|
diff --git a/src/readers/csv/csv.js b/src/readers/csv/csv.js
index <HASH>..<HASH> 100644
--- a/src/readers/csv/csv.js
+++ b/src/readers/csv/csv.js
@@ -72,7 +72,7 @@ const CSVReader = Reader.extend({
try {
const { delimiter = this._guessDelimiter(text) } = this;
const parser = d3.dsvFormat(delimiter);
- const data = parser.parse(text);
+ const data = parser.parse(text, row => Object.keys(row).every(key => !row[key]) ? null : row);
const result = { columns: data.columns, data };
cached[path] = result;
|
Skip empty rows in csv (#<I>)
|
diff --git a/sprd/model/Currency.js b/sprd/model/Currency.js
index <HASH>..<HASH> 100644
--- a/sprd/model/Currency.js
+++ b/sprd/model/Currency.js
@@ -1,6 +1,6 @@
define(["sprd/data/SprdModel", "underscore"], function (Model, _) {
- var dotCurrencyCodes = ['USD', 'GBP'];
+ var dotCurrencyCodes = ['USD', 'GBP', 'AUD'];
return Model.inherit('sprd.model.Currency', {
|
DEV-<I> - Tablomat uses wrong pricing format in Australia (comma instead of period)
|
diff --git a/src/ServiceFactory.php b/src/ServiceFactory.php
index <HASH>..<HASH> 100644
--- a/src/ServiceFactory.php
+++ b/src/ServiceFactory.php
@@ -57,11 +57,6 @@ abstract /* static final (fuck fuck fuuuck!!) */ class ServiceFactory
$serviceMethodArguments = null;
$serviceAliases = $app->configValue('app.service.aliases', []);
- // redirect main method
- if ($requestUri->segment(1) == Service::METHOD_MAIN) {
- return $response->redirect('/'. $serviceName)->end();
- }
-
// main
if (empty($serviceName)) {
$serviceName = Service::SERVICE_MAIN;
|
Move "main" method check to Service.
|
diff --git a/internal/services/recoveryservices/recovery_services_vault_resource_test.go b/internal/services/recoveryservices/recovery_services_vault_resource_test.go
index <HASH>..<HASH> 100644
--- a/internal/services/recoveryservices/recovery_services_vault_resource_test.go
+++ b/internal/services/recoveryservices/recovery_services_vault_resource_test.go
@@ -286,6 +286,9 @@ resource "azurerm_recovery_services_vault" "test" {
soft_delete_enabled = false
storage_mode_type = "LocallyRedundant"
+ tags = {
+ ENV = "test"
+ }
}
`, data.RandomInteger, data.Locations.Primary, data.RandomInteger)
}
|
add tet for azurerm_recovery_services_vault
|
diff --git a/pkg/envoy/xds/cache.go b/pkg/envoy/xds/cache.go
index <HASH>..<HASH> 100644
--- a/pkg/envoy/xds/cache.go
+++ b/pkg/envoy/xds/cache.go
@@ -109,9 +109,7 @@ func (c *Cache) tx(typeURL string, upsertedResources map[string]proto.Message, d
// If the value is unchanged, don't update the entry, to preserve its
// lastModifiedVersion. This allows minimizing the frequency of
// responses in GetResources.
- // Calling proto.Message.String is not very cheap, but we assume that
- // the reduced churn between the clients and the server is worth it.
- if !found || oldV.resource.String() != value.String() {
+ if !found || !proto.Equal(oldV.resource, value) {
if found {
cacheLog.WithField(logfields.XDSResourceName, name).Debug("updating resource in cache")
|
pkg/envoy: use proto.Equal instead comparing strings
On medium-large clusters with lots of endpoints, the conversion to
Strings in order to perform a comparison can be expensive. Instead,
we should make use of the protobuf equal function that should have
the same result.
|
diff --git a/__mocks__/react-easy-state.js b/__mocks__/react-easy-state.js
index <HASH>..<HASH> 100644
--- a/__mocks__/react-easy-state.js
+++ b/__mocks__/react-easy-state.js
@@ -2,7 +2,7 @@
export * from '../src'
// use this to test the es6 modules build
-// export * from '../dist/es6'
+// export * from '../dist/easyState.module'
// use to test the commonJS build
-// export * from '../dist/es5'
+// export * from '../dist/easyState.commonJS'
|
fix (test): fix mocks for modules and commonJS build test
|
diff --git a/lib/resque/worker.rb b/lib/resque/worker.rb
index <HASH>..<HASH> 100644
--- a/lib/resque/worker.rb
+++ b/lib/resque/worker.rb
@@ -476,7 +476,7 @@ module Resque
# Returns an array of string pids of all the other workers on this
# machine. Useful when pruning dead workers on startup.
def worker_pids
- `ps -A -o pid,command | grep [r]esque | grep -v "resque-web"`.split("\n").map do |line|
+ `ps -A -o pid,command | grep resque | grep -v "resque-web"`.split("\n").map do |line|
line.split(' ')[0]
end
end
diff --git a/test/worker_test.rb b/test/worker_test.rb
index <HASH>..<HASH> 100644
--- a/test/worker_test.rb
+++ b/test/worker_test.rb
@@ -267,6 +267,11 @@ context "Resque::Worker" do
end
end
+ test "worker_pids returns pids" do
+ known_workers = @worker.worker_pids
+ assert !known_workers.empty?
+ end
+
test "Processed jobs count" do
@worker.work(0)
assert_equal 1, Resque.info[:processed]
|
Fix issue with finding worker pids on jruby
- Old pattern returned empty array of worker pids on jruby
|
diff --git a/internetarchive/config.py b/internetarchive/config.py
index <HASH>..<HASH> 100644
--- a/internetarchive/config.py
+++ b/internetarchive/config.py
@@ -26,6 +26,7 @@ internetarchive.config
"""
from __future__ import absolute_import
+import errno
import os
from collections import defaultdict
from six.moves import configparser
@@ -97,8 +98,15 @@ def write_config_file(auth_config, config_file=None):
# The XDG Base Dir spec requires that the XDG_CONFIG_HOME directory be created with mode 700.
# is_xdg will be True iff config_file is ${XDG_CONFIG_HOME}/internetarchive/ia.ini.
# So create grandparent first if necessary then parent to ensure both have the right mode.
- os.makedirs(os.path.dirname(config_directory), mode=0o700, exist_ok=True)
- os.mkdir(config_directory, mode=0o700)
+ try:
+ os.makedirs(os.path.dirname(config_directory), mode=0o700, exist_ok=True)
+ except TypeError: # Python 2 doesn't have exist_ok
+ try:
+ os.makedirs(os.path.dirname(config_directory), mode=0o700)
+ except OSError as e:
+ if e.errno != errno.EEXIST or not os.path.isdir(os.path.dirname(config_directory)):
+ raise
+ os.mkdir(config_directory, 0o700)
# Write config file.
with open(config_file, 'w') as fh:
|
Fix write_config_file directory creation under Python 2 due to missing os.makedirs exist_ok kwarg
|
diff --git a/tests/Unit/DirectoryTests/ProductTest.php b/tests/Unit/DirectoryTests/ProductTest.php
index <HASH>..<HASH> 100644
--- a/tests/Unit/DirectoryTests/ProductTest.php
+++ b/tests/Unit/DirectoryTests/ProductTest.php
@@ -108,4 +108,18 @@ class ProductTest extends \Test\TestCase
$items = $product->getAllItems();
$this->assertCount(0, $items);
}
+
+ public function testEmptyAgreements()
+ {
+ $product = new \Connect\Product();
+ $agreements = $product->getAllAgreements();
+ $this->assertCount(0, $agreements);
+ }
+
+ public function testEmptyActions()
+ {
+ $product = new \Connect\Product();
+ $actions = $product->getAllActions();
+ $this->assertCount(0, $actions);
+ }
}
|
Added test to ensure <I>% coverage on Products
|
diff --git a/lib/neovim/ruby_provider.rb b/lib/neovim/ruby_provider.rb
index <HASH>..<HASH> 100644
--- a/lib/neovim/ruby_provider.rb
+++ b/lib/neovim/ruby_provider.rb
@@ -28,7 +28,7 @@ module Neovim
def self.__define_setup(plug)
plug.__send__(:setup) do |client|
$stdout.define_singleton_method(:write) do |string|
- client.out_write(string)
+ client.out_write(string + "\n")
end
$stderr.define_singleton_method(:write) do |string|
|
Add newline to stdout writes
|
diff --git a/neo4j/meta.py b/neo4j/meta.py
index <HASH>..<HASH> 100644
--- a/neo4j/meta.py
+++ b/neo4j/meta.py
@@ -19,4 +19,4 @@
# limitations under the License.
-version = "1.5.0"
+version = "1.6.0"
|
Updated version to <I>
|
diff --git a/Collection.php b/Collection.php
index <HASH>..<HASH> 100644
--- a/Collection.php
+++ b/Collection.php
@@ -1023,7 +1023,13 @@ class Collection extends Object
} else {
$inValues = $values[$column];
}
- $result[$column][$operator] = array_values($inValues);
+
+ $inValues = array_values($inValues);
+ if (count($inValues) == 1) {
+ $result[$column] = $inValues[0];
+ } else {
+ $result[$column][$operator] = $inValues;
+ }
}
return $result;
|
update buildInCondition for array of one element
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -15,7 +15,7 @@ long_description = ""
with codecs.open(os.path.join(here, "README.md"), encoding="utf-8") as readme:
long_description = readme.read()
-tests_require = ["pytest", "pytest-cov", "codecov", "flake8", "black", "psutil"]
+tests_require = ["pytest>=5,<6", "pytest-cov>=2,<3", "codecov>=2,<3", "flake8>=3,<4", "black", "psutil"]
class BaseCommand(Command):
|
Set a specific range of test dependency versions to make builds more stable
|
diff --git a/ui/src/plugins/Loading.js b/ui/src/plugins/Loading.js
index <HASH>..<HASH> 100644
--- a/ui/src/plugins/Loading.js
+++ b/ui/src/plugins/Loading.js
@@ -5,6 +5,7 @@ import { isSSR } from './Platform.js'
import uid from '../utils/uid.js'
let
+ hiding = false,
vm = null,
timeout,
props = {},
@@ -32,6 +33,11 @@ export default {
props.customClass += ` text-${props.backgroundColor}`
+ // Force hide(don't wait for transition to finish) to show the next loading
+ if (hiding && vm !== null) {
+ vm.$emit('destroy')
+ }
+
if (this.isActive) {
if (vm) {
if (!vm.isActive) {
@@ -101,7 +107,7 @@ export default {
},
hide () {
- if (!this.isActive) {
+ if (!this.isActive || hiding) {
return
}
@@ -111,12 +117,14 @@ export default {
this.isActive = false
}
else {
+ hiding = true
vm.isActive = false
vm.$on('destroy', () => {
if (vm !== null) {
vm.$destroy()
}
this.isActive = false
+ hiding = false
})
}
},
|
Fixed some of sequential Loading's were not showing
When hide is called plugin will enter the hiding state. Hiding state disappears when the leave transition is finished just as before. If `show()` is called when the loading is hiding, the loading will be force hidden, then the new loading will be shown.
|
diff --git a/ethereum.go b/ethereum.go
index <HASH>..<HASH> 100644
--- a/ethereum.go
+++ b/ethereum.go
@@ -65,7 +65,8 @@ func main() {
go func() {
for {
res := dagger.Search(ethutil.Big("01001"), ethutil.BigPow(2, 36))
- server.Broadcast("blockmine", ethutil.Encode(res.String()))
+ log.Println("Res dagger", res)
+ //server.Broadcast("blockmine", ethutil.Encode(res.String()))
}
}()
}
diff --git a/testing.go b/testing.go
index <HASH>..<HASH> 100644
--- a/testing.go
+++ b/testing.go
@@ -16,11 +16,11 @@ func Testing() {
bm := NewBlockManager()
tx := NewTransaction("\x00", 20, []string{"PUSH"})
- txData := tx.MarshalRlp()
+ txData := tx.RlpEncode()
//fmt.Printf("%q\n", txData)
copyTx := &Transaction{}
- copyTx.UnmarshalRlp(txData)
+ copyTx.RlpDecode(txData)
//fmt.Println(tx)
//fmt.Println(copyTx)
|
Removed dagger broadcasting to the net
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.