diff
stringlengths 65
26.7k
| message
stringlengths 7
9.92k
|
|---|---|
diff --git a/lib/jsduck/util/singleton.rb b/lib/jsduck/util/singleton.rb
index <HASH>..<HASH> 100644
--- a/lib/jsduck/util/singleton.rb
+++ b/lib/jsduck/util/singleton.rb
@@ -1,3 +1,4 @@
+require 'singleton'
module JsDuck
module Util
@@ -20,9 +21,7 @@ module JsDuck
module Singleton
def self.included(base)
base.class_eval do
- def self.instance
- @instance ||= self.new
- end
+ include ::Singleton
# Redirect calls from MyClass.method to MyClass.instance.method
def self.method_missing(meth, *args, &block)
|
Use the Ruby Singleton class inside Util::Singleton.
Although our simple implementation worked also just fine, the builtin
Singleton has some more checks built into it - just good to rely on
something that's well tried and tested.
|
diff --git a/salt/client/ssh/ssh_py_shim.py b/salt/client/ssh/ssh_py_shim.py
index <HASH>..<HASH> 100644
--- a/salt/client/ssh/ssh_py_shim.py
+++ b/salt/client/ssh/ssh_py_shim.py
@@ -230,7 +230,7 @@ def main(argv): # pylint: disable=W0613
subprocess.call(salt_argv)
shutil.rmtree(OPTIONS.saltdir)
else:
- os.execv(sys.executable, salt_argv)
+ subprocess.call(salt_argv)
if OPTIONS.cmd_umask is not None:
os.umask(old_umask)
|
Some environments refuse to return the command output
This results in the output from salt-ssh being empty,
this fix ensures that the output is always there
|
diff --git a/lib/instrumental/agent.rb b/lib/instrumental/agent.rb
index <HASH>..<HASH> 100644
--- a/lib/instrumental/agent.rb
+++ b/lib/instrumental/agent.rb
@@ -473,6 +473,7 @@ module Instrumental
# or we cannot reach the server
# or the connection state of this socket is in a race
logger.error "unable to connect to Instrumental, hanging up with #{@queue.size} messages remaining"
+ logger.debug "Exception: #{err.inspect}\n#{err.backtrace.join("\n")}"
allow_reconnect = false
else
report_exception(err)
|
Add some debug logging to help with test issues
|
diff --git a/airflow/operators/hive_to_druid.py b/airflow/operators/hive_to_druid.py
index <HASH>..<HASH> 100644
--- a/airflow/operators/hive_to_druid.py
+++ b/airflow/operators/hive_to_druid.py
@@ -68,7 +68,7 @@ class HiveToDruidTransfer(BaseOperator):
def execute(self, context):
hive = HiveCliHook(hive_cli_conn_id=self.hive_cli_conn_id)
logging.info("Extracting data from Hive")
- hive_table = 'druid.' + context['task_instance_key_str']
+ hive_table = 'druid.' + context['task_instance_key_str'].replace('.', '_')
sql = self.sql.strip().strip(';')
hql = """\
set mapred.output.compress=false;
|
there could be dot in the key string, which is illegal in the hive table name
|
diff --git a/plugins/outputs/amqp/amqp.go b/plugins/outputs/amqp/amqp.go
index <HASH>..<HASH> 100644
--- a/plugins/outputs/amqp/amqp.go
+++ b/plugins/outputs/amqp/amqp.go
@@ -249,6 +249,7 @@ func (q *AMQP) Write(metrics []telegraf.Metric) error {
if q.sentMessages >= q.MaxMessages && q.MaxMessages > 0 {
log.Printf("D! Output [amqp] sent MaxMessages; closing connection")
+ q.client.Close()
q.client = nil
}
diff --git a/plugins/outputs/amqp/client.go b/plugins/outputs/amqp/client.go
index <HASH>..<HASH> 100644
--- a/plugins/outputs/amqp/client.go
+++ b/plugins/outputs/amqp/client.go
@@ -55,7 +55,7 @@ func Connect(config *ClientConfig) (*client, error) {
log.Printf("D! Output [amqp] connected to %q", broker)
break
}
- log.Printf("D! Output [amqp] error connecting to %q", broker)
+ log.Printf("D! Output [amqp] error connecting to %q - %s", broker, err.Error())
}
if client.conn == nil {
|
Prevent connection leak by closing unused connections in amqp output (#<I>)
|
diff --git a/doc/source/conf.py b/doc/source/conf.py
index <HASH>..<HASH> 100644
--- a/doc/source/conf.py
+++ b/doc/source/conf.py
@@ -151,7 +151,7 @@ html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
-#html_last_updated_fmt = '%b %d, %Y'
+html_last_updated_fmt = '%Y-%m-%d %T%z'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
@@ -177,7 +177,7 @@ html_static_path = ['_static']
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
-#html_show_sphinx = True
+html_show_sphinx = False
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
|
added build date/time to HTML footer; due to sphinx restrictions, use local time
|
diff --git a/lib/homesick/shell.rb b/lib/homesick/shell.rb
index <HASH>..<HASH> 100644
--- a/lib/homesick/shell.rb
+++ b/lib/homesick/shell.rb
@@ -1,3 +1,5 @@
+require 'thor'
+
module Homesick
# Hack in support for diffing symlinks
class Shell < Thor::Shell::Color
|
Require Thor in Homesick::Shell
|
diff --git a/lib/protocol/ColoringEngine.js b/lib/protocol/ColoringEngine.js
index <HASH>..<HASH> 100644
--- a/lib/protocol/ColoringEngine.js
+++ b/lib/protocol/ColoringEngine.js
@@ -235,7 +235,10 @@ ColoringEngine.prototype._computeAssetIds = function (inputs, markerOutputIndex,
} else {
result.push(new TransactionOutput(
outputs[i].v.readInt32LE(0),
- outputs[i].s));
+ outputs[i].s,
+ null,
+ null,
+ OutputType.ISSUANCE));
}
}
|
Ensure issuance outputs always have output type 'isuance'
|
diff --git a/segno/writers.py b/segno/writers.py
index <HASH>..<HASH> 100644
--- a/segno/writers.py
+++ b/segno/writers.py
@@ -332,9 +332,6 @@ def write_eps(matrix, version, out, scale=1, border=None, color='#000',
raise ValueError('Invalid color "{0}". Not in range 0 .. 1'
.format(c))
return c
- if not 0 <= c <= 255:
- raise ValueError('Invalid color "{0}". Not in range 0 .. 255'
- .format(c))
return 1/255.0 * c if c != 1 else c
return tuple([to_float(i) for i in colors.color_to_rgb(clr)])
|
Removed unused check. colors should handle that
|
diff --git a/client/driver/executor_plugin.go b/client/driver/executor_plugin.go
index <HASH>..<HASH> 100644
--- a/client/driver/executor_plugin.go
+++ b/client/driver/executor_plugin.go
@@ -18,7 +18,7 @@ var HandshakeConfig = plugin.HandshakeConfig{
func GetPluginMap(w io.Writer) map[string]plugin.Plugin {
p := new(ExecutorPlugin)
- p.logger = log.New(w, "executor-plugin-server:", log.LstdFlags)
+ p.logger = log.New(w, "", log.LstdFlags)
return map[string]plugin.Plugin{"executor": p}
}
|
removing the prefix of the logger
|
diff --git a/pyflakes/test/test_undefined_names.py b/pyflakes/test/test_undefined_names.py
index <HASH>..<HASH> 100644
--- a/pyflakes/test/test_undefined_names.py
+++ b/pyflakes/test/test_undefined_names.py
@@ -372,13 +372,20 @@ class Test(harness.Test):
class A:
T = range(10)
- X = {x for x in T}
- Y = {x:x for x in T}
Z = (x for x in T)
L = [x for x in T]
B = dict((i, str(i)) for i in T)
''')
+ if version_info >= (2, 7):
+ self.flakes('''
+ class A:
+ T = range(10)
+
+ X = {x for x in T}
+ Y = {x:x for x in T}
+ ''')
+
class NameTests(TestCase):
"""
|
Skip dict/set comprehension with Python <I> and <I>
|
diff --git a/tests/Kwf/Validate/PasswordTest.php b/tests/Kwf/Validate/PasswordTest.php
index <HASH>..<HASH> 100644
--- a/tests/Kwf/Validate/PasswordTest.php
+++ b/tests/Kwf/Validate/PasswordTest.php
@@ -2,7 +2,7 @@
/**
* @group Validate
*/
-class Kwf_Validate_EmailAddressSimpleTest extends Kwf_Test_TestCase
+class Kwf_Validate_PasswordTest extends Kwf_Test_TestCase
{
public function test3of4()
{
|
fix test name (did this test ever run?)
|
diff --git a/pysat/_params.py b/pysat/_params.py
index <HASH>..<HASH> 100644
--- a/pysat/_params.py
+++ b/pysat/_params.py
@@ -11,8 +11,6 @@ import os
from portalocker import Lock
-import pysat.utils
-
class Parameters(object):
"""Stores user parameters used by pysat.
|
STY: Removed now unused import
|
diff --git a/src/Input.php b/src/Input.php
index <HASH>..<HASH> 100644
--- a/src/Input.php
+++ b/src/Input.php
@@ -349,7 +349,7 @@ class Input implements \Countable
*/
public function getMethod()
{
- return strtoupper($this->server->getRaw('REQUEST_METHOD'));
+ return strtoupper($this->server->getCmd('REQUEST_METHOD'));
}
/**
|
Restrict the values of the request method
|
diff --git a/tests/TestCase/Controller/Component/SecurityComponentTest.php b/tests/TestCase/Controller/Component/SecurityComponentTest.php
index <HASH>..<HASH> 100644
--- a/tests/TestCase/Controller/Component/SecurityComponentTest.php
+++ b/tests/TestCase/Controller/Component/SecurityComponentTest.php
@@ -409,7 +409,7 @@ class SecurityComponentTest extends TestCase
$event = new Event('Controller.startup', $this->Controller);
$this->Controller->Security->startup($event);
- $fields = '68730b0747d4889ec2766f9117405f9635f5fd5e%3AModel.valid';
+ $fields = 'an-invalid-token';
$unlocked = '';
$this->Controller->request->env('REQUEST_METHOD', 'GET');
@@ -418,7 +418,7 @@ class SecurityComponentTest extends TestCase
'_Token' => compact('fields', 'unlocked')
];
$this->Controller->Security->startup($event);
- $this->assertFalse($this->Controller->failed);
+ $this->assertTrue($this->Controller->failed);
}
/**
|
Make test check for failure instead of pass.
|
diff --git a/lib/neo4j/active_rel/persistence/query_factory.rb b/lib/neo4j/active_rel/persistence/query_factory.rb
index <HASH>..<HASH> 100644
--- a/lib/neo4j/active_rel/persistence/query_factory.rb
+++ b/lib/neo4j/active_rel/persistence/query_factory.rb
@@ -15,9 +15,9 @@ module Neo4j::ActiveRel::Persistence
# TODO: This feels like it should also wrap the rel, but that is handled in Neo4j::ActiveRel::Persistence at the moment.
# Builds and executes the query using the objects giving during init.
# It holds the process:
- # * Execute node callbacks if needed
- # * Create and execute the query
- # * Mix the query response into the unpersisted objects given during init
+ # * Execute node callbacks if needed
+ # * Create and execute the query
+ # * Mix the query response into the unpersisted objects given during init
def build!
node_before_callbacks! do
res = query_factory(rel, rel_id, iterative_query).query.unwrapped.return(*unpersisted_return_ids).first
|
Small fix on docs compilation.
|
diff --git a/ncpol2sdpa/physics_utils.py b/ncpol2sdpa/physics_utils.py
index <HASH>..<HASH> 100644
--- a/ncpol2sdpa/physics_utils.py
+++ b/ncpol2sdpa/physics_utils.py
@@ -34,7 +34,7 @@ def get_neighbors(index, lattice_length, width=0, periodic=False):
coords = divmod(index, width)
if coords[1] < width - 1:
neighbors.append(index + 1)
- elif periodic:
+ elif periodic and width > 1:
neighbors.append(index - width + 1)
if coords[0] < lattice_length - 1:
neighbors.append(index + width)
|
get_neighbours function corrected for periodic chains
|
diff --git a/ecell4/util/parseobj.py b/ecell4/util/parseobj.py
index <HASH>..<HASH> 100644
--- a/ecell4/util/parseobj.py
+++ b/ecell4/util/parseobj.py
@@ -485,8 +485,8 @@ class SubExp(ExpBase):
def __append(self, obj):
if isinstance(obj, AnyCallable):
self._elems.append(obj._as_ParseObj())
- elif len(self._elems) > 0 and isinstance(obj, SubExp):
- self._elems.extend(obj._elements())
+ # elif len(self._elems) > 0 and isinstance(obj, SubExp):
+ # self._elems.extend(obj._elements())
else:
self._elems.append(obj)
|
Fix a critical bug related to ode again
|
diff --git a/bin/jade.js b/bin/jade.js
index <HASH>..<HASH> 100755
--- a/bin/jade.js
+++ b/bin/jade.js
@@ -190,19 +190,11 @@ function renderFile(path) {
var dir = resolve(dirname(path));
mkdirp(dir, 0755, function(err){
if (err) throw err;
- try {
- var output = options.client ? fn : fn(options);
- fs.writeFile(path, output, function(err){
- if (err) throw err;
- console.log(' \033[90mrendered \033[36m%s\033[0m', path);
- });
- } catch (e) {
- if (options.watch) {
- console.error(e.stack || e.message || e);
- } else {
- throw e
- }
- }
+ var output = options.client ? fn : fn(options);
+ fs.writeFile(path, output, function(err){
+ if (err) throw err;
+ console.log(' \033[90mrendered \033[36m%s\033[0m', path);
+ });
});
});
// Found directory
|
Remove another now-useless special exception handling for watch mode
I have already removed two in <I>b<I>e<I>e<I>f<I>aae<I>e8d9ca<I>e0b<I>.
|
diff --git a/group.go b/group.go
index <HASH>..<HASH> 100644
--- a/group.go
+++ b/group.go
@@ -11,8 +11,8 @@ type Group struct {
// Add a sub-group to this group
func (g *Group) NewGroup(path string) *Group {
- path = g.path + path
checkPath(path)
+ path = g.path + path
//Don't want trailing slash as all sub-paths start with slash
if path[len(path)-1] == '/' {
path = path[:len(path)-1]
diff --git a/group_test.go b/group_test.go
index <HASH>..<HASH> 100644
--- a/group_test.go
+++ b/group_test.go
@@ -13,6 +13,24 @@ func TestGroupMethods(t *testing.T) {
}
}
+func TestInvalidSubPath(t *testing.T) {
+ defer func() {
+ if err := recover(); err == nil {
+ t.Error("Bad sub-path should have caused a panic")
+ }
+ }()
+ New().NewGroup("/foo").NewGroup("bar")
+}
+
+func TestInvalidPath(t *testing.T) {
+ defer func() {
+ if err := recover(); err == nil {
+ t.Error("Bad path should have caused a panic")
+ }
+ }()
+ New().NewGroup("foo")
+}
+
//Liberally borrowed from router_test
func testGroupMethods(t *testing.T, reqGen RequestCreator) {
var result string
|
Fixed bug in sub-group path checks
added test case for both root group as well as sub group path checks
|
diff --git a/lib/tinder/connection.rb b/lib/tinder/connection.rb
index <HASH>..<HASH> 100644
--- a/lib/tinder/connection.rb
+++ b/lib/tinder/connection.rb
@@ -1,3 +1,4 @@
+require 'uri'
require 'faraday'
module Tinder
|
requires uri library to avoid error (NameError: uninitialized constant Tinder::Connection::URI)
|
diff --git a/lfs/ntlm_test.go b/lfs/ntlm_test.go
index <HASH>..<HASH> 100644
--- a/lfs/ntlm_test.go
+++ b/lfs/ntlm_test.go
@@ -80,16 +80,17 @@ func TestNtlmHeaderParseValid(t *testing.T) {
}
func TestNtlmHeaderParseInvalidLength(t *testing.T) {
-
- defer func() {
- r := recover()
- assert.NotEqual(t, r, nil)
- }()
-
res := http.Response{}
res.Header = make(map[string][]string)
res.Header.Add("Www-Authenticate", "NTL")
- _, _ = parseChallengeResponse(&res)
+ ret, err := parseChallengeResponse(&res)
+ if ret != nil {
+ t.Errorf("Unexpected challenge response: %v", ret)
+ }
+
+ if err == nil {
+ t.Errorf("Expected error, got none!")
+ }
}
func TestNtlmHeaderParseInvalid(t *testing.T) {
|
never want to TEST for a panic
|
diff --git a/luigi/scheduler.py b/luigi/scheduler.py
index <HASH>..<HASH> 100644
--- a/luigi/scheduler.py
+++ b/luigi/scheduler.py
@@ -622,7 +622,7 @@ class CentralPlannerScheduler(Scheduler):
self._update_task_history(task_id, status)
self._state.set_status(task, PENDING if status == SUSPENDED else status, self._config)
if status == FAILED:
- task.retry = time.time() + self._config.retry_delay
+ task.retry = self._retry_time(task, self._config)
if deps is not None:
task.deps = set(deps)
@@ -697,6 +697,9 @@ class CentralPlannerScheduler(Scheduler):
return False
return True
+ def _retry_time(self, task, config):
+ return time.time() + config.retry_delay
+
def get_work(self, host=None, assistant=False, **kwargs):
# TODO: remove any expired nodes
|
Move retry_time logic to its own method.
|
diff --git a/src/Illuminate/Log/Writer.php b/src/Illuminate/Log/Writer.php
index <HASH>..<HASH> 100755
--- a/src/Illuminate/Log/Writer.php
+++ b/src/Illuminate/Log/Writer.php
@@ -10,8 +10,8 @@ use Monolog\Handler\SyslogHandler;
use Monolog\Formatter\LineFormatter;
use Monolog\Handler\ErrorLogHandler;
use Monolog\Logger as MonologLogger;
-use Monolog\Handler\RotatingFileHandler;
use Illuminate\Log\Events\MessageLogged;
+use Monolog\Handler\RotatingFileHandler;
use Illuminate\Contracts\Support\Jsonable;
use Illuminate\Contracts\Events\Dispatcher;
use Illuminate\Contracts\Support\Arrayable;
|
Apply fixes from StyleCI (#<I>)
|
diff --git a/build/broccoli/build-packages.js b/build/broccoli/build-packages.js
index <HASH>..<HASH> 100644
--- a/build/broccoli/build-packages.js
+++ b/build/broccoli/build-packages.js
@@ -116,6 +116,20 @@ function transpileAMD(pkgName, esVersion, tree) {
entry: `${pkgName}/index.js`,
external,
plugins,
+ onwarn(warning) {
+ let {code} = warning;
+ if (
+ // Suppress known error message caused by TypeScript compiled code with Rollup
+ // https://github.com/rollup/rollup/wiki/Troubleshooting#this-is-undefined
+ code === 'THIS_IS_UNDEFINED' ||
+ // Suppress errors regarding un-used exports. These may be left behind
+ // after DEBUG stripping and Rollup removed them anyway.
+ code === 'UNUSED_EXTERNAL_IMPORT'
+ ) {
+ return;
+ }
+ console.log(`Rollup warning: ${warning.message}`);
+ },
targets: [{
dest: `${pkgName}/dist/amd/${esVersion}/${bundleName}.js`,
format: 'amd',
|
Suppress Rollup warnings
Rollup was tossing out warnings in two cases we simply don't care about
seeing them for.
* Top-level this is treated as undefined. Documented in
<URL>
|
diff --git a/planet/exceptions.py b/planet/exceptions.py
index <HASH>..<HASH> 100644
--- a/planet/exceptions.py
+++ b/planet/exceptions.py
@@ -13,7 +13,12 @@
# limitations under the License.
-class APIException(Exception):
+class PlanetException(Exception):
+ """Root for all exceptions thrown by the SDK"""
+ pass
+
+
+class APIException(PlanetException):
'''General unexpected API response'''
@property
def message(self):
@@ -65,11 +70,11 @@ class InvalidIdentity(APIException):
pass
-class RequestCancelled(Exception):
+class RequestCancelled(PlanetException):
'''Internal exception when a request is cancelled'''
pass
-class AuthException(Exception):
+class AuthException(PlanetException):
'''Exceptions encountered during authentication'''
pass
|
add PlanetException to capture all exceptions thrown by program
|
diff --git a/subsystem/click.js b/subsystem/click.js
index <HASH>..<HASH> 100644
--- a/subsystem/click.js
+++ b/subsystem/click.js
@@ -50,6 +50,9 @@ phoxy._.click =
,
OnClick: function (event)
{
+ if (window.event.ctrlKey)
+ return; // Ctrl + Click = open in new tab
+
var target = event.target;
while (true)
{
|
Fix "open in new tab" issue
|
diff --git a/spec/thinking_sphinx/active_record/property_sql_presenter_spec.rb b/spec/thinking_sphinx/active_record/property_sql_presenter_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/thinking_sphinx/active_record/property_sql_presenter_spec.rb
+++ b/spec/thinking_sphinx/active_record/property_sql_presenter_spec.rb
@@ -235,6 +235,17 @@ describe ThinkingSphinx::ActiveRecord::PropertySQLPresenter do
presenter.to_select.should == "CONCAT_WS(',', CAST(UNIX_TIMESTAMP(articles.created_at) AS varchar), CAST(UNIX_TIMESTAMP(articles.created_at) AS varchar)) AS created_at"
end
+ it "does not split attribute clause for timestamp casting if it looks like a function call" do
+ column.stub :__name => "COALESCE(articles.updated_at, articles.created_at)"
+ column.stub :string? => true
+
+ attribute.stub :name => 'mod_date'
+ attribute.stub :columns => [column]
+ attribute.stub :type => :timestamp
+
+ presenter.to_select.should == "UNIX_TIMESTAMP(COALESCE(articles.updated_at, articles.created_at)) AS mod_date"
+ end
+
it "returns nil for query sourced attributes" do
attribute.stub :source_type => :query
|
Added a spec for the change in commit <I>f2
|
diff --git a/lib/routing.js b/lib/routing.js
index <HASH>..<HASH> 100644
--- a/lib/routing.js
+++ b/lib/routing.js
@@ -224,8 +224,8 @@ var route = function(conf, method, pattern, action, rdy, options) {
if (sectionName === 'main' && !isMain) return
var fragment = app.sections[sectionName]
if (fragment) {
- fragment.emit('delete')
if (fragment.template !== template) {
+ fragment.emit('delete')
fragment.template = template
fragment.refresh()
}
diff --git a/lib/state.js b/lib/state.js
index <HASH>..<HASH> 100644
--- a/lib/state.js
+++ b/lib/state.js
@@ -30,8 +30,10 @@ State.prototype.register = function(name, obj) {
if (name in this) {
if (Array.isArray(this[name]) && typeof this[name].reset === 'function')
this[name].reset(obj)
- else
+ else {
this[name] = obj
+ this.emit('changed.' + name)
+ }
return
}
var that = this
|
Fix Fragment/State-Change-related Events
|
diff --git a/environment/src/main/java/jetbrains/exodus/log/Log.java b/environment/src/main/java/jetbrains/exodus/log/Log.java
index <HASH>..<HASH> 100644
--- a/environment/src/main/java/jetbrains/exodus/log/Log.java
+++ b/environment/src/main/java/jetbrains/exodus/log/Log.java
@@ -721,7 +721,7 @@ public final class Log implements Closeable {
}
}
- int getIdentity() {
+ public int getIdentity() {
return logIdentity;
}
|
Log.getIdentity() is public
|
diff --git a/cmd/taxi.go b/cmd/taxi.go
index <HASH>..<HASH> 100644
--- a/cmd/taxi.go
+++ b/cmd/taxi.go
@@ -33,6 +33,7 @@
package cmd
import (
+ "fmt"
"io"
"log"
"time"
@@ -56,8 +57,10 @@ func NewTaxiCommand(stdin io.Reader, stdout, stderr io.Writer) *cobra.Command {
if err != nil {
return err
}
- log.Println("Done: ", time.Since(start))
- select {}
+ dt := time.Since(start)
+ log.Println("Done: ", dt)
+ fmt.Printf("{\"taxi-import\": %f}\n", dt.Seconds())
+ return nil
},
}
flags := taxiCommand.Flags()
|
Minor changes to work well with benchmark framework
|
diff --git a/src/main/java/com/asual/lesscss/LessEngine.java b/src/main/java/com/asual/lesscss/LessEngine.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/asual/lesscss/LessEngine.java
+++ b/src/main/java/com/asual/lesscss/LessEngine.java
@@ -193,9 +193,9 @@ public class LessEngine {
LessEngine engine = new LessEngine();
if (args.length == 1) {
- System.out.println(engine.compile(args[0]));
+ System.out.println(engine.compile(new File(args[0])));
} else if (args.length == 2) {
- engine.compile(new File(args[0]),new File(args[1]));
+ engine.compile(new File(args[0]), new File(args[1]));
} else {
System.err.println("Usage: java -jar lesscss-engine.jar <input_file> [<output_file>]");
}
|
Fixes related to issue <I>.
|
diff --git a/application/Espo/Core/Utils/Metadata.php b/application/Espo/Core/Utils/Metadata.php
index <HASH>..<HASH> 100644
--- a/application/Espo/Core/Utils/Metadata.php
+++ b/application/Espo/Core/Utils/Metadata.php
@@ -326,6 +326,26 @@ class Metadata
$unsets = (array) $unsets;
}
+ switch ($key1) {
+ case 'entityDefs':
+ //unset related additional fields, e.g. a field with "address" type
+ $unsetList = $unsets;
+ foreach ($unsetList as $unsetItem) {
+ if (preg_match('/fields\.([^\.]+)/', $unsetItem, $matches) && isset($matches[1])) {
+ $fieldName = $matches[1];
+ $fieldPath = [$key1, $key2, 'fields', $fieldName];
+
+ $additionalFields = $this->getMetadataHelper()->getAdditionalFieldList($fieldName, $this->get($fieldPath));
+ if (is_array($additionalFields)) {
+ foreach ($additionalFields as $additionalFieldName => $additionalFieldParams) {
+ $unsets[] = 'fields.' . $additionalFieldName;
+ }
+ }
+ }
+ }
+ break;
+ }
+
$normalizedData = array(
'__APPEND__',
);
|
Metadata: delete related additional fields in entitiDefs
|
diff --git a/umi_tools/whitelist.py b/umi_tools/whitelist.py
index <HASH>..<HASH> 100644
--- a/umi_tools/whitelist.py
+++ b/umi_tools/whitelist.py
@@ -176,7 +176,7 @@ whitelist-specific options
Detect CBs above the threshold which may be sequence
errors:
- "discard"
- Discard all putative error CBs
+ Discard all putative error CBs.
- "correct"
Correct putative substituion errors in CBs above the
threshold. Discard putative insertions/deletions. Note that
@@ -296,7 +296,7 @@ def main(argv=None):
choices=["discard", "correct"],
help=("Detect CBs above the threshold which may be "
"sequence errors from another CB and either "
- "'discard' or 'correct'. Default=discard"))
+ "'discard' or 'correct'. Default=None (No correction)"))
parser.add_option_group(group)
parser.set_defaults(method="reads",
|
Update whitelist.py
Change to help text for `ed-above-threshold` to note that the default is to do no correction.
|
diff --git a/salt/fileserver/__init__.py b/salt/fileserver/__init__.py
index <HASH>..<HASH> 100644
--- a/salt/fileserver/__init__.py
+++ b/salt/fileserver/__init__.py
@@ -323,10 +323,11 @@ class Fileserver(object):
if not back:
back = self.opts['fileserver_backend']
else:
- try:
- back = back.split(',')
- except AttributeError:
- back = six.text_type(back).split(',')
+ if not isinstance(back, list):
+ try:
+ back = back.split(',')
+ except AttributeError:
+ back = six.text_type(back).split(',')
ret = []
if not isinstance(back, list):
|
salt.fileserver.Fileserver: Don't try to split a list in _gen_back
This fixes a bug which causes backends passed as a python list to be
converted to a ``str`` (specifically a ``unicode`` type in PY2) and then
split, resulting in a backend that will never match anything.
This only affects runner and Salt Python API usage in which the
"backend" param to fileserver runner funcs is passed as a Python list.
|
diff --git a/system_tests/bigquery.py b/system_tests/bigquery.py
index <HASH>..<HASH> 100644
--- a/system_tests/bigquery.py
+++ b/system_tests/bigquery.py
@@ -464,4 +464,8 @@ class TestBigQuery(unittest.TestCase):
retry = RetryInstanceState(_job_done, max_tries=8)
retry(job.reload)()
- self.assertEqual(job.state.lower(), 'done')
+ # The `cancel` API doesn't leave any reliable traces on
+ # the status of the job resource, so we can't really assert for
+ # them here. The best we can do is not that the API call didn't
+ # raise an error, and that the job completed (in the `retry()`
+ # above).
|
Replace redundant 'job done' assertion with a note.
Explain why we can't make any real assertions about the status of the
cancelled job.
Addresses:
<URL>
|
diff --git a/addon/components/sl-chart.js b/addon/components/sl-chart.js
index <HASH>..<HASH> 100755
--- a/addon/components/sl-chart.js
+++ b/addon/components/sl-chart.js
@@ -23,7 +23,8 @@ export default Ember.Component.extend({
'panel',
'panel-default',
'sl-chart',
- 'sl-panel'
+ 'sl-panel',
+ 'sl-ember-components'
],
/** @type {Object} */
|
Add "sl-ember-components" class to sl-chart for loading state to be properly supported
|
diff --git a/src/Illuminate/Queue/Console/WorkCommand.php b/src/Illuminate/Queue/Console/WorkCommand.php
index <HASH>..<HASH> 100644
--- a/src/Illuminate/Queue/Console/WorkCommand.php
+++ b/src/Illuminate/Queue/Console/WorkCommand.php
@@ -168,8 +168,9 @@ class WorkCommand extends Command
protected function writeStatus(Job $job, $status, $type)
{
$this->output->writeln(sprintf(
- "<{$type}>[%s] %s</{$type}> %s",
+ "<{$type}>[%s][%s] %s</{$type}> %s",
Carbon::now()->format('Y-m-d H:i:s'),
+ $job->getJobId(),
str_pad("{$status}:", 11), $job->resolveName()
));
}
|
Include job ID in the output of the queue:work Artisan command (#<I>)
|
diff --git a/datajoint/diagram.py b/datajoint/diagram.py
index <HASH>..<HASH> 100644
--- a/datajoint/diagram.py
+++ b/datajoint/diagram.py
@@ -230,7 +230,8 @@ else:
# mark "distinguished" tables, i.e. those that introduce new primary key attributes
for name in self.nodes_to_show:
foreign_attributes = set(attr for p in self.in_edges(name, data=True) for attr in p[2]['attr_map'])
- self.node[name]['distinguished'] = foreign_attributes < self.node[name]['primary_key']
+ self.node[name]['distinguished'] = (foreign_attributes < self.node[name]['primary_key']
+ if ('primary_key' in self.node[name].keys()) else False)
# include aliased nodes that are sandwiched between two displayed nodes
gaps = set(nx.algorithms.boundary.node_boundary(self, self.nodes_to_show)).intersection(
nx.algorithms.boundary.node_boundary(nx.DiGraph(self).reverse(), self.nodes_to_show))
|
Fix diagram issue when node has no primary keys.
|
diff --git a/treeherder/model/derived/jobs.py b/treeherder/model/derived/jobs.py
index <HASH>..<HASH> 100644
--- a/treeherder/model/derived/jobs.py
+++ b/treeherder/model/derived/jobs.py
@@ -195,7 +195,7 @@ class JobsModel(TreeherderModelBase):
if not project:
project = self.project
build_systems = cache.get("build_system_by_repo", None)
- if not build_systems:
+ if not build_systems or project not in build_systems:
build_systems = dict((repo, build_system_type) for repo, build_system_type in
ReferenceDataSignatures.objects.order_by("repository"
).values_list("repository", "build_system_type").distinct()
|
Bug <I> - Delete build system cached list when project is not found
|
diff --git a/lib/trestle/resource/controller.rb b/lib/trestle/resource/controller.rb
index <HASH>..<HASH> 100644
--- a/lib/trestle/resource/controller.rb
+++ b/lib/trestle/resource/controller.rb
@@ -44,10 +44,18 @@ module Trestle
end
def show
- respond_to do |format|
- format.html
- format.json { render json: instance }
- format.js
+ if admin.singular? && instance.nil?
+ respond_to do |format|
+ format.html { redirect_to action: :new }
+ format.json { head :not_found }
+ format.js
+ end
+ else
+ respond_to do |format|
+ format.html
+ format.json { render json: instance }
+ format.js
+ end
end
end
|
Redirect to new action if singular instance not found
|
diff --git a/client_test.go b/client_test.go
index <HASH>..<HASH> 100644
--- a/client_test.go
+++ b/client_test.go
@@ -35,6 +35,7 @@ func TestingConfig() *ClientConfig {
cfg.DataDir = tempDir()
cfg.DisableTrackers = true
cfg.NoDefaultPortForwarding = true
+ cfg.DisableAcceptRateLimiting = true
return cfg
}
|
Disable accept rate limiting by default in tests
|
diff --git a/pymc/tests/test_distributions_random.py b/pymc/tests/test_distributions_random.py
index <HASH>..<HASH> 100644
--- a/pymc/tests/test_distributions_random.py
+++ b/pymc/tests/test_distributions_random.py
@@ -360,10 +360,10 @@ class BaseTestDistributionRandom(SeededTest):
)
def check_pymc_params_match_rv_op(self):
- aesera_dist_inputs = self.pymc_rv.get_parents()[0].inputs[3:]
- assert len(self.expected_rv_op_params) == len(aesera_dist_inputs)
+ aesara_dist_inputs = self.pymc_rv.get_parents()[0].inputs[3:]
+ assert len(self.expected_rv_op_params) == len(aesara_dist_inputs)
for (expected_name, expected_value), actual_variable in zip(
- self.expected_rv_op_params.items(), aesera_dist_inputs
+ self.expected_rv_op_params.items(), aesara_dist_inputs
):
assert_almost_equal(expected_value, actual_variable.eval(), decimal=self.decimal)
|
Fix aesara name (#<I>)
|
diff --git a/remote.go b/remote.go
index <HASH>..<HASH> 100644
--- a/remote.go
+++ b/remote.go
@@ -22,6 +22,11 @@ type PluginDecl struct {
Formats []Format `json:"formats"`
}
+type formatResponse struct {
+ Error Error `json:"error,omitempty"`
+ Format `json:"format"`
+}
+
type Format string
var (
@@ -57,18 +62,30 @@ func NewRemotePlugin(r io.ReadCloser, w io.WriteCloser) (pl *RemotePlugin, err e
//Pack reader in JSON decoder and decode a PluginDecl
dec := json.NewDecoder(r)
+ enc := json.NewEncoder(w)
+
err = dec.Decode(&pl.PluginDecl)
if err != nil {
return
}
var best FormatFactory = FormatFactory{-1, nil}
+ var bestFormat Format = ""
for _, format := range pl.PluginDecl.Formats {
if factory, ok := SupportedFormats[format]; ok && factory.Weight > best.Weight {
best = factory
+ bestFormat = format
}
}
+ if bestFormat == "" {
+ enc.Encode(formatResponse{Error: NoSupportedFormat})
+ err = NoSupportedFormat
+ return
+ }
+
+ enc.Encode(formatResponse{Format: bestFormat})
+
pl.enc, pl.dec = best.Construct(r, w)
return
}
|
Added to a answer to the protocol where server responds with the chosen format.
|
diff --git a/lib/zeevex_threadsafe/rails/request_globals.rb b/lib/zeevex_threadsafe/rails/request_globals.rb
index <HASH>..<HASH> 100644
--- a/lib/zeevex_threadsafe/rails/request_globals.rb
+++ b/lib/zeevex_threadsafe/rails/request_globals.rb
@@ -1,5 +1,11 @@
require_dependency "weakref"
+begin
+ require 'active_support/core_ext'
+rescue LoadError
+ require 'zeevex_threadsafe/aliasing'
+end
+
module ZeevexThreadsafe
module Rails
class RequestGlobals
|
include aliasing in request_globals
|
diff --git a/thinc/tests/shims/test_pytorch_grad_scaler.py b/thinc/tests/shims/test_pytorch_grad_scaler.py
index <HASH>..<HASH> 100644
--- a/thinc/tests/shims/test_pytorch_grad_scaler.py
+++ b/thinc/tests/shims/test_pytorch_grad_scaler.py
@@ -1,6 +1,6 @@
import pytest
-from hypothesis import given
+from hypothesis import given, settings
from hypothesis.strategies import lists, one_of, tuples
from thinc.util import has_torch, has_torch_gpu, is_torch_array
from thinc.api import PyTorchGradScaler
@@ -23,6 +23,7 @@ def tensors():
@pytest.mark.skipif(not has_torch, reason="needs PyTorch")
@pytest.mark.skipif(not has_torch_gpu, reason="needs a GPU")
@given(X=one_of(tensors(), lists(tensors()), tuples(tensors())))
+@settings(deadline=None)
def test_scale_random_inputs(X):
import torch
|
Remove deadline of test_scale_random_inputs (#<I>)
By default the hypothesis deadline is <I>ms. The
test_scale_random_inputs test would frequently fail because on the first
run the deadline was exceeded because GPU initialization takes some
time.
|
diff --git a/hpcbench/driver.py b/hpcbench/driver.py
index <HASH>..<HASH> 100644
--- a/hpcbench/driver.py
+++ b/hpcbench/driver.py
@@ -65,6 +65,7 @@ def write_yaml_report(func):
class Enumerator(six.with_metaclass(ABCMeta, object)):
"""Common class for every campaign node"""
def __init__(self, parent, name=None, logger=None):
+ self.parent = parent
self.campaign = parent.campaign
self.node = parent.node
self.name = name
|
Can now got upward in driver class hierarchy
|
diff --git a/src/scrollfire/scrollfire-patch.js b/src/scrollfire/scrollfire-patch.js
index <HASH>..<HASH> 100644
--- a/src/scrollfire/scrollfire-patch.js
+++ b/src/scrollfire/scrollfire-patch.js
@@ -1,3 +1,4 @@
+/* eslint no-new-func:0 */
export class ScrollfirePatch {
patched = false;
|
chore(lint): disable no-new-func in scrollfire-patch
|
diff --git a/test/wlang_test.rb b/test/wlang_test.rb
index <HASH>..<HASH> 100644
--- a/test/wlang_test.rb
+++ b/test/wlang_test.rb
@@ -1,4 +1,6 @@
require File.expand_path('../helper', __FILE__)
+
+begin
require 'wlang'
class WLangTest < Test::Unit::TestCase
@@ -61,4 +63,8 @@ class WLangTest < Test::Unit::TestCase
assert_body "WLang Layout!\nHello World"
end
-end
\ No newline at end of file
+end
+
+rescue LoadError
+ warn "#{$!.to_s}: skipping wlang tests"
+end
|
Wrap wlang test in a rescue clause.
|
diff --git a/lib/discordrb/voice/encoder.rb b/lib/discordrb/voice/encoder.rb
index <HASH>..<HASH> 100644
--- a/lib/discordrb/voice/encoder.rb
+++ b/lib/discordrb/voice/encoder.rb
@@ -49,6 +49,9 @@ module Discordrb::Voice
@opus.encode(buffer, 1920)
end
+ # One frame of complete silence Opus encoded
+ OPUS_SILENCE = [0xF8, 0xFF, 0xFE].freeze
+
# Adjusts the volume of a given buffer of s16le PCM data.
# @param buf [String] An unencoded PCM (s16le) buffer.
# @param mult [Float] The volume multiplier, 1 for same volume.
|
Define a constant to represent opus encoded silence
|
diff --git a/src/java/main/org/jsmpp/util/StringParameter.java b/src/java/main/org/jsmpp/util/StringParameter.java
index <HASH>..<HASH> 100644
--- a/src/java/main/org/jsmpp/util/StringParameter.java
+++ b/src/java/main/org/jsmpp/util/StringParameter.java
@@ -59,7 +59,7 @@ public enum StringParameter {
* STAT_ESME_RINVDFTMSGID, means that predefined message are not exist.
*/
FINAL_DATE(StringType.C_OCTEC_STRING, 0, 17, false, SMPPConstant.STAT_ESME_RINVDFTMSGID),
- SHORT_MESSAGE(StringType.OCTET_STRING, 0, 255, true, SMPPConstant.STAT_ESME_RINVMSGLEN), // the SMPP v3.4 max is 254
+ SHORT_MESSAGE(StringType.OCTET_STRING, 0, 254, true, SMPPConstant.STAT_ESME_RINVMSGLEN),
MESSAGE_ID(StringType.C_OCTEC_STRING, 0, 65, true, SMPPConstant.STAT_ESME_RINVMSGID),
DEL_MESSAGE_ID(StringType.C_OCTEC_STRING, 0, 0, true, SMPPConstant.STAT_ESME_RINVMSGID),
/**
|
Change the maximum length of short message to <I> (SMPP spec v <I>)
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -7,10 +7,10 @@ setup_requires = [
'setuptools>=54.2.0',
]
install_requires = [
- 'aiohttp~=3.7.4',
+ 'aiohttp~=3.8.0',
'aiotusclient~=0.1.4',
'appdirs~=1.4.4',
- 'async_timeout>=3.0',
+ 'async_timeout>=4.0',
'attrs>=21.2',
'click>=8.0.1',
'colorama>=0.4.4',
@@ -31,7 +31,7 @@ build_requires = [
'towncrier>=21.3.0',
]
test_requires = [
- 'pytest~=6.2.4',
+ 'pytest~=6.2.5',
'pytest-cov',
'pytest-mock',
'pytest-asyncio>=0.15.1',
@@ -39,7 +39,7 @@ test_requires = [
'codecov',
]
lint_requires = [
- 'flake8>=3.9.2',
+ 'flake8>=4.0.1',
'flake8-commas>=2.1',
]
typecheck_requires = [
|
setup: Upgrade aiohttp to <I> series and other deps
|
diff --git a/actions/update.js b/actions/update.js
index <HASH>..<HASH> 100644
--- a/actions/update.js
+++ b/actions/update.js
@@ -58,7 +58,7 @@ module.exports = function updateOneRecord (req, res) {
req._sails.log.warn(util.format('Unexpected output from `%s.update`.', Model.globalId));
}
- var updatedRecord = records[0];
+ var updatedRecord = pk;
// If we have the pubsub hook, use the Model's publish method
// to notify all subscribers about the update.
|
Fixed return on update
Sequelize returns the number of affect rows on update, it was returning
always [1], instead of the PK id
|
diff --git a/spec/webdav_server.rb b/spec/webdav_server.rb
index <HASH>..<HASH> 100644
--- a/spec/webdav_server.rb
+++ b/spec/webdav_server.rb
@@ -4,7 +4,11 @@ require 'rubygems'
require 'webrick'
require 'webrick/httpservlet/webdavhandler'
-# Webdav server based on:
+# Web server with WebDAV extensions
+#
+# Usage: ruby webdav_server.rb
+
+# Code based on:
# http://github.com/aslakhellesoy/webdavjs/blob/master/spec/webdav_server.rb
@@ -66,7 +70,7 @@ def webdav_server(*options)
log.level = WEBrick::Log::DEBUG if $DEBUG
serv = WEBrick::HTTPServer.new({:Port => port, :Logger => log})
- dir = Dir.pwd + '/spec/fixtures'
+ dir = File.expand_path(File.dirname(__FILE__)) + '/fixtures'
if(options and options[0][:authentication])
serv.mount("/", WEBrick::HTTPServlet::WebDAVHandlerVersion3, dir)
else
@@ -78,5 +82,6 @@ def webdav_server(*options)
end
if($0 == __FILE__)
+
webdav_server(:port => 10080,:authentication => false)
end
|
changed webdav servers shared directory
|
diff --git a/views/js/qtiCreator/model/Math.js b/views/js/qtiCreator/model/Math.js
index <HASH>..<HASH> 100755
--- a/views/js/qtiCreator/model/Math.js
+++ b/views/js/qtiCreator/model/Math.js
@@ -2,7 +2,7 @@ define([
'lodash',
'taoQtiItem/qtiCreator/model/mixin/editable',
'taoQtiItem/qtiItem/core/Math'
-], function(_, editable, Math){
+], function(_, editable, mathModel){
"use strict";
var methods = {};
_.extend(methods, editable);
@@ -14,5 +14,5 @@ define([
this.getNamespace();
}
});
- return Math.extend(methods);
+ return mathModel.extend(methods);
});
\ No newline at end of file
|
Rename Math to mathModel to avoid conflict with keyword
|
diff --git a/configs/configupgrade/upgrade_test.go b/configs/configupgrade/upgrade_test.go
index <HASH>..<HASH> 100644
--- a/configs/configupgrade/upgrade_test.go
+++ b/configs/configupgrade/upgrade_test.go
@@ -14,6 +14,8 @@ import (
)
func TestUpgradeValid(t *testing.T) {
+ t.Skip("configupgrade is not yet complete enough to run tests against")
+
// This test uses the contents of the test-fixtures/valid directory as
// a table of tests. Every directory there must have both "input" and
// "want" subdirectories, where "input" is the configuration to be
|
configs/configupgrade: Disable the tests for now
The tests in here are illustrating that this package is not yet finished,
but we plan to run a release before we finish this and so we'll skip those
tests for now with the intent of reinstating this again once we return
to finish this up.
|
diff --git a/lib/OpenLayers/Handler/Point.js b/lib/OpenLayers/Handler/Point.js
index <HASH>..<HASH> 100644
--- a/lib/OpenLayers/Handler/Point.js
+++ b/lib/OpenLayers/Handler/Point.js
@@ -247,7 +247,6 @@ OpenLayers.Handler.Point = OpenLayers.Class(OpenLayers.Handler, {
*/
finalize: function(cancel) {
var key = cancel ? "cancel" : "done";
- this.drawing = false;
this.mouseDown = false;
this.lastDown = null;
this.lastUp = null;
|
Handler.Point has no "drawing" property anymore, trivial change (references #<I>)
git-svn-id: <URL>
|
diff --git a/lib/duse/client/entity.rb b/lib/duse/client/entity.rb
index <HASH>..<HASH> 100644
--- a/lib/duse/client/entity.rb
+++ b/lib/duse/client/entity.rb
@@ -101,7 +101,7 @@ module Duse
# encryption will fail. Might improve with: http://stackoverflow.com/questions/11505547/how-calculate-size-of-rsa-cipher-text-using-key-size-clear-text-length
secret_text_in_slices_of(18).map do |secret_part|
# the selected users + current user + server
- threshold = @users.length+2
+ threshold = 2
shares = SecretSharing.split_secret(secret_part, 2, threshold)
server_share, server_sign = Duse::Encryption.encrypt(@private_key, @server_user.public_key, shares[0])
user_share, user_sign = Duse::Encryption.encrypt(@private_key, @current_user.public_key, shares[1])
|
always have a threshold of 2
if giving permissions should become necessary this can be changed
to a users input
|
diff --git a/lib/imgManip.js b/lib/imgManip.js
index <HASH>..<HASH> 100644
--- a/lib/imgManip.js
+++ b/lib/imgManip.js
@@ -54,7 +54,7 @@ exports.effect = function (query, subject, dimensions, out) {
if (opts.verbose.val) {
console.log('query', query)
}
- if (!query.outType && query.url.slice(-4).toLowerCase() === '.png' && ['mask', 'blur', 'overlayBlur'].indexOf(effect) === -1) {
+ if (!query.outType && query.url && query.url.slice(-4).toLowerCase() === '.png' && ['mask', 'blur', 'overlayBlur'].indexOf(effect) === -1) {
query.outType = 'png'
}
var format = (query.outType)
|
Update imgManip.js
Check for query.url before slicing
|
diff --git a/course/tests/courselib_test.php b/course/tests/courselib_test.php
index <HASH>..<HASH> 100644
--- a/course/tests/courselib_test.php
+++ b/course/tests/courselib_test.php
@@ -100,6 +100,8 @@ class courselib_testcase extends advanced_testcase {
$moduleinfo->requireallteammemberssubmit = true;
$moduleinfo->teamsubmissiongroupingid = true;
$moduleinfo->blindmarking = true;
+ $moduleinfo->markingworkflow = true;
+ $moduleinfo->markingallocation = true;
$moduleinfo->assignsubmission_onlinetext_enabled = true;
$moduleinfo->assignsubmission_file_enabled = true;
$moduleinfo->assignsubmission_file_maxfiles = 1;
@@ -134,6 +136,8 @@ class courselib_testcase extends advanced_testcase {
$this->assertEquals($moduleinfo->requireallteammemberssubmit, $dbmodinstance->requireallteammemberssubmit);
$this->assertEquals($moduleinfo->teamsubmissiongroupingid, $dbmodinstance->teamsubmissiongroupingid);
$this->assertEquals($moduleinfo->blindmarking, $dbmodinstance->blindmarking);
+ $this->assertEquals($moduleinfo->markingworkflow, $dbmodinstance->markingworkflow);
+ $this->assertEquals($moduleinfo->markingallocation, $dbmodinstance->markingallocation);
// The goal not being to fully test assign_add_instance() we'll stop here for the assign tests - to avoid too many DB queries.
// Advanced grading.
|
MDL-<I> courselib: Fix unit tests
The courselib tests have hardcoded test data sets for forum and assign
modules that need to be updated every time we add a new feature.
|
diff --git a/ddl/ddl_api.go b/ddl/ddl_api.go
index <HASH>..<HASH> 100644
--- a/ddl/ddl_api.go
+++ b/ddl/ddl_api.go
@@ -2035,6 +2035,16 @@ func (d *ddl) CreateIndex(ctx sessionctx.Context, ti ast.Ident, unique bool, ind
return errors.Trace(err)
}
+ // Check before put the job is put to the queue.
+ // This check is redudant, but useful. If DDL check fail before the job is put
+ // to job queue, the fail path logic is super fast.
+ // After DDL job is put to the queue, and if the check fail, TiDB will run the DDL cancel logic.
+ // The recover step causes DDL wait a few seconds, makes the unit test painfully slow.
+ _, err = buildIndexColumns(t.Meta().Columns, idxColNames)
+ if err != nil {
+ return errors.Trace(err)
+ }
+
if indexOption != nil {
// May be truncate comment here, when index comment too long and sql_mode is't strict.
indexOption.Comment, err = validateCommentLength(ctx.GetSessionVars(),
|
ddl: fast fail check for create index to accelerate CI (#<I>)
➜ session git:(master) ✗ GO<I>MODULE=on go test -check.f TestIndexMaxLength
before:
ok github.com/pingcap/tidb/session <I>s
after:
ok github.com/pingcap/tidb/session <I>s
|
diff --git a/volman/volman_executor_test.go b/volman/volman_executor_test.go
index <HASH>..<HASH> 100644
--- a/volman/volman_executor_test.go
+++ b/volman/volman_executor_test.go
@@ -227,6 +227,9 @@ var _ = Describe("Executor/Garden/Volman", func() {
err := executorClient.DeleteContainer(logger, guid)
Expect(err).NotTo(HaveOccurred())
+ err = os.RemoveAll(path.Join(componentMaker.VolmanDriverConfigDir, "_volumes", volumeId))
+ Expect(err).ToNot(HaveOccurred())
+
files, err := filepath.Glob(path.Join(componentMaker.VolmanDriverConfigDir, "_volumes", volumeId, fileName))
Expect(err).ToNot(HaveOccurred())
Expect(len(files)).To(Equal(0))
|
modify inigo tests to clean up mounted test volumes after unmount
[#<I>](<URL>)
|
diff --git a/filterpy/kalman/square_root.py b/filterpy/kalman/square_root.py
index <HASH>..<HASH> 100644
--- a/filterpy/kalman/square_root.py
+++ b/filterpy/kalman/square_root.py
@@ -334,6 +334,11 @@ class SquareRootKalmanFilter(object):
""" system uncertainty (P projected to measurement space) """
return dot(self.S1_2, self.S1_2.T)
+ @property
+ def SI(self):
+ """ inverse system uncertainty (P projected to measurement space) """
+ return dot(self.SI1_2.T, self.SI1_2)
+
def __repr__(self):
return '\n'.join([
'SquareRootKalmanFilter object',
@@ -349,6 +354,7 @@ class SquareRootKalmanFilter(object):
pretty_str('K', self.K),
pretty_str('y', self.y),
pretty_str('S', self.S),
+ pretty_str('SI', self.SI),
pretty_str('M', self.M),
pretty_str('B', self.B),
])
|
Added SI to SquareRootKalmanFilter.
|
diff --git a/scour/scour.py b/scour/scour.py
index <HASH>..<HASH> 100644
--- a/scour/scour.py
+++ b/scour/scour.py
@@ -3328,7 +3328,7 @@ def scourString(in_string, options=None):
if options.error_on_flowtext:
raise Exception(errmsg)
else:
- print("WARNING: {}".format(errmsg), file=options.ensure_value("stdout", sys.stdout))
+ print("WARNING: {}".format(errmsg), file=sys.stderr)
# remove descriptive elements
removeDescriptiveElements(doc, options)
|
Hardcode printing of "flowtext" warning to stderr
Third-party applications obviously can not handle additional output on stdout nor can they be expected to do any weird stdout/sterr redirection as we do via `options.stdout`
We probably shouldn't print anything in `scourString()` to start with unless we offer an option to disable all non-SVG output for third-party libraries to use.
|
diff --git a/src/ol/animation.js b/src/ol/animation.js
index <HASH>..<HASH> 100644
--- a/src/ol/animation.js
+++ b/src/ol/animation.js
@@ -115,7 +115,7 @@ ol.animation.rotate = function(options) {
(sourceRotation - frameState.viewState.rotation) * delta;
frameState.animate = true;
frameState.viewState.rotation += deltaRotation;
- if (!goog.isNull(anchor)) {
+ if (anchor) {
var center = frameState.viewState.center;
ol.coordinate.sub(center, anchor);
ol.coordinate.rotate(center, deltaRotation);
|
Remove goog.isNull in animation class
|
diff --git a/lib/cucumber/cli/options.rb b/lib/cucumber/cli/options.rb
index <HASH>..<HASH> 100644
--- a/lib/cucumber/cli/options.rb
+++ b/lib/cucumber/cli/options.rb
@@ -118,6 +118,7 @@ module Cucumber
"This option can be specified multiple times.") do |v|
@options[:require] << v
if(Cucumber::JRUBY && File.directory?(v))
+ require 'java'
$CLASSPATH << v
end
end
|
Must require 'java' before using $CLASSPATH in JRuby <I>. [#<I> state:resolved]
|
diff --git a/lib/database_cleaner/data_mapper/truncation.rb b/lib/database_cleaner/data_mapper/truncation.rb
index <HASH>..<HASH> 100644
--- a/lib/database_cleaner/data_mapper/truncation.rb
+++ b/lib/database_cleaner/data_mapper/truncation.rb
@@ -53,7 +53,7 @@ module DataMapper
def truncate_table(table_name)
execute("DELETE FROM #{quote_name(table_name)};")
- if uses_sequence
+ if uses_sequence?
execute("DELETE FROM sqlite_sequence where name = '#{table_name}';")
end
end
@@ -65,6 +65,16 @@ module DataMapper
yield
end
+ private
+
+ def uses_sequence?
+ sql = <<-SQL
+ SELECT name FROM sqlite_master
+ WHERE type='table' AND name='sqlite_sequence'
+ SQL
+ select(sql).first
+ end
+
end
class SqliteAdapter < DataObjectsAdapter
@@ -82,7 +92,7 @@ module DataMapper
def truncate_table(table_name)
execute("DELETE FROM #{quote_name(table_name)};")
- if uses_sequence
+ if uses_sequence?
execute("DELETE FROM sqlite_sequence where name = '#{table_name}';")
end
end
@@ -94,6 +104,16 @@ module DataMapper
yield
end
+ private
+
+ def uses_sequence?
+ sql = <<-SQL
+ SELECT name FROM sqlite_master
+ WHERE type='table' AND name='sqlite_sequence'
+ SQL
+ select(sql).first
+ end
+
end
# FIXME
|
Fixes missing #uses_sequence invokation in adapter classes for sqlite and sqlite3
|
diff --git a/tests/Integration/ReactMqttClientTest.php b/tests/Integration/ReactMqttClientTest.php
index <HASH>..<HASH> 100644
--- a/tests/Integration/ReactMqttClientTest.php
+++ b/tests/Integration/ReactMqttClientTest.php
@@ -335,6 +335,29 @@ class ReactMqttClientTest extends \PHPUnit_Framework_TestCase
}
/**
+ * Test that client's is-connected state is updated correctly
+ *
+ * @depends test_connect_success
+ */
+ public function test_is_connected_when_connect_event_emitted()
+ {
+ $client = $this->buildClient();
+
+ $client->on('connect', function(Connection $connection) use($client){
+ $this->assertTrue($client->isConnected(), 'Client is should be connected');
+ $this->stopLoop();
+ });
+
+ $client->connect(self::HOSTNAME, self::PORT, null, 1)
+ ->then(function () use ($client) {
+ $this->assertTrue($client->isConnected());
+ $this->stopLoop();
+ });
+
+ $this->startLoop();
+ }
+
+ /**
* Tests that messages can be send and received successfully.
*
* @depends test_connect_success
|
Add is connected on 'connect' event test
Proves that a client is connected once the "connect" event
is dispatched.
|
diff --git a/app/models/ldap_authentication.rb b/app/models/ldap_authentication.rb
index <HASH>..<HASH> 100644
--- a/app/models/ldap_authentication.rb
+++ b/app/models/ldap_authentication.rb
@@ -27,19 +27,21 @@ class LdapAuthentication
encryption: get_encryption)
if ::Configuration.ldap_user_dn_pattern
- session.search(
+ result = session.search(
base: get_user_dn_from_pattern,
attributes: get_attributes,
return_result: true
- ).try(:first)
+ )
+ result ? result.try(:first) : nil
elsif ::Configuration.ldap_search_base_dn && ::Configuration.ldap_search_filter
- session.bind_as(
+ result = session.bind_as(
base: ::Configuration.ldap_search_base_dn,
filter: get_search_filter_bind_as,
password: @password,
attributes: get_attributes,
return_result: true
- ).try(:first)
+ )
+ result ? result.try(:first) : nil
else
raise ArgumentError, 'LDAP authentication requires either a user_dn_pattern, or a search_base_dn and a search_filter'
end
|
Fix for failed authentication - Net::LDAP returns False, which does not expose a "first" method.
|
diff --git a/bdates/__init__.py b/bdates/__init__.py
index <HASH>..<HASH> 100644
--- a/bdates/__init__.py
+++ b/bdates/__init__.py
@@ -85,7 +85,7 @@ def get_date_from_match_group(match):
if month.isdigit():
month = int(month)
else:
- month = month_to_number[month]
+ month = month_to_number[month.title()]
try:
day = int(match.group("day_of_the_month"))
|
fixed month_to_number lookup where month isn't capitalized/titled for some reason
|
diff --git a/.babelrc.js b/.babelrc.js
index <HASH>..<HASH> 100644
--- a/.babelrc.js
+++ b/.babelrc.js
@@ -29,11 +29,4 @@ module.exports = {
],
ignore: [/\/node_modules\/(?!@interactjs\/)/],
-
- extensions: [
- '.ts',
- '.tsx',
- '.js',
- '.jsx',
- ]
}
diff --git a/scripts/bundler.js b/scripts/bundler.js
index <HASH>..<HASH> 100644
--- a/scripts/bundler.js
+++ b/scripts/bundler.js
@@ -44,6 +44,12 @@ module.exports = function (options) {
sourceType: 'module',
global: true,
...babelrc,
+ extensions: [
+ '.ts',
+ '.tsx',
+ '.js',
+ '.jsx',
+ ],
}],
],
|
chore: move babelify extensions config to bundler
|
diff --git a/src/getstream.js b/src/getstream.js
index <HASH>..<HASH> 100644
--- a/src/getstream.js
+++ b/src/getstream.js
@@ -26,7 +26,7 @@ function connect(apiKey, apiSecret, appId, options) {
* @example <caption>where streamURL looks like</caption>
* "https://thierry:pass@gestream.io/?app=1"
*/
- if (typeof process !== 'undefined' && process.env.STREAM_URL && !apiKey) {
+ if (process && process.env && process.env.STREAM_URL && !apiKey) {
const parts = /https:\/\/(\w+):(\w+)@([\w-]*).*\?app_id=(\d+)/.exec(process.env.STREAM_URL);
apiKey = parts[1];
apiSecret = parts[2];
|
fixes #<I> with undefined process.env
|
diff --git a/Test/Asserters/Crawler.php b/Test/Asserters/Crawler.php
index <HASH>..<HASH> 100644
--- a/Test/Asserters/Crawler.php
+++ b/Test/Asserters/Crawler.php
@@ -8,9 +8,9 @@ use mageekguy\atoum\asserters;
class Crawler extends asserters\object
{
- public function setWith($value)
+ public function setWith($value, $checkType = false)
{
- parent::setWith($value, false);
+ parent::setWith($value, $checkType);
if (self::isCrawler($this->value) === false) {
$this->fail(sprintf($this->getLocale()->_('%s is not a crawler'), $this));
|
Update crawler assert to fix #<I> issue
|
diff --git a/km3pipe/__version__.py b/km3pipe/__version__.py
index <HASH>..<HASH> 100644
--- a/km3pipe/__version__.py
+++ b/km3pipe/__version__.py
@@ -9,7 +9,7 @@ Pep 386 compliant version info.
(1, 2, 0, 'beta', 2) => "1.2b2"
"""
-version_info = (0, 6, 1, 'final', 0)
+version_info = (0, 6, 2, 'final', 0)
def _get_version(version_info):
"""Return a PEP 386-compliant version number."""
|
Changes version to <I>
|
diff --git a/axiom/attributes.py b/axiom/attributes.py
index <HASH>..<HASH> 100644
--- a/axiom/attributes.py
+++ b/axiom/attributes.py
@@ -101,7 +101,7 @@ class Comparable:
_likeOperators = ('LIKE', 'NOT LIKE')
def _like(self, op, *others):
if op.upper() not in self._likeOperators:
- raise ValueError, 'LIKE-style operators are: %s' % self._likeOperators
+ raise ValueError, 'LIKE-style operators are: %r' % self._likeOperators
if not others:
raise ValueError, 'Must pass at least one expression to _like'
|
use %r instead of %s when formatting the tuple of acceptable LIKE expressions
|
diff --git a/js/jquery.cloudinary.js b/js/jquery.cloudinary.js
index <HASH>..<HASH> 100644
--- a/js/jquery.cloudinary.js
+++ b/js/jquery.cloudinary.js
@@ -166,13 +166,13 @@
};
$.fn.cloudinary = function(options) {
this.filter('img').each(function() {
- options = $.extend({width: $(this).attr('width'), height: $(this).attr('height'),
+ var img_options = $.extend({width: $(this).attr('width'), height: $(this).attr('height'),
src: $(this).attr('src')},
$.extend($(this).data(), options));
- var public_id = option_consume(options, 'source', option_consume(options, 'src'));
- var url = cloudinary_url(public_id, options);
- html_only_attributes(options);
- $(this).attr({src: url, width: options['width'], height: options['height']});
+ var public_id = option_consume(img_options, 'source', option_consume(img_options, 'src'));
+ var url = cloudinary_url(public_id, img_options);
+ html_only_attributes(img_options);
+ $(this).attr({src: url, width: img_options['width'], height: img_options['height']});
});
return this;
};
|
Options were shared between iterations of $.fn.cloudinary
|
diff --git a/lib/ftp.js b/lib/ftp.js
index <HASH>..<HASH> 100644
--- a/lib/ftp.js
+++ b/lib/ftp.js
@@ -11,7 +11,7 @@ var RE_XLISTUNIX = XRegExp.cache('^(?<type>[\\-ld])(?<permission>([\\-r][\\-w][\
RE_XTIMEVAL = XRegExp.cache('^(?<year>\\d{4})(?<month>\\d{2})(?<date>\\d{2})(?<hour>\\d{2})(?<minute>\\d{2})(?<second>\\d+)$'),
RE_PASV = /([\d]+),([\d]+),([\d]+),([\d]+),([-\d]+),([-\d]+)/,
RE_EOL = /\r?\n/g,
- RE_RESEND = /(?:^|\r?\n)(\d{3}) [^\r\n]*\r?\n$/;
+ RE_RESEND = /(?:^|\r?\n)(\d{3}) [^\r\n]*\r?\n/;
var MONTHS = {
jan: 1, feb: 2, mar: 3, apr: 4, may: 5, jun: 6,
|
fix missed change to response end regexp
|
diff --git a/lib/overcommit/plugins/pre_commit/coffee_lint.rb b/lib/overcommit/plugins/pre_commit/coffee_lint.rb
index <HASH>..<HASH> 100644
--- a/lib/overcommit/plugins/pre_commit/coffee_lint.rb
+++ b/lib/overcommit/plugins/pre_commit/coffee_lint.rb
@@ -8,7 +8,8 @@ module Overcommit::GitHook
return :warn, 'Run `npm install -g coffeelint`'
end
- output = `coffeelint --quiet #{(staged.join(' '))}`.split("\n")
+ paths = staged.collect(&:path).join(' ')
+ output = `coffeelint --quiet #{paths}`.split("\n")
return ($?.success? ? :good : :bad), output
end
end
|
Fix CoffeeScript linter path processing
The path was not being created properly for coffee_lint. This commit
emulates the other plug-ins by creating a paths variable.
Submitted as a pull request here:
<URL>
|
diff --git a/code/libraries/koowa/controller/resource.php b/code/libraries/koowa/controller/resource.php
index <HASH>..<HASH> 100644
--- a/code/libraries/koowa/controller/resource.php
+++ b/code/libraries/koowa/controller/resource.php
@@ -331,6 +331,11 @@ abstract class KControllerResource extends KControllerAbstract
if(isset($state->$method) || in_array($method, array('layout', 'view', 'format')))
{
$this->$method = $args[0];
+
+ if($method == 'view') {
+ $this->setView($args[0]);
+ }
+
return $this;
}
}
|
re #<I> : Fluent interface doesn't set view properly in controllers
|
diff --git a/rackup.php b/rackup.php
index <HASH>..<HASH> 100755
--- a/rackup.php
+++ b/rackup.php
@@ -1,7 +1,6 @@
#!/usr/bin/env php
<?php
-require "lib/Rack.php";
-require "lib/RubyRack.php";
+require "autoload.php";
class App
{
diff --git a/test/app.php b/test/app.php
index <HASH>..<HASH> 100644
--- a/test/app.php
+++ b/test/app.php
@@ -1,6 +1,6 @@
<?php
-require __DIR__."/../lib/Rack.php";
+require __DIR__."/../autoload.php";
$app = function($env) {
return array(200, array('Content-Type' => 'text/html'), array('Hello World!'));
|
scripts use autoloader now.
|
diff --git a/src/scroller.js b/src/scroller.js
index <HASH>..<HASH> 100644
--- a/src/scroller.js
+++ b/src/scroller.js
@@ -76,8 +76,11 @@ var Scroller = React.createClass({displayName: "Scroller",
// Set styles
if (item._node) {
for(var prop in styleObject) {
- item._node.style[prop] = styleObject[prop];
+ if (!item._prevStyles || item._prevStyles[prop] !== styleObject[prop]) {
+ item._node.style[prop] = styleObject[prop];
+ }
}
+ item._prevStyles = styleObject;
} else {
item._pendingStyles = styleObject;
}
@@ -191,6 +194,7 @@ var Scroller = React.createClass({displayName: "Scroller",
scrollingY: self.props.scrollingY,
});
+
// Because of React batch operations and optimizations, we need to wait
// for next tick in order to all ScrollableItems initialize and have proper
// RectCache before updating containerSizer for the first time.
|
Skip setting same styles are previous frames for better performance
|
diff --git a/moco-core/src/main/java/com/github/dreamhead/moco/handler/AbstractResponseHandler.java b/moco-core/src/main/java/com/github/dreamhead/moco/handler/AbstractResponseHandler.java
index <HASH>..<HASH> 100644
--- a/moco-core/src/main/java/com/github/dreamhead/moco/handler/AbstractResponseHandler.java
+++ b/moco-core/src/main/java/com/github/dreamhead/moco/handler/AbstractResponseHandler.java
@@ -4,7 +4,7 @@ import com.github.dreamhead.moco.MocoConfig;
import com.github.dreamhead.moco.ResponseHandler;
public abstract class AbstractResponseHandler implements ResponseHandler {
- protected ResponseHandler doApply(final MocoConfig config) {
+ protected final ResponseHandler doApply(final MocoConfig config) {
return this;
}
|
added missing final to abstract response handler
|
diff --git a/lib/knex.js b/lib/knex.js
index <HASH>..<HASH> 100644
--- a/lib/knex.js
+++ b/lib/knex.js
@@ -200,10 +200,12 @@ function explainAfterQuery (app, knex) {
knex.on('query-response', (_response, { sql, bindings }) => {
sql = knex.client._formatQuery(sql, bindings).trim();
if (haveQueryPlan(sql)) {
- knex.raw(`explain ${sql}`).then(result => {
- const explains = helper.rawResult(knex.dialect, result);
- app.knexLogger.info('[egg-knex] explains of %s\n=====> result: %j', sql, explains);
- });
+ knex.raw(`explain ${sql}`)
+ .then(result => {
+ const explains = helper.rawResult(knex.dialect, result);
+ app.knexLogger.info('[egg-knex] explains of %s\n=====> result: %j', sql, explains);
+ })
+ .catch(() => app.knexLogger.info('[egg-knex] Whoops! Explain does\'n work with:', sql));
}
});
}
|
fix: raise explain exception
* Mysql <I> only support SELECT statement
|
diff --git a/GPy/plotting/gpy_plot/gp_plots.py b/GPy/plotting/gpy_plot/gp_plots.py
index <HASH>..<HASH> 100644
--- a/GPy/plotting/gpy_plot/gp_plots.py
+++ b/GPy/plotting/gpy_plot/gp_plots.py
@@ -91,7 +91,7 @@ def _plot_mean(self, canvas, helper_data, helper_prediction,
if projection == '2d':
update_not_existing_kwargs(kwargs, pl().defaults.meanplot_2d) # @UndefinedVariable
plots = dict(gpmean=[pl().contour(canvas, x[:,0], y[0,:],
- mu.reshape(resolution, resolution),
+ mu.reshape(resolution, resolution).T,
levels=levels, label=label, **kwargs)])
elif projection == '3d':
update_not_existing_kwargs(kwargs, pl().defaults.meanplot_3d) # @UndefinedVariable
|
[gp_plots] transposed plotting of 2d contours
|
diff --git a/src/adapters/S3.js b/src/adapters/S3.js
index <HASH>..<HASH> 100644
--- a/src/adapters/S3.js
+++ b/src/adapters/S3.js
@@ -1,5 +1,12 @@
+var util = require('util');
+var BaseAdapter = require(__dirname + '/../base_adapter');
+
+var S3Adapter = function(fs) {
+ this.fs = fs;
+}
+
+util.inherits(S3Adapter, BaseAdapter);
+
module.exports = function() {
- return function() {
- console.log('S3 adapter not implemented yet.');
- };
+ return new S3Adapter();
};
|
Update S3 adapter to inherit the base adapter.
|
diff --git a/ddl/partition.go b/ddl/partition.go
index <HASH>..<HASH> 100644
--- a/ddl/partition.go
+++ b/ddl/partition.go
@@ -50,8 +50,19 @@ func buildTablePartitionInfo(ctx sessionctx.Context, d *ddl, s *ast.CreateTableS
enable = false
default:
// When tidb_enable_table_partition = 'auto',
- // Partition by range expression is enabled by default.
- if s.Partition.Tp == model.PartitionTypeRange && s.Partition.ColumnNames == nil {
+ if s.Partition.Tp == model.PartitionTypeRange {
+ // Partition by range expression is enabled by default.
+ if s.Partition.ColumnNames == nil {
+ enable = true
+ }
+ // Partition by range columns and just one column.
+ if len(s.Partition.ColumnNames) == 1 {
+ enable = true
+ }
+ }
+ // Partition by hash is enabled by default.
+ // Note that linear hash is not enabled.
+ if s.Partition.Tp == model.PartitionTypeHash {
enable = true
}
}
|
ddl: enable hash partition and range columns partition by default (#<I>)
|
diff --git a/spec/unit/veritas/comparator/compare_spec.rb b/spec/unit/veritas/comparator/compare_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/unit/veritas/comparator/compare_spec.rb
+++ b/spec/unit/veritas/comparator/compare_spec.rb
@@ -28,6 +28,12 @@ describe Comparator, '#compare' do
instance.hash.should eql(object.hash ^ instance.object_id.hash ^ instance.to_s.hash)
end
+ # XXX: find out which instance is not a Fixnum
+ it { instance.hash.should be_instance_of(Fixnum) }
+ it { object.hash.should be_instance_of(Fixnum) }
+ it { instance.object_id.hash.should be_instance_of(Fixnum) }
+ it { instance.to_s.hash.should be_instance_of(Fixnum) }
+
it 'memoizes #hash' do
subject
instance.hash
|
Temporary commit to see which #hash method is returning a non-Fixnum
|
diff --git a/mdata/cache/ccache_metric.go b/mdata/cache/ccache_metric.go
index <HASH>..<HASH> 100644
--- a/mdata/cache/ccache_metric.go
+++ b/mdata/cache/ccache_metric.go
@@ -244,12 +244,11 @@ func (mc *CCacheMetric) Search(res *CCSearchResult, from, until uint32) {
if !res.Complete && res.From > res.Until {
log.Debug("CCacheMetric Search: Found from > until (%d/%d), printing chunks\n", res.From, res.Until)
- mc.debugMetric()
+ mc.debugMetric(keys)
}
}
-func (mc *CCacheMetric) debugMetric() {
- keys := mc.sortedTs()
+func (mc *CCacheMetric) debugMetric(keys []uint32) {
log.Debug("CCacheMetric debugMetric: --- debugging metric ---\n")
log.Debug("CCacheMetric debugMetric: oldest %d; newest %d\n", mc.oldest, mc.newest)
for _, key := range keys {
|
no need to rebuild the keys slice when we already have it
|
diff --git a/javacord-api/src/main/java/org/javacord/api/entity/server/BoostLevel.java b/javacord-api/src/main/java/org/javacord/api/entity/server/BoostLevel.java
index <HASH>..<HASH> 100644
--- a/javacord-api/src/main/java/org/javacord/api/entity/server/BoostLevel.java
+++ b/javacord-api/src/main/java/org/javacord/api/entity/server/BoostLevel.java
@@ -21,7 +21,7 @@ public enum BoostLevel {
/**
* Server Boost level 3.
*/
- TIER_3(2),
+ TIER_3(3),
/**
* An unknown boost level, most likely due to new added boost levels.
|
Id Bugfix
Tier 3 ID was set to "2" when it should be "3"
|
diff --git a/src/vis/image.js b/src/vis/image.js
index <HASH>..<HASH> 100644
--- a/src/vis/image.js
+++ b/src/vis/image.js
@@ -166,16 +166,18 @@
var vizjson = this.imageOptions.vizjson;
- var isHTTPS = vizjson.indexOf("https") !== -1 ? true : false;
-
this.options.tiler_domain = domain;
+ this.options.tiler_protocol = protocol;
+ this.options.tiler_port = port;
+
+ if (vizjson.indexOf("http") === 0) {
+ var isHTTPS = vizjson.indexOf("https") !== -1 ? true : false;
+
+ if (isHTTPS) {
+ this.options.tiler_protocol = "https";
+ this.options.tiler_port = 443;
+ }
- if (isHTTPS) {
- this.options.tiler_protocol = "https";
- this.options.tiler_port = 443;
- } else {
- this.options.tiler_protocol = protocol;
- this.options.tiler_port = port;
}
},
|
if we serve the vizjson without the protocol, trust the info we got from the layer
|
diff --git a/appender.go b/appender.go
index <HASH>..<HASH> 100644
--- a/appender.go
+++ b/appender.go
@@ -2,6 +2,7 @@ package golog
import (
"fmt"
+
color "github.com/ivpusic/go-clicolor/clicolor"
)
@@ -19,7 +20,7 @@ type Appender interface {
// Representing stdout appender.
type Stdout struct {
- dateformat string
+ DateFormat string
}
var (
@@ -30,7 +31,7 @@ var (
func (s *Stdout) Append(log Log) {
msg := fmt.Sprintf(" {cyan}%s {default}%s {%s}%s[%s] ▶ %s",
log.Logger.Name,
- log.Time.Format(s.dateformat),
+ log.Time.Format(s.DateFormat),
log.Level.color,
log.Level.icon,
log.Level.Name[:4],
@@ -50,7 +51,7 @@ func StdoutAppender() *Stdout {
if instance == nil {
instance = &Stdout{
- dateformat: "15:04:05",
+ DateFormat: "15:04:05",
}
}
|
exposed date format for stdout logger
|
diff --git a/lib/discordrb/data.rb b/lib/discordrb/data.rb
index <HASH>..<HASH> 100644
--- a/lib/discordrb/data.rb
+++ b/lib/discordrb/data.rb
@@ -852,7 +852,7 @@ module Discordrb
process_channels(data['channels'])
process_voice_states(data['voice_states'])
- @owner = self.member(@owner_id)
+ @owner = member(@owner_id)
end
# @return [Channel] The default channel on this server (usually called #general)
|
Remove a redundant self reference when obtaining the owner
|
diff --git a/DependencyInjection/Configuration.php b/DependencyInjection/Configuration.php
index <HASH>..<HASH> 100644
--- a/DependencyInjection/Configuration.php
+++ b/DependencyInjection/Configuration.php
@@ -106,10 +106,6 @@ return $v; })
->end()
->end()
->arrayNode('serializer')
- ->validate()
- ->ifTrue(function ($v) { return !empty($v['version']) && !empty($v['groups']); })
- ->thenInvalid('Only either a version or a groups exclusion strategy can be set')
- ->end()
->addDefaultsIfNotSet()
->children()
->scalarNode('version')->defaultNull()->end()
|
Removing conflicting validation rule.
While PR #<I> added support for using both serialization groups and versions, the configuration validation was not updated to reflect these changes. This commit simply removes the specific validation rule that is affected.
|
diff --git a/batchSystems/gridengine.py b/batchSystems/gridengine.py
index <HASH>..<HASH> 100644
--- a/batchSystems/gridengine.py
+++ b/batchSystems/gridengine.py
@@ -65,13 +65,13 @@ class MemoryString:
return cmp(self.bytes, other.bytes)
def prepareQsub(cpu, mem):
- qsubline = ["qsub","-b","y","-terse","-j" ,"y", "-cwd", "-o", "/dev/null", "-e", "/dev/null", "-v",
- "LD_LIBRARY_PATH=%s" % os.environ["LD_LIBRARY_PATH"]]
+ qsubline = ["qsub","-b","y","-terse","-j" ,"y", "-cwd", "-o", "/dev/null", "-e", "/dev/null", "-V"]
reqline = list()
if cpu is not None:
reqline.append("p="+str(cpu))
if mem is not None:
reqline.append("vf="+str(mem/ 1024)+"K")
+ reqline.append("h_vmem="+str(mem/ 1024)+"K")
if len(reqline) > 0:
qsubline.extend(["-hard","-l", ",".join(reqline)])
return qsubline
|
Corrected bugs in command line to SGE
|
diff --git a/app/models/cms/layout.rb b/app/models/cms/layout.rb
index <HASH>..<HASH> 100644
--- a/app/models/cms/layout.rb
+++ b/app/models/cms/layout.rb
@@ -81,7 +81,7 @@ protected
# Forcing page content reload
def clear_cached_page_content
- self.pages.each{ |page| page.clear_cached_content! }
+ Cms::Page.where(:id => self.pages.pluck(:id)).update_all(:content => nil)
self.children.each{ |child_layout| child_layout.clear_cached_page_content }
end
|
same idea for layout cached content busting
|
diff --git a/pug/miner/views.py b/pug/miner/views.py
index <HASH>..<HASH> 100644
--- a/pug/miner/views.py
+++ b/pug/miner/views.py
@@ -247,7 +247,7 @@ def context_from_request(request, context=None, Form=GetLagForm, delim=',', verb
'max_lag': str(max_lag),
'min_date': ', '.join(context['filter']['min_dates']),
'max_date': ', '.join(context['filter']['max_dates']),
- 'regex': ', '.context['regex'],
+ 'regex': context['regex'],
}
if verbosity > 1:
|
fix bug in get of regex from context
|
diff --git a/sc2common/__version__.py b/sc2common/__version__.py
index <HASH>..<HASH> 100644
--- a/sc2common/__version__.py
+++ b/sc2common/__version__.py
@@ -17,6 +17,6 @@ See the License for the specific language governing permissions and
limitations under the License.
"""
-VERSION = (1, 0, 13)
+VERSION = (1, 1, 0)
__version__ = '.'.join(map(str, VERSION))
|
- significant version bump owing to logic changes within RestrictedType class
|
diff --git a/iapws/iapws97.py b/iapws/iapws97.py
index <HASH>..<HASH> 100644
--- a/iapws/iapws97.py
+++ b/iapws/iapws97.py
@@ -4021,7 +4021,7 @@ def _Bound_hs(h, s):
"""
region = None
s13 = _Region1(623.15, 100)["s"]
- s13s = _Region1(623.15, Ps_623)["s"]
+ s13s = _Region1(623.15, Ps_623)["s"]
sTPmax = _Region2(1073.15, 100)["s"]
s2ab = _Region2(1073.15, 4)["s"]
|
Correct flake8 E<I> complaint.
E<I> multiple spaces after ','
|
diff --git a/springloaded/src/main/java/org/springsource/loaded/MethodInvokerRewriter.java b/springloaded/src/main/java/org/springsource/loaded/MethodInvokerRewriter.java
index <HASH>..<HASH> 100644
--- a/springloaded/src/main/java/org/springsource/loaded/MethodInvokerRewriter.java
+++ b/springloaded/src/main/java/org/springsource/loaded/MethodInvokerRewriter.java
@@ -992,10 +992,11 @@ public class MethodInvokerRewriter {
@Override
public void visitInvokeDynamicInsn(String name, String desc, Handle bsm, Object... bsmArgs) {
// TODO *shudder* what about invoke dynamic calls that target reflective APIs
- int classId = typeRegistry.getTypeIdFor(slashedclassname, false);
+ int classId = typeRegistry.getTypeIdFor(slashedclassname, true);
if (classId==-1) {
- throw new IllegalStateException();
+ throw new IllegalStateException("Unable to find classId for "+slashedclassname+" referenced from invokedynamic in "+this.methodname+"()");
}
+
// Initially only rewriting use of INVOKEDYNAMIC to support Lambda execution
// TODO support the more general invokedynamic usage
|
Allow for class not yet loaded when rewriting invokedynamic
|
diff --git a/tests/test_substrate.py b/tests/test_substrate.py
index <HASH>..<HASH> 100644
--- a/tests/test_substrate.py
+++ b/tests/test_substrate.py
@@ -828,7 +828,7 @@ class TestMAASAccountFromConfig(TestCase):
config = get_maas_env().config
with patch('subprocess.check_call', autospec=True) as cc_mock:
with maas_account_from_config(config) as maas:
- self.assertIsInstance(maas, MAASAccount)
+ self.assertIs(type(maas), MAASAccount)
self.assertEqual(maas.profile, 'mas')
self.assertEqual(maas.url, 'http://10.0.10.10/MAAS/api/2.0/')
self.assertEqual(maas.oauth, 'a:password:string')
@@ -846,7 +846,7 @@ class TestMAASAccountFromConfig(TestCase):
with patch('subprocess.check_call', autospec=True,
side_effect=[login_error, None, None]) as cc_mock:
with maas_account_from_config(config) as maas:
- self.assertIsInstance(maas, MAASAccount)
+ self.assertIs(type(maas), MAAS1Account)
self.assertEqual(maas.profile, 'mas')
self.assertEqual(maas.url, 'http://10.0.10.10/MAAS/api/1.0/')
self.assertEqual(maas.oauth, 'a:password:string')
|
Correct check on MAASAccount types in test
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.