hash
stringlengths 40
40
| diff
stringlengths 131
114k
| message
stringlengths 7
980
| project
stringlengths 5
67
| split
stringclasses 1
value |
|---|---|---|---|---|
f228691f170bbeb716d8858b81c5865752f2f433
|
diff --git a/tests/tools_dbmaint_unittest.py b/tests/tools_dbmaint_unittest.py
index <HASH>..<HASH> 100644
--- a/tests/tools_dbmaint_unittest.py
+++ b/tests/tools_dbmaint_unittest.py
@@ -272,15 +272,24 @@ class ScriptsToRunTestCase(unittest.TestCase):
def setUp(self):
self.tdir = tempfile.mkdtemp()
self.path = "%s/schema/upgrades" % self.tdir
- self.top = "%s/openquake/pshai/0.3.9-1" % self.path
- self.path1 = "%s/1" % self.top
- os.makedirs(self.path1)
- self.path1d = "%s/1/too_deep" % self.top
- os.makedirs(self.path1d)
- self.path2 = "%s/2" % self.top
- os.makedirs(self.path2)
- self.path3 = "%s/3" % self.top
- os.makedirs(self.path3)
+ self.top = "%s/openquake/pshai" % self.path
+ # older revision
+ self.path_38_1 = "%s/0.3.8/1" % self.top
+ os.makedirs(self.path_38_1)
+ self.path_38_5 = "%s/0.3.8/5" % self.top
+ os.makedirs(self.path_38_5)
+ # current revision
+ self.path_39_1 = "%s/0.3.9-1/1" % self.top
+ os.makedirs(self.path_39_1)
+ self.path_39_1d = "%s/0.3.9-1/1/too_deep" % self.top
+ os.makedirs(self.path_39_1d)
+ self.path_39_2 = "%s/0.3.9-1/2" % self.top
+ os.makedirs(self.path_39_2)
+ self.path_39_3 = "%s/0.3.9-1/3" % self.top
+ os.makedirs(self.path_39_3)
+ # newer revision
+ self.path_42_1 = "%s/0.4.2/1" % self.top
+ os.makedirs(self.path_42_1)
def tearDown(self):
shutil.rmtree(self.tdir)
@@ -291,8 +300,10 @@ class ScriptsToRunTestCase(unittest.TestCase):
rev_info = {"step": "2", "id": "3", "revision": "0.3.9-1"}
config = {"dryrun": True, "path": self.path, "host": "localhost",
"db": "openquake", "user": "postgres"}
- touch("%s/01-a.sql" % self.path1)
- touch("%s/01-a.sql" % self.path2)
+ touch("%s/01-a.sql" % self.path_38_1)
+ touch("%s/01-a.sql" % self.path_38_5)
+ touch("%s/01-a.sql" % self.path_39_1)
+ touch("%s/01-a.sql" % self.path_39_2)
self.assertEqual([], scripts_to_run(artefact, rev_info, config))
def test_scripts_to_run_with_available_upgrades(self):
@@ -301,11 +312,16 @@ class ScriptsToRunTestCase(unittest.TestCase):
rev_info = {"step": "2", "id": "3", "revision": "0.3.9-1"}
config = {"dryrun": True, "path": self.path, "host": "localhost",
"db": "openquake", "user": "postgres"}
- touch("%s/01-a.sql" % self.path1)
- touch("%s/01-b.sql" % self.path2)
- touch("%s/01-c.sql" % self.path3)
- touch("%s/02-d.sql" % self.path3)
- self.assertEqual(["0.3.9-1/3/01-c.sql", "0.3.9-1/3/02-d.sql"],
+ touch("%s/01-a.sql" % self.path_38_1)
+ touch("%s/01-a.sql" % self.path_39_1)
+ touch("%s/01-a.sql" % self.path_38_5)
+ touch("%s/01-b.sql" % self.path_39_2)
+ touch("%s/01-c.sql" % self.path_39_3)
+ touch("%s/02-d.sql" % self.path_39_3)
+ touch("%s/01-a.sql" % self.path_42_1)
+ touch("%s/02-b.sql" % self.path_42_1)
+ self.assertEqual(["0.3.9-1/3/01-c.sql", "0.3.9-1/3/02-d.sql",
+ "0.4.2/1/01-a.sql", "0.4.2/1/02-b.sql"],
scripts_to_run(artefact, rev_info, config))
diff --git a/tools/dbmaint.py b/tools/dbmaint.py
index <HASH>..<HASH> 100755
--- a/tools/dbmaint.py
+++ b/tools/dbmaint.py
@@ -151,6 +151,22 @@ def scripts_to_run(artefact, rev_info, config):
if (int(spath) > step):
result.append(os.path.join(revision, script))
+ # find upgrade scripts for revisions newer than the current one
+ path = "%s/%s" % (config['path'], artefact)
+ current_revision_array = version_array(revision)
+ if os.path.isdir(path):
+ dirs = [os.path.join(path, d)
+ for d in os.listdir(path)
+ if os.path.isdir(os.path.join(path, d))]
+
+ for dir in dirs:
+ path_revision = os.path.basename(dir)
+ dir_revision_array = version_array(path_revision)
+
+ if dir_revision_array > current_revision_array:
+ result.extend(os.path.join(path_revision, s)
+ for s in find_scripts(dir))
+
return list(sorted(result))
|
Find upgrade scripts for revisions newer than the current one.
Former-commit-id: <I>b<I>d8a<I>b8ca<I>d7c7d8e8e1dd<I>b<I>
|
gem_oq-engine
|
train
|
8da819eef5f11cc016ffa9ad747421ee50be32fa
|
diff --git a/actionpack/test/controller/filters_test.rb b/actionpack/test/controller/filters_test.rb
index <HASH>..<HASH> 100644
--- a/actionpack/test/controller/filters_test.rb
+++ b/actionpack/test/controller/filters_test.rb
@@ -213,6 +213,14 @@ class FilterTest < ActionController::TestCase
before_filter :clean_up_tmp, :if => Proc.new { |c| false }
end
+ class ConditionalOptionsSkipFilter < ConditionalFilterController
+ before_filter :ensure_login
+ before_filter :clean_up_tmp
+
+ skip_before_filter :ensure_login, if: -> { false }
+ skip_before_filter :clean_up_tmp, if: -> { true }
+ end
+
class PrependingController < TestController
prepend_before_filter :wonderful_life
# skip_before_filter :fire_flash
@@ -593,6 +601,11 @@ class FilterTest < ActionController::TestCase
assert_equal %w( ensure_login ), assigns["ran_filter"]
end
+ def test_running_conditional_skip_options
+ test_process(ConditionalOptionsSkipFilter)
+ assert_equal %w( ensure_login ), assigns["ran_filter"]
+ end
+
def test_running_collection_condition_filters
test_process(ConditionalCollectionFilterController)
assert_equal %w( ensure_login ), assigns["ran_filter"]
|
add test for skip_before_filter with condition
|
rails_rails
|
train
|
9f81cf096bfa56d22d345ff2f97473e03bc354d8
|
diff --git a/pkg/kubelet/dockershim/docker_container.go b/pkg/kubelet/dockershim/docker_container.go
index <HASH>..<HASH> 100644
--- a/pkg/kubelet/dockershim/docker_container.go
+++ b/pkg/kubelet/dockershim/docker_container.go
@@ -227,6 +227,10 @@ func (ds *dockerService) createContainerLogSymlink(containerID string) error {
if realPath != "" {
// Only create the symlink when container log path is specified and log file exists.
+ // Delete possibly existing file first
+ if err = ds.os.Remove(path); err == nil {
+ glog.Warningf("Deleted previously existing symlink file: %q", path)
+ }
if err = ds.os.Symlink(realPath, path); err != nil {
return fmt.Errorf("failed to create symbolic link %q to the container log file %q for container %q: %v",
path, realPath, containerID, err)
|
Make sure the previous symlink file is deleted
before trying to create a new one.
|
kubernetes_kubernetes
|
train
|
fefa8ae9a172835fb6b8aef7d1dd46d58eecd49f
|
diff --git a/railties/lib/rails/generators/rails/plugin/plugin_generator.rb b/railties/lib/rails/generators/rails/plugin/plugin_generator.rb
index <HASH>..<HASH> 100644
--- a/railties/lib/rails/generators/rails/plugin/plugin_generator.rb
+++ b/railties/lib/rails/generators/rails/plugin/plugin_generator.rb
@@ -185,6 +185,7 @@ task default: :test
end
public_task :set_default_accessors!
+ public_task :apply_rails_template
public_task :create_root
def create_root_files
@@ -241,7 +242,6 @@ task default: :test
build(:leftovers)
end
- public_task :apply_rails_template, :run_bundle
def name
@name ||= begin
@@ -255,6 +255,9 @@ task default: :test
end
end
+ public_task :run_bundle
+ public_task :replay_template
+
protected
def app_templates_dir
|
Fixed plugin_generator test
Template replay needs to happen after bundle.
As we are doing TemplateRecording in apply_rails_template.
here are few commits for changes
8beb<I>cfbc<I>ae4dbb<I>e<I>abcd1fb7d<I>
<I>be<I>f<I>ca<I>a<I>fc<I>bb<I>
|
rails_rails
|
train
|
0525ae21147ac44ec357135c4626005b8b594093
|
diff --git a/tests/test-validator-StringLengthValidator.js b/tests/test-validator-StringLengthValidator.js
index <HASH>..<HASH> 100644
--- a/tests/test-validator-StringLengthValidator.js
+++ b/tests/test-validator-StringLengthValidator.js
@@ -43,7 +43,8 @@ describe('sjl.validator.StringLengthValidator', function () {
});
test ('it should return a validation result object with a `result` of `false` and ' +
'one error message when the passed in value is not within length range', function () {
- const strValidator = stringLengthValidator({min: 6, max: 13}),
+ const exampleOps = {min: 6, max: 13},
+ strValidator = stringLengthValidator(exampleOps),
errMsgTmpl = (value, ops) =>
`Value is not within range ` +
`${ops.min} to ${ops.max}. ` +
@@ -55,7 +56,7 @@ describe('sjl.validator.StringLengthValidator', function () {
const {result, messages} = strValidator(value);
expect(result).to.equal(expected);
expect(messages.length).to.equal(messagesLength);
- expect(messages[0]).to.equal(errMsgTmpl(value));
+ expect(messages[0]).to.equal(errMsgTmpl(value, exampleOps));
});
});
test ('it should return a validation result object with a `result` of `true` and ' +
|
Added messages test for stringLengthValidator.
|
functional-jslib_fjl-validator
|
train
|
2f2e9803647a4be5ea44573b3758f8fe2d16976e
|
diff --git a/src/hypercorn/__about__.py b/src/hypercorn/__about__.py
index <HASH>..<HASH> 100644
--- a/src/hypercorn/__about__.py
+++ b/src/hypercorn/__about__.py
@@ -1 +1 @@
-__version__ = "0.11.0"
+__version__ = "0.11.0+dev"
|
Following the release of <I> bump to +dev
|
pgjones_hypercorn
|
train
|
537abcb52aff92657efc7df67b59206ed3ebeacd
|
diff --git a/howler.js b/howler.js
index <HASH>..<HASH> 100644
--- a/howler.js
+++ b/howler.js
@@ -133,13 +133,14 @@
var audioTest = null;
if (!noAudio) {
audioTest = new Audio();
+ var codecRegex = /^no|maybe$/;
var codecs = {
- mp3: !!audioTest.canPlayType('audio/mpeg;').replace(/^no$/,''),
- opus: !!audioTest.canPlayType('audio/ogg; codecs="opus"').replace(/^no$/,''),
- ogg: !!audioTest.canPlayType('audio/ogg; codecs="vorbis"').replace(/^no$/,''),
- wav: !!audioTest.canPlayType('audio/wav; codecs="1"').replace(/^no$/,''),
- m4a: !!(audioTest.canPlayType('audio/x-m4a;') || audioTest.canPlayType('audio/aac;')).replace(/^no$/,''),
- weba: !!audioTest.canPlayType('audio/webm; codecs="vorbis"').replace(/^no$/,'')
+ mp3: !!audioTest.canPlayType('audio/mpeg;').replace(codecRegex,''),
+ opus: !!audioTest.canPlayType('audio/ogg; codecs="opus"').replace(codecRegex,''),
+ ogg: !!audioTest.canPlayType('audio/ogg; codecs="vorbis"').replace(codecRegex,''),
+ wav: !!audioTest.canPlayType('audio/wav; codecs="1"').replace(codecRegex,''),
+ m4a: !!(audioTest.canPlayType('audio/x-m4a;') || audioTest.canPlayType('audio/aac;')).replace(codecRegex,''),
+ weba: !!audioTest.canPlayType('audio/webm; codecs="vorbis"').replace(codecRegex,'')
};
}
|
Detect 'maybe' responses as well as 'no' responses when testing codec support.
|
goldfire_howler.js
|
train
|
9cfb09763afddac19972183773a2cf8e9b69079f
|
diff --git a/lib/rules/no-curly-component-invocation.js b/lib/rules/no-curly-component-invocation.js
index <HASH>..<HASH> 100644
--- a/lib/rules/no-curly-component-invocation.js
+++ b/lib/rules/no-curly-component-invocation.js
@@ -97,6 +97,12 @@ module.exports = class NoCurlyComponentInvocation extends Rule {
if (node.path.type !== 'PathExpression' || this.isAmbiguousLocalInvocation(node)) {
return '';
}
+
+ // skip multi-part paths like `{{request.note}}`
+ if (node.path.parts.length > 1) {
+ return;
+ }
+
if (
node.type === 'BlockStatement' &&
!BUILT_IN_HELPERS.includes(name) &&
diff --git a/test/unit/rules/no-curly-component-invocation-test.js b/test/unit/rules/no-curly-component-invocation-test.js
index <HASH>..<HASH> 100644
--- a/test/unit/rules/no-curly-component-invocation-test.js
+++ b/test/unit/rules/no-curly-component-invocation-test.js
@@ -28,6 +28,9 @@ const SHARED_GOOD = [
'{{svg-jar "status"}}',
'{{t "some.translation.key"}}',
'{{#animated-if condition}}foo{{/animated-if}}',
+ '{{model.selectedTransfersCount}}',
+ '{{request.note}}',
+ '{{42}}',
];
const SHARED_BAD = [
@@ -63,7 +66,7 @@ generateRuleTests({
disallow: ['heading'],
},
- good: [...SHARED_GOOD, '{{model.selectedTransfersCount}}', '{{request.note}}'],
+ good: [...SHARED_GOOD],
bad: [
...SHARED_BAD,
|
no-curly-component-invocation: Ignore multi-part paths
|
ember-template-lint_ember-template-lint
|
train
|
753ecfedd0a2eda62fe8f398558a58f71f019bf1
|
diff --git a/filer/models/abstract.py b/filer/models/abstract.py
index <HASH>..<HASH> 100644
--- a/filer/models/abstract.py
+++ b/filer/models/abstract.py
@@ -56,21 +56,15 @@ class BaseImage(File):
iext = os.path.splitext(iname)[1].lower()
return iext in ['.jpg', '.jpeg', '.png', '.gif']
- def file_data_changed(self, post_init=False):
- attrs_updated = super(BaseImage, self).file_data_changed(post_init=post_init)
- if attrs_updated:
- # update image dimensions
+ def save(self, *args, **kwargs):
+ self.has_all_mandatory_data = self._check_validity()
+ if self.has_all_mandatory_data:
try:
self.file.seek(0)
self._width, self._height = PILImage.open(self.file).size
self.file.seek(0)
except Exception:
- # probably the image is missing, nevermind.
- pass
- return attrs_updated
-
- def save(self, *args, **kwargs):
- self.has_all_mandatory_data = self._check_validity()
+ self._width, self._height = None, None
super(BaseImage, self).save(*args, **kwargs)
def _check_validity(self):
|
postpone image size reset on invalid images until save
|
divio_django-filer
|
train
|
cfe01ed6361251fe6d8b98ab1a1a38b22958b97e
|
diff --git a/lib/time/task_timer.rb b/lib/time/task_timer.rb
index <HASH>..<HASH> 100644
--- a/lib/time/task_timer.rb
+++ b/lib/time/task_timer.rb
@@ -1,17 +1,8 @@
module BBLib
- class TaskTimer
- attr_reader :tasks, :save, :retention
-
- def initialize task = nil, opts = Hash.new
- @tasks = {}
- self.retention = opts[:retention] || 100
- if task then start task end
- end
-
- def retention= num
- @retention = num.nil? ? nil : BBLib.keep_between(num, -1, nil)
- end
+ class TaskTimer < LazyClass
+ attr_hash :tasks, default: Hash.new
+ attr_int_between -1, nil, :retention, default: 100
def time task = :default, type = :current
return nil unless @tasks.keys.include? task
@@ -65,10 +56,6 @@ module BBLib
start(task) unless stop(task).nil?
end
- def save= save
- @save = save
- end
-
def active? task
return false unless @tasks.keys.include? task
!@tasks[task][:current].nil?
@@ -109,6 +96,12 @@ module BBLib
all: [:times]
}
+ def lazy_init *args
+ if args.first.is_a?(Symbol)
+ start(args.first)
+ end
+ end
+
end
end
|
Ported TaskTimer over to LazyClass.
|
bblack16_bblib-ruby
|
train
|
c205b93629a3dd724755c6499c1014c82af5e45f
|
diff --git a/packages/posts/webpack.serverless.config.js b/packages/posts/webpack.serverless.config.js
index <HASH>..<HASH> 100644
--- a/packages/posts/webpack.serverless.config.js
+++ b/packages/posts/webpack.serverless.config.js
@@ -55,6 +55,7 @@ if (process.env.DEPLOY && process.env.SENTRY_AUTH_TOKEN) {
module.exports = {
entry: slsw.lib.entries,
mode: resolveMode(),
+ target: "node",
optimization: {
minimize: false
},
|
fix(posts): Webpack explicitly targets `node`.
|
randytarampi_me
|
train
|
01a4473c202cfe549283ddb4c1a5ff5eaddcd868
|
diff --git a/dvc/parsing/context.py b/dvc/parsing/context.py
index <HASH>..<HASH> 100644
--- a/dvc/parsing/context.py
+++ b/dvc/parsing/context.py
@@ -62,7 +62,7 @@ class MergeError(ContextError):
)
-class ParamsFileNotFound(ContextError):
+class ParamsLoadError(ContextError):
pass
@@ -349,7 +349,9 @@ class Context(CtxDict):
def load_from(cls, tree, path: PathInfo, select_keys=None) -> "Context":
file = relpath(path)
if not tree.exists(path):
- raise ParamsFileNotFound(f"'{file}' does not exist")
+ raise ParamsLoadError(f"'{file}' does not exist")
+ if tree.isdir(path):
+ raise ParamsLoadError(f"'{file}' is a directory")
_, ext = os.path.splitext(file)
loader = LOADERS[ext]
@@ -357,7 +359,7 @@ class Context(CtxDict):
data = loader(path, tree=tree)
if not isinstance(data, Mapping):
typ = type(data).__name__
- raise ContextError(
+ raise ParamsLoadError(
f"expected a dictionary, got '{typ}' in file '{file}'"
)
@@ -367,7 +369,7 @@ class Context(CtxDict):
data = {key: data[key] for key in select_keys}
except KeyError as exc:
key, *_ = exc.args
- raise ContextError(
+ raise ParamsLoadError(
f"could not find '{key}' in '{file}'"
) from exc
diff --git a/tests/unit/test_context.py b/tests/unit/test_context.py
index <HASH>..<HASH> 100644
--- a/tests/unit/test_context.py
+++ b/tests/unit/test_context.py
@@ -1,5 +1,6 @@
from dataclasses import asdict
from math import pi
+from unittest.mock import mock_open
import pytest
@@ -10,13 +11,13 @@ from dvc.parsing.context import (
CtxList,
KeyNotInContext,
MergeError,
- ParamsFileNotFound,
+ ParamsLoadError,
Value,
recurse_not_a_node,
)
from dvc.tree.local import LocalTree
from dvc.utils import relpath
-from dvc.utils.serialize import dump_yaml
+from dvc.utils.serialize import dump_yaml, dumps_yaml
def test_context():
@@ -216,17 +217,13 @@ def test_overwrite_with_setitem():
def test_load_from(mocker):
- def _yaml_load(*args, **kwargs):
- return {"x": {"y": {"z": 5}, "lst": [1, 2, 3]}, "foo": "foo"}
-
- mocker.patch("dvc.parsing.context.LOADERS", {".yaml": _yaml_load})
-
- class tree:
- def exists(self, _):
- return True
-
+ d = {"x": {"y": {"z": 5}, "lst": [1, 2, 3]}, "foo": "foo"}
+ tree = mocker.Mock(
+ open=mock_open(read_data=dumps_yaml(d)),
+ **{"exists.return_value": True, "isdir.return_value": False},
+ )
file = "params.yaml"
- c = Context.load_from(tree(), file)
+ c = Context.load_from(tree, file)
assert asdict(c["x"].meta) == {
"source": file,
@@ -430,7 +427,18 @@ def test_resolve_resolves_boolean_value():
assert context.resolve_str("--flag ${disabled}") == "--flag false"
-def test_merge_from_raises_if_file_not_exist(tmp_dir, dvc):
- context = Context(foo="bar")
- with pytest.raises(ParamsFileNotFound):
- context.merge_from(dvc.tree, DEFAULT_PARAMS_FILE, tmp_dir)
+def test_load_from_raises_if_file_not_exist(tmp_dir, dvc):
+ with pytest.raises(ParamsLoadError) as exc_info:
+ Context.load_from(dvc.tree, tmp_dir / DEFAULT_PARAMS_FILE)
+
+ assert str(exc_info.value) == "'params.yaml' does not exist"
+
+
+def test_load_from_raises_if_file_is_directory(tmp_dir, dvc):
+ data_dir = tmp_dir / "data"
+ data_dir.mkdir()
+
+ with pytest.raises(ParamsLoadError) as exc_info:
+ Context.load_from(dvc.tree, data_dir)
+
+ assert str(exc_info.value) == "'data' is a directory"
|
parametrization: fix error when the file in `vars` is a directory (#<I>)
* Fix error when the file in `vars` is a directory
* Fix on windows
* mock tree only
|
iterative_dvc
|
train
|
30d9763761a0daad22e7f1fc84b762079d966c17
|
diff --git a/sources/scalac/symtab/SymbolCloner.java b/sources/scalac/symtab/SymbolCloner.java
index <HASH>..<HASH> 100644
--- a/sources/scalac/symtab/SymbolCloner.java
+++ b/sources/scalac/symtab/SymbolCloner.java
@@ -11,7 +11,6 @@ package scalac.symtab;
import java.util.Map;
import java.util.HashMap;
-import scalac.util.FreshNameCreator;
import scalac.util.Debug;
/**
@@ -25,9 +24,6 @@ public class SymbolCloner {
//########################################################################
// Public Fields
- /** The fresh name generator used to rename cloned symbols */
- public final FreshNameCreator renamer;
-
/** A table that maps owners of symbols to owners of cloned symbols */
public final Map/*<Symbol,Symbol*/ owners;
@@ -38,18 +34,17 @@ public class SymbolCloner {
// Public Constructor
/** Initializes a new instance. */
- public SymbolCloner(FreshNameCreator renamer) {
- this(renamer, new HashMap());
+ public SymbolCloner() {
+ this(new HashMap());
}
/** Initializes a new instance. */
- public SymbolCloner(FreshNameCreator renamer, Map owners) {
- this(renamer, owners, new HashMap());
+ public SymbolCloner(Map owners) {
+ this(owners, new HashMap());
}
/** Initializes a new instance. */
- public SymbolCloner(FreshNameCreator renamer, Map owners, Map clones) {
- this.renamer = renamer;
+ public SymbolCloner(Map owners, Map clones) {
this.owners = owners;
this.clones = clones;
}
@@ -73,11 +68,6 @@ public class SymbolCloner {
/** Clones the given symbol. */
public Symbol cloneSymbol(Symbol symbol) {
- return cloneSymbol(symbol, false);
- }
-
- /** Clones the given symbol and renames it if rename is true. */
- public Symbol cloneSymbol(Symbol symbol, boolean rename) {
assert !symbol.isPrimaryConstructor() : Debug.show(symbol);
assert !symbol.isModuleClass() : Debug.show(symbol);
assert !symbol.isClass() : Debug.show(symbol);
@@ -86,10 +76,6 @@ public class SymbolCloner {
assert !clones.containsKey(symbol) :
Debug.show(symbol) + " -> " + Debug.show(clones.get(symbol));
Symbol clone = symbol.cloneSymbol(getOwnerFor(symbol));
- if (rename) {
- assert !symbol.isConstructor() : Debug.show(symbol);
- clone.name = renamer.newName(symbol.name);
- }
clones.put(symbol, clone);
clone.setType(cloneType(symbol.info()));
return clone;
@@ -97,15 +83,10 @@ public class SymbolCloner {
/** Clones the given symbols. */
public Symbol[] cloneSymbols(Symbol[] symbols) {
- return cloneSymbols(symbols, false);
- }
-
- /** Clones the given symbols and renames them if rename is true. */
- public Symbol[] cloneSymbols(Symbol[] symbols, boolean rename) {
if (symbols.length == 0) return Symbol.EMPTY_ARRAY;
Symbol[] clones = new Symbol[symbols.length];
for (int i = 0; i < clones.length; i++)
- clones[i] = cloneSymbol(symbols[i], rename);
+ clones[i] = cloneSymbol(symbols[i]);
return clones;
}
diff --git a/sources/scalac/transformer/ExpandMixinsPhase.java b/sources/scalac/transformer/ExpandMixinsPhase.java
index <HASH>..<HASH> 100644
--- a/sources/scalac/transformer/ExpandMixinsPhase.java
+++ b/sources/scalac/transformer/ExpandMixinsPhase.java
@@ -317,7 +317,7 @@ public class ExpandMixinsPhase extends Phase {
assert clasz.isClass() && !clasz.isInterface(): Debug.show(clasz);
TypeTransformer transformer =(TypeTransformer)transformers.get(clasz);
if (transformer == null) {
- transformer = new TypeTransformer(global, clasz);
+ transformer = new TypeTransformer(clasz);
transformers.put(clasz, transformer);
}
return transformer;
@@ -357,9 +357,9 @@ public class ExpandMixinsPhase extends Phase {
public final SymbolCloner cloner;
public final Map/*<Symbol,Type>*/ inlines;
- public TypeTransformer(Global global, Symbol clasz) {
+ public TypeTransformer(Symbol clasz) {
this.clasz = clasz;
- this.cloner = new SymbolCloner(global.freshNameCreator);
+ this.cloner = new SymbolCloner();
this.inlines = new HashMap();
initialize();
}
|
- Removed renamer from SymbolCloner
|
scala_scala
|
train
|
b1727f8a79502f095105c37ebbcd050b3e668dc1
|
diff --git a/ONGRFilterManagerBundle.php b/ONGRFilterManagerBundle.php
index <HASH>..<HASH> 100644
--- a/ONGRFilterManagerBundle.php
+++ b/ONGRFilterManagerBundle.php
@@ -15,6 +15,7 @@ use ONGR\FilterManagerBundle\DependencyInjection\Compiler\FilterPass;
use ONGR\FilterManagerBundle\DependencyInjection\Filter\ChoiceFilterFactory;
use ONGR\FilterManagerBundle\DependencyInjection\Filter\DateRangeFilterFactory;
use ONGR\FilterManagerBundle\DependencyInjection\Filter\DocumentValueFactory;
+use ONGR\FilterManagerBundle\DependencyInjection\Filter\DynamicAggregateFilterFactory;
use ONGR\FilterManagerBundle\DependencyInjection\Filter\FieldValueFactory;
use ONGR\FilterManagerBundle\DependencyInjection\Filter\FuzzyFilterFactory;
use ONGR\FilterManagerBundle\DependencyInjection\Filter\MatchFilterFactory;
@@ -50,6 +51,7 @@ class ONGRFilterManagerBundle extends Bundle
$extension->addFilterFactory(new DateRangeFilterFactory());
$extension->addFilterFactory(new FieldValueFactory());
$extension->addFilterFactory(new DocumentValueFactory());
+ $extension->addFilterFactory(new DynamicAggregateFilterFactory());
$container->addCompilerPass(new FilterPass());
}
|
included a vactory in the bundle
|
ongr-io_FilterManagerBundle
|
train
|
a35c893baa70623672d7ba07a30b3880724ee2d7
|
diff --git a/classes/PHPTAL/Php/Attribute/METAL/UseMacro.php b/classes/PHPTAL/Php/Attribute/METAL/UseMacro.php
index <HASH>..<HASH> 100644
--- a/classes/PHPTAL/Php/Attribute/METAL/UseMacro.php
+++ b/classes/PHPTAL/Php/Attribute/METAL/UseMacro.php
@@ -57,7 +57,8 @@ class PHPTAL_Php_Attribute_METAL_UseMacro extends PHPTAL_Php_Attribute
if ($defineAttr = $this->phpelement->getAttributeNodeNS(
'http://xml.zope.org/namespaces/metal', 'define-macro')) {
if ($defineAttr->getValue() == $macroname)
- throw new Exception("Cannot simultaneously define and use macro '$macroname'");
+ throw new PHPTAL_TemplateException("Cannot simultaneously define and use macro '$macroname'",
+ $this->phpelement->getSourceFile(), $this->phpelement->getSourceLine());
}
// local macro (no filename specified) and non dynamic macro name
|
Update to d1e0db4a: Throw PHPTal_TemplateException w/ line # & file
Now throws PHPTal_TemplateException for error thrown when simultaneously
using metal:define-macro and metal:use-macro for the same macro.
(Passes line # and file of error location so the exception will show these)<I>
|
phptal_PHPTAL
|
train
|
332a918eeac63c2832fdef1a3486818894b9a48a
|
diff --git a/src/Rocketeer/Traits/Task.php b/src/Rocketeer/Traits/Task.php
index <HASH>..<HASH> 100644
--- a/src/Rocketeer/Traits/Task.php
+++ b/src/Rocketeer/Traits/Task.php
@@ -209,27 +209,34 @@ abstract class Task extends Bash
*/
public function setPermissions($folder)
{
+ $commands = array();
+
// Get path to folder
$folder = $this->releasesManager->getCurrentReleasePath($folder);
$this->command->comment('Setting permissions for '.$folder);
// Get permissions options
$options = $this->rocketeer->getOption('remote.permissions');
- $chmod = array_get($options, 'permissions', 775);
+ $chmod = array_get($options, 'permissions');
$user = array_get($options, 'apache.user');
$group = array_get($options, 'apache.group');
// Add chmod
- $commands = array(
- sprintf('chmod -R %s %s', $chmod, $folder),
- sprintf('chmod -R g+s %s', $folder),
- );
+ if ($chmod) {
+ $commands[] = sprintf('chmod -R %s %s', $chmod, $folder);
+ $commands[] = sprintf('chmod -R g+s %s', $folder);
+ }
// And chown
if ($user and $group) {
$commands[] = sprintf('chown -R %s:%s %s', $user, $group, $folder);
}
+ // Cancel if setting of permissions is not configured
+ if (empty($commands)) {
+ return true;
+ }
+
return $this->runForCurrentRelease($commands);
}
diff --git a/src/config/config.php b/src/config/config.php
index <HASH>..<HASH> 100644
--- a/src/config/config.php
+++ b/src/config/config.php
@@ -75,6 +75,7 @@
'permissions' => array(
// The permissions to CHMOD folders to
+ // Change to null to leave the folders untouched
'permissions' => 755,
// The folders and files to set as web writable
|
Don't run chmod if not permissions are set
|
rocketeers_rocketeer
|
train
|
f2ec5f9a6b5f9b243e390e9a049c454cc342e03f
|
diff --git a/src/scs_core/sys/node.py b/src/scs_core/sys/node.py
index <HASH>..<HASH> 100644
--- a/src/scs_core/sys/node.py
+++ b/src/scs_core/sys/node.py
@@ -53,6 +53,13 @@ class Node(ABC):
# ----------------------------------------------------------------------------------------------------------------
@abstractmethod
+ def time_is_synchronized(self):
+ pass
+
+
+ # ----------------------------------------------------------------------------------------------------------------
+
+ @abstractmethod
def home_dir(self):
pass
|
Added time sync dependence to SystemDisplay
|
south-coast-science_scs_core
|
train
|
2c6079634c32b96a1f58f04c9c0aea75e30323bb
|
diff --git a/tests/builder_test.py b/tests/builder_test.py
index <HASH>..<HASH> 100644
--- a/tests/builder_test.py
+++ b/tests/builder_test.py
@@ -432,39 +432,39 @@ class ToUfosTest(unittest.TestCase):
self.assertEqual(postscriptNames, {'C-fraktur': 'uni212D'})
def test_category(self):
- data = generate_minimal_font()
- add_glyph(data, 'foo')['category'] = 'Mark'
- add_glyph(data, 'bar')
- ufo = to_ufos(data)[0]
+ font = generate_minimal_font()
+ add_glyph(font, 'foo')['category'] = 'Mark'
+ add_glyph(font, 'bar')
+ ufo = to_ufos(font)[0]
category_key = GLYPHLIB_PREFIX + 'category'
self.assertEqual(ufo['foo'].lib.get(category_key), 'Mark')
self.assertFalse(category_key in ufo['bar'].lib)
def test_subCategory(self):
- data = generate_minimal_font()
- add_glyph(data, 'foo')['subCategory'] = 'Nonspacing'
- add_glyph(data, 'bar')
- ufo = to_ufos(data)[0]
+ font = generate_minimal_font()
+ add_glyph(font, 'foo')['subCategory'] = 'Nonspacing'
+ add_glyph(font, 'bar')
+ ufo = to_ufos(font)[0]
subCategory_key = GLYPHLIB_PREFIX + 'subCategory'
self.assertEqual(ufo['foo'].lib.get(subCategory_key), 'Nonspacing')
self.assertFalse(subCategory_key in ufo['bar'].lib)
def test_mark_nonspacing_zero_width(self):
- data = generate_minimal_font()
+ font = generate_minimal_font()
- add_glyph(data, 'dieresiscomb').layers[0].width = 100
+ add_glyph(font, 'dieresiscomb').layers[0].width = 100
- foo = add_glyph(data, 'foo')
+ foo = add_glyph(font, 'foo')
foo.category = 'Mark'
foo.subCategory = 'Nonspacing'
foo.layers[0].width = 200
- bar = add_glyph(data, 'bar')
+ bar = add_glyph(font, 'bar')
bar.category = 'Mark'
bar.subCategory = 'Nonspacing'
bar.layers[0].width = 0
- ufo = to_ufos(data)[0]
+ ufo = to_ufos(font)[0]
originalWidth_key = GLYPHLIB_PREFIX + 'originalWidth'
self.assertEqual(ufo['dieresiscomb'].width, 0)
@@ -589,14 +589,14 @@ class ToUfosTest(unittest.TestCase):
to_ufos(font)[0].features.text)
def test_GDEF_custom_category_subCategory(self):
- data = generate_minimal_font()
- add_glyph(data, 'foo')['subCategory'] = 'Ligature'
- add_anchor(data, 'foo', 'top', 400, 1000)
- bar = add_glyph(data, 'bar')
+ font = generate_minimal_font()
+ add_glyph(font, 'foo')['subCategory'] = 'Ligature'
+ add_anchor(font, 'foo', 'top', 400, 1000)
+ bar = add_glyph(font, 'bar')
bar['category'], bar['subCategory'] = 'Mark', 'Nonspacing'
- baz = add_glyph(data, 'baz')
+ baz = add_glyph(font, 'baz')
baz['category'], baz['subCategory'] = 'Mark', 'Spacing Combining'
- features = to_ufos(data)[0].features.text
+ features = to_ufos(font)[0].features.text
self.assertIn('[foo], # Liga', features)
self.assertIn('[bar baz], # Mark', features)
|
builder_test.py: rename data to font
|
googlefonts_glyphsLib
|
train
|
27ce4c129fff1e3aa78ce7b650f98af61316e7de
|
diff --git a/fudge/inspector.py b/fudge/inspector.py
index <HASH>..<HASH> 100644
--- a/fudge/inspector.py
+++ b/fudge/inspector.py
@@ -1,5 +1,5 @@
-"""Value inspector that can be passed to :func:`fudge.Fake.with_args` for more
+"""Value inspectors that can be passed to :func:`fudge.Fake.with_args` for more
expressive argument matching.
As a mnemonic device,
@@ -219,6 +219,28 @@ class ValueInspector(object):
:hide:
>>> fudge.clear_expectations()
+
+ If it makes more sense to perform assertions in your test function then
+ be sure to return True :
+
+ >>> def is_valid(s):
+ ... assert s in ('active','deleted'), (
+ ... "Unexpected status value: %s" % s)
+ ... return True
+ ...
+ >>> import fudge
+ >>> from fudge.inspector import arg
+ >>> system = fudge.Fake("system")
+ >>> system = system.expects("set_status").with_args(arg.passes_test(is_valid))
+ >>> system.set_status("sleep")
+ Traceback (most recent call last):
+ ...
+ AssertionError: Unexpected status value: sleep
+
+ .. doctest::
+ :hide:
+
+ >>> fudge.clear_expectations()
"""
return PassesTest(test)
|
Added caveat example to making assertions in passes_test(test)
|
fudge-py_fudge
|
train
|
8aba68d45efc63bbb2f7830b38f24e4974ebd5d3
|
diff --git a/applications/default/extensions/field/field.js b/applications/default/extensions/field/field.js
index <HASH>..<HASH> 100644
--- a/applications/default/extensions/field/field.js
+++ b/applications/default/extensions/field/field.js
@@ -6,27 +6,6 @@ var utils = require('prana').utils;
var field = module.exports = {};
/**
- * The type() hook.
- */
-field.type = function(types, callback) {
- var newTypes = {};
-
- newTypes['field'] = {
- title: 'Field',
- description: 'Fields add schema to types and provide validation and output sanitizing.',
- access: {
- 'list': true,
- 'load': true,
- 'add': false,
- 'edit': false,
- 'delete': false
- },
- };
-
- callback(null, newTypes);
-};
-
-/**
* The field() hook.
*/
field.field = function(fields, callback) {
@@ -118,8 +97,7 @@ field.fieldCallback = function(hook) {
// Add fieldName to fieldSettings.
fieldSettings.name = fieldName;
- var Field = application.type('field');
- Field.load(fieldSettings.type, function(error, field) {
+ self.application.pick('field', fieldSettings.type, function(error, field) {
if (error) {
// Application error.
return next(error);
diff --git a/lib/model-patch.js b/lib/model-patch.js
index <HASH>..<HASH> 100644
--- a/lib/model-patch.js
+++ b/lib/model-patch.js
@@ -99,8 +99,7 @@ var modelPatch = module.exports = {
return next();
}
- var Field = self.application.type('field');
- Field.load(fieldSettings.type, function(error, field) {
+ self.application.pick('field', fieldSettings.type, function(error, field) {
if (error) {
// Application error.
return next(error);
diff --git a/package.json b/package.json
index <HASH>..<HASH> 100644
--- a/package.json
+++ b/package.json
@@ -9,8 +9,8 @@
],
"repository": "git://github.com/recidive/choko.git",
"dependencies": {
- "prana": "recidive/prana#no-orm",
"waterline": "0.10.x",
+ "prana": "recidive/prana#cache",
"waterline-prana": "recidive/waterline-prana#master",
"sails-mongo": "0.10.x",
"express": "4.8.x",
|
Making "field" not a type anymore, just a hook. Changing Prana branch to the one with the cache system. #<I>
|
recidive_choko
|
train
|
01f9e6dfdaaabf1a2feb85d96a2a1e6bc6cad824
|
diff --git a/test/fixtures/codemod/latest-node/my-app/app/controllers/application.js b/test/fixtures/codemod/latest-node/my-app/app/controllers/application.js
index <HASH>..<HASH> 100644
--- a/test/fixtures/codemod/latest-node/my-app/app/controllers/application.js
+++ b/test/fixtures/codemod/latest-node/my-app/app/controllers/application.js
@@ -1,3 +1,4 @@
+import { on } from '@ember/object/evented';
import Controller from '@ember/controller';
import Ember from 'ember';
import { assign } from '@ember/polyfills';
@@ -16,6 +17,10 @@ export default Controller.extend({
return friend[this.nameKey];
}),
+ logCompleted: on('completed', function() {
+ console.log('Job completed!');
+ }),
+
actions: {
foo(object) {
this.doStuff(object);
diff --git a/test/fixtures/codemod/local/my-app/app/controllers/application.js b/test/fixtures/codemod/local/my-app/app/controllers/application.js
index <HASH>..<HASH> 100644
--- a/test/fixtures/codemod/local/my-app/app/controllers/application.js
+++ b/test/fixtures/codemod/local/my-app/app/controllers/application.js
@@ -15,6 +15,10 @@ export default Ember.Controller.extend({
return friend[this.nameKey];
}).property('nameKey'),
+ logCompleted: function() {
+ console.log('Job completed!');
+ }.on('completed'),
+
actions: {
foo(object) {
Ember.propertyWillChange(object, 'someProperty');
diff --git a/test/fixtures/codemod/min-node/my-app/app/controllers/application.js b/test/fixtures/codemod/min-node/my-app/app/controllers/application.js
index <HASH>..<HASH> 100644
--- a/test/fixtures/codemod/min-node/my-app/app/controllers/application.js
+++ b/test/fixtures/codemod/min-node/my-app/app/controllers/application.js
@@ -1,3 +1,4 @@
+import { on } from '@ember/object/evented';
import Controller from '@ember/controller';
import Ember from 'ember';
import { assign } from '@ember/polyfills';
@@ -16,6 +17,10 @@ export default Controller.extend({
return friend[this.nameKey];
}),
+ logCompleted: on('completed', function() {
+ console.log('Job completed!');
+ }),
+
actions: {
foo(object) {
this.doStuff(object);
|
[FEAT] Add fpe-on codemod and fixtures (#<I>)
[FEAT] Add fpe-on codemod and fixtures
|
ember-cli_ember-cli-update
|
train
|
0272857ec8679429115e986bca6ef981789e4f88
|
diff --git a/usage.go b/usage.go
index <HASH>..<HASH> 100644
--- a/usage.go
+++ b/usage.go
@@ -117,7 +117,11 @@ func (f *flagGroup) writeHelp(width int, w io.Writer) {
}
func (f *flagGroup) gatherFlagSummary() (out []string) {
+ count := 0
for _, flag := range f.flagOrder {
+ if flag.name != "help" {
+ count++
+ }
if flag.required {
fb, ok := flag.value.(boolFlag)
if ok && fb.IsBoolFlag() {
@@ -127,7 +131,7 @@ func (f *flagGroup) gatherFlagSummary() (out []string) {
}
}
}
- if len(f.long) != len(out) {
+ if count != len(out) {
out = append(out, "[<flags>]")
}
return
|
Don't show [<flags>] on commands when there are none.
|
alecthomas_kingpin
|
train
|
f7f083520689954e2302273c008ae2e6fa4a5319
|
diff --git a/zeno/test/test_client.py b/zeno/test/test_client.py
index <HASH>..<HASH> 100644
--- a/zeno/test/test_client.py
+++ b/zeno/test/test_client.py
@@ -20,7 +20,8 @@ F = getMaxFailures(nodeCount)
whitelist = ['signer not configured so not signing',
'for EmptySignature',
- 'discarding message'] # warnings
+ 'discarding message',
+ 'found legacy entry'] # warnings
# noinspection PyIncorrectDocstring
|
Fixing an intermittently failing test by whitelisting a log statement
|
hyperledger_indy-plenum
|
train
|
c2c23ac0bdd41359a2a0a0f055d8eea7f2320f8d
|
diff --git a/cobra_test.go b/cobra_test.go
index <HASH>..<HASH> 100644
--- a/cobra_test.go
+++ b/cobra_test.go
@@ -208,6 +208,34 @@ func TestCommandAlias(t *testing.T) {
}
}
+func TestPrefixMatching(t *testing.T) {
+ noRRSetupTest("ech times one two")
+
+ if te != nil || tp != nil {
+ t.Error("Wrong command called")
+ }
+ if tt == nil {
+ t.Error("Wrong command called")
+ }
+ if strings.Join(tt, " ") != "one two" {
+ t.Error("Command didn't parse correctly")
+ }
+}
+
+func TestAliasPrefixMatching(t *testing.T) {
+ noRRSetupTest("sa times one two")
+
+ if te != nil || tp != nil {
+ t.Error("Wrong command called")
+ }
+ if tt == nil {
+ t.Error("Wrong command called")
+ }
+ if strings.Join(tt, " ") != "one two" {
+ t.Error("Command didn't parse correctly")
+ }
+}
+
func TestChildSameName(t *testing.T) {
c := initializeWithSameName()
c.AddCommand(cmdPrint, cmdEcho)
diff --git a/command.go b/command.go
index <HASH>..<HASH> 100644
--- a/command.go
+++ b/command.go
@@ -286,11 +286,26 @@ func (c *Command) Find(arrs []string) (*Command, []string, error) {
if len(args) > 0 && c.HasSubCommands() {
argsWOflags := stripFlags(args)
if len(argsWOflags) > 0 {
+ matches := make([]*Command, 0)
for _, cmd := range c.commands {
if cmd.Name() == argsWOflags[0] || cmd.HasAlias(argsWOflags[0]) { // exact name or alias match
return innerfind(cmd, argsMinusX(args, argsWOflags[0]))
+ } else {
+ if strings.HasPrefix(cmd.Name(), argsWOflags[0]) { // prefix match
+ matches = append(matches, cmd)
+ }
+ for _, x := range cmd.Aliases {
+ if strings.HasPrefix(x, argsWOflags[0]) {
+ matches = append(matches, cmd)
+ }
+ }
}
}
+
+ // only accept a single prefix match - multiple matches would be ambiguous
+ if len(matches) == 1 {
+ return innerfind(matches[0], argsMinusX(args, argsWOflags[0]))
+ }
}
}
|
adding support for prefix matching against aliases & names
|
spf13_cobra
|
train
|
38829c25f14af84f60e466ccc4b1a7374e9455a3
|
diff --git a/ui/src/router/beforeEach.js b/ui/src/router/beforeEach.js
index <HASH>..<HASH> 100644
--- a/ui/src/router/beforeEach.js
+++ b/ui/src/router/beforeEach.js
@@ -1,5 +1,3 @@
-/* globals pangolinBase */
-
import api from '../api'
import store from '../store'
@@ -29,8 +27,8 @@ export default async function (to, from, next) {
return
}
- // Remove base to get "pure" component path.
- const path = to.path.replace(pangolinBase, '')
+ // Remove leading `/` to get "pure" component path.
+ const path = to.path.slice(1)
// Get current page by path and save to store.
store.commit('current', store.getters.componentByPath(path))
|
fix(UI): Component render isn't resolved correctly
|
pangolinjs_core
|
train
|
223c9e07d080e4a819caa173c1bb319006e2926d
|
diff --git a/nad_receiver/__init__.py b/nad_receiver/__init__.py
index <HASH>..<HASH> 100644
--- a/nad_receiver/__init__.py
+++ b/nad_receiver/__init__.py
@@ -6,7 +6,6 @@ Functions can be found on the NAD website: http://nadelectronics.com/software
"""
from nad_receiver.nad_commands import CMDS
-from time import sleep
import serial # pylint: disable=import-error
import codecs
import socket
@@ -155,22 +154,28 @@ class D7050(object):
"""Send a command string to the amplifier."""
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((self._host, self.PORT))
- message = codecs.decode(message, 'hex_codec')
- sock.send(message)
+ sock.send(codecs.decode(message, 'hex_codec'))
if read_reply:
reply = ''
- while len(reply) < len(message):
- reply = sock.recv(self.BUFFERSIZE)
+ tries = 0
+ max_tries = 20
+ while len(reply) < len(message) and tries < max_tries:
+ reply += codecs.encode(sock.recv(self.BUFFERSIZE), 'hex')\
+ .decode("utf-8")
+ tries += 1
sock.close()
- return reply
+ if tries >= max_tries:
+ return
+ else:
+ return reply
sock.close()
def status(self):
"""
Return the status of the device.
-
+
Returns a dictionary with keys 'volume' (int 0-200) , 'power' (bool),
- 'muted' (bool) and 'source' (str).
+ 'muted' (bool) and 'source' (str).
"""
nad_reply = self._send(self.POLL_VOLUME +
self.POLL_POWER +
@@ -178,7 +183,6 @@ class D7050(object):
self.POLL_SOURCE, read_reply=True)
if nad_reply is None:
return
- nad_reply = codecs.encode(nad_reply, 'hex').decode("utf-8")
# split reply into parts of 10 characters
num_chars = 10
|
various changes
Added timeout to sock.recv() call
send now returns the same string format as it accepts as input
removed unused import
|
joopert_nad_receiver
|
train
|
6ff3bf8541456cdac6abbe8edaf25a23fb46e219
|
diff --git a/modules/orionode/test/endpoints/test-site.js b/modules/orionode/test/endpoints/test-site.js
index <HASH>..<HASH> 100644
--- a/modules/orionode/test/endpoints/test-site.js
+++ b/modules/orionode/test/endpoints/test-site.js
@@ -16,6 +16,7 @@ var assert = require('assert'),
supertest = require('supertest'),
store = require('../../lib/metastore/fs/store'),
testData = require('../support/test_data'),
+ testHelper = require('../support/testHelper'),
sites = require('../../lib/sites');
var CONTEXT_PATH = '',
diff --git a/modules/orionode/test/metastore/test-simple.js b/modules/orionode/test/metastore/test-simple.js
index <HASH>..<HASH> 100644
--- a/modules/orionode/test/metastore/test-simple.js
+++ b/modules/orionode/test/metastore/test-simple.js
@@ -307,8 +307,9 @@ describe("Orion metastore", function() {
testHelper.withWorkspace(request, PREFIX, WORKSPACE_ID)
.end(function(err, res) {
testHelper.throwIfError(err);
+ var ws = res.body.Location;
request()
- .post(res.body.Location)
+ .post(ws)
.type('json')
.send({Name: 'testMoveProjectWith|InProjectName'})
.expect(201)
@@ -316,7 +317,7 @@ describe("Orion metastore", function() {
testHelper.throwIfError(err);
var pLoc = res.body.Location;
request()
- .post(ws.Location)
+ .post(ws)
.type('json')
.set('X-Create-Options', "move")
.set('Slug', 'testMoveProjectWith|InProjectNameMOVED')
@@ -336,8 +337,9 @@ describe("Orion metastore", function() {
testHelper.withWorkspace(request, PREFIX, WORKSPACE_ID)
.end(function(err, res) {
testHelper.throwIfError(err);
+ var ws = res.body.Location;
request()
- .post(res.body.Location)
+ .post(ws)
.type('json')
.send({Name: 'testMoveSimpleProject'})
.expect(201)
@@ -345,7 +347,7 @@ describe("Orion metastore", function() {
testHelper.throwIfError(err);
var pLoc = res.body.Location;
request()
- .post(ws.Location)
+ .post(ws)
.type('json')
.set('X-Create-Options', "move")
.set('Slug', 'testMoveSimpleProjectMOVED')
|
testHelper is missing and some variable is missing in Mocha Test
|
eclipse_orion.client
|
train
|
1b2bcaa07ea463cc77d0d1f1c83588c43ecb2864
|
diff --git a/tool/builder3d/src/main/java/org/openscience/cdk/modeling/builder3d/AtomPlacer3D.java b/tool/builder3d/src/main/java/org/openscience/cdk/modeling/builder3d/AtomPlacer3D.java
index <HASH>..<HASH> 100644
--- a/tool/builder3d/src/main/java/org/openscience/cdk/modeling/builder3d/AtomPlacer3D.java
+++ b/tool/builder3d/src/main/java/org/openscience/cdk/modeling/builder3d/AtomPlacer3D.java
@@ -567,7 +567,7 @@ public class AtomPlacer3D {
public int numberOfUnplacedHeavyAtoms(IAtomContainer ac) {
int nUnplacedHeavyAtoms=0;
for (int i = 0; i < ac.getAtomCount(); i++) {
- if (!ac.getAtom(i).getFlag(CDKConstants.ISPLACED) && !ac.getAtom(i).equals("H")) {
+ if (!ac.getAtom(i).getFlag(CDKConstants.ISPLACED) && !ac.getAtom(i).getSymbol().equals("H")) {
nUnplacedHeavyAtoms+=1;
}
}
|
Fixed bug where AtomPlacer3D().numberOfUnplacedHeavyAtoms() counts explicit hydrogens as heavy atoms.
|
cdk_cdk
|
train
|
1cde326d53b5a90ae50f006ef1b23c7422ea29e2
|
diff --git a/dynamic_scraper/spiders/django_base_spider.py b/dynamic_scraper/spiders/django_base_spider.py
index <HASH>..<HASH> 100644
--- a/dynamic_scraper/spiders/django_base_spider.py
+++ b/dynamic_scraper/spiders/django_base_spider.py
@@ -1,5 +1,6 @@
import datetime, os
from scrapy import log, signals
+from scrapy.http import Request
from scrapy.spider import Spider
from scrapy.xlib.pydispatch import dispatcher
from scrapy.exceptions import CloseSpider
@@ -42,6 +43,17 @@ class DjangoBaseSpider(Spider):
self._check_mandatory_vars()
+ def start_requests(self):
+ for url in self.start_urls:
+ meta = {}
+ if self.scraper.render_javascript:
+ meta['splash'] = {
+ 'endpoint': 'render.html',
+ 'args': {'wait': 0.5}
+ }
+ yield Request(url, self.parse, meta=meta)
+
+
def _set_ref_object(self, ref_object_class, **kwargs):
if not 'id' in kwargs:
msg = "You have to provide an ID (Command: %s)." % self.command
diff --git a/dynamic_scraper/spiders/django_spider.py b/dynamic_scraper/spiders/django_spider.py
index <HASH>..<HASH> 100644
--- a/dynamic_scraper/spiders/django_spider.py
+++ b/dynamic_scraper/spiders/django_spider.py
@@ -34,16 +34,6 @@ class DjangoSpider(DjangoBaseSpider):
self.log(msg, log.INFO)
- def start_requests(self):
- for url in self.start_urls:
- yield Request(url, self.parse, meta={
- 'splash': {
- 'endpoint': 'render.html',
- 'args': {'wait': 0.5}
- }
- })
-
-
def _set_config(self, **kwargs):
log_msg = ""
#max_items_read
@@ -244,11 +234,12 @@ class DjangoSpider(DjangoBaseSpider):
# generally no attributes scraped from detail page
meta = {}
meta['item'] = item
-
- meta['splash'] = {
- 'endpoint': 'render.html',
- 'args': { 'wait': 0.5 }
- }
+
+ if self.scraper.render_javascript:
+ meta['splash'] = {
+ 'endpoint': 'render.html',
+ 'args': { 'wait': 0.5 }
+ }
if (cnt > 0 and cnt1 == 0) or cnt2 == 0:
yield item
else:
diff --git a/tests/run_js_tests.sh b/tests/run_js_tests.sh
index <HASH>..<HASH> 100755
--- a/tests/run_js_tests.sh
+++ b/tests/run_js_tests.sh
@@ -2,11 +2,11 @@
suite='scraper.scraper_js_run_test.ScraperJSRunTest'
tests="
+test_default_no_scrapyjs_main_page
+test_default_no_scrapyjs_detail_page
test_activated_scrapyjs_main_page
test_activated_scrapyjs_detail_page
"
-#test_default_no_scrapyjs_main_page
-#test_default_no_scrapyjs_detail_page
for test in `echo $tests`
do
diff --git a/tests/scraper/scraper_js_run_test.py b/tests/scraper/scraper_js_run_test.py
index <HASH>..<HASH> 100644
--- a/tests/scraper/scraper_js_run_test.py
+++ b/tests/scraper/scraper_js_run_test.py
@@ -14,6 +14,8 @@ class ScraperJSRunTest(ScraperTest):
def setUpScraperJSDockerScraper(self):
self.event_website.url = os.path.join('http://10.0.2.2:8010/static/', 'site_with_js/event_main_docker.html')
+ self.scraper.render_javascript = True
+ self.scraper.save()
self.event_website.save()
|
First proper integration of javascript rendering switch into spider processing, adopted unit tests
|
holgerd77_django-dynamic-scraper
|
train
|
8feb67e630254a8f487beb1862991b00c07dfc39
|
diff --git a/db.go b/db.go
index <HASH>..<HASH> 100644
--- a/db.go
+++ b/db.go
@@ -92,8 +92,8 @@ type Appender interface {
// If the reference is 0 it must not be used for caching.
Add(l labels.Labels, t int64, v float64) (uint64, error)
- // Add adds a sample pair for the referenced series. It is generally faster
- // than adding a sample by providing its full label set.
+ // AddFast adds a sample pair for the referenced series. It is generally
+ // faster than adding a sample by providing its full label set.
AddFast(ref uint64, t int64, v float64) error
// Commit submits the collected samples and purges the batch.
|
Update Appender.AddFast method comment (#<I>)
|
prometheus_prometheus
|
train
|
c3fc52e70f02d9e5132e0099cfa041a1d15f67b5
|
diff --git a/extension/ezoe/ezxmltext/handlers/input/ezoexmlinput.php b/extension/ezoe/ezxmltext/handlers/input/ezoexmlinput.php
index <HASH>..<HASH> 100644
--- a/extension/ezoe/ezxmltext/handlers/input/ezoexmlinput.php
+++ b/extension/ezoe/ezxmltext/handlers/input/ezoexmlinput.php
@@ -1247,7 +1247,7 @@ class eZOEXMLInput extends eZXMLInputHandler
$objectAttr .= ' class="' . $className . '"';
$output .= '<img id="' . $idString . '" title="' . $objectName . '" src="' .
- $srcString . '" width="' . $imageWidth . '" height="' . $imageHeight .
+ htmlspecialchars( $srcString ) . '" width="' . $imageWidth . '" height="' . $imageHeight .
'" ' . $objectAttr . $customAttributePart . $styleString . ' />';
}
else if ( self::embedTagIsCompatibilityMode() )
|
Fix EZP-<I>: Image not displayed in editor if alias contains quotes
|
ezsystems_ezpublish-legacy
|
train
|
d328fe4ea5006866a182f720e5f8de364b4b0269
|
diff --git a/lib/renderer.js b/lib/renderer.js
index <HASH>..<HASH> 100644
--- a/lib/renderer.js
+++ b/lib/renderer.js
@@ -115,29 +115,22 @@ marked.setOptions({
});
module.exports = function(data, options) {
- const siteCfg = Object.assign({}, {
- config: {
- url: this.config.url,
- root: this.config.root,
- relative_link: this.config.relative_link,
- post_asset_folder: this.config.post_asset_folder
- }
- });
+ hexo = this;
// exec filter to extend renderer.
const renderer = new Renderer();
this.execFilterSync('marked:renderer', renderer, {context: this});
- let path = '';
+ let postId = '';
if (data.path && this.config.post_asset_folder && this.config.marked.prependRoot && this.config.marked.postAsset) {
const Post = this.model('Post');
// Windows compatibility, Post.findOne() requires forward slash
const source = data.path.replace(this.source_dir, '').replace('\\', '/');
const post = Post.findOne({ source });
- path = post ? post.path : '';
+ postId = post ? post._id : '';
}
return marked(data.text, Object.assign({
renderer
- }, this.config.marked, options, siteCfg, { path }));
+ }, this.config.marked, options, { postId }));
};
|
fix(postAsset): utilise PostAsset model to target existed post's assets
|
hexojs_hexo-renderer-marked
|
train
|
bf35a4a594c0323699e863f0782c32eab5fd7102
|
diff --git a/tests/lib/rules/complexity.js b/tests/lib/rules/complexity.js
index <HASH>..<HASH> 100644
--- a/tests/lib/rules/complexity.js
+++ b/tests/lib/rules/complexity.js
@@ -129,6 +129,17 @@ vows.describe(RULE_ID).addBatch({
"should report a complexity of 2": getComplexityAssertion(2)
},
+ "When evaluating an if statement": {
+ topic: "if (foo) { bar(); }",
+ "should not report a violation": function (topic) {
+ var config = { rules: {} };
+ config.rules[RULE_ID] = [1, 3];
+
+ var messages = eslint.verify(topic, config);
+ assert.equal(messages.length, 0);
+ }
+ },
+
"When evaluating a simple function with 2 complex inner functions": {
topic: "function a(x) {(function() {while(true){'foo';}})(); (function() {while(true){'bar';}})();}",
"should report 2 violations for the inner functions": function(topic) {
|
Increased test coverage for `complexity` rule.
Created a test case that increased the `complexity` test coverage to <I>%.
|
eslint_eslint
|
train
|
68003f331113bfdf02e9437da3bab942ebec3f3a
|
diff --git a/lib/MumbleSocket.js b/lib/MumbleSocket.js
index <HASH>..<HASH> 100644
--- a/lib/MumbleSocket.js
+++ b/lib/MumbleSocket.js
@@ -1,8 +1,6 @@
"use strict";
-var fs = require('fs');
-
var MumbleSocket = function (socket) {
var self = this;
this.buffers = [];
@@ -17,7 +15,6 @@ var MumbleSocket = function (socket) {
MumbleSocket.prototype.receiveData = function (data) {
- fs.writeFileSync('buffer', data);
this.buffers.push(data);
this.length += data.length;
this._checkReader();
|
Removed an old debug file write
|
Rantanen_node-mumble
|
train
|
b9726cba3a7a3cb2737a6a8ae327c3b8f8a81fd7
|
diff --git a/mqlight.js b/mqlight.js
index <HASH>..<HASH> 100644
--- a/mqlight.js
+++ b/mqlight.js
@@ -152,8 +152,8 @@ var STATE_STOPPING = 'stopping';
* Creates and returns a new instance of the Client object.
* <p>
* See README.md for more details.
- *
- * @param {Object} options - (optional) properties that define the
+ *
+ * @param {Object} options - (optional) properties that define the
* characteristics of the client.
* @param {Function} callback - (optional) callback, invoked when the client has
* attained 'started' or 'stopped' state.
@@ -423,9 +423,9 @@ var getHttpServiceFunction = function(serviceUrl) {
* service - Required; One or more URLs representing the TCP/IP
* endpoints to which the client will attempt to connect, in turn.
* When a function is specified, it is invoked each time a endpoint
- * is required and is supplied a callback, in the form
+ * is required and is supplied a callback, in the form
* function(err, service), as its only argument. The function should
- * invoked the callback supplying a URL String (or an Array of URL
+ * invoked the callback supplying a URL String (or an Array of URL
* strings) as the second argument.
* @param {String}
* id - Optional; an identifier that is used to identify this client.
@@ -1289,9 +1289,9 @@ if (process.env.NODE_ENV === 'unittest') {
/**
-* Called on reconnect or first connect to process any actions that may have
+* Called on reconnect or first connect to process any actions that may have
* been queued.
-*
+*
* @this should be set to the client object that has connected or reconnected
* @param {Error} err if an error occurred in the performConnect function that
* calls this callback.
@@ -1386,7 +1386,7 @@ Object.defineProperty(Client, 'service', {
/**
* @return {String} The current state of the client - can will be one of the
- * following string values: 'started', 'starting', 'stopped', 'stopping', or
+ * following string values: 'started', 'starting', 'stopped', 'stopping', or
* 'retrying'.
*/
Object.defineProperty(Client, 'state', {
|
Now with <I>% greater conformance to Google JS style guide
|
mqlight_nodejs-mqlight
|
train
|
9d86f6ce6efbe9b8b03c0992945bc2d056e6678e
|
diff --git a/railties/test/application/configuration_test.rb b/railties/test/application/configuration_test.rb
index <HASH>..<HASH> 100644
--- a/railties/test/application/configuration_test.rb
+++ b/railties/test/application/configuration_test.rb
@@ -274,19 +274,13 @@ module ApplicationTests
app.config.session_store :disabled
end
- class ::OmgController < ActionController::Base
- def index
- render text: Rails.application.message_verifier.generate("some_value")
- end
- end
+ message = app.message_verifier.generate("some_value")
- get "/"
-
- assert_equal 'some_value', Rails.application.message_verifier.verify(last_response.body)
+ assert_equal 'some_value', Rails.application.message_verifier.verify(message)
secret = app.key_generator.generate_key('default')
verifier = ActiveSupport::MessageVerifier.new(secret)
- assert_equal 'some_value', verifier.verify(last_response.body)
+ assert_equal 'some_value', verifier.verify(message)
end
test "application verifier can build different verifiers" do
|
Don't need to use a controller to test the verifier
|
rails_rails
|
train
|
9348ed81708a88f6d1c16868d8e3fc07c4eedd46
|
diff --git a/salt/modules/virt.py b/salt/modules/virt.py
index <HASH>..<HASH> 100644
--- a/salt/modules/virt.py
+++ b/salt/modules/virt.py
@@ -1860,3 +1860,5 @@ def snapshot(vm, name=None):
n_name.text = name
_get_domain(vm).snapshotCreateXML(ElementTree.tostring(doc))
+
+ return {'name': name}
|
Add name of the created snapshot if succeed
|
saltstack_salt
|
train
|
76b9b16369faa377ce31b4aed268066b7cfa2905
|
diff --git a/src/Connection_Oracle.php b/src/Connection_Oracle.php
index <HASH>..<HASH> 100644
--- a/src/Connection_Oracle.php
+++ b/src/Connection_Oracle.php
@@ -25,9 +25,10 @@ class Connection_Oracle extends Connection
parent::__construct($properties);
// date and datetime format should be like this for Agile Data to correctly pick it up and typecast
- $this->expr('ALTER SESSION SET NLS_DATE_FORMAT={format} NLS_NUMERIC_CHARACTERS={dec_char}', [
- 'format' => 'YYYY-MM-DD HH24:MI:SS', // datetime format
- 'dec_char' => '. ', // decimal separator, no thousands separator
+ $this->expr('ALTER SESSION SET NLS_TIMESTAMP_FORMAT={datetime_format} NLS_DATE_FORMAT={date_format} NLS_NUMERIC_CHARACTERS={dec_char}', [
+ 'datetime_format' => 'YYYY-MM-DD HH24:MI:SS', // datetime format
+ 'date_format' => 'YYYY-MM-DD', // date format
+ 'dec_char' => '. ', // decimal separator, no thousands separator
])->execute();
}
@@ -49,8 +50,8 @@ class Connection_Oracle extends Connection
}
// otherwise we have to select max(id_field) - this can be bad for performance !!!
- // Imants: Disabled for now because otherwise this will work even if database use triggers or any other mechanism
- // to automatically increment ID and we can't tell this line to not execute.
+ // Imants: Disabled for now because otherwise this will work even if database use triggers or
+ // any other mechanism to automatically increment ID and we can't tell this line to not execute.
//return $this->expr('SELECT max([field]) FROM [table]', ['field'=>$m->id_field, 'table'=>$m->table])->getOne();
}
|
fix Oracle session timestamp and date format
|
atk4_dsql
|
train
|
aa9045cdf512a07b4ed634025c274d2c26db89d3
|
diff --git a/src/Plugin/InvokeStrategy/HandleCommandStrategy.php b/src/Plugin/InvokeStrategy/HandleCommandStrategy.php
index <HASH>..<HASH> 100644
--- a/src/Plugin/InvokeStrategy/HandleCommandStrategy.php
+++ b/src/Plugin/InvokeStrategy/HandleCommandStrategy.php
@@ -21,7 +21,6 @@ use Prooph\Common\Messaging\HasMessageName;
*/
class HandleCommandStrategy extends AbstractInvokeStrategy
{
-
/**
* @param mixed $handler
* @param mixed $message
|
Fixed CS issue in `HandleCommandStrategy`.
|
prooph_service-bus
|
train
|
3d5058a81353f13f9d906cb54ef0bf0efff42c8c
|
diff --git a/lib/query-controller.js b/lib/query-controller.js
index <HASH>..<HASH> 100644
--- a/lib/query-controller.js
+++ b/lib/query-controller.js
@@ -66,7 +66,7 @@ class QueryController {
return next();
}
debug("executing current query 'post' hook: %s", currentQuery.name);
- return currentQuery.post.call(_this, next);
+ return currentQuery.post.call(_this, _this.getAnswer(currentQuery.name), next);
},
function(next) {
if (_this._retry) {
|
Pass the current answer as first arg in post hook
|
forfuturellc_mau
|
train
|
5ef2e81010d633fbc2814e7167f2a55bfb21d000
|
diff --git a/Kwf_js/EyeCandy/Lightbox/Lightbox.js b/Kwf_js/EyeCandy/Lightbox/Lightbox.js
index <HASH>..<HASH> 100644
--- a/Kwf_js/EyeCandy/Lightbox/Lightbox.js
+++ b/Kwf_js/EyeCandy/Lightbox/Lightbox.js
@@ -40,6 +40,14 @@ Kwf.onJElementReady('.kwfLightbox', function lightboxEl(el) {
Kwf.Utils.HistoryState.updateState();
l.lightboxEl = el;
l.innerLightboxEl = el.find('.kwfLightboxInner');
+ l.innerLightboxEl.getTransitionEndName = function() {
+ var transEndEventNames = {
+ 'WebkitTransition' : 'webkitTransitionEnd',
+ 'MozTransition' : 'transitionend',
+ 'transition' : 'transitionend'
+ };
+ return transEndEventNames[ Modernizr.prefixed('transition') ];
+ };
l.fetched = true;
l.initialize();
l.closeHref = window.location.href.substr(0, window.location.href.lastIndexOf('/'));
@@ -187,6 +195,14 @@ Kwf.EyeCandy.Lightbox.Lightbox.prototype = {
this.lightboxEl = lightbox;
this.innerLightboxEl = lightbox.find('.kwfLightboxInner');
var el = this.innerLightboxEl;
+ el.getTransitionEndName = function() {
+ var transEndEventNames = {
+ 'WebkitTransition' : 'webkitTransitionEnd',
+ 'MozTransition' : 'transitionend',
+ 'transition' : 'transitionend'
+ };
+ return transEndEventNames[ Modernizr.prefixed('transition') ];
+ };
var transformName = Modernizr.prefixed('transform');
@@ -312,12 +328,7 @@ Kwf.EyeCandy.Lightbox.Lightbox.prototype = {
this.fetchContent();
}
- var transEndEventNames = {
- 'WebkitTransition' : 'webkitTransitionEnd',
- 'MozTransition' : 'transitionend',
- 'transition' : 'transitionend'
- };
- var transEndEventName = transEndEventNames[ Modernizr.prefixed('transition') ];
+ var transEndEventName = this.innerLightboxEl.getTransitionEndName();
if (!this.lightboxEl.is(':visible')) {
this.lightboxEl.show();
this.lightboxEl.width(); //TODO layout trigger hack
@@ -480,12 +491,7 @@ Kwf.EyeCandy.Lightbox.Styles.Abstract.prototype = {
if (Kwf.EyeCandy.Lightbox.Styles.Abstract.masks > 0) return;
var lightboxMaskEl = $(document.body).find('.kwfLightboxMask');
$(document.body).removeClass('kwfLightboxTheaterMode');
- var transEndEventNames = {
- 'WebkitTransition' : 'webkitTransitionEnd',
- 'MozTransition' : 'transitionend',
- 'transition' : 'transitionend'
- };
- var transEndEventName = transEndEventNames[ Modernizr.prefixed('transition') ];
+ var transEndEventName = this.lightbox.innerLightboxEl.getTransitionEndName();
var transitionDurationName = Modernizr.prefixed('transitionDuration');
var duration = lightboxMaskEl.css(transitionDurationName);
lightboxMaskEl.removeClass('kwfLightboxMaskOpen');
@@ -665,12 +671,7 @@ Kwf.EyeCandy.Lightbox.Styles.CenterBox = Ext2.extend(Kwf.EyeCandy.Lightbox.Style
this._center();
},
onClose: function(options) {
- var transEndEventNames = {
- 'WebkitTransition' : 'webkitTransitionEnd',
- 'MozTransition' : 'transitionend',
- 'transition' : 'transitionend'
- };
- var transEndEventName = transEndEventNames[ Modernizr.prefixed('transition') ];
+ var transEndEventName = this.lightbox.innerLightboxEl.getTransitionEndName();
var transitionDurationName = Modernizr.prefixed('transitionDuration');
var duration = this.lightbox.innerLightboxEl.css(transitionDurationName);
if (parseFloat(duration)>0) {
|
Method for getting the TransitionEndName
|
koala-framework_koala-framework
|
train
|
e8a6f68bb7012bec15fe8c441f9384ee9f4916aa
|
diff --git a/lib/api/manipulation.js b/lib/api/manipulation.js
index <HASH>..<HASH> 100644
--- a/lib/api/manipulation.js
+++ b/lib/api/manipulation.js
@@ -11,9 +11,15 @@ var _ = require('underscore'),
parsing strings if necessary
*/
var makeCheerioArray = function(elems) {
- return _.reduce(elems, function(dom, elem) {
- return dom.concat(elem.cheerio ? elem.toArray() : evaluate(elem));
- }, []);
+ return _.chain(elems).map(function(elem) {
+ if (elem.cheerio) {
+ return elem.toArray();
+ } else if (!_.isArray(elem)) {
+ return evaluate(elem);
+ } else {
+ return elem;
+ }
+ }).flatten().value();
};
var _insert = function(concatenator) {
@@ -109,7 +115,7 @@ var remove = exports.remove = function(selector) {
};
var replaceWith = exports.replaceWith = function(content) {
- content = content.cheerio ? content.toArray() : evaluate(content);
+ content = makeCheerioArray([content]);
this.each(function(i, el) {
var siblings = el.parent.children,
diff --git a/lib/parse.js b/lib/parse.js
index <HASH>..<HASH> 100644
--- a/lib/parse.js
+++ b/lib/parse.js
@@ -29,9 +29,6 @@ exports = module.exports = function(content, options) {
var evaluate = exports.evaluate = function(content, options) {
// options = options || $.fn.options;
- if (_.isArray(content) && typeof content[0] !== 'string') {
- return content;
- }
var handler = new htmlparser.DomHandler(options),
parser = new htmlparser.Parser(handler, options);
|
Refactor Cheerio array creation
Because so many of jQuery's methods accept "mixed" arrays, the array
normalization logic should be abstracted into a function that is shared
across the manipulation methods.
This approach is also slightly more efficient because it eliminates a
conditional in the heavily-used `evaluate` method.
|
oyyd_cheerio-without-node-native
|
train
|
c0056e22270163bc7d627f72685c9f262b13b140
|
diff --git a/lib/javascript-static.js b/lib/javascript-static.js
index <HASH>..<HASH> 100644
--- a/lib/javascript-static.js
+++ b/lib/javascript-static.js
@@ -137,16 +137,18 @@ function lockoptionsallsetup(formid) {
if (master == undefined) {
continue;
}
- master.onclick = function() {return lockoptionsall(this.form.getAttribute('id'));};
- master.onblur = function() {return lockoptionsall(this.form.getAttribute('id'));};
- master.onchange = function() {return lockoptionsall(this.form.getAttribute('id'));};
+ master.formid = formid;
+ master.onclick = function() {return lockoptionsall(this.formid);};
+ master.onblur = function() {return lockoptionsall(this.formid);};
+ master.onchange = function() {return lockoptionsall(this.formid);};
}
for (var i = 0; i < form.elements.length; i++){
var formelement = form.elements[i];
if (formelement.type=='reset') {
- formelement.onclick = function() {this.form.reset();return lockoptionsall(this.form.getAttribute('id'));};
- formelement.onblur = function() {this.form.reset();return lockoptionsall(this.form.getAttribute('id'));};
- formelement.onchange = function() {this.form.reset();return lockoptionsall(this.form.getAttribute('id'));};
+ formelement.formid = formid;
+ formelement.onclick = function() {this.form.reset();return lockoptionsall(this.formid);};
+ formelement.onblur = function() {this.form.reset();return lockoptionsall(this.formid);};
+ formelement.onchange = function() {this.form.reset();return lockoptionsall(this.formid);};
}
}
return lockoptionsall(formid);
@@ -432,14 +434,14 @@ function insertAtCursor(myField, myValue) {
/*
- Call instead of setting window.onload directly or setting body onload=.
- Adds your function to a chain of functions rather than overwriting anything
- that exists.
-*/
+ Call instead of setting window.onload directly or setting body onload=.
+ Adds your function to a chain of functions rather than overwriting anything
+ that exists.
+*/
function addonload(fn) {
var oldhandler=window.onload;
window.onload=function() {
if(oldhandler) oldhandler();
- fn();
+ fn();
}
}
|
fix for MDL-<I> IE7 and IE6 on win xp error when changing "disable" state of "Start date"
|
moodle_moodle
|
train
|
f4e0ca4a30801221367707d07eeeaf1f1ea63d89
|
diff --git a/packages/sproutcore-metal/lib/system/mixin.js b/packages/sproutcore-metal/lib/system/mixin.js
index <HASH>..<HASH> 100644
--- a/packages/sproutcore-metal/lib/system/mixin.js
+++ b/packages/sproutcore-metal/lib/system/mixin.js
@@ -78,6 +78,8 @@ function mergeMixins(mixins, m, descs, values, base) {
if (!props.hasOwnProperty(key)) continue;
value = props[key];
if (value instanceof SC.Descriptor) {
+ if (value === REQUIRED && descs[key]) { continue; }
+
descs[key] = value;
values[key] = undefined;
} else {
|
Ensure that when merging mixins, the special REQUIRED value does not overwrite real values
|
emberjs_ember.js
|
train
|
918cca4731fc2046737a8955d7ee5e0c88ca861c
|
diff --git a/src/Aura/Autoload/Exception.php b/src/Aura/Autoload/Exception.php
index <HASH>..<HASH> 100644
--- a/src/Aura/Autoload/Exception.php
+++ b/src/Aura/Autoload/Exception.php
@@ -14,5 +14,7 @@ namespace Aura\Autoload;
*
* Generic package exception.
*
+ * @package Aura.Autoload
+ *
*/
class Exception extends \Exception {}
diff --git a/src/Aura/Autoload/Exception/AlreadyLoaded.php b/src/Aura/Autoload/Exception/AlreadyLoaded.php
index <HASH>..<HASH> 100644
--- a/src/Aura/Autoload/Exception/AlreadyLoaded.php
+++ b/src/Aura/Autoload/Exception/AlreadyLoaded.php
@@ -14,5 +14,7 @@ namespace Aura\Autoload\Exception;
*
* Indicates a class has already been loaded.
*
+ * @package Aura.Autoload
+ *
*/
class AlreadyLoaded extends \Aura\Autoload\Exception {}
diff --git a/src/Aura/Autoload/Exception/NotDeclared.php b/src/Aura/Autoload/Exception/NotDeclared.php
index <HASH>..<HASH> 100644
--- a/src/Aura/Autoload/Exception/NotDeclared.php
+++ b/src/Aura/Autoload/Exception/NotDeclared.php
@@ -14,5 +14,7 @@ namespace Aura\Autoload\Exception;
*
* Indicates the loader did not find a class definition after loading a file.
*
+ * @package Aura.Autoload
+ *
*/
class NotDeclared extends \Aura\Autoload\Exception {}
diff --git a/src/Aura/Autoload/Exception/NotReadable.php b/src/Aura/Autoload/Exception/NotReadable.php
index <HASH>..<HASH> 100644
--- a/src/Aura/Autoload/Exception/NotReadable.php
+++ b/src/Aura/Autoload/Exception/NotReadable.php
@@ -14,5 +14,7 @@ namespace Aura\Autoload\Exception;
*
* Indicates the Loader failed to find a class file.
*
+ * @package Aura.Autoload
+ *
*/
class NotReadable extends \Aura\Autoload\Exception {}
diff --git a/src/Aura/Autoload/Loader.php b/src/Aura/Autoload/Loader.php
index <HASH>..<HASH> 100644
--- a/src/Aura/Autoload/Loader.php
+++ b/src/Aura/Autoload/Loader.php
@@ -15,6 +15,8 @@ namespace Aura\Autoload;
* An SPL autoloader adhering to [PSR-0](https://github.com/php-fig/fig-standards/blob/master/accepted/PSR-0.md)
* and <https://wiki.php.net/rfc/splclassloader>.
*
+ * @package Aura.Autoload
+ *
*/
class Loader
{
|
phpdoc2 wants @package at page level **and** at class level
|
auraphp_Aura.Autoload
|
train
|
05c6e91c4bc71e237619e64b43124c9f7bbc2d35
|
diff --git a/pyvizio/util/const.py b/pyvizio/util/const.py
index <HASH>..<HASH> 100644
--- a/pyvizio/util/const.py
+++ b/pyvizio/util/const.py
@@ -4,7 +4,13 @@ APK_SOURCE_PATH = "src"
RESOURCE_PATH = "resources/res/raw"
APP_NAMES_FILE = "apps.json"
APP_PAYLOADS_FILE = "apps_availability.json"
+
+# File with app URLs: smartcast.apk-decompiled\res\values\strings.xml
+# Use the keys below to find the values
+
+# <string name="default_appsservice_app_server">
APP_NAMES_URL = "http://hometest.buddytv.netdna-cdn.com/appservice/vizio_apps_prod.json"
+# <string name="default_appsservice_availability_server">
APP_PAYLOADS_URL = (
"http://hometest.buddytv.netdna-cdn.com/appservice/app_availability_prod.json"
)
|
Add comments so I know where to find apps files in the future (#<I>)
* Add comments so I know where to find apps files in the future
* comments
|
vkorn_pyvizio
|
train
|
ad53ed6873e1c2e7f7a79f9a7e8f4c5ebd8a5424
|
diff --git a/ca/django_ca/static/django_ca/admin/js/profilewidget.js b/ca/django_ca/static/django_ca/admin/js/profilewidget.js
index <HASH>..<HASH> 100644
--- a/ca/django_ca/static/django_ca/admin/js/profilewidget.js
+++ b/ca/django_ca/static/django_ca/admin/js/profilewidget.js
@@ -17,6 +17,7 @@ django.jQuery(document).ready(function() {
django.jQuery(critical_selector).prop('checked', profile[ext].critical);
var value_selector = '.field-' + ext + ' select';
django.jQuery(value_selector).val(profile[ext].value);
+ django.jQuery(value_selector).change(); // so any existing callbacks are called
});
});
});
|
call change() for any callbacks (basicConstraints pathlen)
|
mathiasertl_django-ca
|
train
|
ac69564a45c178ed491d9ab98335737975804376
|
diff --git a/lib/magic_grid/collection.rb b/lib/magic_grid/collection.rb
index <HASH>..<HASH> 100644
--- a/lib/magic_grid/collection.rb
+++ b/lib/magic_grid/collection.rb
@@ -140,33 +140,34 @@ module MagicGrid
end
def apply_pagination(current_page, per_page)
- if per_page
- @reduced_collection = nil
- @paginations << {:current_page => current_page, :per_page => per_page}
- end
+ @per_page = per_page
+ @original_count = @collection.count
+ @total_pages = @original_count / @per_page
+ @current_page = current_page
+ @reduced_collection = nil
self
end
- def perform_pagination(collection, current_page, per_page)
- @original_count = @collection.count
- @total_pages = @original_count / per_page
- @current_page = current_page
+ def perform_pagination(collection)
+ return collection unless @per_page
+
if collection.respond_to? :paginate
- collection = collection.paginate(:page => current_page,
- :per_page => per_page)
+ collection = collection.paginate(:page => @current_page,
+ :per_page => @per_page)
elsif collection.respond_to? :page
- collection = collection.page(current_page).per(per_page)
+ collection = collection.page(@current_page).per(@per_page)
elsif collection.is_a?(Array) and Module.const_defined?(:Kaminari)
- collection = Kaminari.paginate_array(collection).page(current_page).per(per_page)
+ collection = Kaminari.paginate_array(collection).page(@current_page).per(@per_page)
else
collection = collection.to_enum
- collection = collection.each_slice(per_page)
- collection = collection.drop(current_page - 1)
+ collection = collection.each_slice(@per_page)
+ collection = collection.drop(@current_page - 1)
collection = collection.first.to_a
class << collection
attr_accessor :current_page, :total_pages, :original_count
end
end
+
collection
end
@@ -189,10 +190,8 @@ module MagicGrid
@post_filter_callbacks.each do |callback|
collection = callback.call(collection)
end
- @paginations.each do |params|
- collection = perform_pagination(collection, params[:current_page], params[:per_page])
- end
- collection
+ # Paginate at the very end, after all sorting, filtering, etc..
+ perform_pagination(collection)
end
def collection
diff --git a/lib/magic_grid/definition.rb b/lib/magic_grid/definition.rb
index <HASH>..<HASH> 100644
--- a/lib/magic_grid/definition.rb
+++ b/lib/magic_grid/definition.rb
@@ -145,7 +145,6 @@ module MagicGrid
if @options[:post_filter] and @options[:post_filter].respond_to?(:call)
@collection.apply_post_filter_callback @options[:post_filter]
end
- # Paginate at the very end, after all sorting, filtering, etc..
@collection.apply_pagination(current_page, @per_page)
end
diff --git a/spec/collection_spec.rb b/spec/collection_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/collection_spec.rb
+++ b/spec/collection_spec.rb
@@ -65,7 +65,8 @@ describe MagicGrid::Collection do
a.should_receive(:paginate).with(:page => 1, :per_page => 1) { a }
end
collection = MagicGrid::Collection.new(array, nil)
- collection.perform_pagination(array, 1, 1).should == array
+ collection.apply_pagination(1, 1)
+ collection.perform_pagination(array).should == array
end
end
@@ -77,7 +78,8 @@ describe MagicGrid::Collection do
a.should_receive(:page).with(1) { array }
end
collection = MagicGrid::Collection.new(array, nil)
- collection.perform_pagination(array, 1, 1).should == array
+ collection.apply_pagination(1, 1)
+ collection.perform_pagination(array).should == array
end
end
@@ -93,7 +95,8 @@ describe MagicGrid::Collection do
end
stub_const('Kaminari', kaminari)
collection = MagicGrid::Collection.new(array, nil)
- collection.perform_pagination(array, 1, 1).should == kaminaried_array
+ collection.apply_pagination(1, 1)
+ collection.perform_pagination(array).should == kaminaried_array
end
end
end
@@ -113,7 +116,8 @@ describe MagicGrid::Collection do
it "should attempt to use Enumerable methods to perform pagination" do
array = Array.new(100) { 1 }
collection = MagicGrid::Collection.new(array, nil)
- collection.perform_pagination(array, 1, 1).should == [1]
+ collection.apply_pagination(1, 1)
+ collection.perform_pagination(array).should == [1]
end
end
end
|
Better separate setup and execution of pagination
|
rmg_magic_grid
|
train
|
18f1beebfbfdf4e38666cb8fd583d4c1fe25a138
|
diff --git a/src/satosa/backends/saml2.py b/src/satosa/backends/saml2.py
index <HASH>..<HASH> 100644
--- a/src/satosa/backends/saml2.py
+++ b/src/satosa/backends/saml2.py
@@ -68,10 +68,12 @@ class SamlBackend(BackendModule):
"""
# state_key = self.store_state(authn_req, relay_state, req_args)
- disco_state = {"req_args": {
- "name_id_policy": request_info["req_args"]["name_id_policy"].to_string().decode(
- "utf-8")},
- "state": state, }
+ disco_state = {"state": state, }
+ if "req_args" in request_info and "name_id_policy" in request_info["req_args"]:
+ disco_state["req_args"] = {
+ "name_id_policy":
+ request_info["req_args"]["name_id_policy"].to_string().decode("utf-8")}
+
disco_state = urlsafe_b64encode(json.dumps(disco_state).encode("utf-8")).decode("utf-8")
# disco_state = urlencode(disco_state)
@@ -166,8 +168,9 @@ class SamlBackend(BackendModule):
return Unauthorized("You must chose an IdP")
else:
state = json.loads(state)
- state["req_args"]["name_id_policy"] = name_id_policy_from_string(
- state["req_args"]["name_id_policy"])
+ if "req_args" in state and "name_id_policy" in state["req_args"]:
+ state["req_args"]["name_id_policy"] = name_id_policy_from_string(
+ state["req_args"]["name_id_policy"])
return self.authn_request(context, entity_id, state, state["state"])
|
Discovery state do not have to contain name_id_policy.
|
IdentityPython_SATOSA
|
train
|
7341cbb169ac1b9fd23820bb9e143d407225623b
|
diff --git a/modules/syslog/module.py b/modules/syslog/module.py
index <HASH>..<HASH> 100644
--- a/modules/syslog/module.py
+++ b/modules/syslog/module.py
@@ -28,6 +28,8 @@
# to brok log into the syslog
import syslog
+import types
+from logging.handlers import SysLogHandler
from shinken.basemodule import BaseModule
from shinken.log import logger
@@ -37,26 +39,54 @@ properties = {
'type': 'syslog',
'external': False,
'phases': ['running'],
- }
+}
# called by the plugin manager to get a broker
def get_instance(plugin):
- logger.info("Get a Syslog broker for plugin %s" % plugin.get_name())
+ name = plugin.get_name()
+ logger.info("Get a Syslog broker for plugin %s" % (name))
- #Catch errors
- #path = plugin.path
- instance = Syslog_broker(plugin)
+ # syslog.syslog priority defaults to (LOG_INFO | LOG_USER)
+ facility = syslog.LOG_USER
+ priority = syslog.LOG_INFO
+
+ # Get configuration values, if any
+ if hasattr(plugin, 'facility'):
+ facility = plugin.facility
+ if hasattr(plugin, 'priority'):
+ priority = plugin.priority
+
+ # Ensure config values have a string type compatible with
+ # SysLogHandler.encodePriority
+ if type(facility) in types.StringTypes:
+ facility = types.StringType(facility)
+ if type(priority) in types.StringTypes:
+ priority = types.StringType(priority)
+
+ # Convert facility / priority (integers or strings) to aggregated
+ # priority value
+ sh = SysLogHandler()
+ try:
+ priority = sh.encodePriority(facility, priority)
+ except TypeError, e:
+ logger.error("[%s] Couldn't get syslog priority, "
+ "reverting to defaults" % (name))
+
+ logger.debug("[%s] Syslog priority: %d" % (name, priority))
+
+ instance = Syslog_broker(plugin, priority)
return instance
-# Class for the Merlindb Broker
-# Get broks and puts them in merlin database
+# Class for the Syslog Broker
+# Get log broks and send them to syslog
class Syslog_broker(BaseModule):
- def __init__(self, modconf):
+ def __init__(self, modconf, priority):
BaseModule.__init__(self, modconf)
+ self.priority = priority
- # A service check have just arrived, we UPDATE data info with this
+ # A log has just arrived, we send it to syslog
def manage_log_brok(self, b):
data = b.data
- syslog.syslog(data['log'].encode('UTF-8'))
+ syslog.syslog(self.priority, data['log'].encode('UTF-8'))
|
Allow specifying syslog facility and priority
|
Alignak-monitoring_alignak
|
train
|
627c5e11d33e2e729613f539d4901538bb01b77b
|
diff --git a/templates/module/modularity-mod-files.php b/templates/module/modularity-mod-files.php
index <HASH>..<HASH> 100644
--- a/templates/module/modularity-mod-files.php
+++ b/templates/module/modularity-mod-files.php
@@ -7,10 +7,16 @@
<ul class="files">
<?php foreach ($files as $file) : ?>
- <li><a target="_blank" class="link-item" href="<?php echo $file['url']; ?>" title="<?php echo $file['title']; ?>">
- <?php echo $file['filename']; ?>
- (<?php echo size_format(filesize(get_attached_file($file['ID'])), 2); ?>)
- </a></li>
+ <li>
+ <a target="_blank" class="link-item" href="<?php echo $file['url']; ?>" title="<?php echo $file['title']; ?>">
+ <?php echo $file['title']; ?>
+ (<?php echo pathinfo($file['url'], PATHINFO_EXTENSION); ?>, <?php echo size_format(filesize(get_attached_file($file['ID'])), 2); ?>)
+ </a>
+
+ <?php if (isset($file['description']) && !empty($file['description'])) : ?>
+ <?php echo wpautop($file['description']); ?>
+ <?php endif; ?>
+ </li>
<?php endforeach; ?>
</ul>
</div>
|
Show title and descirption of files in the file module
|
helsingborg-stad_Modularity
|
train
|
7adf6e79ac78228ae997a05a73fc708abce80779
|
diff --git a/HISTORY.rst b/HISTORY.rst
index <HASH>..<HASH> 100644
--- a/HISTORY.rst
+++ b/HISTORY.rst
@@ -2,6 +2,11 @@
History
=======
+0.8.1 (2018-01-01)
+------------------
+
+* Fixed issue #7 where having a ``str`` value for IPython's ``highlighting_style`` setting was not properly handled in ``prettyprinter``'s IPython integration, and raised an exception when trying to print data.
+
0.8.0 (2017-12-31)
------------------
diff --git a/prettyprinter/__init__.py b/prettyprinter/__init__.py
index <HASH>..<HASH> 100644
--- a/prettyprinter/__init__.py
+++ b/prettyprinter/__init__.py
@@ -4,7 +4,7 @@
__author__ = """Tommi Kaikkonen"""
__email__ = 'kaikkonentommi@gmail.com'
-__version__ = '0.8.0'
+__version__ = '0.8.1'
from io import StringIO
from importlib import import_module
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -31,7 +31,7 @@ test_requirements = [
setup(
name='prettyprinter',
- version='0.8.0',
+ version='0.8.1',
description="Syntax-highlighting, declarative and composable pretty printer for Python 3.6+",
long_description=readme + '\n\n' + history,
author="Tommi Kaikkonen",
|
Increment patch version to <I>, write changenotes
|
tommikaikkonen_prettyprinter
|
train
|
882c4144dbd55ee3a01a7a67ab13789c20d54d64
|
diff --git a/dedupe.py b/dedupe.py
index <HASH>..<HASH> 100644
--- a/dedupe.py
+++ b/dedupe.py
@@ -136,6 +136,35 @@ def findDuplicates(candidates, data_d, data_model, threshold) :
if __name__ == '__main__':
from test_data import init
+
+ def consoleLabel(uncertain_pairs) :
+ duplicates = []
+ nonduplicates = []
+
+ for pair in uncertain_pairs :
+ label = ''
+
+ for instance in tuple(pair) :
+ print instance.values()
+ print "Do these records refer to the same thing?"
+
+ valid_response = False
+ while not valid_response :
+ label = raw_input('yes(y)/no(n)/unsure(u)\n')
+ if label in ['y', 'n', 'u'] :
+ valid_response = True
+
+ if label == 'y' :
+ duplicates.append(pair)
+ elif label == 'n' :
+ nonduplicates.append(pair)
+ elif label != 'u' :
+ print 'Nonvalid response'
+ raise
+
+ return({0:nonduplicates, 1:duplicates})
+
+
num_training_dupes = 200
num_training_distinct = 16000
numIterations = 100
@@ -226,8 +255,7 @@ if __name__ == '__main__':
for pair in false_positives :
print ""
- for instance in tuple(pair) :
- print data_d[instance].values()
+
|
added console labeling function as user supplied function for adaptive learn
|
dedupeio_dedupe
|
train
|
1df948ff23d92c891abf28e7a98f661e05f1f3ff
|
diff --git a/indra/tools/assemble_corpus.py b/indra/tools/assemble_corpus.py
index <HASH>..<HASH> 100644
--- a/indra/tools/assemble_corpus.py
+++ b/indra/tools/assemble_corpus.py
@@ -1702,8 +1702,11 @@ def filter_by_curation(stmts_in, curations, incorrect_policy='any',
----------
stmts_in : list[indra.statements.Statement]
A list of statements to filter.
- curations : list
- A list of curations for evidences.
+ curations : list[Curation]
+ A list of curations for evidences. Curation object should have
+ (at least) the following attributes:
+ pa_hash (preassembled statement hash), source_hash (evidence hash) and
+ tag (e.g. 'correct', 'wrong_relation', etc.)
incorrect_policy : str
A policy for filtering out statements given incorrect curations. 'Any'
policy filters out a statement if at least one of its evidences is
|
Add curation object description in docstring
|
sorgerlab_indra
|
train
|
cccfba04c6618fb65808474e3d8da41289c1a94c
|
diff --git a/OAuth/ResourceOwner/VkontakteResourceOwner.php b/OAuth/ResourceOwner/VkontakteResourceOwner.php
index <HASH>..<HASH> 100644
--- a/OAuth/ResourceOwner/VkontakteResourceOwner.php
+++ b/OAuth/ResourceOwner/VkontakteResourceOwner.php
@@ -11,10 +11,13 @@
namespace HWI\Bundle\OAuthBundle\OAuth\ResourceOwner;
+use HWI\Bundle\OAuthBundle\Security\Core\Authentication\Token\OAuthToken;
+
/**
* VkontakteResourceOwner
*
* @author Adrov Igor <nucleartux@gmail.com>
+ * @author Vladislav Vlastovskiy <me@vlastv.ru>
*/
class VkontakteResourceOwner extends GenericOAuth2ResourceOwner
{
@@ -24,16 +27,22 @@ class VkontakteResourceOwner extends GenericOAuth2ResourceOwner
protected $options = array(
'authorization_url' => 'https://api.vk.com/oauth/authorize',
'access_token_url' => 'https://oauth.vk.com/access_token',
- 'infos_url' => 'https://api.vk.com/method/getUserInfoEx',
+ 'infos_url' => 'https://api.vk.com/method/users.get',
+
+ 'user_response_class' => '\HWI\Bundle\OAuthBundle\OAuth\Response\VkontakteUserResponse',
+
+ 'fields' => 'nickname,photo_50',
+ 'name_case' => null,
);
/**
* {@inheritDoc}
*/
protected $paths = array(
- 'identifier' => 'response.user_id',
- 'nickname' => 'response.user_name',
- 'realname' => 'response.user_name',
+ 'identifier' => 'response.0.uid',
+ 'nickname' => 'response.0.nickname',
+ 'last_name' => 'response.0.last_name',
+ 'first_name' => 'response.0.first_name',
);
/**
@@ -46,4 +55,26 @@ class VkontakteResourceOwner extends GenericOAuth2ResourceOwner
$this->options['scope'] = str_replace(',', ' ', $this->options['scope']);
}
}
+
+ /**
+ * {@inheritDoc}
+ */
+ public function getUserInformation(array $accessToken, array $extraParameters = array())
+ {
+ $parameters = array(
+ 'access_token' => $accessToken['access_token'],
+ 'fields' => is_array($fields = $this->getOption('fields')) ? implode(',', $fields) : $fields,
+ 'name_case' => $this->getOption('name_case'),
+ );
+ $url = $this->normalizeUrl($this->getOption('infos_url'), $parameters);
+
+ $content = $this->doGetUserInformationRequest($url)->getContent();
+
+ $response = $this->getUserResponse();
+ $response->setResponse($content);
+ $response->setResourceOwner($this);
+ $response->setOAuthToken(new OAuthToken($accessToken));
+
+ return $response;
+ }
}
|
Changed Vkontakte resource owner
Transferred receiving information about the user through the new API, as the previous method does not work.
|
hwi_HWIOAuthBundle
|
train
|
5818dce4c8dcbc2d29296440395e19318a7441fa
|
diff --git a/luigi/execution_summary.py b/luigi/execution_summary.py
index <HASH>..<HASH> 100644
--- a/luigi/execution_summary.py
+++ b/luigi/execution_summary.py
@@ -22,6 +22,8 @@ at the end of luigi invocations.
"""
import textwrap
+import collections
+import functools
def _partition_tasks(worker):
@@ -259,24 +261,15 @@ def _get_run_by_other_worker(worker):
"""
This returns a set of the tasks that are being run by other worker
"""
- worker_that_blocked_task = dict()
- get_work_response_history = worker._get_work_response_history
- for get_work_response in get_work_response_history:
- if get_work_response['task_id'] is None:
- for running_task in get_work_response['running_tasks']:
- other_worker_id = running_task['worker']
- other_task_id = running_task['task_id']
- other_task = worker._scheduled_tasks.get(other_task_id)
- if other_task:
- worker_that_blocked_task[other_task] = other_worker_id
- return set(worker_that_blocked_task.keys())
+ task_sets = _get_external_workers(worker).values()
+ return functools.reduce(lambda a, b: a | b, task_sets, set())
def _get_external_workers(worker):
"""
This returns a dict with a set of tasks for all of the other workers
"""
- worker_that_blocked_task = dict()
+ worker_that_blocked_task = collections.defaultdict(set)
get_work_response_history = worker._get_work_response_history
for get_work_response in get_work_response_history:
if get_work_response['task_id'] is None:
@@ -284,10 +277,9 @@ def _get_external_workers(worker):
other_worker_id = running_task['worker']
other_task_id = running_task['task_id']
other_task = worker._scheduled_tasks.get(other_task_id)
- if other_task:
- if other_worker_id not in worker_that_blocked_task.keys():
- worker_that_blocked_task[other_worker_id] = set()
- worker_that_blocked_task[other_worker_id].add(other_task)
+ if other_worker_id == worker._id or not other_task:
+ continue
+ worker_that_blocked_task[other_worker_id].add(other_task)
return worker_that_blocked_task
diff --git a/test/execution_summary_test.py b/test/execution_summary_test.py
index <HASH>..<HASH> 100644
--- a/test/execution_summary_test.py
+++ b/test/execution_summary_test.py
@@ -591,3 +591,20 @@ class ExecutionSummaryTest(LuigiTestCase):
self.assertIn('Luigi Execution Summary', s)
self.assertNotIn('00:00:00', s)
self.assertNotIn('\n\n\n', s)
+
+ def test_multiple_dash_dash_workers(self):
+ """
+ Don't print own worker with ``--workers 2`` setting.
+ """
+ self.worker = luigi.worker.Worker(scheduler=self.scheduler, worker_processes=2)
+
+ class Foo(RunOnceTask):
+ pass
+
+ self.run_task(Foo())
+ d = self.summary_dict()
+ self.assertEqual(set(), d['run_by_other_worker'])
+ s = self.summary()
+ self.assertNotIn('The other workers were', s)
+ self.assertIn('This progress looks :) because there were no failed ', s)
+ self.assertNotIn('\n\n\n', s)
|
Execution summary: Don't print own worker
Previously you would see your own worker being printed among the list of
other workers when running with `--workers 2` or higher.
|
spotify_luigi
|
train
|
5e6028a4603f227c1ab17d68017726688f073409
|
diff --git a/spec/helper.rb b/spec/helper.rb
index <HASH>..<HASH> 100644
--- a/spec/helper.rb
+++ b/spec/helper.rb
@@ -1,9 +1,5 @@
+# encoding: utf-8
require 'simplecov'
-# HACK - couldn't get tests to run without this, simple cov barfed with the following error:
-# .../simplecov-0.5.4/lib/simplecov/source_file.rb:157:in `block in process_skipped_lines!': invalid byte sequence in US-ASCII #(ArgumentError)
-# I intend to find a better solution before making the pull request
-Encoding.default_external = Encoding::UTF_8
-Encoding.default_internal = Encoding::UTF_8
SimpleCov.start
require 'twitter'
|
removing encoding hack on specs helper
|
sferik_twitter
|
train
|
8b5c7d7b20879dc99b8239bd8620b17417113241
|
diff --git a/tests/lib/tests.js b/tests/lib/tests.js
index <HASH>..<HASH> 100644
--- a/tests/lib/tests.js
+++ b/tests/lib/tests.js
@@ -7,44 +7,39 @@ var assert = require('assert'),
path = require('path'),
async = require('async'),
winston = require('winston'),
-
TEMPLATES_DIRECTORY = path.join(__dirname, '../templates/');
-
-
function prepare(callback) {
var raw = {},
expected = {};
- fs.readdir(TEMPLATES_DIRECTORY, function(err, files) {
- async.each(files, function(file, next) {
- fs.readFile(path.join(TEMPLATES_DIRECTORY, file), 'utf-8', function(err, html) {
- if (file.match(/\.html?/)) {
- expected[file.replace(/\.html?/, '')] = html;
- } else if (file.match(/\.tpl?/)) {
- raw[file.replace(/\.tpl?/, '')] = html;
- }
+ var files = fs.readdirSync(TEMPLATES_DIRECTORY);
- next();
- });
- }, function(err) {
- if (err) {
- throw new Error(err);
- }
+ async.each(files, function(file, next) {
+ var html = fs.readFileSync(path.join(TEMPLATES_DIRECTORY, file), 'utf-8');
- for (var key in raw) {
- if (raw.hasOwnProperty(key)) {
- if (typeof expected[key] === 'undefined') {
- winston.warn('Missing expected file: ' + key + '.html');
- delete raw[key];
- }
- }
- }
+ if (file.match(/\.html?/)) {
+ expected[file.replace(/\.html?/, '')] = html;
+ } else if (file.match(/\.tpl?/)) {
+ raw[file.replace(/\.tpl?/, '')] = html;
+ }
- callback(raw, expected);
- });
+ next();
+ }, function(err) {
+ if (err) {
+ throw new Error(err);
+ }
+ for (var key in raw) {
+ if (raw.hasOwnProperty(key)) {
+ if (typeof expected[key] === 'undefined') {
+ winston.warn('Missing expected file: ' + key + '.html');
+ delete raw[key];
+ }
+ }
+ }
+ callback(raw, expected);
});
}
|
tests - switching to sync file loading
some weird issue on my env
|
benchpressjs_benchpressjs
|
train
|
775f9b0af3902738d2212a2d53e42a6aaf8b695f
|
diff --git a/scss/sessions/aw-table_sessions.scss b/scss/sessions/aw-table_sessions.scss
index <HASH>..<HASH> 100644
--- a/scss/sessions/aw-table_sessions.scss
+++ b/scss/sessions/aw-table_sessions.scss
@@ -3,7 +3,7 @@
@import '../mixins';
.aw-table__sessions__reputation-preview {
- width: $reputation-preview-width;
+ min-width: $reputation-preview-width;
padding: 0;
@each $reputation, $color in $reputation-preview-colors {
&--#{$reputation} {
@@ -16,19 +16,21 @@
&--count {
text-align: right;
margin-right: 10px;
+ min-width: 100px;
}
&--reputationPreview {
padding: 0;
}
+ &--rule {
+ min-width: 80px;
+ }
}
@include table-body-cell() {
- &--count {
- padding-right: 25px;
- }
- &--rule {
+ &--actions {
+ min-width: 140px;
.rule-button {
- font-size: 0;
+ font-size: 10px;
border: none;
padding: 0;
@include white-to-brand-purple();
diff --git a/src/react/address/address_resolvers.js b/src/react/address/address_resolvers.js
index <HASH>..<HASH> 100644
--- a/src/react/address/address_resolvers.js
+++ b/src/react/address/address_resolvers.js
@@ -69,11 +69,6 @@ export const tableResolvers = [
resolver: inAddress('as_number'),
},
{
- id: 'networkName',
- label: 'Network name',
- resolver: inAddress('network_name'),
- },
- {
id: 'country',
resolver: entry => {
const countryComponent = countryResolver(entry);
diff --git a/src/react/sessions/resolvers.js b/src/react/sessions/resolvers.js
index <HASH>..<HASH> 100644
--- a/src/react/sessions/resolvers.js
+++ b/src/react/sessions/resolvers.js
@@ -2,6 +2,7 @@ import React from 'react';
import cx from 'classnames';
import { formatNumber, formatSpeedMin } from '../../i18n';
+import { capitalize } from '../../utilities/string';
import ActivityCell from '../activity/activity_cell';
import RuleActions from '../rules/rule_actions';
@@ -34,6 +35,12 @@ export const tableResolvers = [
{
id: 'rule',
// eslint-disable-next-line
+ resolver: ({ rule }) => (rule ? capitalize(rule.type) : ''),
+ },
+ {
+ id: 'actions',
+ label: '',
+ // eslint-disable-next-line
resolver: ({ rule, type, ...session }) => (
<RuleActions
showFullText={false}
|
Added texts for actions in SessionsTable
|
access-watch_access-watch-ui
|
train
|
8d468524d1c256c1092032bb95262052c014114f
|
diff --git a/config/karma/default.js b/config/karma/default.js
index <HASH>..<HASH> 100644
--- a/config/karma/default.js
+++ b/config/karma/default.js
@@ -86,7 +86,10 @@ module.exports = function( config ) {
plugins: [
require( 'rollup-plugin-mockr' )( require( '../mockr/default' ) ),
require( 'rollup-plugin-commonjs' )(),
- require( 'rollup-plugin-node-resolve' )()
+ require( 'rollup-plugin-node-resolve' )( {
+ jsnext: true,
+ main: false
+ } )
]
},
bundle: {
diff --git a/config/rollup/es6.js b/config/rollup/es6.js
index <HASH>..<HASH> 100644
--- a/config/rollup/es6.js
+++ b/config/rollup/es6.js
@@ -7,7 +7,13 @@ export default {
entry: 'src/index.js',
format: 'es6',
sourceMap: true,
- plugins: [ nodeResolve(), convertCJS() ],
+ plugins: [
+ nodeResolve( {
+ jsnext: true,
+ main: false
+ } ),
+ convertCJS()
+ ],
banner: `/*! ${packageInfo.name} v${packageInfo.version} | (c) ${new Date().getFullYear()} ${packageInfo.author.name} | ${packageInfo.license} license (see LICENSE) */`,
dest: `dist/${packageInfo.name}.js`
};
|
[ci skip] Updated configuration for rollup-plugin-node-resolve.
|
BEMQuery_bemquery-core
|
train
|
f025bac7ea9194eda58c4d135abe99c158519932
|
diff --git a/mollie/api/objects/refund.py b/mollie/api/objects/refund.py
index <HASH>..<HASH> 100644
--- a/mollie/api/objects/refund.py
+++ b/mollie/api/objects/refund.py
@@ -7,21 +7,17 @@ class Refund(Base):
STATUS_PROCESSING = 'processing'
STATUS_REFUNDED = 'refunded'
- @property
def is_queued(self):
- return self['status'] == self.STATUS_QUEUED
+ return self.status == self.STATUS_QUEUED
- @property
def is_pending(self):
- return self['status'] == self.STATUS_PENDING
+ return self.status == self.STATUS_PENDING
- @property
def is_processing(self):
- return self['status'] == self.STATUS_PROCESSING
+ return self.status == self.STATUS_PROCESSING
- @property
def is_refunded(self):
- return self['status'] == self.STATUS_REFUNDED
+ return self.status == self.STATUS_REFUNDED
@property
def resource(self):
diff --git a/tests/test_refunds.py b/tests/test_refunds.py
index <HASH>..<HASH> 100644
--- a/tests/test_refunds.py
+++ b/tests/test_refunds.py
@@ -1,3 +1,5 @@
+from mollie.api.objects.list import List
+from mollie.api.objects.refund import Refund
PAYMENT_ID = 'tr_7UhSN1zuXS'
REFUND_ID = 're_4qqhO89gsT'
@@ -7,12 +9,12 @@ def test_list_all_refunds(client, response):
response.get('https://api.mollie.com/v2/refunds', 'refunds_multiple')
refunds = client.refunds.all()
assert refunds.count == 1
- assert refunds.__class__.__name__ == 'List'
+ assert isinstance(refunds, List)
iterated = 0
iterated_refund_ids = []
for refund in refunds:
- assert refund.__class__.__name__ == 'Refund'
+ assert isinstance(refund, Refund)
iterated += 1
assert refund.id is not None
iterated_refund_ids.append(refund.id)
@@ -27,12 +29,12 @@ def test_list_all_refunds_of_payment(client, response):
payment = client.payments.get(PAYMENT_ID)
refunds = client.payments.refunds(payment).all()
assert refunds.count == 1
- assert refunds.__class__.__name__ == 'List'
+ assert isinstance(refunds, List)
iterated = 0
iterated_refund_ids = []
for refund in refunds:
- assert refund.__class__.__name__ == 'Refund'
+ assert isinstance(refund, Refund)
iterated += 1
assert refund.id is not None
iterated_refund_ids.append(refund.id)
@@ -70,7 +72,7 @@ def test_create_refund(client, response):
}
refund = client.payments.refund(payment, data)
assert refund.id == REFUND_ID
- assert refund.__class__.__name__ == 'Refund'
+ assert isinstance(refund, Refund)
def test_cancel_refund(client, response):
@@ -83,4 +85,4 @@ def test_cancel_refund(client, response):
refund = client.payments.refunds(payment).get(REFUND_ID)
canceled_refund = refund.cancel()
assert canceled_refund == {}
- assert canceled_refund.__class__.__name__ == 'Refund'
+ assert isinstance(refund, Refund)
|
update tests and change properties into a function
|
mollie_mollie-api-python
|
train
|
97e5126ca0f48460459e9e9ccfbd92e9ab2cd9d5
|
diff --git a/prow/pod-utils/decorate/podspec.go b/prow/pod-utils/decorate/podspec.go
index <HASH>..<HASH> 100644
--- a/prow/pod-utils/decorate/podspec.go
+++ b/prow/pod-utils/decorate/podspec.go
@@ -107,7 +107,7 @@ func LabelsAndAnnotationsForSpec(spec prowapi.ProwJobSpec, extraLabels, extraAnn
maybeTruncated := value
if len(value) > validation.LabelValueMaxLength {
// TODO(fejta): consider truncating middle rather than end.
- maybeTruncated = strings.TrimRight(value[:validation.LabelValueMaxLength], ".-")
+ maybeTruncated = strings.TrimRight(value[:validation.LabelValueMaxLength], "._-")
log.WithFields(logrus.Fields{
"key": key,
"value": value,
|
Prow job decoration: also trim underscore after truncating
Label ending with underscore is not valid either, also truncating it
|
kubernetes_test-infra
|
train
|
3fa346e744d968900169865b57f39f7ba2883b27
|
diff --git a/pkg/kubectl/cmd/replace.go b/pkg/kubectl/cmd/replace.go
index <HASH>..<HASH> 100644
--- a/pkg/kubectl/cmd/replace.go
+++ b/pkg/kubectl/cmd/replace.go
@@ -35,6 +35,7 @@ import (
"k8s.io/kubernetes/pkg/kubectl"
"k8s.io/kubernetes/pkg/kubectl/cmd/templates"
cmdutil "k8s.io/kubernetes/pkg/kubectl/cmd/util"
+ "k8s.io/kubernetes/pkg/kubectl/scheme"
"k8s.io/kubernetes/pkg/kubectl/util/i18n"
"k8s.io/kubernetes/pkg/kubectl/validation"
)
@@ -218,7 +219,7 @@ func (o *ReplaceOptions) Run() error {
return err
}
- if err := kubectl.CreateOrUpdateAnnotation(o.createAnnotation, info.Object, cmdutil.InternalVersionJSONEncoder()); err != nil {
+ if err := kubectl.CreateOrUpdateAnnotation(o.createAnnotation, info.Object, scheme.DefaultJSONEncoder()); err != nil {
return cmdutil.AddSourceToErr("replacing", info.Source, err)
}
@@ -309,7 +310,7 @@ func (o *ReplaceOptions) forceReplace() error {
return err
}
- if err := kubectl.CreateOrUpdateAnnotation(o.createAnnotation, info.Object, cmdutil.InternalVersionJSONEncoder()); err != nil {
+ if err := kubectl.CreateOrUpdateAnnotation(o.createAnnotation, info.Object, scheme.DefaultJSONEncoder()); err != nil {
return err
}
|
Replace internal version Encoder with external version Encoder
|
kubernetes_kubernetes
|
train
|
da733f1c93743c51313ab164176200eadcced8e5
|
diff --git a/scrubadub/filth/base.py b/scrubadub/filth/base.py
index <HASH>..<HASH> 100644
--- a/scrubadub/filth/base.py
+++ b/scrubadub/filth/base.py
@@ -54,6 +54,11 @@ class Filth(object):
self.replacement_string = replacement_string # type: Optional[str]
self.locale = locale # type: Optional[str]
+ if self.beg >= self.end:
+ raise ValueError(
+ f"Creating invalid filth (self.beg >= self.end): {self}"
+ )
+
@property
def placeholder(self) -> str:
return self.type.upper()
diff --git a/scrubadub/scrubbers.py b/scrubadub/scrubbers.py
index <HASH>..<HASH> 100644
--- a/scrubadub/scrubbers.py
+++ b/scrubadub/scrubbers.py
@@ -289,17 +289,21 @@ class Scrubber(object):
self, text: str, filth_list: Sequence[Filth], document_name: Optional[str], **kwargs
) -> str:
filth_list = [filth for filth in filth_list if filth.document_name == document_name]
+ if len(filth_list) == 0:
+ return text
+
filth_list = self._sort_filths(filth_list) # TODO: expensive sort may not be needed
+ filth = None # type: Optional[Filth]
clean_chunks = []
- filth = Filth()
for next_filth in filth_list:
- clean_chunks.append(text[filth.end:next_filth.beg])
+ clean_chunks.append(text[(0 if filth is None else filth.end):next_filth.beg])
if next_filth.replacement_string is not None:
clean_chunks.append(next_filth.replacement_string)
else:
clean_chunks.append(next_filth.replace_with(**kwargs))
filth = next_filth
- clean_chunks.append(text[filth.end:])
+ if filth is not None:
+ clean_chunks.append(text[filth.end:])
return u''.join(clean_chunks)
def _post_process_filth_list(self, filth_list: Sequence[Filth]) -> Sequence[Filth]:
diff --git a/tests/test_filth.py b/tests/test_filth.py
index <HASH>..<HASH> 100644
--- a/tests/test_filth.py
+++ b/tests/test_filth.py
@@ -8,7 +8,7 @@ class FilthTestCase(unittest.TestCase):
def test_disallowed_replace_with(self):
"""replace_with should fail gracefully"""
- filth = Filth()
+ filth = Filth(beg=0, end=3, text='asd')
with self.assertRaises(InvalidReplaceWith):
filth.replace_with('surrogate')
with self.assertRaises(InvalidReplaceWith):
|
ensure that filth can not be of zero size
|
datascopeanalytics_scrubadub
|
train
|
f2b1b348965cd141a5be648074a675fdff6ca729
|
diff --git a/src/Providers/Composer/Composer.php b/src/Providers/Composer/Composer.php
index <HASH>..<HASH> 100644
--- a/src/Providers/Composer/Composer.php
+++ b/src/Providers/Composer/Composer.php
@@ -96,13 +96,8 @@ final class Composer implements ComposerContract
{
$process = new Process($cmd, $cwd);
- if ('\\' !== DIRECTORY_SEPARATOR && file_exists('/dev/tty') && is_readable('/dev/tty')) {
- try {
- $process->setTty(true);
- } catch (Throwable $e) {
- // Ignore this error, as it just indicates there is no working TTY available
- // and the rude check above failed to detect that.
- }
+ if ($process->isTty()) {
+ $process->setTty(true);
}
try {
|
refactor: uses process isTty to check if tty is available
|
laravel-zero_framework
|
train
|
35569e2f62de47fdea674e06100dac7b24ad23da
|
diff --git a/parsedatetime/pdt_locales.py b/parsedatetime/pdt_locales.py
index <HASH>..<HASH> 100644
--- a/parsedatetime/pdt_locales.py
+++ b/parsedatetime/pdt_locales.py
@@ -125,6 +125,7 @@ class pdtLocale_base(object):
# to fill in any value to be replace - the current date/time will
# already have been populated by the method buildSources
self.re_sources = { 'noon': { 'hr': 12, 'mn': 0, 'sec': 0 },
+ 'afternoon': { 'hr': 13, 'mn': 0, 'sec': 0 },
'lunch': { 'hr': 12, 'mn': 0, 'sec': 0 },
'morning': { 'hr': 6, 'mn': 0, 'sec': 0 },
'breakfast': { 'hr': 8, 'mn': 0, 'sec': 0 },
@@ -623,4 +624,4 @@ class pdtLocale_nl(pdtLocale_base):
self.re_sources['nacht'] = { 'hr': 21, 'mn': 0, 'sec': 0 }
self.re_sources['nachts'] = { 'hr': 21, 'mn': 0, 'sec': 0 }
self.re_sources['vanavond'] = { 'hr': 21, 'mn': 0, 'sec': 0 }
- self.re_sources['vannacht'] = { 'hr': 21, 'mn': 0, 'sec': 0 }
\ No newline at end of file
+ self.re_sources['vannacht'] = { 'hr': 21, 'mn': 0, 'sec': 0 }
diff --git a/parsedatetime/tests/TestSimpleDateTimes.py b/parsedatetime/tests/TestSimpleDateTimes.py
index <HASH>..<HASH> 100644
--- a/parsedatetime/tests/TestSimpleDateTimes.py
+++ b/parsedatetime/tests/TestSimpleDateTimes.py
@@ -216,6 +216,10 @@ class test(unittest.TestCase):
self.assertTrue(_compareResults(self.cal.parse('lunch', start), (target, 2)))
+ target = datetime.datetime(self.yr, self.mth, self.dy, 13, 0, 0).timetuple()
+
+ self.assertTrue(_compareResults(self.cal.parse('afternoon', start), (target, 2)))
+
target = datetime.datetime(self.yr, self.mth, self.dy, 18, 0, 0).timetuple()
self.assertTrue(_compareResults(self.cal.parse('evening', start), (target, 2)))
|
re_sources is lacking of "afternoon" which causes bug
|
bear_parsedatetime
|
train
|
5b0e7c61189fd8b81f461063ab9cc0bba43c6baf
|
diff --git a/internal/acctest/acctest.go b/internal/acctest/acctest.go
index <HASH>..<HASH> 100644
--- a/internal/acctest/acctest.go
+++ b/internal/acctest/acctest.go
@@ -1209,11 +1209,12 @@ provider "aws" {
}
const testAccProviderConfigBase = `
-data "aws_partition" "provider_test" {}
+data "aws_region" "provider_test" {}
-# Required to initialize the provider
-data "aws_arn" "test" {
- arn = "arn:${data.aws_partition.provider_test.partition}:s3:::test"
+# Required to initialize the provider.
+data "aws_service" "provider_test" {
+ region = data.aws_region.provider_test.name
+ service_id = "s3"
}
`
diff --git a/internal/acctest/provider_test.go b/internal/acctest/provider_test.go
index <HASH>..<HASH> 100644
--- a/internal/acctest/provider_test.go
+++ b/internal/acctest/provider_test.go
@@ -912,11 +912,12 @@ data "aws_caller_identity" "current" {}
` //lintignore:AT004
const testAccProviderConfigBase = `
-data "aws_partition" "provider_test" {}
+data "aws_region" "provider_test" {}
-# Required to initialize the provider
-data "aws_arn" "test" {
- arn = "arn:${data.aws_partition.provider_test.partition}:s3:::test"
+# Required to initialize the provider.
+data "aws_service" "provider_test" {
+ region = data.aws_region.provider_test.name
+ service_id = "s3"
}
`
|
Avoid use of the 'aws_arn' data source in provider acceptance tests.
|
terraform-providers_terraform-provider-aws
|
train
|
2718f55db7941a89b8006b95b6138535bb5b27c3
|
diff --git a/lib/svtplay_dl/service/tv4play.py b/lib/svtplay_dl/service/tv4play.py
index <HASH>..<HASH> 100644
--- a/lib/svtplay_dl/service/tv4play.py
+++ b/lib/svtplay_dl/service/tv4play.py
@@ -31,8 +31,7 @@ class Tv4play(Service, OpenGraphThumbMixin):
end_time_stamp.isoformat())
self.config.set("live", True)
- self.options.hls_time_stamp = True
- streams = hlsparse(self.config, self.http.request("get", url), url, output=self.output)
+ streams = hlsparse(self.config, self.http.request("get", url), url, output=self.output, hls_time_stamp=True)
for n in list(streams.keys()):
yield streams[n]
return
|
tv4play: send hls_time_stamp as a argument to fetchers
|
spaam_svtplay-dl
|
train
|
a1e7bd84090672fcb84222b3e452275e878bc232
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -6,7 +6,7 @@ package_version = '0.2.3'
setup(
name=package_name,
packages=find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
- install_requires=["sphinx_rtd_theme"],
+ install_requires=["sphinx_rtd_theme", "sphinx<4"],
version=package_version,
description='A class representing a file system directory, that deletes on '
'garbage collect.',
|
PEEK-<I> Updated sphinx version
|
Synerty_pytmpdir
|
train
|
7d36e805460e0d5f35c57db259dd017c0e5c2653
|
diff --git a/lib/beaker-aws/version.rb b/lib/beaker-aws/version.rb
index <HASH>..<HASH> 100644
--- a/lib/beaker-aws/version.rb
+++ b/lib/beaker-aws/version.rb
@@ -1,3 +1,3 @@
module BeakerAws
- VERSION = '0.7.0'
+ VERSION = '0.8.0'
end
|
(GEM) update beaker-aws version to <I>
|
puppetlabs_beaker-aws
|
train
|
d7efc237961e2ecca61c95856614bdb959861432
|
diff --git a/org.eclipse.xtext.tests/src/org/eclipse/xtext/nodemodel/util/NodeModelUtilsTest.java b/org.eclipse.xtext.tests/src/org/eclipse/xtext/nodemodel/util/NodeModelUtilsTest.java
index <HASH>..<HASH> 100644
--- a/org.eclipse.xtext.tests/src/org/eclipse/xtext/nodemodel/util/NodeModelUtilsTest.java
+++ b/org.eclipse.xtext.tests/src/org/eclipse/xtext/nodemodel/util/NodeModelUtilsTest.java
@@ -54,6 +54,9 @@ public class NodeModelUtilsTest extends AbstractXtextTests {
nodes = NodeModelUtils.findNodesForFeature(declaration, XtextPackage.eINSTANCE.getGeneratedMetamodel_Name());
assertEquals(1, nodes.size());
assertEquals("foo", nodes.get(0).getText().trim());
+
+ nodes = NodeModelUtils.findNodesForFeature(declaration, null);
+ assertEquals(0, nodes.size());
}
@Test public void testFindNodesForFeature_MultipleFeature() throws Exception {
diff --git a/org.eclipse.xtext/src/org/eclipse/xtext/nodemodel/util/NodeModelUtils.java b/org.eclipse.xtext/src/org/eclipse/xtext/nodemodel/util/NodeModelUtils.java
index <HASH>..<HASH> 100644
--- a/org.eclipse.xtext/src/org/eclipse/xtext/nodemodel/util/NodeModelUtils.java
+++ b/org.eclipse.xtext/src/org/eclipse/xtext/nodemodel/util/NodeModelUtils.java
@@ -164,7 +164,7 @@ public class NodeModelUtils extends InternalNodeModelUtils {
/* @NonNull */
public static List<INode> findNodesForFeature(EObject semanticObject, EStructuralFeature structuralFeature) {
ICompositeNode node = findActualNodeFor(semanticObject);
- if (node != null) {
+ if (node != null && structuralFeature != null) {
return findNodesForFeature(semanticObject, node, structuralFeature);
}
return Collections.emptyList();
|
[<I>] Add null guard
Test call findNodesForFeature with null feature
|
eclipse_xtext-core
|
train
|
2998060caa24072e8b8658bc06c995f8cbf48b82
|
diff --git a/src/components/app.js b/src/components/app.js
index <HASH>..<HASH> 100644
--- a/src/components/app.js
+++ b/src/components/app.js
@@ -91,8 +91,13 @@ class App extends Component {
const page = pages[store.modal.page] || {};
const pageLinkHandler = () => this.handlePage(page.link);
- if (!store.gotrue) {
- return <SiteURLForm onSiteURL={this.handleSiteURL} />;
+ if (process.env.NODE_ENV === "development" && store.siteURL === null) {
+ return (
+ <SiteURLForm
+ devMode={store.siteURL != null}
+ onSiteURL={store.siteURL ? this.clearSiteURL : this.handleSiteURL}
+ />
+ )
}
if (!store.settings) {
return;
@@ -127,7 +132,14 @@ class App extends Component {
{page.link_text}
</button>
)}
- <SiteURLForm devMode="true" onSiteURL={this.clearSiteURL} />
+ {process.env.NODE_ENV === "development" ? (
+ <SiteURLForm
+ devMode={store.siteURL != null}
+ onSiteURL={store.siteURL ? this.clearSiteURL : this.handleSiteURL}
+ />
+ ) : (
+ <div />
+ )}
</div>
);
}
diff --git a/src/netlify-identity.js b/src/netlify-identity.js
index <HASH>..<HASH> 100644
--- a/src/netlify-identity.js
+++ b/src/netlify-identity.js
@@ -84,6 +84,7 @@ function instantiateGotrue(APIUrl) {
parts.push("/")
}
parts.push(".netlify/identity")
+ store.setSiteURL(siteURL)
return new GoTrue({ APIUrl: parts.join(""), setCookie: !isLocal })
}
if (isLocal) {
|
fix: ensure dev settings for url is only visible on dev
|
netlify_netlify-identity-widget
|
train
|
1529258bbf14f83bba13e7dc4b1055c4dbbd94dd
|
diff --git a/law/task/base.py b/law/task/base.py
index <HASH>..<HASH> 100644
--- a/law/task/base.py
+++ b/law/task/base.py
@@ -171,6 +171,8 @@ class BaseTask(luigi.Task):
arg = "--{}".format(name.replace("_", "-"))
if isinstance(param, (luigi.IntParameter, luigi.FloatParameter)):
args.extend([arg, str(val)])
+ elif isinstance(param, luigi.BoolParameter):
+ args.extend([arg, "{}".format(val)])
else:
args.extend([arg, "\"{}\"".format(val)])
|
[task/base] Fix serialization of bools in cli_args.
|
riga_law
|
train
|
e068f9cf9785c9b14dbfcecbe86ae9696838eb4d
|
diff --git a/lib/fluent/plugin/out_elasticsearch.rb b/lib/fluent/plugin/out_elasticsearch.rb
index <HASH>..<HASH> 100644
--- a/lib/fluent/plugin/out_elasticsearch.rb
+++ b/lib/fluent/plugin/out_elasticsearch.rb
@@ -307,10 +307,6 @@ EOC
end
end
- if @buffer_config.flush_thread_count < 2
- log.warn "To prevent events traffic jam, you should specify 2 or more 'flush_thread_count'."
- end
-
# Consider missing the prefix of "$." in nested key specifiers.
@id_key = convert_compat_id_key(@id_key) if @id_key
@parent_key = convert_compat_id_key(@parent_key) if @parent_key
|
Remove verbose warning log
Users' Elasticsearch cluster differs its capacities.
This issue should be fixed user cluster side.
In Fluentd side, cluster outage is not managed....
|
uken_fluent-plugin-elasticsearch
|
train
|
abf5823fe00f011c82f49998c3e89a841a9afc4e
|
diff --git a/changes.txt b/changes.txt
index <HASH>..<HASH> 100755
--- a/changes.txt
+++ b/changes.txt
@@ -1,5 +1,8 @@
CHANGES
+2013-08-01
+- Second param to \Elastica\Search.php:count($query = '', $fullResult = false) added. If second param is set to true, full ResultSet is returned including facets.
+
2013-07-16
- Plugin geocluster-facet support added
diff --git a/lib/Elastica/Search.php b/lib/Elastica/Search.php
index <HASH>..<HASH> 100644
--- a/lib/Elastica/Search.php
+++ b/lib/Elastica/Search.php
@@ -430,9 +430,10 @@ class Search
/**
*
* @param mixed $query
- * @return int
+ * @param $fullResult (default = false) By default only the total hit count is returned. If set to true, the full ResultSet including facets is returned.
+ * @return int|ResultSet
*/
- public function count($query = '')
+ public function count($query = '', $fullResult = false)
{
$this->setOptionsAndQuery(null, $query);
@@ -446,8 +447,8 @@ class Search
array(self::OPTION_SEARCH_TYPE => self::OPTION_SEARCH_TYPE_COUNT)
);
$resultSet = new ResultSet($response, $query);
-
- return $resultSet->getTotalHits();
+
+ return $fullResult?$resultSet:$resultSet->getTotalHits();
}
/**
diff --git a/test/lib/Elastica/Test/SearchTest.php b/test/lib/Elastica/Test/SearchTest.php
index <HASH>..<HASH> 100644
--- a/test/lib/Elastica/Test/SearchTest.php
+++ b/test/lib/Elastica/Test/SearchTest.php
@@ -462,4 +462,26 @@ class SearchTest extends BaseTest
$source = $resultSet->current()->getSource();
$this->assertEquals('bunny', $source['username']);
}
+
+ public function testCount() {
+ $index = $this->_createIndex('eeee');
+ $search = new Search($index->getClient());
+ $type = $index->getType('test');
+
+ $doc = new Document(1, array('id' => 1, 'username' => 'ruflin'));
+
+ $type->addDocument($doc);
+ $index->refresh();
+
+ $search->addIndex($index);
+ $search->addType($type);
+
+ $result1 = $search->count(new \Elastica\Query\MatchAll());
+ $this->assertEquals(1, $result1);
+
+
+ $result2 = $search->count(new \Elastica\Query\MatchAll(), true);
+ $this->assertInstanceOf('\Elastica\ResultSet', $result2);
+ $this->assertEquals(1, $result2->getTotalHits());
+ }
}
|
Second param to \Elastica\Search.php:count added to return full result
|
ruflin_Elastica
|
train
|
81e5652ee448a8a98b3153fe0d65093397ef3621
|
diff --git a/aikif/dataTools/cls_datatable.py b/aikif/dataTools/cls_datatable.py
index <HASH>..<HASH> 100644
--- a/aikif/dataTools/cls_datatable.py
+++ b/aikif/dataTools/cls_datatable.py
@@ -3,6 +3,8 @@
#from cls_dataset import DataSet
import math
+import collections
+from collections import Counter
class DataTable(object):
"""
@@ -89,7 +91,19 @@ class DataTable(object):
else:
print ("TODO ")
return -44 # yes this will crash - fix this
-
+
+ def count_unique_values(self, colNum, colText, topN_values=10):
+ res = []
+ cols = collections.Counter()
+ for row in self.arr:
+ cols[row[colNum]] += 1
+ #print (colText, self.Dict2String(cols.most_common()[0:topN_values]))
+ res.append( colText + '=' + self.Dict2String(cols.most_common()[0:topN_values]))
+ print(colText + ' = ' , cols.most_common()[0:topN_values])
+ #return self.dict2list(cols.most_common()[0:topN_values])
+
+ return res
+
def select_where(self, where_col_list, where_value_list, col_name=''):
"""
selects rows from the array where col_list == val_list
@@ -139,7 +153,13 @@ class DataTable(object):
return result
+ def Dict2String(self, d):
+ res = ","
+ for k, v in d: # .iteritems():
+ res = res + k + str(v) + ','
+ return res
+ def dict2list(self, dct, keylist): return [dct[i] for i in keylist]
def update_where(self, col, value, where_col_list, where_value_list):
"""
diff --git a/aikif/mapper.py b/aikif/mapper.py
index <HASH>..<HASH> 100644
--- a/aikif/mapper.py
+++ b/aikif/mapper.py
@@ -127,9 +127,13 @@ class Mapper:
for row_num, col in enumerate(headers):
if col != '':
vals = l_dataset.get_distinct_values_from_cols([col])
- print(vals)
+ #print(vals)
l_map.append('column:count:distinct:' + str(row_num) + '=' + str(len(vals[0])) )
-
+ col_vals = l_dataset.count_unique_values(row_num, col, 4)
+ print('col_vals=', col_vals)
+ for val_num, v in enumerate(col_vals):
+ l_map.append('column:values:' + col + ':' + str(val_num) + '=' + v )
+ #l_map.append('column:values:top5:' + str(row_num) + '=' + col_vals)
return l_map
def create_map_from_file(self, data_filename):
@@ -151,10 +155,20 @@ class Mapper:
f.write('filename:source=' + data_filename + '\n')
f.write('filename:rule=' + op_filename + '\n\n')
for row in l_map:
- f.write(row + '\n')
+ print('ROW = ' , row)
+ if type(row) is str:
+ f.write(row + '\n')
+ else:
+ for num, v in enumerate(row):
+ f.write(v)
+def List2String(l):
+ res = ""
+ for v in l:
+ res = res + v
+ return res
class MapRule:
diff --git a/data/raw/sample-filelist-for-AIKIF.csv.rule b/data/raw/sample-filelist-for-AIKIF.csv.rule
index <HASH>..<HASH> 100644
--- a/data/raw/sample-filelist-for-AIKIF.csv.rule
+++ b/data/raw/sample-filelist-for-AIKIF.csv.rule
@@ -8,7 +8,12 @@ column:name:2=path
column:name:3=size
column:name:4=date
column:count:distinct:0=183
+column:values:fullFilename:0=fullFilename=,T:\user\dev\src\python\AI\.git\hooks\post-receive.sample1,P:\events\Christmas2013\20131225_075940.jpg1,P:\events\Christmas2013\20131225_163743.jpg1,P:\events\Christmas2013\20131225_130915.jpg1,
column:count:distinct:1=182
+column:values:name:0=name=,Thumbs.db2,nounList.txt1,20131225_161305.jpg1,20131225_084300.jpg1,
column:count:distinct:2=8
+column:values:path:0=path=,P:\events\Christmas2013131,T:\user\dev\src\python\AI\data22,T:\user\dev\src\python\AI\.git\hooks11,T:\user\dev\src\python\AI\AI9,
column:count:distinct:3=182
+column:values:size:0=size=,02,35402711,23829091,31866761,
column:count:distinct:4=162
+column:values:date:0=date=,2013-09-03 22:33:2711,2014-03-16 14:55:443,2014-03-16 14:55:472,2014-03-16 14:55:432,
|
create map from file gets info on distinct values - not finished
|
acutesoftware_AIKIF
|
train
|
4ec7cdc2f7741265556c90f9a0e7f0355d47ec59
|
diff --git a/lib/jimson/client.rb b/lib/jimson/client.rb
index <HASH>..<HASH> 100644
--- a/lib/jimson/client.rb
+++ b/lib/jimson/client.rb
@@ -12,10 +12,12 @@ module Jimson
rand(10**12)
end
- def initialize(url)
+ def initialize(url, opts={})
@url = url
URI.parse(@url) # for the sake of validating the url
@batch = []
+ @opts = opts
+ @opts[:content_type] = 'application/json'
end
def process_call(sym, args)
@@ -41,7 +43,7 @@ module Jimson
'params' => args,
'id' => self.class.make_id
})
- resp = RestClient.post(@url, post_data, :content_type => 'application/json')
+ resp = RestClient.post(@url, post_data, @opts)
if resp.nil? || resp.body.nil? || resp.body.empty?
raise Client::Error::InvalidResponse.new
end
@@ -51,7 +53,7 @@ module Jimson
def send_batch_request(batch)
post_data = MultiJson.encode(batch)
- resp = RestClient.post(@url, post_data, :content_type => 'application/json')
+ resp = RestClient.post(@url, post_data, @opts)
if resp.nil? || resp.body.nil? || resp.body.empty?
raise Client::Error::InvalidResponse.new
end
@@ -152,8 +154,8 @@ module Jimson
helper.send_batch
end
- def initialize(url)
- @helper = ClientHelper.new(url)
+ def initialize(url, opts={})
+ @helper = ClientHelper.new(url, opts)
end
def method_missing(sym, *args, &block)
diff --git a/spec/client_spec.rb b/spec/client_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/client_spec.rb
+++ b/spec/client_spec.rb
@@ -65,6 +65,14 @@ module Jimson
client = Client.new(SPEC_URL)
client.foo(1,2,3).should == 42
end
+
+ it "sends a valid JSON-RPC request with custom options" do
+ response = MultiJson.encode(BOILERPLATE.merge({'result' => 42}))
+ RestClient.should_receive(:post).with(SPEC_URL, @expected, {:content_type => 'application/json', :timeout => 10000}).and_return(@resp_mock)
+ @resp_mock.should_receive(:body).at_least(:once).and_return(response)
+ client = Client.new(SPEC_URL, :timeout => 10000)
+ client.foo(1,2,3).should == 42
+ end
end
end
|
Allow options to be given to RestClient.
|
chriskite_jimson
|
train
|
9197a91fd797ba95a39045bc75100529eea30ddc
|
diff --git a/src/fastener.js b/src/fastener.js
index <HASH>..<HASH> 100644
--- a/src/fastener.js
+++ b/src/fastener.js
@@ -83,12 +83,10 @@ export const queryMove = R.curry((move, b, f, z) =>
const bwd = (move, z) => {
switch (move) {
- case downHead: return up
- case downLast: return up
case left: return right
case right: return left
case up: return downTo(keyOf(z))
- default: throw new Error(`Cannot invert ${move}`)
+ default: return up
}
}
|
Generalize bwd based on closed world assumption.
|
polytypic_fastener
|
train
|
1a9121b3e65d04b05242ba82b5b56751667c5437
|
diff --git a/lib/wed/wed.js b/lib/wed/wed.js
index <HASH>..<HASH> 100644
--- a/lib/wed/wed.js
+++ b/lib/wed/wed.js
@@ -89,18 +89,54 @@ Editor.prototype.init = log.wrap(function (widget, options) {
this.options = options;
- // $gui_root and root represent the document root in the HTML elements
- // displayed. The top level element of the XML document being
- // edited will be the single child of $gui_root/root.
- this.$gui_root = $("<div class='wed-document'/>");
+ // This structure will wrap around the document to be edited.
+ var $framework = $('\
+<div class="row">\
+ <div id="sidebar" class="col-lg-2 col-md-2 col-sm-2"/>\
+ <div id="wed-frame" class="col-lg-10 col-md-10 col-sm-10">\
+ <div class="row">\
+ <div class="progress">\
+ <span></span>\
+ <div id="validation-progress" class="progress-bar" style="width: 0%"/>\
+ </div>\
+ </div>\
+ <div class="row">\
+ <input class="wed-comp-field" type="text"></input>\
+ <div class="wed-alert-layer"><div class="wed-alert-layer-contents"/></div>\
+ <div class="wed-document"><span class="root-here"/></div>\
+ </div>\
+ </div>\
+</div>');
- this.$widget.wrapInner(this.$gui_root);
- // jQuery does not update this.$gui_root to reflect its position in the
- // DOM tree.
- this.$gui_root = $(this.widget.childNodes[0]);
+ //
+ // Grab all the references we need while $framework does not yet contain
+ // the document to be edited. (Faster!)
+ //
+ // $gui_root represents the document root in the HTML elements
+ // displayed. The top level element of the XML document being
+ // edited will be the single child of $gui_root.
+ this.$gui_root = $framework.find('.wed-document');
this.gui_root = this.$gui_root.get(0);
+ this.$sidebar = $framework.find("#sidebar");
+
+ this.$validation_progress = $framework.find("#validation-progress");
+ this.$validation_message = this.$validation_progress.prev('span');
+
+ this._$input_field = $framework.find(".wed-comp-field");
+
+ this.alert_layer = $framework.find(".wed-alert-layer")[0];
+ this.alert_layer_contents = this.alert_layer.childNodes[0];
+
+ // Insert the framework and put the document in its proper place.
+ var $root_placeholder = $framework.find(".root-here");
+ if (widget.firstChild)
+ $root_placeholder.replaceWith(widget.firstChild);
+ else
+ $root_placeholder.remove();
+ this.$widget.append($framework);
+
// $data_root is the document we are editing, $gui_root will become
// decorated with all kinds of HTML elements so we keep the two
// separate.
@@ -128,23 +164,6 @@ Editor.prototype.init = log.wrap(function (widget, options) {
log.error("wed cannot save data due " +
"to the absence of a save_url option");
- this.$gui_root.wrap('\
-<div class="row">\
-<div id="wed-frame" class="col-lg-10 col-md-10 col-sm-10">\
-<div class="row"/></div></div>');
- this.$sidebar = $('<div id="sidebar" class="col-lg-2 col-md-2 col-sm-2"/>');
- this.$widget.find('.row').first().prepend(this.$sidebar);
- // Needed by Validator
- this.$gui_root.parent().before("\
-<div class='row'>\
-<div class='progress'>\
-<span></span>\
-<div id='validation-progress' \
-class='progress-bar' \
-style='width: 0%'></div></div></div>");
-
- this.$validation_progress = this.$widget.find("#validation-progress");
- this.$validation_message = this.$validation_progress.prev('span');
// We duplicate data-parent on the toggles and on the collapsible
// elements due to a bug in Bootstrap 3.0.0 RC2. See
@@ -190,18 +209,9 @@ class="panel-collapse collapse">\
this._current_dropdown = undefined;
this._$fake_caret = $("<span class='_wed_caret' contenteditable='false'> </span>");
- this._$input_field = $("<input class='wed-comp-field' type='text'></input>");
this._fake_caret = undefined;
this._refreshing_caret = 0;
- this.$gui_root.before(this._$input_field);
-
- this.alert_layer = $("\
-<div class='wed-alert-layer'><div class='wed-alert-layer-contents'>\
-</div></div>").get(0);
- this.$gui_root.before(this.alert_layer);
- this.alert_layer_contents = this.alert_layer.childNodes[0];
-
this._namespace_modal = this.makeModal();
this._namespace_modal.setTitle("Assign names for namespaces");
this._namespace_modal.addOkCancel();
|
Combined into a single $framework the piecemeal changes to the DOM tree that would create the structure around the document.
|
mangalam-research_wed
|
train
|
eda9895f9c32cc4ab1b788231911c390f601ed45
|
diff --git a/core/maths/mathgen/src/main/java/org/arakhne/afc/math/MathUtil.java b/core/maths/mathgen/src/main/java/org/arakhne/afc/math/MathUtil.java
index <HASH>..<HASH> 100644
--- a/core/maths/mathgen/src/main/java/org/arakhne/afc/math/MathUtil.java
+++ b/core/maths/mathgen/src/main/java/org/arakhne/afc/math/MathUtil.java
@@ -60,9 +60,9 @@ public final class MathUtil {
* @return the sign of the argument
*/
@Pure
- @Inline(value = "($1 == 0.) ? 0 : (($1 < -0.) ? -1 : 1)")
+ @Inline(value = "($1 == 0. || Double.isNaN($1)) ? 0 : (($1 < -0.) ? -1 : 1)")
public static int sign(double value) {
- return (value == 0.) ? 0 : ((value < -0.) ? -1 : 1);
+ return (value == 0. || Double.isNaN(value)) ? 0 : ((value < -0.) ? -1 : 1);
}
|
[mathgen] Add the support of NaN to MathUtil.sign().
|
gallandarakhneorg_afc
|
train
|
6c263f615070d15543a2e697b55bd6b06c39373d
|
diff --git a/keyring/backends/macOS/__init__.py b/keyring/backends/macOS/__init__.py
index <HASH>..<HASH> 100644
--- a/keyring/backends/macOS/__init__.py
+++ b/keyring/backends/macOS/__init__.py
@@ -1,5 +1,6 @@
import platform
import os
+import warnings
from ...backend import KeyringBackend
from ...errors import PasswordSetError
@@ -68,4 +69,9 @@ class Keyring(KeyringBackend):
)
def with_keychain(self, keychain):
+ warnings.warn(
+ "macOS.Keyring.with_keychain is deprecated. Use with_properties instead.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
return self.with_properties(keychain=keychain)
|
Deprecate macOS.Keyring.with_keychain, superseded by with_properties.
|
jaraco_keyring
|
train
|
738959b7482be92365bac5b1ae6379784ce268d5
|
diff --git a/addon/components/frost-action-bar.js b/addon/components/frost-action-bar.js
index <HASH>..<HASH> 100644
--- a/addon/components/frost-action-bar.js
+++ b/addon/components/frost-action-bar.js
@@ -236,15 +236,11 @@ export default Component.extend({
result = control
}
- result.classNames = ''
-
- if (result.disabled) {
- result.classNames += 'disabled'
- }
-
- if (!result.isVisible) {
- result.classNames += ' invisible'
- }
+ // set classNames
+ result.classNames = [
+ result.disabled ? 'disabled' : '',
+ !result.isVisible ? 'invisible' : ''
+ ].join(' ')
return result
},
|
cleanup setting classNames for moreActions and improve docs more
|
ciena-frost_ember-frost-object-browser
|
train
|
8cd0e618671a87441f32689c495a7a77b173f10e
|
diff --git a/src/leaflet-panel-layers.js b/src/leaflet-panel-layers.js
index <HASH>..<HASH> 100755
--- a/src/leaflet-panel-layers.js
+++ b/src/leaflet-panel-layers.js
@@ -50,16 +50,21 @@ L.Control.PanelLayers = L.Control.Layers.extend({
addBaseLayer: function (layer, name, group) {
layer.name = name || layer.name || '';
this._addLayer(layer, false, group);
- this._update();
+ this._updateLayers();
return this;
},
addOverlay: function (layer, name, group) {
layer.name = name || layer.name || '';
this._addLayer(layer, true, group);
- this._update();
+ this._updateLayers();
return this;
},
+
+ _updateLayers: function () {
+ this._groups = {};
+ this._update();
+ },
_instanceLayer: function(layerDef) {
if(layerDef instanceof L.Class)
|
Fixed the rendering when there are groups and a layer is added
When a layer is added after the initial creation, any groups vanish. The `_update()` function clears out the html of the base and overlay divs, and then recreates all the items using `_addItem`. However the '_groups' hash still contains the previous html element, so new groups do not get created. This change clears out the `_groups` hash before calling `_update`.
|
stefanocudini_leaflet-panel-layers
|
train
|
ce24514e7e50b1e2cb75f56087c95e643c31418d
|
diff --git a/resource_openstack_networking_network_v2.go b/resource_openstack_networking_network_v2.go
index <HASH>..<HASH> 100644
--- a/resource_openstack_networking_network_v2.go
+++ b/resource_openstack_networking_network_v2.go
@@ -244,6 +244,11 @@ func waitForNetworkDelete(networkingClient *gophercloud.ServiceClient, networkId
log.Printf("[DEBUG] Successfully deleted OpenStack Network %s", networkId)
return n, "DELETED", nil
}
+ if errCode, ok := err.(gophercloud.ErrUnexpectedResponseCode); ok {
+ if errCode.Actual == 409 {
+ return n, "ACTIVE", nil
+ }
+ }
return n, "ACTIVE", err
}
diff --git a/resource_openstack_networking_subnet_v2.go b/resource_openstack_networking_subnet_v2.go
index <HASH>..<HASH> 100644
--- a/resource_openstack_networking_subnet_v2.go
+++ b/resource_openstack_networking_subnet_v2.go
@@ -385,6 +385,11 @@ func waitForSubnetDelete(networkingClient *gophercloud.ServiceClient, subnetId s
log.Printf("[DEBUG] Successfully deleted OpenStack Subnet %s", subnetId)
return s, "DELETED", nil
}
+ if errCode, ok := err.(gophercloud.ErrUnexpectedResponseCode); ok {
+ if errCode.Actual == 409 {
+ return s, "ACTIVE", nil
+ }
+ }
return s, "ACTIVE", err
}
|
provider/openstack: gophercloud migration: Account for subnets and networks still in use
|
terraform-providers_terraform-provider-openstack
|
train
|
9f03f1ff14c7f1b1d3461b2b4a6893f0c6ba992b
|
diff --git a/core/coreapi/unixfs.go b/core/coreapi/unixfs.go
index <HASH>..<HASH> 100644
--- a/core/coreapi/unixfs.go
+++ b/core/coreapi/unixfs.go
@@ -217,13 +217,13 @@ func (api *UnixfsAPI) Ls(ctx context.Context, p path.Path, opts ...options.Unixf
}
func (api *UnixfsAPI) processLink(ctx context.Context, linkres ft.LinkResult, settings *options.UnixfsLsSettings) coreiface.DirEntry {
+ if linkres.Err != nil {
+ return coreiface.DirEntry{Err: linkres.Err}
+ }
+
lnk := coreiface.DirEntry{
Name: linkres.Link.Name,
Cid: linkres.Link.Cid,
- Err: linkres.Err,
- }
- if lnk.Err != nil {
- return lnk
}
switch lnk.Cid.Type() {
|
fix(unixfs): check for errors before dereferencing the link
If there's an error, the link will be nil, and this will panic.
|
ipfs_go-ipfs
|
train
|
8f7e10e3d6ee157b6937454333343f864aa8d02a
|
diff --git a/pypsa/components.py b/pypsa/components.py
index <HASH>..<HASH> 100644
--- a/pypsa/components.py
+++ b/pypsa/components.py
@@ -57,8 +57,9 @@ from .io import (export_to_csv_folder, import_from_csv_folder,
import_series_from_dataframe, import_from_pandapower_net)
from .pf import (network_lpf, sub_network_lpf, network_pf,
- sub_network_pf, find_bus_controls, find_slack_bus, calculate_Y,
- calculate_PTDF, calculate_B_H, calculate_dependent_values)
+ sub_network_pf, find_bus_controls, find_slack_bus, find_cycles,
+ calculate_Y, calculate_PTDF, calculate_B_H,
+ calculate_dependent_values)
from .contingency import (calculate_BODF, network_lpf_contingency,
network_sclopf)
@@ -831,6 +832,9 @@ class Network(Basic):
for c in self.iterate_components(self.passive_branch_components):
c.df["sub_network"] = c.df.bus0.map(self.buses["sub_network"])
+ for sub in self.sub_networks.obj:
+ find_cycles(sub)
+
def iterate_components(self, components=None, skip_empty=True):
if components is None:
components = self.all_components
|
components: find cycles when determining network topology
|
PyPSA_PyPSA
|
train
|
8f8cc7f5b81a2b301a3841b4bdceb1001d2b70d8
|
diff --git a/structr-ui/src/main/resources/structr/js/model.js b/structr-ui/src/main/resources/structr/js/model.js
index <HASH>..<HASH> 100644
--- a/structr-ui/src/main/resources/structr/js/model.js
+++ b/structr-ui/src/main/resources/structr/js/model.js
@@ -755,7 +755,7 @@ StructrElement.prototype.exists = function() {
var obj = this;
var hasChildren = obj.childrenIds && obj.childrenIds.length;
- var isComponent = obj.syncedNodes.length;
+ var isComponent = obj.syncedNodes && obj.syncedNodes.length;
var isMasterComponent = (isComponent && hasChildren);
|
avoid error when obj.synchedNodes is undefined
|
structr_structr
|
train
|
0880383c823cd9d0ae5b60ddef1ed60b1f348878
|
diff --git a/auth/jwt/middleware.go b/auth/jwt/middleware.go
index <HASH>..<HASH> 100644
--- a/auth/jwt/middleware.go
+++ b/auth/jwt/middleware.go
@@ -9,11 +9,13 @@ import (
"github.com/go-kit/kit/endpoint"
)
+type key string
+
const (
// JWTTokenContextKey holds the key used to store a JWT Token in the context
- JWTTokenContextKey = "JWTToken"
+ JWTTokenContextKey key = "JWTToken"
// JWTClaimsContxtKey holds the key used to store the JWT Claims in the context
- JWTClaimsContextKey = "JWTClaims"
+ JWTClaimsContextKey key = "JWTClaims"
)
var (
|
Don't export the type of the context keys to avoid collisions
|
go-kit_kit
|
train
|
33632505a0b85fe657072d2be452db472dbc84e1
|
diff --git a/src/Illuminate/Collections/Arr.php b/src/Illuminate/Collections/Arr.php
index <HASH>..<HASH> 100644
--- a/src/Illuminate/Collections/Arr.php
+++ b/src/Illuminate/Collections/Arr.php
@@ -142,7 +142,7 @@ class Arr
* Get all of the given array except for a specified array of keys.
*
* @param array $array
- * @param array|string $keys
+ * @param array|string|int|float $keys
* @return array
*/
public static function except($array, $keys)
@@ -256,7 +256,7 @@ class Arr
* Remove one or many array items from a given array using "dot" notation.
*
* @param array $array
- * @param array|string $keys
+ * @param array|string|int|float $keys
* @return void
*/
public static function forget(&$array, $keys)
diff --git a/tests/Support/SupportArrTest.php b/tests/Support/SupportArrTest.php
index <HASH>..<HASH> 100644
--- a/tests/Support/SupportArrTest.php
+++ b/tests/Support/SupportArrTest.php
@@ -149,6 +149,10 @@ class SupportArrTest extends TestCase
$this->assertEquals(['name' => 'taylor'], Arr::except($array, 'framework'));
$this->assertEquals(['name' => 'taylor', 'framework' => ['name' => 'Laravel']], Arr::except($array, 'framework.language'));
$this->assertEquals(['framework' => ['language' => 'PHP']], Arr::except($array, ['name', 'framework.name']));
+
+ $array = [1 => 'hAz', 2 => [5 => 'foo', 12 => 'baz']];
+ $this->assertEquals([1 => 'hAz'], Arr::except($array, 2));
+ $this->assertEquals([1 => 'hAz', 2 => [12 => 'baz']], Arr::except($array, 2.5));
}
public function testExists()
@@ -947,6 +951,14 @@ class SupportArrTest extends TestCase
$array = ['emails' => ['joe@example.com' => ['name' => 'Joe'], 'jane@localhost' => ['name' => 'Jane']]];
Arr::forget($array, ['emails.joe@example.com', 'emails.jane@localhost']);
$this->assertEquals(['emails' => ['joe@example.com' => ['name' => 'Joe']]], $array);
+
+ $array = ['name' => 'hAz', '1' => 'test', 2 => 'bAz'];
+ Arr::forget($array, 1);
+ $this->assertEquals(['name' => 'hAz', 2 => 'bAz'], $array);
+
+ $array = [2 => [1 =>'products', 3 => 'users']];
+ Arr::forget($array, 2.3);
+ $this->assertEquals([2 => [1 =>'products']], $array);
}
public function testWrap()
|
improve doctype of forget/except method of Arr class (#<I>)
|
laravel_framework
|
train
|
8dee21b80e3bc09f797b0bbb5bf6803fac159051
|
diff --git a/codeTemplates/tests/bootstrap.php b/codeTemplates/tests/bootstrap.php
index <HASH>..<HASH> 100644
--- a/codeTemplates/tests/bootstrap.php
+++ b/codeTemplates/tests/bootstrap.php
@@ -5,10 +5,15 @@ use EdmondsCommerce\DoctrineStaticMeta\ConfigInterface;
use EdmondsCommerce\DoctrineStaticMeta\Schema\Database;
use EdmondsCommerce\DoctrineStaticMeta\SimpleEnv;
-SimpleEnv::setEnv(Config::getProjectRootDirectory() . '/.env');
-$server = $_SERVER;
-$server[ConfigInterface::paramDbName] .= '_test';
-$config = new Config($server);
-$database = new Database($config);
-$database->drop(true);
-$database->create(true);
+call_user_func(
+ function () {
+ SimpleEnv::setEnv(Config::getProjectRootDirectory() . '/.env');
+ $server = $_SERVER;
+ $server[ConfigInterface::paramDbName] .= '_test';
+ $config = new Config($server);
+ (new Database($config))
+ ->drop(true)
+ ->create(true)
+ ->close();
+ }
+);
|
stricter sure enforcing
|
edmondscommerce_doctrine-static-meta
|
train
|
263238c1aeb57d8f2f3a2ffb85019d8392468d1f
|
diff --git a/lib/synapse/config_generator/haproxy.rb b/lib/synapse/config_generator/haproxy.rb
index <HASH>..<HASH> 100644
--- a/lib/synapse/config_generator/haproxy.rb
+++ b/lib/synapse/config_generator/haproxy.rb
@@ -1342,13 +1342,16 @@ class Synapse::ConfigGenerator
return true
end
+ # capture2e runs a shell command and captures both stdout/stderr streams.
+ # It returns the combined streams (res) and the exit code (exit_code).
+ # See: https://docs.ruby-lang.org/en/2.0.0/Open3.html#method-i-capture2e.
res, exit_code = Open3.capture2e(opts['check_command'])
success = exit_code.success?
unless success
- log.error "synapse: invalid generated HAProxy config (checked via #{opts['check_command']}): #{res};\nexited with #{exit_code.exitstatus}"
+ log.error "synapse: invalid generated HAProxy config (checked via #{opts['check_command']}): exited with #{exit_code.exitstatus}: #{res}"
end
- statsd_increment("synapse.haproxy.check_config", ["success:#{success}"])
+ statsd_increment("synapse.haproxy.check_config", ["status:#{success}"])
log.info "synapse: checked HAProxy config located at #{opts['candidate_config_file_path']}; status: #{success}"
return success
|
Clarify capture2e usage
In addition, the log outputted by the HAProxy config check is also
condensed into one line for simpler searching.
|
airbnb_synapse
|
train
|
16037201f7348b88cd7b41f19361faf1194a035c
|
diff --git a/options.go b/options.go
index <HASH>..<HASH> 100644
--- a/options.go
+++ b/options.go
@@ -14,16 +14,25 @@ type OnRetryFunc func(n uint, err error)
type DelayTypeFunc func(n uint, config *config) time.Duration
type config struct {
- attempts uint
- delay time.Duration
- onRetry OnRetryFunc
- retryIf RetryIfFunc
- delayType DelayTypeFunc
+ attempts uint
+ delay time.Duration
+ onRetry OnRetryFunc
+ retryIf RetryIfFunc
+ delayType DelayTypeFunc
+ lastErrorOnly bool
}
// Option represents an option for retry.
type Option func(*config)
+// return the direct last error that came from the retried function
+// default is false (return wrapped error with everything
+func LastErrorOnly(lastErrorOnly bool) Option {
+ return func(c *config) {
+ c.lastErrorOnly = lastErrorOnly
+ }
+}
+
// Attempts set count of retry
// default is 10
func Attempts(attempts uint) Option {
diff --git a/retry.go b/retry.go
index <HASH>..<HASH> 100644
--- a/retry.go
+++ b/retry.go
@@ -83,6 +83,7 @@ func Do(retryableFunc RetryableFunc, opts ...Option) error {
onRetry: func(n uint, err error) {},
retryIf: func(err error) bool { return true },
delayType: BackOffDelay,
+ lastErrorOnly: false,
}
//apply opts
@@ -117,6 +118,9 @@ func Do(retryableFunc RetryableFunc, opts ...Option) error {
n++
}
+ if config.lastErrorOnly {
+ return errorLog.LastError()
+ }
return errorLog
}
@@ -153,3 +157,15 @@ func lenWithoutNil(e Error) (count int) {
func (e Error) WrappedErrors() []error {
return e
}
+
+func (e Error) LastError() error {
+ var lastErr error
+ for _, err := range e.WrappedErrors() {
+ if err != nil{
+ lastErr = err
+ } else {
+ return lastErr
+ }
+ }
+ return lastErr
+}
\ No newline at end of file
diff --git a/retry_test.go b/retry_test.go
index <HASH>..<HASH> 100644
--- a/retry_test.go
+++ b/retry_test.go
@@ -6,6 +6,7 @@ import (
"time"
"github.com/stretchr/testify/assert"
+ "fmt"
)
func TestDoAllFailed(t *testing.T) {
@@ -92,3 +93,17 @@ func TestFixedSleep(t *testing.T) {
assert.Error(t, err)
assert.True(t, dur < 500*time.Millisecond, "3 times default retry is shorter then 500ms")
}
+
+func TestLastErrorOnly(t *testing.T) {
+ var retrySum uint
+ err := Do(
+ func() error { return errors.New(fmt.Sprintf("%d", retrySum)) },
+ OnRetry(func(n uint, err error) { retrySum += 1 }),
+ Delay(time.Nanosecond),
+ LastErrorOnly(true),
+ )
+ if err.Error() != "9"{
+ fmt.Println(err.Error())
+ t.Fail()
+ }
+}
\ No newline at end of file
|
add last error option and implementation including tests
|
avast_retry-go
|
train
|
fa94df3cd162b39b878c641a0b2aeee635589faf
|
diff --git a/cgutils/commands/top.py b/cgutils/commands/top.py
index <HASH>..<HASH> 100644
--- a/cgutils/commands/top.py
+++ b/cgutils/commands/top.py
@@ -69,10 +69,10 @@ class CGTopStats:
self.delta['cpu'] = 0
self.delta['time'] = 0
- def _get_skelton_stats(self, cg):
+ def _get_skelton_stats(self, name, n_procs):
return {
- 'name': cg.fullname,
- 'n_procs': cg.n_procs,
+ 'name': name,
+ 'n_procs': n_procs,
'cpu.user': 0.0,
'cpu.system': 0.0,
'bio.read': 0.0,
@@ -86,7 +86,7 @@ class CGTopStats:
cgroup_stats = []
for name, cgroup_list in self.cgroups.iteritems():
cpu = mem = bio = None
- proc_exists = False
+ pids = []
for _cgroup in cgroup_list:
subsys_name = _cgroup.subsystem.NAME
if subsys_name == 'cpuacct':
@@ -95,14 +95,15 @@ class CGTopStats:
mem = _cgroup
elif subsys_name == 'blkio':
bio = _cgroup
- else: pass
- if _cgroup.n_procs > 0:
- proc_exists = True
- if self.options.hide_empty and not proc_exists:
+ _cgroup.update_pids()
+ pids += _cgroup.pids
+
+ n_procs = len(set(pids))
+ if self.options.hide_empty and n_procs == 0:
continue
active = False
- stats = self._get_skelton_stats(_cgroup)
+ stats = self._get_skelton_stats(_cgroup.fullname, n_procs)
if cpu:
def percent(delta):
|
Fix top on enumerating the number of processes
Original code unexpectedly used only the number of processes
in a blkio cgroup. Fixed code gets the number of processes
by enumerating processes of all cgroups and calculating a
sum of sets of them.
|
peo3_cgroup-utils
|
train
|
d6168cfed15b3a34464ebe2248aa80075e64b184
|
diff --git a/bin/wsdump.py b/bin/wsdump.py
index <HASH>..<HASH> 100755
--- a/bin/wsdump.py
+++ b/bin/wsdump.py
@@ -59,7 +59,7 @@ class InteractiveConsole(code.InteractiveConsole):
def write(self, data):
sys.stdout.write("\033[2K\033[E")
# sys.stdout.write("\n")
- sys.stdout.write("\033[34m" + data + "\033[39m")
+ sys.stdout.write("\033[34m< " + data + "\033[39m")
sys.stdout.write("\n> ")
sys.stdout.flush()
@@ -117,9 +117,9 @@ def main():
opcode, data = recv()
msg = None
if not args.verbose and opcode in OPCODE_DATA:
- msg = "< %s" % data
+ msg = data
elif args.verbose:
- msg = "< %s: %s" % (websocket.ABNF.OPCODE_MAP.get(opcode), data)
+ msg = "%s: %s" % (websocket.ABNF.OPCODE_MAP.get(opcode), data)
if msg is not None:
console.write(msg)
|
console decoration of wsdump.py inside InteractiveConsole
|
websocket-client_websocket-client
|
train
|
46fcf5fb67d6dca667176de2a095108f3865d61a
|
diff --git a/lib/eventsListeners.js b/lib/eventsListeners.js
index <HASH>..<HASH> 100644
--- a/lib/eventsListeners.js
+++ b/lib/eventsListeners.js
@@ -87,12 +87,13 @@ function throwError(files) {
function violationsListener(options, violations) {
var failOnErrors = _.isObject(options) ? _.get(options, 'errors', false) : options,
failOnWarnings = _.isObject(options) ? _.get(options, 'warnings', false) : options,
- fatalErrors = _.result(violations, 'fatalErrors', false),
- errors = _.result(violations, 'errors', false),
- warnings = _.result(violations, 'warnings', false),
+ fatalErrors = _.result(violations, 'fatalErrors', []),
+ errors = _.result(violations, 'errors', []),
+ warnings = _.result(violations, 'warnings', []),
hasFatalErrors = fatalErrors.length > 0,
hasErrors = errors.length > 0,
- hasWarnings = warnings.length > 0;
+ hasWarnings = warnings.length > 0,
+ fail = hasFatalErrors || (failOnErrors && hasErrors) || (failOnWarnings && hasWarnings);
if (hasFatalErrors) {
logError('fatalError', fatalErrors);
@@ -106,7 +107,7 @@ function violationsListener(options, violations) {
logError('warn', warnings);
}
- if (hasFatalErrors || (failOnErrors && hasErrors) || (failOnWarnings && hasWarnings)) {
+ if (fail) {
var files = _.uniq([].concat(fatalErrors, errors, warnings));
throwError(files);
|
fix(violationsListener): now uses `[]` instead of `false` for getting `violations`
|
Fitbit_webpack-cluster
|
train
|
cd159f7caf469554ec4088f6965ca053faaf6e13
|
diff --git a/salt/modules/file.py b/salt/modules/file.py
index <HASH>..<HASH> 100644
--- a/salt/modules/file.py
+++ b/salt/modules/file.py
@@ -1892,7 +1892,13 @@ def get_selinux_context(path):
salt '*' file.get_selinux_context /etc/hosts
'''
out = __salt__['cmd.run']('ls -Z {0}'.format(path))
- return out.split(' ')[4]
+
+ try:
+ ret = re.search('\w+:\w+:\w+:\w+', out).group(0)
+ except AttributeError:
+ ret = "No selinux context information is available for {0}".format(path)
+
+ return ret
def set_selinux_context(path,
|
Modified the parsing the output from ls -lZ to use regex instead of split() in order to resolve issue <I>
|
saltstack_salt
|
train
|
ce972283c81ce789f0eafd4207876823b6904f63
|
diff --git a/spec/lib/radiant/resource_responses_spec.rb b/spec/lib/radiant/resource_responses_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/lib/radiant/resource_responses_spec.rb
+++ b/spec/lib/radiant/resource_responses_spec.rb
@@ -1,5 +1,3 @@
-require File.dirname(__FILE__) + "/../../spec_helper"
-
describe "Radiant::ResourceResponses" do
before :each do
@klass = Class.new(ApplicationController)
|
Removed the unclear spec_helper require since it's required by the .rspec file.
|
radiant_radiant
|
train
|
856b6dacbc7265bcf08aaaeabea2ad6dea855507
|
diff --git a/CHANGELOG b/CHANGELOG
index <HASH>..<HASH> 100644
--- a/CHANGELOG
+++ b/CHANGELOG
@@ -1,6 +1,15 @@
CHANGELOG
---------
+changes since "cdk preview 20011201"
+------------------------------------
+* splitted libraries from CDK source code
+* fixed some javadoc issues
+* added Aelfred2 SAX2 parser
+* added FileConvertor application
+* added constructors that take Writer/Reader for several IO classes
+* fixed reading of CML output of XDrawChem
+
changes since "cdk preview 20011013"
------------------------------------
* More classes make use of new LoggingTool
diff --git a/org/openscience/cdk/io/CMLReader.java b/org/openscience/cdk/io/CMLReader.java
index <HASH>..<HASH> 100644
--- a/org/openscience/cdk/io/CMLReader.java
+++ b/org/openscience/cdk/io/CMLReader.java
@@ -58,15 +58,33 @@ public class CMLReader implements CDKConstants, ChemObjectReader {
* @param input Reader type input
*/
public CMLReader(Reader input) {
- logger = new org.openscience.cdk.tools.LoggingTool(
- this.getClass().getName());
- try {
- parser = new org.apache.xerces.parsers.SAXParser();
- this.input = input;
- } catch (Exception e) {
- logger.error("CMLReader: You found a serious bug! Please report it!");
- System.exit(1);
- }
+ logger = new org.openscience.cdk.tools.LoggingTool(this.getClass().getName());
+
+ boolean success = false;
+ if (!success) {
+ try {
+ parser = new org.apache.xerces.parsers.SAXParser();
+ logger.info("Using Xerces XML parser.");
+ success = true;
+ } catch (Exception e) {
+ logger.warn("Could not instantiate Xerces XML reader!");
+ }
+ }
+ // Xerces is prefered. Aelfred2 seems to ignore the entity handler. Removal of the
+ // DocType line will make Aelfred2 work properly.
+ if (!success) {
+ try {
+ parser = new gnu.xml.aelfred2.XmlReader();
+ logger.info("Using Aelfred2 XML parser.");
+ success = true;
+ } catch (Exception e) {
+ logger.warn("Could not instantiate Aelfred2 XML reader!");
+ }
+ }
+ if (!success) {
+ logger.error("Could not instantiate any XML parser!");
+ }
+ this.input = input;
}
diff --git a/org/openscience/cdk/io/cml/CMLResolver.java b/org/openscience/cdk/io/cml/CMLResolver.java
index <HASH>..<HASH> 100644
--- a/org/openscience/cdk/io/cml/CMLResolver.java
+++ b/org/openscience/cdk/io/cml/CMLResolver.java
@@ -31,14 +31,21 @@ import java.io.*;
public class CMLResolver implements EntityResolver {
+ private org.openscience.cdk.tools.LoggingTool logger;
+
+ public CMLResolver() {
+ logger = new org.openscience.cdk.tools.LoggingTool(this.getClass().getName());
+ }
+
public InputSource resolveEntity (String publicId, String systemId) {
- System.err.println("CMLResolver: resolving " + publicId + ", " + systemId);
+ logger.warn("CMLResolver: resolving " + publicId + ", " + systemId);
systemId = systemId.toLowerCase();
- if ((systemId.indexOf("cml-1999-05-15.dtd") != -1) || (systemId.indexOf("cml.dtd") != -1)) {
- return getCMLType( "org/openscience/cdk/io/cml/data/cml.dtd" );
- } else {
- return null;
- }
+ if ((systemId.indexOf("cml-1999-05-15.dtd") != -1) || (systemId.indexOf("cml.dtd") != -1)) {
+ return getCMLType( "org/openscience/cdk/io/cml/data/cml.dtd" );
+ } else {
+ logger.warn("Could not resolve " + systemId);
+ return null;
+ }
}
private InputSource getCMLType( String type ) {
diff --git a/org/openscience/cdk/io/cml/Convention.java b/org/openscience/cdk/io/cml/Convention.java
index <HASH>..<HASH> 100644
--- a/org/openscience/cdk/io/cml/Convention.java
+++ b/org/openscience/cdk/io/cml/Convention.java
@@ -161,11 +161,20 @@ public class Convention implements ConventionInterface {
break;
case BOND :
for (int i = 0; i < atts.getLength(); i++) {
- logger.debug("B2 " + atts.getQName(i) + "=" + atts.getValue(i));
- if (atts.getQName(i).equals("id")) {
- bondid.addElement(atts.getValue(i));
- logger.debug("B3 " + bondid);
- }
+ logger.debug("B2 " + atts.getQName(i) + "=" + atts.getValue(i));
+ if (atts.getQName(i).equals("id")) {
+ bondid.addElement(atts.getValue(i));
+ logger.debug("B3 " + bondid);
+ } else if (atts.getQName(i).equals("atomRefs")) {
+ // expect only two references
+ try {
+ StringTokenizer st = new StringTokenizer(atts.getValue(i));
+ bondARef1.addElement((String)st.nextElement());
+ bondARef2.addElement((String)st.nextElement());
+ } catch (Exception e) {
+ logger.error("Error in CML file: " + e.toString());
+ }
+ }
}
stereoGiven = false;
curRef = 0;
|
Fixed reading of CML written with XDrawChem (<URL>
|
cdk_cdk
|
train
|
22bca3b06ac6a6af5a45b5b07be225ea802cf00a
|
diff --git a/core/profiles/src/main/java/org/mobicents/slee/container/deployment/profile/jpa/ConcreteProfileEntityGenerator.java b/core/profiles/src/main/java/org/mobicents/slee/container/deployment/profile/jpa/ConcreteProfileEntityGenerator.java
index <HASH>..<HASH> 100644
--- a/core/profiles/src/main/java/org/mobicents/slee/container/deployment/profile/jpa/ConcreteProfileEntityGenerator.java
+++ b/core/profiles/src/main/java/org/mobicents/slee/container/deployment/profile/jpa/ConcreteProfileEntityGenerator.java
@@ -201,8 +201,14 @@ public class ConcreteProfileEntityGenerator {
if (profileAttribute.isUnique()) {
// just collect uniqueConstraints attributtes
uniqueConstraints.add(Introspector.decapitalize(pojoCmpAccessorSufix));
- }
- ClassGeneratorUtils.addAnnotation(Column.class.getName(), new LinkedHashMap<String, Object>(), ctMethod);
+ }
+
+ //String , primitive types , Array , Date will not be modified , only serialized data
+ LinkedHashMap<String,Object> map = new LinkedHashMap<String, Object>();
+ if (!returnType.isPrimitive() && ! returnType.getName().equals("java.lang.String"))
+ map.put("length", 512);
+
+ ClassGeneratorUtils.addAnnotation(Column.class.getName(), map, ctMethod);
}
// add usual setter
ctMethod = CtNewMethod.setter( "set" + pojoCmpAccessorSufix, genField );
@@ -311,6 +317,7 @@ public class ConcreteProfileEntityGenerator {
CtMethod getSerializableMethod = CtNewMethod.make(getSerializableMethodSrc, concreteArrayValueClass);
LinkedHashMap<String,Object> map = new LinkedHashMap<String, Object>();
map.put("name", "serializable");
+ map.put("length", 512);
//if (unique)map.put("unique", true);
ClassGeneratorUtils.addAnnotation(Column.class.getName(), map, getSerializableMethod);
concreteArrayValueClass.addMethod(getSerializableMethod);
diff --git a/core/profiles/src/main/java/org/mobicents/slee/container/deployment/profile/jpa/JPAProfileTable.java b/core/profiles/src/main/java/org/mobicents/slee/container/deployment/profile/jpa/JPAProfileTable.java
index <HASH>..<HASH> 100644
--- a/core/profiles/src/main/java/org/mobicents/slee/container/deployment/profile/jpa/JPAProfileTable.java
+++ b/core/profiles/src/main/java/org/mobicents/slee/container/deployment/profile/jpa/JPAProfileTable.java
@@ -26,6 +26,7 @@ import java.io.Serializable;
import javax.persistence.Entity;
import javax.persistence.Id;
+import javax.persistence.Column;
import javax.persistence.NamedQueries;
import javax.persistence.NamedQuery;
import javax.persistence.Table;
@@ -63,6 +64,7 @@ public class JPAProfileTable implements Serializable {
@Id
private String profileTableName;
+ @Column(length=512)
private ProfileSpecificationID profileSpecId;
private JPAProfileTable() {
|
Fixing "Profiles are not working with Mysql Database"
Update Issue <I>
Status: Fixed
Committed provided patch, with a change to size of <I>.
git-svn-id: <URL>
|
RestComm_jain-slee
|
train
|
bfd1e0cc908194ee159d3a5f4ce0375ac4798f43
|
diff --git a/examples/comment/reply_to_media_comments.py b/examples/comment/reply_to_media_comments.py
index <HASH>..<HASH> 100644
--- a/examples/comment/reply_to_media_comments.py
+++ b/examples/comment/reply_to_media_comments.py
@@ -54,8 +54,10 @@ for comment in tqdm(comments):
commenter = comment['user']['username']
text = comment['text']
# if using python3 change to:
- # bot.logger.info("Checking comment `{text}` from `{commenter}`".format(text=text,commenter=commenter))
- bot.logger.info("Checking comment from `{commenter}`".format(commenter=commenter))
+ if sys.version_info[0] < 3:
+ bot.logger.info(unicode("Checking comment `{text}` from `{commenter}`".format(text=text, commenter=commenter), 'utf-8'))
+ else:
+ bot.logger.info("Checking comment `{text}` from `{commenter}`".format(text=text,commenter=commenter))
# to save time, because you can't reply to yourself
if str(user_id) == bot.user_id:
bot.logger.error("You can't reply to yourself")
|
fixed utf-8 errors
fixed utf-8 errors for print comment['text'] in python versions < 3
|
instagrambot_instabot
|
train
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.