hash stringlengths 40 40 | diff stringlengths 131 26.7k | message stringlengths 7 694 | project stringlengths 5 67 | split stringclasses 1 value | diff_languages stringlengths 2 24 |
|---|---|---|---|---|---|
70bac0ac01c81fa40f4bd0970bd061b6b2f36a1e | diff --git a/src/Symfony/Bundle/AsseticBundle/DependencyInjection/Compiler/CheckClosureFilterPass.php b/src/Symfony/Bundle/AsseticBundle/DependencyInjection/Compiler/CheckClosureFilterPass.php
index <HASH>..<HASH> 100644
--- a/src/Symfony/Bundle/AsseticBundle/DependencyInjection/Compiler/CheckClosureFilterPass.php
+++ b/src/Symfony/Bundle/AsseticBundle/DependencyInjection/Compiler/CheckClosureFilterPass.php
@@ -23,7 +23,9 @@ class CheckClosureFilterPass implements CompilerPassInterface
{
public function process(ContainerBuilder $container)
{
- if ($container->hasDefinition('assetic.filter.closure.jar') && $container->hasParameter('assetic.filter.closure.jar')) {
+ if ($container->hasDefinition('assetic.filter.closure.jar')
+ && $container->hasParameter('assetic.filter.closure.jar')
+ && $container->getParameterBag()->resolveValue($container->getParameter('assetic.filter.closure.jar'))) {
$container->removeDefinition('assetic.filter.closure.api');
} elseif ($container->hasDefinition('assetic.filter.closure.api')) {
$container->removeDefinition('assetic.filter.closure.jar'); | [AsseticBundle] fixed previous PR | symfony_symfony | train | php |
159a761ad9ec61803a3f61e4b656e9dc515dcba3 | diff --git a/lib/couchrest/mixins/design_doc.rb b/lib/couchrest/mixins/design_doc.rb
index <HASH>..<HASH> 100644
--- a/lib/couchrest/mixins/design_doc.rb
+++ b/lib/couchrest/mixins/design_doc.rb
@@ -16,7 +16,6 @@ module CouchRest
def design_doc_slug
return design_doc_slug_cache if (design_doc_slug_cache && design_doc_fresh)
funcs = []
- self.design_doc ||= Design.new(default_design_doc)
design_doc['views'].each do |name, view|
funcs << "#{name}/#{view['map']}#{view['reduce']}"
end
diff --git a/lib/couchrest/mixins/document_queries.rb b/lib/couchrest/mixins/document_queries.rb
index <HASH>..<HASH> 100644
--- a/lib/couchrest/mixins/document_queries.rb
+++ b/lib/couchrest/mixins/document_queries.rb
@@ -12,7 +12,6 @@ module CouchRest
# name of the current class. Take the standard set of
# CouchRest::Database#view options.
def all(opts = {}, &block)
- self.design_doc ||= Design.new(default_design_doc)
unless design_doc_fresh
refresh_design_doc
end | Tidy up initialisations of design_doc which are no longer needed | couchrest_couchrest_model | train | rb,rb |
c0cf46e543ccf7585974e6aa73769079cf8fef7f | diff --git a/vprof/ui/flame_graph.js b/vprof/ui/flame_graph.js
index <HASH>..<HASH> 100644
--- a/vprof/ui/flame_graph.js
+++ b/vprof/ui/flame_graph.js
@@ -264,7 +264,7 @@ class FlameGraph {
*/
static getTruncatedNodeName_(d, rectLength) {
let fullname = FlameGraph.getNodeName_(d);
- let maxSymbols = rectLength / 10; // ~ 10 pixels per character.
+ let maxSymbols = rectLength / 7;
return maxSymbols <= 3 ? '' : common.shortenString(
fullname, maxSymbols, false);
} | Fix short names in flame graph view. | nvdv_vprof | train | js |
b83e3460017259211e47d44bcbc1d991c2ec2025 | diff --git a/src/components/_gapminder/timeslider/timeslider.js b/src/components/_gapminder/timeslider/timeslider.js
index <HASH>..<HASH> 100644
--- a/src/components/_gapminder/timeslider/timeslider.js
+++ b/src/components/_gapminder/timeslider/timeslider.js
@@ -141,7 +141,7 @@
.x(this.xScale)
.extent([0, 0])
.on("brush", function () {
- utils.throttle(brushed.bind(this), 30);
+ utils.throttle(brushed.bind(this), 50);
})
.on("brushend", function () {
brushedEnd.call(this);
@@ -367,13 +367,13 @@
*/
_setTime: function (time) {
//update state
- var _this = this,
- frameRate = 50;
+ var _this = this;
+ // frameRate = 50;
//avoid updating more than once in "frameRate"
- var now = new Date();
- if (this._updTime != null && now - this._updTime < frameRate) return;
- this._updTime = now;
+ //var now = new Date();
+ //if (this._updTime != null && now - this._updTime < frameRate) return;
+ //this._updTime = now;
_this.model.time.value = time;
}, | Leave only one throttle on dragging. Why do we need two? | vizabi_vizabi | train | js |
538d527e4a6b133af09adb46b01d20d7a06c59f5 | diff --git a/packages/react-scripts/scripts/utils/createJestConfig.js b/packages/react-scripts/scripts/utils/createJestConfig.js
index <HASH>..<HASH> 100644
--- a/packages/react-scripts/scripts/utils/createJestConfig.js
+++ b/packages/react-scripts/scripts/utils/createJestConfig.js
@@ -87,6 +87,7 @@ module.exports = (resolve, rootDir, isEjecting) => {
'resetModules',
'restoreMocks',
'snapshotSerializers',
+ 'testMatch',
'transform',
'transformIgnorePatterns',
'watchPathIgnorePatterns', | feat(react-scripts): allow testMatch for jest config (#<I>)
resolves #<I> | facebook_create-react-app | train | js |
6f6a2181e158c50c69778cf08ec3c3609a259362 | diff --git a/query/src/main/java/io/keen/client/java/RequestUrlBuilder.java b/query/src/main/java/io/keen/client/java/RequestUrlBuilder.java
index <HASH>..<HASH> 100644
--- a/query/src/main/java/io/keen/client/java/RequestUrlBuilder.java
+++ b/query/src/main/java/io/keen/client/java/RequestUrlBuilder.java
@@ -64,7 +64,7 @@ class RequestUrlBuilder {
URL getDatasetsUrl(String projectId, String datasetName, boolean fetchResults, Map<String, ?> queryParams) throws KeenQueryClientException {
try {
- StringBuilder url = createBaseUrl(projectId);
+ StringBuilder url = createBaseDatasetsUrl(projectId);
if (datasetName != null) {
appendDatasetName(url, datasetName);
@@ -85,7 +85,7 @@ class RequestUrlBuilder {
}
}
- private StringBuilder createBaseUrl(String projectId) {
+ private StringBuilder createBaseDatasetsUrl(String projectId) {
return new StringBuilder(String.format(Locale.US,
"%s/%s/projects/%s/%s",
this.baseUrl, | Renamed builder's method | keenlabs_KeenClient-Java | train | java |
5984d4ee2b62f7a3af36b4d44c02000ae6234348 | diff --git a/spring-boot-project/spring-boot/src/main/java/org/springframework/boot/context/properties/ConfigurationProperties.java b/spring-boot-project/spring-boot/src/main/java/org/springframework/boot/context/properties/ConfigurationProperties.java
index <HASH>..<HASH> 100644
--- a/spring-boot-project/spring-boot/src/main/java/org/springframework/boot/context/properties/ConfigurationProperties.java
+++ b/spring-boot-project/spring-boot/src/main/java/org/springframework/boot/context/properties/ConfigurationProperties.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2012-2019 the original author or authors.
+ * Copyright 2012-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -23,6 +23,7 @@ import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import org.springframework.core.annotation.AliasFor;
+import org.springframework.stereotype.Indexed;
/**
* Annotation for externalized configuration. Add this to a class definition or a
@@ -46,6 +47,7 @@ import org.springframework.core.annotation.AliasFor;
@Target({ ElementType.TYPE, ElementType.METHOD })
@Retention(RetentionPolicy.RUNTIME)
@Documented
+@Indexed
public @interface ConfigurationProperties {
/** | Index @ConfigurationProperties so they can be found by scanning
Previously, @ConfigurationProperties was not annotated with @Indexed.
This meant that @ConfigurationPropertiesScan would not be able to
find them when the underlying
ClassPathScanningCandidateComponentProvider is using a
CandidateComponentsIndex.
This commit annotated @ConfigurationProperties with @Indexed so that
they can be found by index-based scanning.
Fixes gh-<I> | spring-projects_spring-boot | train | java |
a6142fd5fc99ec433d3a0ab8fbe3838e33778229 | diff --git a/Form/Type/FilterType.php b/Form/Type/FilterType.php
index <HASH>..<HASH> 100644
--- a/Form/Type/FilterType.php
+++ b/Form/Type/FilterType.php
@@ -200,9 +200,9 @@ class FilterType extends AbstractFormType
? $this->formRegistry->getTypeGuesser()->guessType($meta->getTranslationClass(), $property)
: $this->formRegistry->getTypeGuesser()->guessType($meta->getEntityClass(), $property);
- $fieldOptions = [
+ $fieldOptions = array_merge([
'required' => false,
- ];
+ ], $fieldOptions);
if (null === $fieldType) {
$fieldType = $guess->getType(); | Do not overwrite filter field options. | DarvinStudio_DarvinAdminBundle | train | php |
ce3986de1e0404d296cf0fee7ae141adb6d04ce2 | diff --git a/core/src/elements/ons-pull-hook.spec.js b/core/src/elements/ons-pull-hook.spec.js
index <HASH>..<HASH> 100644
--- a/core/src/elements/ons-pull-hook.spec.js
+++ b/core/src/elements/ons-pull-hook.spec.js
@@ -71,7 +71,7 @@ describe('OnsPullHookElement', () => {
});
});
-/* describe('#_onDrag()', () => {
+ describe('#_onDrag()', () => {
it('does nothing if disabled', () => {
const spy = chai.spy.on(pullHook, '_translateTo');
@@ -121,7 +121,7 @@ describe('OnsPullHookElement', () => {
expect(spy).to.have.been.called.once;
});
- });*/
+ });
describe('#_onDragStart()', () => {
it('does nothing if the pull hook is disabled', () => { | fix(ons-pull-hook): uncomment tests | OnsenUI_OnsenUI | train | js |
9c2b0248ce87afae4eb751dbb02ee95c5683ba25 | diff --git a/app/models/account.rb b/app/models/account.rb
index <HASH>..<HASH> 100644
--- a/app/models/account.rb
+++ b/app/models/account.rb
@@ -44,15 +44,17 @@ class Account < ActiveRecord::Base
# Tagging
# =======
- acts_as_taggable
- attr_accessible :tag_list
+ if defined? ActsAsTaggable
+ acts_as_taggable
+ attr_accessible :tag_list
- def self.default_tags
- ['invoice:debit', 'invoice:earnings', 'invoice:credit', 'invoice:costs', 'vat:credit', 'vat:debit']
- end
+ def self.default_tags
+ ['invoice:debit', 'invoice:earnings', 'invoice:credit', 'invoice:costs', 'vat:credit', 'vat:debit']
+ end
- def self.tag_collection
- (default_tags + Account.tag_counts.pluck(:name)).uniq
+ def self.tag_collection
+ (default_tags + Account.tag_counts.pluck(:name)).uniq
+ end
end
# Holder | Make acts_as_taggable support optional for Account model. | huerlisi_has_accounts | train | rb |
708d25f3837ac6930bbbb8bea3e9016a0c4e40cd | diff --git a/bcbio/ngsalign/novoalign.py b/bcbio/ngsalign/novoalign.py
index <HASH>..<HASH> 100644
--- a/bcbio/ngsalign/novoalign.py
+++ b/bcbio/ngsalign/novoalign.py
@@ -89,7 +89,7 @@ def _novoalign_args_from_config(config, need_quality=True):
qual_flags = ["-F", "ILMFQ" if qual_format == "illumina" else "STDFQ"]
else:
qual_flags = []
- multi_mappers = config["algorithm"].get("multiple_mappers", True)
+ multi_mappers = config["algorithm"].get("multiple_mappers")
if multi_mappers is True:
multi_flag = "Random"
elif isinstance(multi_mappers, basestring): | Use default novoalign treatment of multiple mapping reads unless otherwise configured | bcbio_bcbio-nextgen | train | py |
182f5646a95422514611ab13ada909897f18a4d1 | diff --git a/releaf-core/app/assets/javascripts/releaf/include/field.type_richtext.js b/releaf-core/app/assets/javascripts/releaf/include/field.type_richtext.js
index <HASH>..<HASH> 100644
--- a/releaf-core/app/assets/javascripts/releaf/include/field.type_richtext.js
+++ b/releaf-core/app/assets/javascripts/releaf/include/field.type_richtext.js
@@ -84,9 +84,17 @@ jQuery(function()
textarea.attr( 'id', 'richtext_' + String((new Date()).getTime()).replace(/\D/gi,'') );
}
- if (textarea.data('attachment-upload-url'))
+ if (textarea.data('attachment-upload-url') || textarea.data('attachment-browse-url'))
{
- config.filebrowserUploadUrl = textarea.data('attachment-upload-url');
+ if (textarea.data('attachment-browse-url'))
+ {
+ config.filebrowserBrowseUrl = textarea.data('attachment-browse-url');
+ }
+
+ if (textarea.data('attachment-upload-url'))
+ {
+ config.filebrowserUploadUrl = textarea.data('attachment-upload-url');
+ }
}
else
{ | Add support for passing file browser url to CKEditor | cubesystems_releaf | train | js |
f93ae2ad9d00251e72e22088c69d4e9e853b811e | diff --git a/lib/thinking_sphinx/sphinxql.rb b/lib/thinking_sphinx/sphinxql.rb
index <HASH>..<HASH> 100644
--- a/lib/thinking_sphinx/sphinxql.rb
+++ b/lib/thinking_sphinx/sphinxql.rb
@@ -13,5 +13,5 @@ module ThinkingSphinx::SphinxQL
self.count = '@count'
end
- self.variables!
+ self.functions!
end | Use Sphinx <I>.x functions instead of variables by default.
This can be changed back to the old behaviour by putting this call in an initialiser: ThinkingSphinx::SphinxQL.variables! | pat_thinking-sphinx | train | rb |
405c150c1d5393af2232526f77f39985d37b11fc | diff --git a/distutils/dist.py b/distutils/dist.py
index <HASH>..<HASH> 100644
--- a/distutils/dist.py
+++ b/distutils/dist.py
@@ -168,10 +168,6 @@ Common commands: (see '--help-commands' for more)
# for the setup script to override command classes
self.cmdclass = {}
- # Make sure 'commands' is defined, so dist.run_commands can run
- # It might be overwritten by parse_command_line()
- self.commands = []
-
# 'command_packages' is a list of packages in which commands
# are searched for. The factory for command 'foo' is expected
# to be named 'foo' in the module 'foo' in one of the packages | Remove early initialization of 'commands'; unneeded. | pypa_setuptools | train | py |
9aa30e7f26e116bc8c2175414152aee516d8e009 | diff --git a/blueforge/__init__.py b/blueforge/__init__.py
index <HASH>..<HASH> 100644
--- a/blueforge/__init__.py
+++ b/blueforge/__init__.py
@@ -1,4 +1,4 @@
import os
-__version__ = '0.0.102'
+__version__ = '0.0.103'
BLUEFORGE_ROOT = os.path.dirname(os.path.abspath(__file__)) | [CB-<I>] Added the facebook messenger API for blueforge. | BlueHack-Core_blueforge | train | py |
17a8793ec9f3e64643f9b37bfc962080fe267456 | diff --git a/spec/hashie/clash_spec.rb b/spec/hashie/clash_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/hashie/clash_spec.rb
+++ b/spec/hashie/clash_spec.rb
@@ -42,8 +42,7 @@ describe Hashie::Clash do
it 'should be able to replace all of its own keys with #replace' do
@c.foo(:bar).hello(:world)
- @c.replace(:baz => 123, :hgi => 123)
- @c.keys.sort.should == [:baz, :hgi]
+ @c.replace(:baz => 123, :hgi => 123).should == {:baz => 123, :hgi => 123}
@c.should == {:baz => 123, :hgi => 123}
@c[:foo].should be_nil
@c[:hello].should be_nil | Fix failing test on Ruby <I>.x | intridea_hashie | train | rb |
4cd72b91bd024e6be9abb30aee345c0a90a6561b | diff --git a/airflow/models.py b/airflow/models.py
index <HASH>..<HASH> 100755
--- a/airflow/models.py
+++ b/airflow/models.py
@@ -3905,10 +3905,15 @@ class DAG(BaseDag, LoggingMixin):
upstream and downstream neighbours based on the flag passed.
"""
+ # deep-copying self.task_dict takes a long time, and we don't want all
+ # the tasks anyway, so we copy the tasks manually later
+ task_dict = self.task_dict
+ self.task_dict = {}
dag = copy.deepcopy(self)
+ self.task_dict = task_dict
regex_match = [
- t for t in dag.tasks if re.findall(task_regex, t.task_id)]
+ t for t in self.tasks if re.findall(task_regex, t.task_id)]
also_include = []
for t in regex_match:
if include_downstream:
@@ -3917,7 +3922,9 @@ class DAG(BaseDag, LoggingMixin):
also_include += t.get_flat_relatives(upstream=True)
# Compiling the unique list of tasks that made the cut
- dag.task_dict = {t.task_id: t for t in regex_match + also_include}
+ # Make sure to not recursively deepcopy the dag while copying the task
+ dag.task_dict = {t.task_id: copy.deepcopy(t, {id(t.dag): t.dag})
+ for t in regex_match + also_include}
for t in dag.tasks:
# Removing upstream/downstream references to tasks that did not
# made the cut | [AIRFLOW-<I>] speed up dag.sub_dag(...)
previous version created the subdag by copying
over all the tasks, and
then filtering them down. it's a lot faster if we
only copy over the
tasks we need
Closes #<I> from abdul-stripe/faster-subdag | apache_airflow | train | py |
0cd060de6490014c1eab75b80eab4071851d234c | diff --git a/helpers.js b/helpers.js
index <HASH>..<HASH> 100644
--- a/helpers.js
+++ b/helpers.js
@@ -1,6 +1,19 @@
+var jade = require('jade');
+var mixin = require('utils-merge');
var stormpath = require('stormpath');
+module.exports.render = function(view, res, options) {
+ options = options || {};
+ mixin(options, res.locals);
+
+ jade.renderFile(view, options, function(err, html) {
+ if (err) throw err;
+ res.send(html);
+ });
+};
+
+
module.exports.getUser = function(req, res, callback) {
if (req.session && req.session.user) {
req.app.get('stormpathClient').getAccount(req.session.user.href, { expand: 'customData' }, function(err, account) { | Adding a render helper function.
This is necessary because this stormpath package is going to be rendering views
using Jade -- but the developer might not necessarily be using jade themselves
-- so we must *force* manual jade rendering as to not mess up the developer's
code. | stormpath_express-stormpath | train | js |
bbbf00e350378fe85b743ddb0a060cd0231dfb2e | diff --git a/runner_test.go b/runner_test.go
index <HASH>..<HASH> 100644
--- a/runner_test.go
+++ b/runner_test.go
@@ -170,7 +170,7 @@ func TestRunnerParallelWorkers(t *testing.T) {
result := testRunner.runWorkers()
- if result.Time <= 0 {
+ if result.Time < 0 {
t.Errorf("[%d] expected result time to be positive number but got %s", specIdx, result.Time)
} | Time measurement on windows might not be fine enought | leanovate_gopter | train | go |
04b42ef567af9b9457f18a520ea154ea8c0067e7 | diff --git a/src/Utility/NotificationManager.php b/src/Utility/NotificationManager.php
index <HASH>..<HASH> 100644
--- a/src/Utility/NotificationManager.php
+++ b/src/Utility/NotificationManager.php
@@ -88,7 +88,7 @@ class NotificationManager
$data = array_merge($_data, $data);
foreach ((array)$data['recipientLists'] as $recipientList) {
- (array)$list = $this->getRecipientList($recipientList);
+ $list = (array)$this->getRecipientList($recipientList);
$data['users'] = array_merge($data['users'], $list);
} | Bugfix: Unsupported operand types | bakkerij_notifier | train | php |
f9e6c5cb95d065a6b285166b68a385056d8298d2 | diff --git a/mungers/submit-queue_test.go b/mungers/submit-queue_test.go
index <HASH>..<HASH> 100644
--- a/mungers/submit-queue_test.go
+++ b/mungers/submit-queue_test.go
@@ -1019,6 +1019,7 @@ func TestSubmitQueue(t *testing.T) {
}
}()
+ // TODO: concurrent map read/write bug here!
if sq.prStatus[issueNumStr].Reason == test.reason {
done <- true
return | Add TODO re occasional test failure | kubernetes_test-infra | train | go |
c3d7213ac6832f0c6cee176776a6ebdbd4689e67 | diff --git a/jira/client.py b/jira/client.py
index <HASH>..<HASH> 100755
--- a/jira/client.py
+++ b/jira/client.py
@@ -595,6 +595,11 @@ class JIRA(object):
:param fields: comma-separated string of issue fields to include in the results
:param expand: extra information to fetch inside each resource
"""
+
+ # this allows us to pass Issue objects to issue()
+ if type(id) == Issue:
+ return id
+
issue = Issue(self._options, self._session)
params = {} | allowing to pass Issue() instances instead of id/keys to issue() | pycontribs_jira | train | py |
8dae8a307615a90c1e1a5e0a25707f9367367277 | diff --git a/tasks/RebuildStaticCacheTask.php b/tasks/RebuildStaticCacheTask.php
index <HASH>..<HASH> 100644
--- a/tasks/RebuildStaticCacheTask.php
+++ b/tasks/RebuildStaticCacheTask.php
@@ -87,7 +87,7 @@ class RebuildStaticCacheTask extends Controller {
$count = isset($_GET['count']) ? $_GET['count'] : sizeof($urls);
if(($start + $count) > sizeof($urls)) $count = sizeof($urls) - $start;
- $urls = array_slice($urls, $start, $count);
+ $mappedUrls = array_slice($mappedUrls, $start, $count);
if($removeAll && !isset($_GET['urls']) && $start == 0 && file_exists("../cache")) {
echo "Removing stale cache files... \n"; | BUGFIX Fixed limiting of URLs on RebuildStaticCacheTask | silverstripe_silverstripe-siteconfig | train | php |
aedd1555d343f394781d9b74514dda5bccbe40f4 | diff --git a/builder/digitalocean/wait.go b/builder/digitalocean/wait.go
index <HASH>..<HASH> 100644
--- a/builder/digitalocean/wait.go
+++ b/builder/digitalocean/wait.go
@@ -9,6 +9,9 @@ import (
// waitForState simply blocks until the droplet is in
// a state we expect, while eventually timing out.
func waitForDropletState(desiredState string, dropletId uint, client *DigitalOceanClient, timeout time.Duration) error {
+ done := make(chan struct{})
+ defer close(done)
+
result := make(chan error, 1)
go func() {
attempts := 0
@@ -29,6 +32,15 @@ func waitForDropletState(desiredState string, dropletId uint, client *DigitalOce
// Wait 3 seconds in between
time.Sleep(3 * time.Second)
+
+ // Verify we shouldn't exit
+ select {
+ case <-done:
+ // We finished, so just exit the goroutine
+ return
+ default:
+ // Keep going
+ }
}
}() | builder/digitalocean: properly cleanup goroutines for status checking | hashicorp_packer | train | go |
e95891ec0115a6be8fe7855e1f1381726c06b919 | diff --git a/datapackage_pipelines/status/backend_filesystem.py b/datapackage_pipelines/status/backend_filesystem.py
index <HASH>..<HASH> 100644
--- a/datapackage_pipelines/status/backend_filesystem.py
+++ b/datapackage_pipelines/status/backend_filesystem.py
@@ -48,12 +48,15 @@ class FilesystemBackend(object):
self.del_status(p)
def all_pipeline_ids(self):
- all_ids = sorted(os.listdir(self.base_dir))
- all_ids = [
- codecs.decode(_id.encode('utf8'), 'base64').decode('utf8')
- for _id in all_ids
- ]
- return all_ids
+ # Decoding encoded identifiers
+ dec_ids = []
+ enc_ids = sorted(os.listdir(self.base_dir))
+ for enc_id in enc_ids:
+ dec_id = codecs.decode(enc_id.encode('utf8'), 'base64').decode('utf8')
+ if dec_id.startswith('PipelineStatus:'):
+ dec_id = dec_id.replace('PipelineStatus:', '')
+ dec_ids.append(dec_id)
+ return dec_ids
def all_statuses(self):
return [self.get_status(_id) | Fixed decoding of encoded identifiers (#<I>) | frictionlessdata_datapackage-pipelines | train | py |
1b8a274b393c3f71a0cc8bcb9af1cd99d8696c5a | diff --git a/src/loader.js b/src/loader.js
index <HASH>..<HASH> 100644
--- a/src/loader.js
+++ b/src/loader.js
@@ -23,7 +23,6 @@ export class Loader {
*/
programId: PublicKey;
-
/**
* Amount of program data placed in each load Transaction
*/
diff --git a/src/transaction.js b/src/transaction.js
index <HASH>..<HASH> 100644
--- a/src/transaction.js
+++ b/src/transaction.js
@@ -20,6 +20,11 @@ export type TransactionSignature = string;
export type TransactionId = string;
/**
+ * Maximum over-the-wire size of a Transaction
+ */
+export const PACKET_DATA_SIZE = 512;
+
+/**
* List of TransactionInstruction object fields that may be initialized at construction
*
* @typedef {Object} TransactionInstructionCtorFields
@@ -290,8 +295,8 @@ export class Transaction {
});
signData.copy(wireTransaction, 8 + signatures.length * 64);
invariant(
- wireTransaction.length < 512,
- `${wireTransaction.length}, ${signatures.length}`,
+ wireTransaction.length <= PACKET_DATA_SIZE,
+ `Transaction too large: ${wireTransaction.length} > ${PACKET_DATA_SIZE}`,
);
return wireTransaction;
} | refactor: add constant for max Transaction size | solana-labs_solana-web3.js | train | js,js |
ad5ead328ad6f2ed4d209716bb332547c74df7ca | diff --git a/src/sass.api.js b/src/sass.api.js
index <HASH>..<HASH> 100644
--- a/src/sass.api.js
+++ b/src/sass.api.js
@@ -41,7 +41,7 @@ var Sass = {
}
Object.keys(options).forEach(function(key) {
- var _type = this._optionTypes[key];
+ var _type = Sass._optionTypes[key];
// no need to import crap
if (!_type) {
@@ -49,8 +49,8 @@ var Sass = {
}
// force expected data type
- this._options[key] = _type(options[key]);
- }, this);
+ Sass._options[key] = _type(options[key]);
+ });
callback && callback();
},
@@ -60,7 +60,7 @@ var Sass = {
throw new Error('importer callback must either be a function or null');
}
- this._importer = importerCallback;
+ Sass._importer = importerCallback;
callback && callback();
},
@@ -262,7 +262,7 @@ var Sass = {
return option.type;
})),
// arguments for invocation
- [text, Sass._path, Number(Boolean(this._importer))].concat(options.map(function(option) {
+ [text, Sass._path, Number(Boolean(Sass._importer))].concat(options.map(function(option) {
return Sass._options[option.key];
})),
// we're not expecting synchronous return value | feature(api): consistently use `Sass` over `this` | medialize_sass.js | train | js |
153e8d31eeb5c6a5ad4a09edde38f7915031980b | diff --git a/insights/specs/sos_archive.py b/insights/specs/sos_archive.py
index <HASH>..<HASH> 100644
--- a/insights/specs/sos_archive.py
+++ b/insights/specs/sos_archive.py
@@ -220,6 +220,7 @@ class SosSpecs(Specs):
sssd_logs = glob_file("var/log/sssd/*.log")
samba_logs = glob_file("var/log/samba/log.*")
ssh_foreman_config = simple_file("/usr/share/foreman/.ssh/ssh_config")
+ subscription_manager_id = simple_file("/sos_commands/subscription_manager/subscription-manager_identity")
subscription_manager_list_consumed = first_file([
'sos_commands/yum/subscription-manager_list_--consumed',
'sos_commands/subscription_manager/subscription-manager_list_--consumed', | Added spec for subscription_manager_in sos_archive.py (#<I>)
The Spec for subscription_manager_id added in sos_archive.py to parse the associated file:
- sos_commands/subscription_manager/subscription-manager_identity. | RedHatInsights_insights-core | train | py |
044c53bcad2e28f90d406f7a1ccfb1aca23073e7 | diff --git a/src/transaction.js b/src/transaction.js
index <HASH>..<HASH> 100644
--- a/src/transaction.js
+++ b/src/transaction.js
@@ -93,7 +93,10 @@ Transaction.fromHex = function(hex) {
* Note that this method does not sign the created input.
*/
Transaction.prototype.addInput = function(hash, index, sequence, script) {
- if (sequence === undefined) sequence = Transaction.DEFAULT_SEQUENCE
+ if (sequence === undefined || sequence === null) {
+ sequence = Transaction.DEFAULT_SEQUENCE
+ }
+
script = script || Script.EMPTY
if (typeof hash === 'string') { | transaction.addInput checks if sequence is NULL | BitGo_bitgo-utxo-lib | train | js |
1aa163b4c9c69b8de0cbbd7686f6a075a9e99c62 | diff --git a/lahja/misc.py b/lahja/misc.py
index <HASH>..<HASH> 100644
--- a/lahja/misc.py
+++ b/lahja/misc.py
@@ -29,10 +29,9 @@ class BroadcastConfig:
class BaseEvent:
- def __init__(self) -> None:
- self._origin = ''
- self._id: Optional[str] = None
- self._config: Optional[BroadcastConfig] = None
+ _origin = ''
+ _id: Optional[str] = None
+ _config: Optional[BroadcastConfig] = None
def broadcast_config(self) -> BroadcastConfig:
return BroadcastConfig( | Get rid of __init__ in BaseEvent | ethereum_lahja | train | py |
4e20522afdf899f0916ffb140030a42bd69c7c23 | diff --git a/command/fix.go b/command/fix.go
index <HASH>..<HASH> 100644
--- a/command/fix.go
+++ b/command/fix.go
@@ -127,6 +127,7 @@ Fixes that are run:
pp-vagrant-override Replaces old-style provider overrides for the Vagrant
post-processor to new-style as of Packer 0.5.0.
virtualbox-rename Updates "virtualbox" builders to "virtualbox-iso"
+ vmware-rename Updates "vmware" builders to "vmware-iso"
Options: | Fixes #<I>: Added vmware-rename to fix command help text | hashicorp_packer | train | go |
4d680c49a1fc6511571f4e475db4536a3a278023 | diff --git a/kintama/helpers.rb b/kintama/helpers.rb
index <HASH>..<HASH> 100644
--- a/kintama/helpers.rb
+++ b/kintama/helpers.rb
@@ -21,6 +21,7 @@ def spawn_all_processes
end
def kill_all_processes
+ sleep $for_a_moment
@pids.each do |pid|
Process.kill :TERM, pid
Process.wait pid | Sometimes the test goes so fast that we need a breather before shutting down | JScott_robot_sweatshop | train | rb |
85487d221f7d8207100f40e6b376b03638965997 | diff --git a/src/loader/loader.js b/src/loader/loader.js
index <HASH>..<HASH> 100644
--- a/src/loader/loader.js
+++ b/src/loader/loader.js
@@ -370,16 +370,16 @@
switch (res.type) {
case "binary":
// reuse the preloadImage fn
- preloadBinary.apply(this, [res, onload, onerror]);
+ preloadBinary.call(this, res, onload, onerror);
return 1;
case "image":
// reuse the preloadImage fn
- preloadImage.apply(this, [res, onload, onerror]);
+ preloadImage.call(this, res, onload, onerror);
return 1;
case "tmx":
- preloadXML.apply(this, [res, true, onload, onerror]);
+ preloadXML.call(this, res, true, onload, onerror);
// increase the resourceCount by 1
// allowing to add the loading of level in the
// levelDirector as part of the loading progress | use Function.call instead of Function.apply (note this does not fix or add anything) | melonjs_melonJS | train | js |
da000e5b782c462a1eb8a6279cbea651616fb561 | diff --git a/chef/lib/plugins/cloudstack.rb b/chef/lib/plugins/cloudstack.rb
index <HASH>..<HASH> 100644
--- a/chef/lib/plugins/cloudstack.rb
+++ b/chef/lib/plugins/cloudstack.rb
@@ -21,5 +21,3 @@
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
require_plugin "cloud"
-
-Ohai::Log.warn("deprecated in favor of cloud plugin")
diff --git a/chef/lib/plugins/ec2.rb b/chef/lib/plugins/ec2.rb
index <HASH>..<HASH> 100644
--- a/chef/lib/plugins/ec2.rb
+++ b/chef/lib/plugins/ec2.rb
@@ -21,5 +21,3 @@
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
require_plugin "cloud"
-
-Ohai::Log.warn("deprecated in favor of cloud plugin")
diff --git a/chef/lib/plugins/rackspace.rb b/chef/lib/plugins/rackspace.rb
index <HASH>..<HASH> 100644
--- a/chef/lib/plugins/rackspace.rb
+++ b/chef/lib/plugins/rackspace.rb
@@ -21,5 +21,3 @@
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
require_plugin "cloud"
-
-Ohai::Log.warn("deprecated in favor of cloud plugin") | removed warnings from ohai cloud plugin overrides because they cause a "Unexpected output from execution" message to appear in logs | rightscale_right_link | train | rb,rb,rb |
4e9816094f5613dabc8c72ae67ac502cc1571a1a | diff --git a/vsphere/internal/virtualdevice/virtual_machine_disk_subresource.go b/vsphere/internal/virtualdevice/virtual_machine_disk_subresource.go
index <HASH>..<HASH> 100644
--- a/vsphere/internal/virtualdevice/virtual_machine_disk_subresource.go
+++ b/vsphere/internal/virtualdevice/virtual_machine_disk_subresource.go
@@ -136,6 +136,7 @@ func DiskSubresourceSchema() map[string]*schema.Schema {
"unit_number": {
Type: schema.TypeInt,
Optional: true,
+ Default: 0,
Description: "The unique device number for this disk. This number determines where on the SCSI bus this device will be attached.",
ValidateFunc: validation.IntBetween(0, 59),
}, | r/virtual_machine: Add default for disk unit number
This fixes a diff mismatch on tainting of a resource that may possibly
come up during ForceNew as well. The annoying part of this issue is that
the "diffs didn't match during apply" error fires during the middle of
the operation, after the resource gets deleted, but before it's
re-created. | terraform-providers_terraform-provider-vsphere | train | go |
9d5fc95d7c7c2e8d40bf5c8fed45b5eadbe4e6d3 | diff --git a/packages/swagger2openapi/index.js b/packages/swagger2openapi/index.js
index <HASH>..<HASH> 100644
--- a/packages/swagger2openapi/index.js
+++ b/packages/swagger2openapi/index.js
@@ -3,6 +3,7 @@
var fs = require('fs');
var url = require('url');
+var pathlib = require('path');
var co = require('co');
var maybe = require('call-me-maybe');
@@ -10,7 +11,7 @@ var fetch = require('node-fetch');
var yaml = require('js-yaml');
var common = require('./common.js');
-var statusCodes = require('./statusCodes.json');
+var statusCodes = require(pathlib.join(__dirname,'statusCodes.json'));
// TODO split out into params, security etc
// TODO handle specification-extensions with plugins? | Make statusCodes.json findable when loaded as module | Mermade_oas-kit | train | js |
9b15df57640a1a66b747fbbe5eeff1b2408b7fb4 | diff --git a/dimod/reference/composites/fixedvariable.py b/dimod/reference/composites/fixedvariable.py
index <HASH>..<HASH> 100644
--- a/dimod/reference/composites/fixedvariable.py
+++ b/dimod/reference/composites/fixedvariable.py
@@ -52,7 +52,7 @@ class FixedVariableComposite(ComposedSampler):
>>> quadratic = {(1,4):-0.6}
>>> response = sampler.sample_ising(linear,quadratic,fixed_variables={1:-1})
>>> print(response.first)
- Sample(sample={4: -1, 1: -1}, energy=1.2000000000000002, num_occurrences=1)
+ Sample(sample={1: -1, 4: -1}, energy=1.2000000000000002, num_occurrences=1)
""" | satisfying doctests for FixedVariableComposite | dwavesystems_dimod | train | py |
44fb5b5674fd4e6cbf0fe999bd64ecc3d0a20350 | diff --git a/src/system/modules/metamodels/MetaModels/DcGeneral/Data/Driver.php b/src/system/modules/metamodels/MetaModels/DcGeneral/Data/Driver.php
index <HASH>..<HASH> 100644
--- a/src/system/modules/metamodels/MetaModels/DcGeneral/Data/Driver.php
+++ b/src/system/modules/metamodels/MetaModels/DcGeneral/Data/Driver.php
@@ -378,7 +378,7 @@ class Driver implements MultiLanguageDataProviderInterface
case 'IN':
// Rewrite the IN operation to a rephrased term: "(x=a) OR (x=b) OR ...".
$arrSubRules = array();
- foreach ($arrFilter['value'] as $varValue)
+ foreach ($arrFilter['values'] as $varValue)
{
$arrSubRules[] = array(
'property' => $arrFilter['property'], | Fix filter for "IN" operation. | MetaModels_core | train | php |
322f81b9885d04fda4dd344763841053b1c39d35 | diff --git a/src/runners/flow-node.js b/src/runners/flow-node.js
index <HASH>..<HASH> 100644
--- a/src/runners/flow-node.js
+++ b/src/runners/flow-node.js
@@ -8,7 +8,7 @@ import Options from '../models/options/Options'
import SwaggerParser from '../parsers/swagger/Parser'
import RAMLParser from '../parsers/raml/Parser'
import PostmanParser from '../parsers/postman/Parser'
-import CurlParser from '../parsers/curl/Parser'
+import CurlParser from '../parsers/cURL/Parser'
import InternalParser from '../parsers/internal/Parser'
import SwaggerSerializer from '../serializers/swagger/Serializer' | fixed an casing issue with flow-node.js | luckymarmot_API-Flow | train | js |
47d9911d771802b8ca72085effdb307da19f6504 | diff --git a/resources/assets/js/media.js b/resources/assets/js/media.js
index <HASH>..<HASH> 100644
--- a/resources/assets/js/media.js
+++ b/resources/assets/js/media.js
@@ -17,6 +17,20 @@ module.exports = function(){
},
imgIcon: function(path){
return 'background-size: cover; background-image: url("' + path + '"); background-repeat:no-repeat; background-position:center center;display:inline-block; width:100%; height:100%;';
+ },
+ dateFilter: function(date){
+ if(!date){
+ return null;
+ }
+ var date = new Date(date * 1000);
+
+ var month = "0" + (date.getMonth() + 1);
+ var minutes = "0" + date.getMinutes();
+ var seconds = "0" + date.getSeconds();
+
+ var dateForamted = date.getFullYear() + '-' + month.substr(-2) + '-' + date.getDate() + ' ' + date.getHours() + ':' + minutes.substr(-2) + ':' + seconds.substr(-2);
+
+ return dateForamted;
}
}
}); | Update media.js
Add date filter method in media js | the-control-group_voyager | train | js |
b6f7172725c03f0876ae929f870f8e00b87a3f88 | diff --git a/audiomate/corpus/io/tuda.py b/audiomate/corpus/io/tuda.py
index <HASH>..<HASH> 100644
--- a/audiomate/corpus/io/tuda.py
+++ b/audiomate/corpus/io/tuda.py
@@ -103,7 +103,7 @@ BAD_FILES = {
'test': [
# INVALID AUDIO
'2015-02-04-12-36-32', '2015-02-10-13-45-07', '2015-01-27-14-37-33',
- '2015-02-10-14-18-26'
+ '2015-02-10-14-18-26', '2015-02-09-15-07-19',
]
} | Add invalid file to tuda reader | ynop_audiomate | train | py |
09a5983f105ef53c818d806ed5b0618603672e88 | diff --git a/src/components/autoplay.js b/src/components/autoplay.js
index <HASH>..<HASH> 100644
--- a/src/components/autoplay.js
+++ b/src/components/autoplay.js
@@ -62,12 +62,16 @@ export default function (Glide, Components, Events) {
* @return {Void}
*/
bind () {
- Binder.on('mouseover', Components.Html.root, () => {
+ Binder.on('mouseenter', Components.Html.root, () => {
this.stop()
+
+ Events.emit('autoplay.over')
})
- Binder.on('mouseout', Components.Html.root, () => {
+ Binder.on('mouseleave', Components.Html.root, () => {
this.start()
+
+ Events.emit('autoplay.out')
})
}, | feat(autoplay): Emit events for hoverpause (#<I>) | glidejs_glide | train | js |
7b668237bc1dec5d895d29955a05045a4f63e7f5 | diff --git a/cmd/torrent-pick/main.go b/cmd/torrent-pick/main.go
index <HASH>..<HASH> 100644
--- a/cmd/torrent-pick/main.go
+++ b/cmd/torrent-pick/main.go
@@ -163,6 +163,7 @@ func main() {
continue
}
srcReader := file.NewReader()
+ defer srcReader.Close()
io.Copy(dstWriter, srcReader)
return
} | cmd/torrent-pick: Close file Reader | anacrolix_torrent | train | go |
14fb27507df85460098cd47f4fdee8c18514e3da | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -34,7 +34,7 @@ history = open('CHANGES.rst').read()
tests_require = [
'check-manifest>=0.25',
'coverage>=4.0',
- 'invenio-accounts>=1.0.0a13',
+ 'invenio-accounts>=1.0.0a15',
'invenio-access>=1.0.0a9',
'isort>=4.2.2',
'psycopg2>=2.6.1',
@@ -72,10 +72,10 @@ install_requires = [
'elasticsearch-dsl>=2.0.0',
'invenio-assets>=1.0.0b2',
'invenio-db[versioning]>=1.0.0b1',
- 'invenio-files-rest>=1.0.0a9',
+ 'invenio-files-rest>=1.0.0a12',
'invenio-indexer>=1.0.0a6',
'invenio-jsonschemas>=1.0.0a3',
- 'invenio-oauth2server>=1.0.0a9',
+ 'invenio-oauth2server>=1.0.0a10',
'invenio-records-files>=1.0.0a6',
'invenio-records-rest>=1.0.0a16',
'invenio-records-ui>=1.0.0a7', | global: compatibility fix with Flask-WTF>=<I> | inveniosoftware_invenio-deposit | train | py |
88692e1cb18341613a708468493a9319007fdd5b | diff --git a/geojson.js b/geojson.js
index <HASH>..<HASH> 100644
--- a/geojson.js
+++ b/geojson.js
@@ -126,6 +126,18 @@
}
}
+ if(params.extra) { addExtra(properties, params.extra); }
+
+ return properties;
+ }
+
+ function addExtra(properties, extra) {
+ for(var key in extra){
+ if(extra.hasOwnProperty(key)) {
+ properties[key] = extra[key];
+ }
+ }
+
return properties;
} | Added 'extra' parameter to include arbitrary feature properties. Closes #5 | caseycesari_GeoJSON.js | train | js |
20fdcbd3b2174a44b6e5c6910c8fd14a4ddcb626 | diff --git a/versions.js b/versions.js
index <HASH>..<HASH> 100755
--- a/versions.js
+++ b/versions.js
@@ -1,9 +1,9 @@
#!/usr/bin/env node
"use strict";
-const {basename, dirname, join} = require("path");
-const {platform} = require("os");
const {readFile, writeFile, truncate, stat, realpath} = require("fs").promises;
+const {basename, dirname, join, relative} = require("path");
+const {platform} = require("os");
const execa = require("execa");
const fastGlob = require("fast-glob");
const findUp = require("find-up");
@@ -395,7 +395,7 @@ async function main() {
const commitMsg = commitMsgs.join("\n\n") + (changelog ? `\n\n${changelog}` : ``);
if (args["explicit-add"]) {
- await run(["git", "add", ...files]);
+ await run(["git", "add", ...files.map(file => relative(__dirname, file))]);
await run(["git", "commit", "-F", "-"], {input: commitMsg});
} else {
await run(["git", "commit", "-a", "-F", "-"], {input: commitMsg}); | pass relative paths to 'git add' | silverwind_ver | train | js |
2cba8fac2abb7fa2005077e254e6a31d053d93cf | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -45,6 +45,7 @@ install_requires = [
'fedmsg',
'python-fedora',
'python-dateutil',
+ 'pytz'
]
tests_require = [
'nose', | buildsys.py needs pytz | fedora-infra_fedmsg_meta_fedora_infrastructure | train | py |
abf920af853cb070df914620e6e0ca5ff7b8d60a | diff --git a/src/Exceptions/BatchException.php b/src/Exceptions/BatchException.php
index <HASH>..<HASH> 100644
--- a/src/Exceptions/BatchException.php
+++ b/src/Exceptions/BatchException.php
@@ -25,7 +25,7 @@ class BatchException extends RestException
*/
public function __construct($responses, $message = null, $code = null, $previous = null)
{
- parent::__construct(Response::HTTP_INTERNAL_SERVER_ERROR, $message,
+ parent::__construct(Response::HTTP_BAD_REQUEST, $message,
$code ?: ErrorCodes::BATCH_ERROR, $previous, $responses);
} | DP-<I> Conflicting HTTP Status Codes
Batch Error response changed to <I> | dreamfactorysoftware_df-core | train | php |
8868ae79af1ff41d83a264e078a34efa4c3251ad | diff --git a/lib/quartz_flow/server.rb b/lib/quartz_flow/server.rb
index <HASH>..<HASH> 100644
--- a/lib/quartz_flow/server.rb
+++ b/lib/quartz_flow/server.rb
@@ -74,7 +74,11 @@ class Server < Sinatra::Base
LogConfigurator.configLevels
peerClient = QuartzTorrent::PeerClient.new(settings.basedir)
peerClient.port = settings.torrent_port
- peerClient.start
+ begin
+ peerClient.start
+ rescue Errno::EADDRINUSE
+ raise "Starting torrent peer failed because listening on port #{settings.torrent_port} failed: " + $!.message
+ end
# Initialize Datamapper
#DataMapper::Logger.new($stdout, :debug) | Better error message when bittorrent port is in use | jeffwilliams_quartz-flow | train | rb |
754ba630291a162e1bbc3cf97a1f21594e4520a5 | diff --git a/Core/src/Artax/Core/Handlers/Termination.php b/Core/src/Artax/Core/Handlers/Termination.php
index <HASH>..<HASH> 100644
--- a/Core/src/Artax/Core/Handlers/Termination.php
+++ b/Core/src/Artax/Core/Handlers/Termination.php
@@ -66,6 +66,12 @@ class Termination implements TerminationInterface
protected $debug;
/**
+ * An event mediator instance
+ * @var Mediator
+ */
+ protected $mediator;
+
+ /**
* Specify debug output flag and register exception/shutdown handlers
*
* @param bool $debug A boolean debug output flag | Termination Handler: added missing protected property declaration | amphp_artax | train | php |
c7de3bb8934db988917271d1ec732a212f94e0d2 | diff --git a/deployutils/__init__.py b/deployutils/__init__.py
index <HASH>..<HASH> 100644
--- a/deployutils/__init__.py
+++ b/deployutils/__init__.py
@@ -27,7 +27,7 @@ Function to load site and credentials config files
"""
import os, re, sys
-__version__ = '0.2.3-dev'
+__version__ = '0.2.3'
def locate_config(confname, app_name, prefix='etc', verbose=False):
"""
diff --git a/deployutils/settings.py b/deployutils/settings.py
index <HASH>..<HASH> 100644
--- a/deployutils/settings.py
+++ b/deployutils/settings.py
@@ -48,7 +48,7 @@ _SETTINGS = {
settings.BASE_DIR + '/htdocs/'),
'INSTALLED_TEMPLATES_ROOT': getattr(settings,
'DEPLOYUTILS_INSTALLED_TEMPLATES_ROOT',
- settings.TEMPLATE_DIRS[0]),
+ settings.TEMPLATE_DIRS[0] if len(settings.TEMPLATE_DIRS) > 0 else None),
'RESOURCES_REMOTE_LOCATION': getattr(settings,
'DEPLOYUTILS_RESOURCES_REMOTE_LOCATION', None),
'DENY_NO_SESSION': getattr(settings, 'DEPLOYUTILS_DENY_NO_SESSION', False), | release <I> to pypi | djaodjin_djaodjin-deployutils | train | py,py |
3e5f7132a84867cc2e4a4a46bfdbba24678366eb | diff --git a/backbone.js b/backbone.js
index <HASH>..<HASH> 100644
--- a/backbone.js
+++ b/backbone.js
@@ -447,7 +447,7 @@
if (options.comparator) this.comparator = options.comparator;
this._reset();
this.initialize.apply(this, arguments);
- if (models) this.reset(models, {silent: true});
+ if (models) this.reset(models, {silent: true, parse: options.parse});
};
// Define the Collection's inheritable methods. | Fixes #<I>, pass through options.parse when creating a new collection. | jashkenas_backbone | train | js |
3c726b6c9d15a3c586c0b1bd5ed99dc8d60e7355 | diff --git a/spec/page_text_receiver_spec.rb b/spec/page_text_receiver_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/page_text_receiver_spec.rb
+++ b/spec/page_text_receiver_spec.rb
@@ -33,4 +33,14 @@ describe PDF::Reader::PageTextReceiver do
@receiver.content.should eql("James Healy")
end
+
+ it "should return merged text content from the regular page and a Form XObjects" do
+ @reader = PDF::Reader.new(pdf_spec_file("form_xobject_more"))
+ @page = @reader.page(1)
+ @receiver = PDF::Reader::PageTextReceiver.new
+
+ @page.walk(@receiver)
+
+ @receiver.content.should eql("James Healy\nSome regular content")
+ end
end | an extra spec for PageTextReceiver | yob_pdf-reader | train | rb |
e9c25cba5a642e203db91f57a4fcdbccb6d63b03 | diff --git a/lib/mvcli/middleware.rb b/lib/mvcli/middleware.rb
index <HASH>..<HASH> 100644
--- a/lib/mvcli/middleware.rb
+++ b/lib/mvcli/middleware.rb
@@ -4,8 +4,16 @@ module MVCLI
@apps = []
end
- def call(command)
- invoke command, 0
+ def call(command, apps = @apps)
+ app, *rest = apps
+ if app
+ app.call(command) do |yielded|
+ yielded ||= command
+ call yielded, rest
+ end
+ else
+ return 0
+ end
end
def [](idx)
@@ -20,19 +28,5 @@ module MVCLI
@apps << app
end
- private
-
- def invoke(command, index)
- if app = @apps[index]
- app.call(command) do |c|
- if @apps[index + 1]
- c ||= command
- invoke c, index + 1
- end
- end
- else
- return 0
- end
- end
end
end | refactor middleware recursive style | cowboyd_mvcli | train | rb |
0c617a8339d2225166e0103dba21bded6e61e414 | diff --git a/lib/train/transports/ssh.rb b/lib/train/transports/ssh.rb
index <HASH>..<HASH> 100644
--- a/lib/train/transports/ssh.rb
+++ b/lib/train/transports/ssh.rb
@@ -104,10 +104,12 @@ module Train::Transports
if key == :keys && options[:key_files].nil? && !host_cfg[:keys].nil? && options[:password].nil?
options[:key_files] = host_cfg[key]
elsif options[key].nil?
- # Give precedence to config file when ssh_config_file options is set to true or to the path of the config file.
- # This is required as there are default values set for some of the opitons and we unable to
- # identify whether the values are set from the cli option or those are default so either we should give
- # precedence to config file or otherwise we need to check each options default values and then set the value for that option.
+ # Precedence is given to the option set by the user manually.
+ # And only assigning value to the option from the ssh config file when it is not set by the user
+ # in the option. When the option has a default value for e.g. option "user" has the "root" as the default
+ # value, then the default value will be used even though the value for "user" is present in the ssh
+ # config file. That is because the precedence is to the options set manually, and we don't have
+ # any way to differentiate between the value set by the user or is it the default.
options[key] = host_cfg[key]
end
end | Updated comment with example so to be clear in the documentation of the functionality | inspec_train | train | rb |
9a07025006f49c7e78317bcbcedf1d396477f352 | diff --git a/standard/test/kg/apc/jmeter/PluginsCMDWorkerTest.java b/standard/test/kg/apc/jmeter/PluginsCMDWorkerTest.java
index <HASH>..<HASH> 100644
--- a/standard/test/kg/apc/jmeter/PluginsCMDWorkerTest.java
+++ b/standard/test/kg/apc/jmeter/PluginsCMDWorkerTest.java
@@ -114,7 +114,7 @@ public class PluginsCMDWorkerTest {
assertEquals(expResult, result);
System.out.println(csvfile.length());
System.out.println(pngfile.length());
- assertTrue(73 == csvfile.length() || 305 == csvfile.length()); // win/linux diff
+ assertTrue(73 == csvfile.length() || 295 == csvfile.length() || 305 == csvfile.length()); // win/linux diff
assertTrue(16000 < pngfile.length()); // win/linux different
} | OS Specific Unit test nightmare... should realy be avoided | undera_jmeter-plugins | train | java |
19e8a0c97080f67d770ecc9ec09244aa093667f8 | diff --git a/apps/notifications/src/panel/indices-to-html/index.js b/apps/notifications/src/panel/indices-to-html/index.js
index <HASH>..<HASH> 100644
--- a/apps/notifications/src/panel/indices-to-html/index.js
+++ b/apps/notifications/src/panel/indices-to-html/index.js
@@ -84,6 +84,7 @@ function render_range( new_sub_text, new_sub_range, range_info, range_data, opti
case 'em':
case 'sub':
case 'sup':
+ case 'del':
case 's':
case 'ol':
case 'ul': | Notifications - adding support for strikethrough formatting (#<I>) | Automattic_wp-calypso | train | js |
a6ba7c04c31828b630127983c7ed2e44b27daf63 | diff --git a/gooey/code_prep.py b/gooey/code_prep.py
index <HASH>..<HASH> 100644
--- a/gooey/code_prep.py
+++ b/gooey/code_prep.py
@@ -37,7 +37,7 @@ def drop_imports(code):
def split_line(line):
# splits an assignment statement into varname and command strings
# in: "parser = ArgumentParser(description='Example Argparse Program')"
- # out: "parser", "= parser = ArgumentParser(description='Example Argparse Program"
+ # out: "parser", "ArgumentParser(description='Example Argparse Program"
# take/dropwhile used to avoid splitting on multiple '=' signs
not_equal_sign = lambda x: x != '='
varname = ''.join(takewhile(not_equal_sign, line)).strip() | Minor fix to a code comment. | chriskiehl_Gooey | train | py |
adcb989c6c621c41f434e97b600e973004447bf3 | diff --git a/packages/browser/rollup.config.js b/packages/browser/rollup.config.js
index <HASH>..<HASH> 100644
--- a/packages/browser/rollup.config.js
+++ b/packages/browser/rollup.config.js
@@ -16,7 +16,7 @@ const terserInstance = terser({
// I listed all of them here just for the clarity sake, as they are all used in the frames manipulation process.
reserved: ['captureException', 'captureMessage', 'sentryWrapped'],
properties: {
- regex: /^_/,
+ regex: /^_[^_]/,
},
},
}); | Stop mangling double underscore properties (#<I>) | getsentry_sentry-javascript | train | js |
0be417a0b48fe2d4d920f29402fdd74fed6e03f2 | diff --git a/anyconfig/api.py b/anyconfig/api.py
index <HASH>..<HASH> 100644
--- a/anyconfig/api.py
+++ b/anyconfig/api.py
@@ -216,9 +216,6 @@ def multi_load(paths, ac_parser=None, ac_template=False, ac_context=None,
if ac_merge not in MERGE_STRATEGIES:
raise ValueError("Invalid merge strategy: " + ac_merge)
- if not paths:
- return container()
-
schema = format_checker = None
if ac_schema is not None:
kwargs["ac_schema"] = None # Avoid infinit loop | refactor: do not check given paths is empty as the check is not needed actually in api.multi_load | ssato_python-anyconfig | train | py |
70a35613c6c1fcbb2216ac9cd65d20f67364782a | diff --git a/sparkle/acq/players.py b/sparkle/acq/players.py
index <HASH>..<HASH> 100644
--- a/sparkle/acq/players.py
+++ b/sparkle/acq/players.py
@@ -38,6 +38,7 @@ class AbstractPlayerBase(object):
self.stim_changed = False
+ self.attenuator = None
self.connect_attenuator(False)
self.trigger_src = None #"PCI-6259/port0/line1"
@@ -169,6 +170,9 @@ class AbstractPlayerBase(object):
self.attenuator = pa5
else:
+ # if there is an attenuator, make sure it is set to 0 before disconnecting
+ if self.attenuator:
+ self.attenuator.setAtten(0)
self.attenuator = None
return self.attenuator | attenuator set to 0 before disconnect | portfors-lab_sparkle | train | py |
0b8f15ce35467d8da4e3f3b3817f6ce90ad33e24 | diff --git a/aospy/utils.py b/aospy/utils.py
index <HASH>..<HASH> 100644
--- a/aospy/utils.py
+++ b/aospy/utils.py
@@ -145,6 +145,7 @@ def phalf_from_pfull(pfull, val_toa=0, val_sfc=0):
phalf[1:-1] = 0.5*(pfull[:-1] + pfull[1:])
return phalf
+
def pfull_from_sigma(bk, pk, ps, pfull_coord):
return pfull_from_phalf(phalf_from_sigma(bk, pk, ps), pfull_coord)
@@ -162,11 +163,7 @@ def dp_from_sigma(bk, pk, ps, pfull_coord):
def weight_by_delta(integrand, delta):
- """Multiply an xray.DataArray by some weights.
-
- In the xray world coordinates are aligned automatically (and newaxes are
- added if needed).
- """
+ """Multiply an xray.DataArray by some weights."""
return integrand*delta | Make more concise weight_by_delta docstring | spencerahill_aospy | train | py |
85413ebb0af49c465aa38ae4318214e6fe3c105d | diff --git a/src/flaskext/assets.py b/src/flaskext/assets.py
index <HASH>..<HASH> 100644
--- a/src/flaskext/assets.py
+++ b/src/flaskext/assets.py
@@ -102,6 +102,8 @@ class Environment(BaseEnvironment):
def absurl(self, fragment):
if self.config.get('url') is not None:
+ # If a manual base url is configured, skip any
+ # blueprint-based auto-generation.
return super(Environment, self).absurl(fragment)
else:
try:
@@ -110,9 +112,10 @@ class Environment(BaseEnvironment):
except (ValueError):
filename = fragment
query = ''
+
if hasattr(self.app, 'blueprints'):
try:
- blueprint, name = filename.split('/', 1)
+ blueprint, name = filename.split('/', 1)
self.app.blueprints[blueprint] # generates keyerror if no module
endpoint = '%s.static' % blueprint
filename = name
@@ -121,7 +124,7 @@ class Environment(BaseEnvironment):
else:
# Module support for Flask < 0.7
try:
- module, name = filename.split('/', 1)
+ module, name = filename.split('/', 1)
self.app.modules[module] # generates keyerror if no module
endpoint = '%s.static' % module
filename = name | Fix indentation, added a comment for clarification. | miracle2k_flask-assets | train | py |
e1bc8be1b789711b6e257f70eb2cba6319e3aeba | diff --git a/rails_event_store_active_record/spec/legacy_event_repository_spec.rb b/rails_event_store_active_record/spec/legacy_event_repository_spec.rb
index <HASH>..<HASH> 100644
--- a/rails_event_store_active_record/spec/legacy_event_repository_spec.rb
+++ b/rails_event_store_active_record/spec/legacy_event_repository_spec.rb
@@ -24,7 +24,7 @@ module RailsEventStoreActiveRecord
end
end
- let(:test_race_conditions_auto) { !ENV['DATABASE_URL'].include?("sqlite") }
+ let(:test_race_conditions_auto) { false }
let(:test_race_conditions_any) { !ENV['DATABASE_URL'].include?("sqlite") }
it_behaves_like :event_repository, LegacyEventRepository | Skip :auto strategy testing since legacy schema doesn't have Integer version & creating it doesn't make sense. | RailsEventStore_rails_event_store | train | rb |
7a86b10dae0f5dfb4da9e64f4ed0aa0714c69568 | diff --git a/cumulusci/core/flowrunner.py b/cumulusci/core/flowrunner.py
index <HASH>..<HASH> 100644
--- a/cumulusci/core/flowrunner.py
+++ b/cumulusci/core/flowrunner.py
@@ -415,7 +415,7 @@ class FlowCoordinator(object):
for step in self.steps:
self._run_step(step)
flow_name = f"'{self.name}' " if self.name else ""
- self.logger.info(f"Completed flow {flow_name} on org {org_config.name}successfully!")
+ self.logger.info(f"Completed flow {flow_name}on org {org_config.name} successfully!")
finally:
self.callbacks.post_flow(self) | Correct flow_name and org_config.name space | SFDO-Tooling_CumulusCI | train | py |
bbe6eb7fb8f7326a2ce4058650f1a16a08c8873a | diff --git a/fatbotslim/log.py b/fatbotslim/log.py
index <HASH>..<HASH> 100644
--- a/fatbotslim/log.py
+++ b/fatbotslim/log.py
@@ -19,15 +19,16 @@
import logging
-def create_logger(name):
+def create_logger(name, level='INFO'):
formatter = logging.Formatter(
- '%(asctime)s [%(name)s] %(levelname)s - %(message)s',
+ '%(levelname)s - %(asctime)s [%(name)s] %(message)s',
'%Y-%m-%d %H:%M:%S'
)
+ if not isinstance(logging.getLevelName(level), int):
+ level = 'INFO'
handler = logging.StreamHandler()
- handler.setLevel(logging.DEBUG)
handler.setFormatter(formatter)
logger = logging.getLogger(name)
- logger.setLevel(logging.DEBUG)
+ logger.setLevel(level)
logger.addHandler(handler)
return logger | allow to pass desired log level to the logger factory function | mdeous_fatbotslim | train | py |
d4daf356ddfa3de04bd34057194aba2da28f3ddc | diff --git a/app/models/agents/telegram_agent.rb b/app/models/agents/telegram_agent.rb
index <HASH>..<HASH> 100644
--- a/app/models/agents/telegram_agent.rb
+++ b/app/models/agents/telegram_agent.rb
@@ -20,7 +20,7 @@ module Agents
1. Obtain an `auth_token` by [creating a new bot](https://telegram.me/botfather).
2. Send a private message to your bot by visiting https://telegram.me/YourHuginnBot
- 3. Obtain your private `chat_id` from the recently started conversation by visiting https://api.telegram.org/bot<auth_token>/getUpdates
+ 3. Obtain your private `chat_id` from the recently started conversation by visiting https://api.telegram.org/bot`<auth_token>`/getUpdates
MD
def default_options | escape the otherwise omitted auth_token within the getUpdates address | huginn_huginn | train | rb |
f8a112f08e9349432cfa6bfc2da5d4275aceb11a | diff --git a/build/post-compile.js b/build/post-compile.js
index <HASH>..<HASH> 100644
--- a/build/post-compile.js
+++ b/build/post-compile.js
@@ -4,4 +4,4 @@ var print = require("sys").print,
src = require("fs").readFileSync(process.argv[2], "utf8");
// Previously done in sed but reimplemented here due to portability issues
-print(src.replace(/^(\s*\*\/)(.+)/m, "$1\n$2;"));
+print(src.replace(/^(\s*\*\/)(.+)/m, "$1\n$2;").replace(/([^;]*)$/, "$1;")); | Ensure that an endline is in place at the end of the minified jQuery file. | jquery_jquery | train | js |
6975452f11c8c1d556294c1d61ab46cc6a6a0cbb | diff --git a/svg/charts/graph.py b/svg/charts/graph.py
index <HASH>..<HASH> 100644
--- a/svg/charts/graph.py
+++ b/svg/charts/graph.py
@@ -10,11 +10,7 @@ The base module for `svg.charts` classes.
from operator import itemgetter
from itertools import islice
import functools
-try:
- import collections.abc
-except Exception:
- import collections
- collections.abc = collections
+import collections.abc
try:
import importlib.resources as importlib_resources
except ImportError: | Remove compatibility import; python <I> has the indicated module. | jaraco_svg.charts | train | py |
613c148ea0c621439f331676ab161fbff1991868 | diff --git a/store/secret/secret.go b/store/secret/secret.go
index <HASH>..<HASH> 100644
--- a/store/secret/secret.go
+++ b/store/secret/secret.go
@@ -120,6 +120,15 @@ func (s *Secret) Body() string {
return s.body
}
+// Data returns the data of a secret. Unless the body was valid YAML, it returns
+// an map
+func (s *Secret) Data() map[string]interface{} {
+ s.Lock()
+ defer s.Unlock()
+
+ return s.data
+}
+
// SetBody sets a new body possibly erasing an decoded YAML map
func (s *Secret) SetBody(b string) error {
s.Lock() | Add Data() method to retrieve all secret data (#<I>) | gopasspw_gopass | train | go |
9af73d53676ec904fa866f87d6bc9a2354b575da | diff --git a/lib/linked_list.js b/lib/linked_list.js
index <HASH>..<HASH> 100644
--- a/lib/linked_list.js
+++ b/lib/linked_list.js
@@ -22,11 +22,22 @@ LinkedList.prototype.append = function(node) {
LinkedList.prototype.insertAfter = function(node, newNode) {
newNode.prev = node;
- newNode.next = node.next;
- if (node.next !== null) {
- node.next.prev = newNode;
+ if (node) {
+ newNode.next = node.next;
+ if (node.next !== null) {
+ node.next.prev = newNode;
+ }
+ node.next = newNode;
+ if (node == this.last) {
+ this.last = newNode;
+ }
+ }
+ else {
+ // Insert after null implies inserting at position 0
+ newNode.next = this.first;
+ this.first.prev = newNode;
+ this.first = newNode;
}
- node.next = newNode;
this.length++;
}; | support inserting at 0, fix bug where last is not the last node if we use insert instead of append | quilljs_quill | train | js |
4b508c1db921a271349bc73f064f20c03c22c0db | diff --git a/parsl/executors/high_throughput/interchange.py b/parsl/executors/high_throughput/interchange.py
index <HASH>..<HASH> 100644
--- a/parsl/executors/high_throughput/interchange.py
+++ b/parsl/executors/high_throughput/interchange.py
@@ -361,8 +361,8 @@ class Interchange(object):
msg['reg_time'] = datetime.datetime.strptime(msg['reg_time'], "%Y-%m-%d %H:%M:%S")
reg_flag = True
except Exception:
- logger.warning("[MAIN] Got a non-json registration message from manager:{}".format(
- manager))
+ logger.warning("[MAIN] Got Exception reading registration message from manager:{}".format(
+ manager), exc_info=True)
logger.debug("[MAIN] Message :\n{}\n".format(message[0]))
# By default we set up to ignore bad nodes/registration messages. | fixing an exception message in interchange (#<I>) | Parsl_parsl | train | py |
cfdc284abee59057d6c6f7bc751d5648d8f8688e | diff --git a/tags_unit_test.go b/tags_unit_test.go
index <HASH>..<HASH> 100644
--- a/tags_unit_test.go
+++ b/tags_unit_test.go
@@ -47,6 +47,7 @@ func TestLookupImage(t *testing.T) {
}
defer os.RemoveAll(tmp)
store := mkTestTagStore(tmp, t)
+ defer store.graph.driver.Cleanup()
if img, err := store.LookupImage(testImageName); err != nil {
t.Fatal(err) | tags test: cleanup driver
If not we leak a devicemapper pool | moby_moby | train | go |
366fb4718e7bad951328391a0439b6d07852b8fe | diff --git a/__init__.py b/__init__.py
index <HASH>..<HASH> 100644
--- a/__init__.py
+++ b/__init__.py
@@ -0,0 +1,11 @@
+"""distutils
+
+The main package for the Python Module Distribtion Utilities. Normally
+used from a setup script as
+
+ from distutils.core import setup
+
+ setup (...)
+"""
+
+__rcsid__ = "$Id$" | Added docstring and RCS id (apparently some Windows tar extractors
ignore zero-byte files: grr...). | pypa_setuptools | train | py |
8dca90777de13a6756a5a3c5361aa9ff71efbb7a | diff --git a/lib/tickly/parser.rb b/lib/tickly/parser.rb
index <HASH>..<HASH> 100644
--- a/lib/tickly/parser.rb
+++ b/lib/tickly/parser.rb
@@ -41,7 +41,6 @@ module Tickly
private
- LAST_CHAR = -1..-1 # If we were 1.9 only we could use -1
TERMINATORS = ["\n", ";"]
ESC = 92.chr # Backslash (\)
QUOTES = %w( " ' )
@@ -150,9 +149,9 @@ module Tickly
c = io.read_one_char
if c.nil?
raise Error, "The IO ran out before the end of a literal string"
- elsif c == stop_quote && buf[LAST_CHAR] != ESC
+ elsif c == stop_quote && last_char(buf) != ESC
return buf
- elsif buf[LAST_CHAR] == ESC # Eat out the escape char
+ elsif last_char(buf) == ESC # Eat out the escape char
buf = buf[0..-2] # Trim the escape character at the end of the buffer
buf << c
else
@@ -163,5 +162,8 @@ module Tickly
return buf
end
+ def last_char(str)
+ RUBY_VERSION < '1.9' ? str[-1].chr : str[-1]
+ end
end
end
\ No newline at end of file | Using the char index with Fixnum#chr is faster on <I> than using a range | julik_tickly | train | rb |
fd2521826c35c0257734372846a28284d5afdb92 | diff --git a/lxd/cluster/heartbeat.go b/lxd/cluster/heartbeat.go
index <HASH>..<HASH> 100644
--- a/lxd/cluster/heartbeat.go
+++ b/lxd/cluster/heartbeat.go
@@ -213,7 +213,12 @@ func (g *Gateway) heartbeat(ctx context.Context, initialHeartbeat bool) {
}
raftNodes, err := g.currentRaftNodes()
- if err == ErrNotLeader {
+ if err != nil {
+ if errors.Cause(err) == ErrNotLeader {
+ return
+ }
+
+ logger.Error("Failed to get current raft members", log.Ctx{"err": err})
return
} | lxd/cluster/heartbeat: Keep error handling from g.currentRaftNodes together
Als if getting nodes fails for an unknown reason then this is logged as an error. | lxc_lxd | train | go |
8a9ca8f574f3748f48370a7274c113345894735e | diff --git a/src/reverse.js b/src/reverse.js
index <HASH>..<HASH> 100644
--- a/src/reverse.js
+++ b/src/reverse.js
@@ -54,7 +54,7 @@ function findFeature(geocoder, latitude, longitude, callback) {
function formatResult(rows) {
const row = rows[0]
- if (row === undefined) {
+ if (!row || row === undefined) {
return {}
} else {
return formatLocation(row) | Add bug fix for reverse lookup in rural/remote areas | lucaspiller_offline-geocoder | train | js |
8580bd6ce6cc73ceee912d386b50f4ba56f6394f | diff --git a/lib/twilio-ruby/rest/client.rb b/lib/twilio-ruby/rest/client.rb
index <HASH>..<HASH> 100644
--- a/lib/twilio-ruby/rest/client.rb
+++ b/lib/twilio-ruby/rest/client.rb
@@ -45,6 +45,8 @@ module Twilio
include Twilio::Util
include Twilio::REST::Utils
+ API_VERSION = '2010-04-01'
+
HTTP_HEADERS = {
'Accept' => 'application/json',
'Accept-Charset' => 'utf-8',
@@ -194,7 +196,7 @@ module Twilio
##
# Set up +account+ and +accounts+ attributes.
def set_up_subresources # :doc:
- @accounts = Twilio::REST::Accounts.new '/2010-04-01/Accounts', self
+ @accounts = Twilio::REST::Accounts.new "/#{API_VERSION}/Accounts", self
@account = @accounts.get @account_sid
end | pull the api version out into a variable | twilio_twilio-ruby | train | rb |
37dccebe4a80dd95c42e8a6cf88e16b1c75e420f | diff --git a/gruntfile.js b/gruntfile.js
index <HASH>..<HASH> 100644
--- a/gruntfile.js
+++ b/gruntfile.js
@@ -16,20 +16,20 @@ module.exports = function(grunt) {
/* Firefox */
{
browserName: "firefox",
- version: "54.0",
+ version: "61.0",
platform: "Windows 10"
},
/* Chrome */
{
browserName: "chrome",
platform: "Windows 10",
- version: "59.0"
+ version: "68.0"
},
/* Safari */
{
browserName: "safari",
- platform: "macOS 10.12",
- version: "10.0"
+ platform: "macOS 10.13",
+ version: "11.1"
}];
grunt.initConfig({ | Update browsers tested in to most-used versions
Safari <I> is no longer available on Sauce Labs, so was causing test failures.
Edge has not been upgraded, as Sauce Labs has a problem running the
tests on later versions. | Ortham_jed-gettext-parser | train | js |
deeaf077d783c32aba41fde8200792f002962951 | diff --git a/plan/physical_plan_builder.go b/plan/physical_plan_builder.go
index <HASH>..<HASH> 100644
--- a/plan/physical_plan_builder.go
+++ b/plan/physical_plan_builder.go
@@ -328,6 +328,9 @@ func addPlanToResponse(parent PhysicalPlan, info *physicalPlanInfo) *physicalPla
// enforceProperty creates a *physicalPlanInfo that satisfies the required property by adding
// sort or limit as the parent of the given physical plan.
func enforceProperty(prop *requiredProperty, info *physicalPlanInfo) *physicalPlanInfo {
+ if info.p == nil {
+ return info
+ }
if len(prop.props) != 0 {
items := make([]*ByItems, 0, len(prop.props))
for _, col := range prop.props {
diff --git a/plan/plan_test.go b/plan/plan_test.go
index <HASH>..<HASH> 100644
--- a/plan/plan_test.go
+++ b/plan/plan_test.go
@@ -511,6 +511,10 @@ func (s *testPlanSuite) TestCBO(c *C) {
best: "Index(t.c_d_e)[[<nil>,+inf]]->StreamAgg->Projection",
},
{
+ sql: "select count(*) from t group by e order by d limit 1",
+ best: "Table(t)->HashAgg->Projection->Sort + Limit(1) + Offset(0)->Trim",
+ },
+ {
sql: "select count(*) from t group by a",
best: "Table(t)->StreamAgg->Projection",
}, | plan: fix a bug about doing aggregation plan. (#<I>) | pingcap_tidb | train | go,go |
5d202a2b9902082e9a44a2e0f5834e941b2c36dc | diff --git a/maven-plugin/src/main/java/hudson/maven/reporters/MavenFingerprinter.java b/maven-plugin/src/main/java/hudson/maven/reporters/MavenFingerprinter.java
index <HASH>..<HASH> 100644
--- a/maven-plugin/src/main/java/hudson/maven/reporters/MavenFingerprinter.java
+++ b/maven-plugin/src/main/java/hudson/maven/reporters/MavenFingerprinter.java
@@ -81,7 +81,15 @@ public class MavenFingerprinter extends MavenReporter {
* Mojos perform different dependency resolution, so we need to check this for each mojo.
*/
public boolean postExecute(MavenBuildProxy build, MavenProject pom, MojoInfo mojo, BuildListener listener, Throwable error) throws InterruptedException, IOException {
- record(pom.getArtifacts(),used);
+ MavenProject parent = pom.getParent();
+ while (parent != null) {
+ // Parent Artifact contains no acual file, so we resolve against the local repository
+ Artifact parentArtifact = parent.getProjectBuildingRequest().getLocalRepository().find(parent.getArtifact());
+ record(parentArtifact, used);
+ parent = parent.getParent();
+ }
+
+ record(pom.getArtifacts(),used);
record(pom.getArtifact(),produced);
record(pom.getAttachedArtifacts(),produced);
record(pom.getGroupId(),pom.getFile(),produced); | [JENKINS-<I>] record fingerprints of ancestor poms | jenkinsci_jenkins | train | java |
11645be43d2845da65a4fbafde4cfa95780280c0 | diff --git a/tensorflow_probability/python/positive_semidefinite_kernels/internal/util_test.py b/tensorflow_probability/python/positive_semidefinite_kernels/internal/util_test.py
index <HASH>..<HASH> 100644
--- a/tensorflow_probability/python/positive_semidefinite_kernels/internal/util_test.py
+++ b/tensorflow_probability/python/positive_semidefinite_kernels/internal/util_test.py
@@ -136,5 +136,11 @@ class UtilTest(tf.test.TestCase):
self.evaluate(tf.gradients(safe_sqrt, xs)[0]),
rtol=1e-10)
+ def testSqrtWithFiniteGradsWithDynamicShape(self):
+ x = tf.placeholder_with_default([1.], shape=[None])
+ self.assertAllEqual(
+ self.evaluate(tf.gradients(tf.sqrt(x), x)),
+ self.evaluate(tf.gradients(util.sqrt_with_finite_grads(x), x)))
+
if __name__ == '__main__':
tf.test.main() | Add unit test for `sqrt_with_finite_grads` input with dynamic shape | tensorflow_probability | train | py |
a526ef9eae83c74b31303874df2f2d144dcc91d1 | diff --git a/test/shared/tests/app/integration/non_authenticated_test.rb b/test/shared/tests/app/integration/non_authenticated_test.rb
index <HASH>..<HASH> 100644
--- a/test/shared/tests/app/integration/non_authenticated_test.rb
+++ b/test/shared/tests/app/integration/non_authenticated_test.rb
@@ -17,7 +17,7 @@ module App
end
teardown do
- DummyApp.restore_all
+ DummyApp.restore_all true
end
should "behave as expected" do | Restoring the dummy app after running non_authenticated_test.rb | archan937_rich_cms | train | rb |
839e7217b1b948da554d0ab55da4dcceaecdedaa | diff --git a/generator/model.go b/generator/model.go
index <HASH>..<HASH> 100644
--- a/generator/model.go
+++ b/generator/model.go
@@ -13,7 +13,20 @@ import (
"github.com/go-swagger/go-swagger/swag"
)
-// GenerateDefinition generates a model file for a schema defintion
+// GenerateDefinition generates a model file for a schema defintion.
+//
+// defintion of primitive => type alias/name
+// defintion of array => type alias/name
+// definition of map => type alias/name
+// definition of object with properties => struct
+// definition of ref => type alias/name
+// object with only additional properties => map[string]T
+// object with additional properties and properties => custom serializer
+// schema with schema array in items => tuple (struct with properties, custom serializer)
+// schema with all of => struct
+// * all of schema with ref => embedded value
+// * all of schema with properties => properties are included in struct
+// * adding an all of schema with just "x-isnullable": true turns the schema into a pointer
func GenerateDefinition(modelNames []string, includeModel, includeValidator bool, opts GenOpts) error {
// Load the spec
specPath, specDoc, err := loadSpec(opts.Spec) | adds comment about type mapping from schema to go | go-swagger_go-swagger | train | go |
4aa74cc7f892564a66f16212717a43b77e2cd0dc | diff --git a/lib/Page.js b/lib/Page.js
index <HASH>..<HASH> 100644
--- a/lib/Page.js
+++ b/lib/Page.js
@@ -59,11 +59,8 @@ class Page extends EventEmitter {
this._frameManager = frameManager;
this._networkManager = networkManager;
this._screenDPI = screenDPI;
- this._extraHeaders = {};
/** @type {!Map<string, function>} */
this._inPageCallbacks = new Map();
- /** @type {?function(!InterceptedRequest)} */
- this._requestInterceptor = null;
/** @type {?Promise<number>} */
this._rootNodeIdPromise = null;
@@ -264,7 +261,7 @@ class Page extends EventEmitter {
* @return {!Promise<boolean>}
*/
navigate(url, options) {
- return new Navigator(this._client, options).navigate(url, this._extraHeaders.referer);
+ return new Navigator(this._client, options).navigate(url, this._networkManager.httpHeaders().referer);
}
/** | cleanup Page.js from network leftovers after a<I>a<I>d | GoogleChrome_puppeteer | train | js |
c9d1822cb84938042aafe070a529360ae8d1c696 | diff --git a/tohu/generators.py b/tohu/generators.py
index <HASH>..<HASH> 100644
--- a/tohu/generators.py
+++ b/tohu/generators.py
@@ -472,14 +472,15 @@ class HashDigest(CharString):
raise ValueError(f"Length must be an even number if as_bytes=True (got: length={length})")
chars = "0123456789ABCDEF"
self.length = length
- self._maybe_convert_type = bytes.fromhex if as_bytes else _identity
+ self.as_bytes = as_bytes
+ self._maybe_convert_type = bytes.fromhex if self.as_bytes else _identity
super().__init__(length=length, chars=chars)
def __next__(self):
return self._maybe_convert_type(super().__next__())
def _spawn(self):
- return HashDigest(length=self.length)
+ return HashDigest(length=self.length, as_bytes=self.as_bytes)
class GeolocationPair(TupleGenerator): | Propagate value of as_bytes when spawning generator | maxalbert_tohu | train | py |
e475a68d4110bf761a906c076153ae04cb36a4eb | diff --git a/tests/DispatcherTest.php b/tests/DispatcherTest.php
index <HASH>..<HASH> 100644
--- a/tests/DispatcherTest.php
+++ b/tests/DispatcherTest.php
@@ -166,6 +166,17 @@ namespace {
}
}
+ public function testExceptMethod()
+ {
+ $methods = ['put', 'patch'];
+
+ $this->dispatcher->except($methods, '/test', [$this, 'staticRouteAction']);
+
+ foreach (array_diff($this->methods, $methods) as $method) {
+ $this->assertTrue($this->dispatcher->dispatch($method, '/test'));
+ }
+ }
+
public function testDinamicRoutePattern()
{
$this->dispatcher->get('/{test:[0-9]+}', [$this, 'dinamicRouteAction']); | Added code coverage for dispatcher's except method | codeburnerframework_router | train | php |
69af8b8b2b888f49735ad88d5b8b32afaf6e0167 | diff --git a/tests/fixtures/TeamCityFormatter.php b/tests/fixtures/TeamCityFormatter.php
index <HASH>..<HASH> 100644
--- a/tests/fixtures/TeamCityFormatter.php
+++ b/tests/fixtures/TeamCityFormatter.php
@@ -43,7 +43,7 @@ class TeamCityFormatter extends JsonFormatter
/**
* @inheritDoc
*/
- public function format(array $record)
+ public function format(array $record): string
{
if (isset($record['context']['responseBody']) &&
isset($record['context']['responseHeaders']['content-type']) && | test(TC): fix TC formatter | commercetools_commercetools-php-sdk | train | php |
d2badea5aae571b329959c4d2b352db03f7b5202 | diff --git a/pysoundfile.py b/pysoundfile.py
index <HASH>..<HASH> 100644
--- a/pysoundfile.py
+++ b/pysoundfile.py
@@ -758,7 +758,7 @@ class SoundFile(object):
# returned as numpy array.
from warnings import warn
warn('indexing has been deprecated and will be removed in the future',
- DeprecationWarning)
+ Warning)
second_frame = None
if isinstance(frame, tuple):
if len(frame) > 2:
@@ -781,7 +781,7 @@ class SoundFile(object):
# Both open slice bounds and negative values are allowed.
from warnings import warn
warn('indexing has been deprecated and will be removed in the future',
- DeprecationWarning)
+ Warning)
start, stop = self._get_slice_bounds(frame)
if stop - start != len(data):
raise IndexError( | elevated DeprecationWarning to plain Warning
since DeprecationWarnings are by default not visible to users, and this
is a user-visible feature. | bastibe_SoundFile | train | py |
5e6f1838c9ac0b4e89d45bf3aaa589697ebb825b | diff --git a/src/test/java/io/dropwizard/bundles/assets/AssetServletTest.java b/src/test/java/io/dropwizard/bundles/assets/AssetServletTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/io/dropwizard/bundles/assets/AssetServletTest.java
+++ b/src/test/java/io/dropwizard/bundles/assets/AssetServletTest.java
@@ -217,7 +217,7 @@ public class AssetServletTest {
assertThat(firstEtag)
.isEqualTo("\"174a6dd7325e64c609eab14ab1d30b86\"");
assertThat(secondEtag)
- .isEqualTo("\"26ae56a90cd78c6720c544707d22110b\"");
+ .isEqualTo("\"7a13c3f9f2be8379b5a2fb77a85e1d10\"");
}
@Test | Changed second ETag to make failing test assignsDifferentETagsForDifferentFiles (AssetServletTest) pass | dropwizard-bundles_dropwizard-configurable-assets-bundle | train | java |
eb7c663a446ab49629f10b787ebd8aa7df3d7013 | diff --git a/blockstack/lib/nameset/virtualchain_hooks.py b/blockstack/lib/nameset/virtualchain_hooks.py
index <HASH>..<HASH> 100644
--- a/blockstack/lib/nameset/virtualchain_hooks.py
+++ b/blockstack/lib/nameset/virtualchain_hooks.py
@@ -373,6 +373,7 @@ def db_check( block_id, new_ops, op, op_data, txid, vtxindex, checked_ops, db_st
os.abort()
else:
+ db_state.log_reject(block_id, vtxindex, op, op_data, do_print=False)
accept = False
return accept
@@ -495,6 +496,14 @@ def db_save( block_height, consensus_hash, ops_hash, accepted_ops, virtualchain_
log.fatal("Failed to vest accounts at {}+1".format(block_height))
os.abort()
+ # record all rejected transactions
+ try:
+ db_state.store_rejected(block_height)
+ except Exception as e:
+ log.exception(e)
+ log.fatal("Failed to log rejected txs")
+ os.abort()
+
try:
# flush the database
db_state.commit_finished(block_height) | each block, store the rejected transactions we found | blockstack_blockstack-core | train | py |
c44834735992439ee00fd374ed7e913093471ccd | diff --git a/src/get_vanguard_au_prices.py b/src/get_vanguard_au_prices.py
index <HASH>..<HASH> 100644
--- a/src/get_vanguard_au_prices.py
+++ b/src/get_vanguard_au_prices.py
@@ -3,14 +3,14 @@
The script downloads fund prices from Vanguard Australia site.
Retail Funds
Vanguard Diversified Bond Index Fund VAN0101AU 8123
+Vanguard International Shares Index Fund (Hedged) VAN0107AU 8146
Vanguard Australian Property Securities Index Fund VAN0012AU 8147
Vanguard Australian Shares High Yield Fund VAN0017AU 8148
-Vanguard International Shares Index Fund (Hedged) VAN0107AU 8146
"""
import requests
import json
-user_funds = ["8123", "8147", "8148", "8146"]
+user_funds = ["8123", "8146", "8148", "8147"]
def get_json_prices():
""" | reordering the funds alphabetically | MisterY_gnucash-portfolio | train | py |
e0d80a5a3678313748ee59b7dae5b6b8247352d8 | diff --git a/angr/path.py b/angr/path.py
index <HASH>..<HASH> 100644
--- a/angr/path.py
+++ b/angr/path.py
@@ -183,6 +183,10 @@ class Path(object):
def _make_sim_run(self):
self._run = self._project.sim_run(self.state, stmt_whitelist=self.stmt_whitelist, last_stmt=self.last_stmt, jumpkind=self.jumpkind)
+ def make_sim_run_with_size(self, size):
+ self._run = self._project.sim_run(self.state, stmt_whitelist=self.stmt_whitelist, last_stmt=self.last_stmt,
+ jumpkind=self.jumpkind, max_size=size)
+
@property
def next_run(self):
if self._run is None: | Improvement in Path: support making a next_run with specific sizes. | angr_angr | train | py |
f9cefedf0fb01f524d34d714a86c40cb73d5340b | diff --git a/lib/reality/zapwhite.rb b/lib/reality/zapwhite.rb
index <HASH>..<HASH> 100644
--- a/lib/reality/zapwhite.rb
+++ b/lib/reality/zapwhite.rb
@@ -238,6 +238,7 @@ module Reality
attributes.text_rule('*.xsd')
attributes.text_rule('*.xsl')
attributes.text_rule('*.wsdl')
+ attributes.text_rule('*.toml')
# Build system defaults
attributes.text_rule('buildfile') | Add support for toml configuration files | realityforge_zapwhite | train | rb |
5f892ddc11d6478b58592b5833c6dddaafbdd7d6 | diff --git a/lib/gscraper/search/result.rb b/lib/gscraper/search/result.rb
index <HASH>..<HASH> 100644
--- a/lib/gscraper/search/result.rb
+++ b/lib/gscraper/search/result.rb
@@ -39,6 +39,14 @@ module GScraper
end
#
+ # Fetches the page of the result. If a _block_ is given it will be
+ # passed the page.
+ #
+ def page(&block)
+ get_page(@url,&block)
+ end
+
+ #
# Create a new Query for results that are similar to the Result. If
# a _block_ is given, it will be passed the newly created Query
# object. | * Added the Result#page method. | postmodern_gscraper | train | rb |
080bcf2ac5edbc6bffff4b847bb4e4c830a1f408 | diff --git a/planner/core/rule_inject_extra_projection.go b/planner/core/rule_inject_extra_projection.go
index <HASH>..<HASH> 100644
--- a/planner/core/rule_inject_extra_projection.go
+++ b/planner/core/rule_inject_extra_projection.go
@@ -66,9 +66,10 @@ func wrapCastForAggFuncs(sctx sessionctx.Context, aggFuncs []*aggregation.AggFun
}
}
-// InjectProjBelowAgg injects a ProjOperator below AggOperator. If all the args
-// of `aggFuncs`, and all the item of `groupByItems` are columns or constants,
-// we do not need to build the `proj`.
+// InjectProjBelowAgg injects a ProjOperator below AggOperator. So that All
+// scalar functions in aggregation may speed up by vectorized evaluation in
+// the `proj`. If all the args of `aggFuncs`, and all the item of `groupByItems`
+// are columns or constants, we do not need to build the `proj`.
func InjectProjBelowAgg(aggPlan PhysicalPlan, aggFuncs []*aggregation.AggFuncDesc, groupByItems []expression.Expression) PhysicalPlan {
hasScalarFunc := false | planner: add commemt to explain why we need InjectProjBelowAgg (#<I>) | pingcap_tidb | train | go |
f09658f66b403ace0c8236585172cb7a691cc698 | diff --git a/lib/chef/chef_fs/file_system/multiplexed_dir.rb b/lib/chef/chef_fs/file_system/multiplexed_dir.rb
index <HASH>..<HASH> 100644
--- a/lib/chef/chef_fs/file_system/multiplexed_dir.rb
+++ b/lib/chef/chef_fs/file_system/multiplexed_dir.rb
@@ -41,7 +41,7 @@ class Chef
child_entry = dir.child(name)
if child_entry.exists?
if result
- Chef::Log.debug("Child with name '#{child_entry.name}' found in multiple directories: #{result.parent.path_for_printing} and #{child_entry.parent.path_for_printing}") unless seen[child.name].path_for_printing == child.path_for_printing
+ Chef::Log.debug("Child with name '#{child_entry.name}' found in multiple directories: #{result.parent.path_for_printing} and #{child_entry.parent.path_for_printing}")
else
result = child_entry
end | Fix for #<I> - Remove bad variable reference
A previous change introduced what appears to be a copy/paste bug. A guard condition from the children
method was used in the make_child_entry incorrectly.
This removes the reference to the non-existent variable `seen` | chef_chef | train | rb |
f7f91fe7e39224c0bbf6e0f26620ba6a5b1479ef | diff --git a/themes/RedMallee/Menu/SubSubHorizontal/Component.js b/themes/RedMallee/Menu/SubSubHorizontal/Component.js
index <HASH>..<HASH> 100644
--- a/themes/RedMallee/Menu/SubSubHorizontal/Component.js
+++ b/themes/RedMallee/Menu/SubSubHorizontal/Component.js
@@ -1,4 +1,4 @@
-Kwf.onElementReady('.redMalleeMenuSubHorizontal', function(el) {
+Kwf.onElementReady('.redMalleeMenuSubSubHorizontal', function(el) {
var menu = el.child('.menu').dom;
if (menu.scrollWidth - $(menu).innerWidth() == 0) return false;
$(menu).before('<a class="arrowLeft"></a>').before('<a class="arrowRight"></a>'); | use right class for subSubMenuHorizontal scrolling animation on ipad | koala-framework_koala-framework | train | js |
69bedbba3e4fe79d0350dd63f16809c79b472433 | diff --git a/java/client/test/org/openqa/selenium/ProxySettingTest.java b/java/client/test/org/openqa/selenium/ProxySettingTest.java
index <HASH>..<HASH> 100644
--- a/java/client/test/org/openqa/selenium/ProxySettingTest.java
+++ b/java/client/test/org/openqa/selenium/ProxySettingTest.java
@@ -235,6 +235,7 @@ public class ProxySettingTest extends JUnit4TestBase {
public void handleHttpRequest(
HttpRequest request, HttpResponse response, HttpControl control) {
response.charset(Charsets.UTF_8)
+ .header(HttpHeaders.CONTENT_TYPE, "text/html")
.content(responseHtml)
.end();
} | Set the content type in proxy settings test.
Without this, the htmlunit driver believes we've returned a page of text,
and then can't find elements. | SeleniumHQ_selenium | train | java |
278bd4509ab1cd25eab214ae5ed63473c0148af1 | diff --git a/test/integration/containerized/test_cli_containerized.py b/test/integration/containerized/test_cli_containerized.py
index <HASH>..<HASH> 100644
--- a/test/integration/containerized/test_cli_containerized.py
+++ b/test/integration/containerized/test_cli_containerized.py
@@ -6,12 +6,14 @@ import pytest
HERE = os.path.abspath(os.path.dirname(__file__))
+@pytest.mark.serial
def test_module_run(cli, container_runtime_available):
if not container_runtime_available:
pytest.skip('container runtime(s) not available')
cli(['-m', 'ping','--hosts', 'localhost', 'run', os.path.join(HERE, 'priv_data')])
+@pytest.mark.serial
def test_playbook_run(cli, container_runtime_available):
if not container_runtime_available:
pytest.skip('container runtime(s) not available') | run containerized cli tests serially | ansible_ansible-runner | train | py |
Subsets and Splits
Java Commits in Train Set
Queries for all entries where the diff_languages column is 'java', providing a filtered dataset but without deeper analysis.
Java Commits Test Data
Returns a subset of 5000 entries from the dataset where the programming language difference is Java, providing basic filtering for exploration.
Java Commits Sample
Retrieves the first 1,000 records where the 'diff_languages' column is 'java', providing limited insight into the specific data entries.
Java Commits Validation Sample
Retrieves a sample of entries from the validation dataset where the diff languages are Java, providing limited insight into specific Java-related data points.
Java Commits in Validation
This query retrieves a limited sample of entries from the validation dataset where the programming language difference is Java, providing basic filtering with minimal insight.
Java Commits Sample
This query retrieves a sample of 100 records where the 'diff_languages' is 'java', providing basic filtering but limited analytical value.
Java Commits Sample
Retrieves 100 samples where the language difference is Java, providing basic filtering but minimal analytical value.
Java Commits Sample
Retrieves 10 samples where the diff_languages column is 'java', providing basic examples of data entries with this specific language.
Java Commits Validation Sample
Retrieves 1,000 records where the differences in languages are marked as Java, providing a snapshot of that specific subset but limited to raw data.
Java Commits Sample
This query retrieves 1000 random samples from the dataset where the programming language is Java, offering limited insight beyond raw data.