hash stringlengths 40 40 | diff stringlengths 131 26.7k | message stringlengths 7 694 | project stringlengths 5 67 | split stringclasses 1 value | diff_languages stringlengths 2 24 |
|---|---|---|---|---|---|
27e1d1f5614200a3dad66a899e0dbc3552688352 | diff --git a/src/Models/BaseEmailServiceConfigModel.php b/src/Models/BaseEmailServiceConfigModel.php
index <HASH>..<HASH> 100644
--- a/src/Models/BaseEmailServiceConfigModel.php
+++ b/src/Models/BaseEmailServiceConfigModel.php
@@ -43,6 +43,9 @@ class BaseEmailServiceConfigModel extends BaseServiceConfigModel
throw new BadRequestException('Web service parameters must be an array.');
}
EmailServiceParameterConfig::setConfig($id, $params);
+ unset($config['parameters']);
}
+
+ parent::setConfig($id, $config);
}
}
\ No newline at end of file | DF-<I> #resolve #comment Fixed SMTP as well other could based email service config not saving issue | dreamfactorysoftware_df-core | train | php |
63847d5e43eef0b93549accb58487fe3fad2d741 | diff --git a/apiserver/facades/client/highavailability/highavailability.go b/apiserver/facades/client/highavailability/highavailability.go
index <HASH>..<HASH> 100644
--- a/apiserver/facades/client/highavailability/highavailability.go
+++ b/apiserver/facades/client/highavailability/highavailability.go
@@ -232,6 +232,14 @@ func validatePlacementForSpaces(st *state.State, spaces *[]string, placement []s
for _, v := range placement {
p, err := instance.ParsePlacement(v)
if err != nil {
+ if err == instance.ErrPlacementScopeMissing {
+ // Where an unscoped placement is not parsed as a machine ID,
+ // such as for a MaaS node name, just allow it through.
+ // TODO (manadart 2018-03-27): Possible work at the provider
+ // level to accommodate placement and space constraints during
+ // instance pre-check may be entertained in the future.
+ continue
+ }
return errors.Annotate(err, "parsing placement")
}
if p.Directive == "" { | Allows unscoped placement directives to proceed without parsing error or space validation. | juju_juju | train | go |
285a9a88846fefa8eb9a0d0433e5a3f7efa0ce75 | diff --git a/xero/basemanager.py b/xero/basemanager.py
index <HASH>..<HASH> 100644
--- a/xero/basemanager.py
+++ b/xero/basemanager.py
@@ -326,6 +326,7 @@ class BaseManager(object):
elif parts[1] in ["isnull"]:
sign = '=' if value else '!'
return '%s%s=null' % (parts[0], sign)
+ field = field.replace('_', '.')
return fmt % (
field,
get_filter_params(key, value) | Fix incorrect field parsing with filters | freakboy3742_pyxero | train | py |
415449d88064f803e2bae1826bfaad15ec058ffa | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -21,7 +21,7 @@ class tests(Command):
def run(self):
import subprocess
import sys
- for t in glob.glob('unitest*.py'):
+ for t in glob.glob('unitest.py'):
ret = subprocess.call([sys.executable, t]) != 0
if ret != 0:
raise SystemExit(ret) | API tests + requests in the setup.py script | nicolargo_glances | train | py |
2d1b06b4f41f9f4eff6cf4de3b74620300a191e5 | diff --git a/src/language/lexer.js b/src/language/lexer.js
index <HASH>..<HASH> 100644
--- a/src/language/lexer.js
+++ b/src/language/lexer.js
@@ -187,9 +187,9 @@ function printCharCode(code) {
/**
* Gets the next token from the source starting at the given position.
*
- * This skips over whitespace and comments until it finds the next lexable
- * token, then lexes punctuators immediately or calls the appropriate helper
- * function for more complicated tokens.
+ * This skips over whitespace until it finds the next lexable token, then lexes
+ * punctuators immediately or calls the appropriate helper function for more
+ * complicated tokens.
*/
function readToken(lexer: Lexer<*>, prev: Token): Token {
const source = lexer.source;
@@ -362,8 +362,7 @@ function unexpectedCharacterMessage(code) {
/**
* Reads from body starting at startPosition until it finds a non-whitespace
- * or commented character, then returns the position of that character for
- * lexing.
+ * character, then returns the position of that character for lexing.
*/
function positionAfterWhitespace(
body: string, | Fix comments to reflex current behaviour (#<I>)
Lexer: Fix comments to reflex current behaviour | graphql_graphql-js | train | js |
3170f3fa2b1bca3b7faeefc615f10a5491e20c08 | diff --git a/pykube/http.py b/pykube/http.py
index <HASH>..<HASH> 100644
--- a/pykube/http.py
+++ b/pykube/http.py
@@ -6,7 +6,7 @@ import yaml
from .exceptions import KubernetesError
-class APIClient(object):
+class HTTPClient(object):
def __init__(self, kubeconfig_path, cluster_name, user_name, url=None, namespace="default", version="v1"):
self.kubeconfig_path = kubeconfig_path | renamed APIClient | kelproject_pykube | train | py |
08dabe0d6b82701c48022e5d7a758d29bfaea154 | diff --git a/tests.py b/tests.py
index <HASH>..<HASH> 100644
--- a/tests.py
+++ b/tests.py
@@ -1826,6 +1826,9 @@ class FieldTypeTests(BasePeeweeTestCase):
self.assertSQLEqual(null_lookup.sql(), ('SELECT * FROM nullmodel WHERE char_field IS NULL', []))
self.assertEqual(list(null_lookup), [nm])
+
+ null_lookup = NullModel.select().where(~Q(char_field__is=None))
+ self.assertSQLEqual(null_lookup.sql(), ('SELECT * FROM nullmodel WHERE NOT char_field IS NULL', []))
non_null_lookup = NullModel.select().where(char_field='')
self.assertSQLEqual(non_null_lookup.sql(), ('SELECT * FROM nullmodel WHERE char_field = ?', [''])) | Showing how to do a not null lookup | coleifer_peewee | train | py |
652c99b471ebb2d2624b7605cb0292d8c21ae9a9 | diff --git a/manticore/platforms/linux.py b/manticore/platforms/linux.py
index <HASH>..<HASH> 100644
--- a/manticore/platforms/linux.py
+++ b/manticore/platforms/linux.py
@@ -1753,9 +1753,9 @@ class Linux(Platform):
self.sched()
self.running.remove(procid)
#self.procs[procid] = None
- logger.debug("EXIT_GROUP PROC_%02d %s", procid, error_code)
+ logger.debug("EXIT_GROUP PROC_%02d %s", procid, ctypes.c_int32(error_code).value)
if len(self.running) == 0 :
- raise TerminateState("Program finished with exit status: %r" % error_code, testcase=True)
+ raise TerminateState("Program finished with exit status: %r" % ctypes.c_int32(error_code).value, testcase=True)
return error_code
def sys_ptrace(self, request, pid, addr, data): | Manticore prints linux ret code as uint instead of int (#<I>)
* Fixing raise issue #<I>
* syncing git
* Fix Bug #<I>
* syncing
* removed all binaries
* missed one file | trailofbits_manticore | train | py |
843e825cbe706da61ee4b30e795a31edd3e791e0 | diff --git a/lib/chef/knife/container_docker_build.rb b/lib/chef/knife/container_docker_build.rb
index <HASH>..<HASH> 100644
--- a/lib/chef/knife/container_docker_build.rb
+++ b/lib/chef/knife/container_docker_build.rb
@@ -190,7 +190,12 @@ class Chef
# Run a shell command from the Docker Context directory
#
def run_command(cmd)
- shell_out(cmd, cwd: docker_context)
+ Open3.popen2e(cmd, chdir: docker_context) do |stdin, stdout_err, wait_thr|
+ while line = stdout_err.gets
+ puts line
+ end
+ wait_thr.value.to_i
+ end
end
# | in order to show the output of commands that are run I am switching run_command to use Open3. | chef-boneyard_knife-container | train | rb |
34e1d645d3598bff8ee50985df72c5d2d18396c5 | diff --git a/lib/resource/index.js b/lib/resource/index.js
index <HASH>..<HASH> 100644
--- a/lib/resource/index.js
+++ b/lib/resource/index.js
@@ -350,7 +350,7 @@ Resource = new Class({
, paginator
;
- objects = objects || [];
+ objects = objects || '[]';
objects = this.sort( JSON.parse( objects ) );
debug( 'paging' );
paginator = new this.options.paginator({ | make sure default data from _get_list is a json string.
can't parse an object | node-tastypie_tastypie | train | js |
29a84d0253ef2fee3d0ef1698cb87e8c8905798b | diff --git a/pymagicc/io.py b/pymagicc/io.py
index <HASH>..<HASH> 100644
--- a/pymagicc/io.py
+++ b/pymagicc/io.py
@@ -963,7 +963,8 @@ class _TempOceanLayersOutReader(_Reader):
class _BinData(object):
def __init__(self, filepath):
# read the entire file into memory
- self.data = open(filepath, "rb").read()
+ with open(filepath, "rb") as fh:
+ self.data = fh.read()
self.data = memoryview(self.data)
self.pos = 0 | Ensure that the open file is explicitly closed once read | openclimatedata_pymagicc | train | py |
f6e22772246c4e88e02afba3a491b1055643f950 | diff --git a/collatex-pythonport/ClusterShell/RangeSet.py b/collatex-pythonport/ClusterShell/RangeSet.py
index <HASH>..<HASH> 100644
--- a/collatex-pythonport/ClusterShell/RangeSet.py
+++ b/collatex-pythonport/ClusterShell/RangeSet.py
@@ -30,6 +30,8 @@
# The fact that you are presently reading this means that you have had
# knowledge of the CeCILL-C license and that you accept its terms.
+# This class has been modified to make it Python 3 compliant by Ronald Haentjens Dekker
+
"""
Cluster range set module.
@@ -537,7 +539,13 @@ class RangeSet(set):
(I.e. all elements that are in both sets.)
"""
- return self._wrap_set_op(set.intersection, other)
+ #NOTE: This is a work around
+ # Python 3 return as the result of set.intersection a new set instance.
+ # Python 2 however returns as a the result a ClusterShell.RangeSet.RangeSet instance.
+ # ORIGINAL CODE: return self._wrap_set_op(set.intersection, other)
+ copy = self.copy()
+ copy.intersection_update(other)
+ return copy
def __xor__(self, other):
"""Return the symmetric difference of two RangeSets as a new RangeSet. | Implemented workaround to make intersection method of RangeSet class
work correctly with Python 3. | interedition_collatex | train | py |
d1327c6b0fc4a02afb84f5ca5fe915f206adedb1 | diff --git a/chisel/app.py b/chisel/app.py
index <HASH>..<HASH> 100644
--- a/chisel/app.py
+++ b/chisel/app.py
@@ -196,13 +196,15 @@ class Context(object):
self.log = logging.getLoggerClass()('')
self.log.setLevel(self.app.log_level)
wsgi_errors = environ.get('wsgi.errors')
- if wsgi_errors is not None:
+ if wsgi_errors is None:
+ handler = logging.NullHandler()
+ else:
handler = logging.StreamHandler(wsgi_errors)
- if hasattr(self.app.log_format, '__call__'):
- handler.setFormatter(self.app.log_format(self))
- else:
- handler.setFormatter(logging.Formatter(self.app.log_format))
- self.log.addHandler(handler)
+ if hasattr(self.app.log_format, '__call__'):
+ handler.setFormatter(self.app.log_format(self))
+ else:
+ handler.setFormatter(logging.Formatter(self.app.log_format))
+ self.log.addHandler(handler)
def start_response(self, status, headers):
return self._start_response(status, list(itertools.chain(headers, self.headers))) | Add null handler when wsgi.errors is None | craigahobbs_chisel | train | py |
4243e1c97c3e3086633d6837798442d52f00e049 | diff --git a/lib/rester/client.rb b/lib/rester/client.rb
index <HASH>..<HASH> 100644
--- a/lib/rester/client.rb
+++ b/lib/rester/client.rb
@@ -96,11 +96,10 @@ module Rester
end
@_requester.on_close do
- logger.info("circuit closed")
+ _log_with_correlation_id(:info, "circuit closed for #{_producer_name}")
end
else
@_requester = proc { |*args| _request(*args) }
- _log_with_correlation_id(:info, "circuit closed for #{_producer_name}")
end
end | [#<I>] Fix merge mistake | payout_rester | train | rb |
9f798ef2c3001627c598a5ca9cb18ad3be06af7a | diff --git a/src/hdnode.js b/src/hdnode.js
index <HASH>..<HASH> 100644
--- a/src/hdnode.js
+++ b/src/hdnode.js
@@ -243,6 +243,7 @@ HDNode.prototype.derive = function(index) {
}
// Private parent key -> private child key
+ var hd
if (this.privKey) {
// ki = parse256(IL) + kpar (mod n)
var ki = pIL.add(this.privKey.D).mod(ecparams.getN()) | HDWallet: adds missing hd declaration
Only a problem if "use strict" is enforced | BitGo_bitgo-utxo-lib | train | js |
e8ed9b73f3ddab8608097655255bb7f9fbecd918 | diff --git a/examples/cert_checker.py b/examples/cert_checker.py
index <HASH>..<HASH> 100644
--- a/examples/cert_checker.py
+++ b/examples/cert_checker.py
@@ -47,13 +47,13 @@ if __name__ == '__main__':
# Check if the certificate subject has any spoofed domains
subject = row['certificate.subject']
- domain = subject[3:] # Just chopping off the 'CN=' part
if any([domain in subject for domain in spoofed_domains]):
print('\n<<< Suspicious Certificate Found >>>')
pprint(row)
# Make a Virus Total query with the spoofed domain (just for fun)
- results = vtq.query_url(domain)
+ query_domain = subject[3:] # Just chopping off the 'CN=' part
+ results = vtq.query_url(query_domain)
if results.get('positives', 0) >= 2: # At least two hits
print('\n<<< Virus Total Query >>>')
pprint(results) | cleaning up some goofy code, thanks <URL> | SuperCowPowers_bat | train | py |
42ac1879326168b806cb49ee5780cfd23f34fca3 | diff --git a/peer.go b/peer.go
index <HASH>..<HASH> 100644
--- a/peer.go
+++ b/peer.go
@@ -114,8 +114,6 @@ type peer struct {
started int32
disconnect int32
- cfg *Config
-
// The following fields are only meant to be used *atomically*
bytesReceived uint64
bytesSent uint64
@@ -130,6 +128,8 @@ type peer struct {
// our last ping message. To be used atomically.
pingLastSend int64
+ cfg *Config
+
connReq *connmgr.ConnReq
conn net.Conn | peer: fix struct alignment
Integers used atomically MUST be aligned properly, otherwise the
sync library will crash on purpose. Therefore non-aligned struct
members must come later. | lightningnetwork_lnd | train | go |
46d121f5a388993311e69c7796810bcb86378e8e | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -9,7 +9,7 @@ except ImportError:
else:
if not os.path.exists(swagger_ui.STATIC_DIR):
swagger_ui.setup_ui('2.2.10')
- swagger_ui.setup_ui('3.14.1')
+ swagger_ui.setup_ui('3.17.2')
with open(os.path.join(os.path.abspath(os.path.dirname(
diff --git a/swagger_ui.py b/swagger_ui.py
index <HASH>..<HASH> 100755
--- a/swagger_ui.py
+++ b/swagger_ui.py
@@ -22,6 +22,7 @@ TEMPLATES_DIR = os.path.join(DIR, PACKAGE, 'templates', 'swagger-ui')
PREFIX = '{{static_prefix}}'
REPLACE_STRINGS = [
('http://petstore.swagger.io/v2/swagger.json', '{{url}}'),
+ ('https://petstore.swagger.io/v2/swagger.json', '{{url}}'),
('href="images', 'href="' + PREFIX + 'images'),
('src="images', 'src="' + PREFIX + 'images'),
("href='css", "href='" + PREFIX + 'css'), | bump swagger-ui to <I> | aamalev_aiohttp_apiset | train | py,py |
9e8577e7569ff9a29af49964d6942945fb2e8530 | diff --git a/app/Module/SlideShowModule.php b/app/Module/SlideShowModule.php
index <HASH>..<HASH> 100644
--- a/app/Module/SlideShowModule.php
+++ b/app/Module/SlideShowModule.php
@@ -27,6 +27,7 @@ use Illuminate\Database\Query\JoinClause;
use Illuminate\Support\Str;
use Psr\Http\Message\ServerRequestInterface;
use function app;
+use function in_array;
/**
* Class SlideShowModule
@@ -87,6 +88,11 @@ class SlideShowModule extends AbstractModule implements ModuleBlockInterface
$media_types = array_filter($media_types);
+ // The type "other" includes media without a type.
+ if (in_array('other', $media_types, true)) {
+ $media_types[] = '';
+ }
+
// We can apply the filters using SQL
// Do not use "ORDER BY RAND()" - it is very slow on large tables. Use PHP::array_rand() instead.
$all_media = DB::table('media') | Media type 'other' can include media without a type - #<I> | fisharebest_webtrees | train | php |
819927b8b3f53209727eb606aaf060b57e96f85b | diff --git a/bundles/org.eclipse.orion.client.ui/web/orion/projectCommands.js b/bundles/org.eclipse.orion.client.ui/web/orion/projectCommands.js
index <HASH>..<HASH> 100644
--- a/bundles/org.eclipse.orion.client.ui/web/orion/projectCommands.js
+++ b/bundles/org.eclipse.orion.client.ui/web/orion/projectCommands.js
@@ -659,6 +659,8 @@ define(['require', 'i18n!orion/navigate/nls/messages', 'orion/webui/littlelib',
data.oldParams = params;
commandService.collectParameters(data);
} else {
+ item.status = {CheckState: true};
+ sharedLaunchConfigurationDispatcher.dispatchEvent({type: "changeState", newValue: item });
errorHandler(error);
}
}); | [no bug] restart app progress does not stop if an error occured | eclipse_orion.client | train | js |
d4cf55bbbc303fda510f5e06e229504564261c96 | diff --git a/tests/integration/test.changes.js b/tests/integration/test.changes.js
index <HASH>..<HASH> 100644
--- a/tests/integration/test.changes.js
+++ b/tests/integration/test.changes.js
@@ -859,6 +859,11 @@ adapters.forEach(function (adapter) {
// Note for the following test that CouchDB's implementation of /_changes
// with `descending=true` ignores any `since` parameter.
it('Descending changes', function (done) {
+ // _changes in CouchDB 2.0 does not guarantee order
+ // so skip this test
+ if (testUtils.isCouchMaster()) {
+ return done();
+ }
var db = new PouchDB(dbs.name);
db.post({_id: '0', test: 'ing'}, function (err, res) {
db.post({_id: '1', test: 'ing'}, function (err, res) { | (#<I>) - Fix "descending changes" / CouchDB <I>
_changes in CouchDB does not guarantee order so skip the test which
asserts this when testing against CouchDB master. | pouchdb_pouchdb | train | js |
7f7faaf75374cc16369ca4920bd6fb6a1debc626 | diff --git a/plugins/org.eclipse.xtext/src/org/eclipse/xtext/linking/lazy/LazyLinkingResource.java b/plugins/org.eclipse.xtext/src/org/eclipse/xtext/linking/lazy/LazyLinkingResource.java
index <HASH>..<HASH> 100644
--- a/plugins/org.eclipse.xtext/src/org/eclipse/xtext/linking/lazy/LazyLinkingResource.java
+++ b/plugins/org.eclipse.xtext/src/org/eclipse/xtext/linking/lazy/LazyLinkingResource.java
@@ -118,7 +118,9 @@ public class LazyLinkingResource extends XtextResource {
}
protected void resolveLazyCrossReference(InternalEObject source, EStructuralFeature crossRef) {
- if (crossRef.isDerived())
+ if (crossRef.isDerived()
+ || (crossRef instanceof EReference && !((EReference)crossRef).isResolveProxies())
+ || crossRef.isTransient())
return;
if (crossRef.isMany()) {
@SuppressWarnings("unchecked") | [core] added guard for lazy crossref proxy resolution. (see #<I>) | eclipse_xtext-core | train | java |
f73537b1b0293df280fbfb38e33d051819539837 | diff --git a/src/Go/Core/AspectContainer.php b/src/Go/Core/AspectContainer.php
index <HASH>..<HASH> 100644
--- a/src/Go/Core/AspectContainer.php
+++ b/src/Go/Core/AspectContainer.php
@@ -102,9 +102,9 @@ class AspectContainer extends Container
$this->share('aspect.pointcut.lexer', function () {
return new PointcutLexer();
});
- $this->share('aspect.pointcut.parser', function () {
+ $this->share('aspect.pointcut.parser', function ($container) {
return new Parser(
- new PointcutGrammar($this),
+ new PointcutGrammar($container),
// Include production parse table for parser
include __DIR__ . '/../Aop/Pointcut/PointcutParseTable.php'
); | Fix BC with PHP<I>, resolves #<I> | goaop_framework | train | php |
4e08a99fa0ca8b8579cd72f418c84a70393dbac3 | diff --git a/salt/modules/sysmod.py b/salt/modules/sysmod.py
index <HASH>..<HASH> 100644
--- a/salt/modules/sysmod.py
+++ b/salt/modules/sysmod.py
@@ -572,7 +572,7 @@ def list_state_functions(*args, **kwargs): # pylint: disable=unused-argument
for func in fnmatch.filter(st_.states, module):
names.add(func)
else:
- # "sys" should just match sys, without also matching sysctl
+ # "sys" should just match sys without also matching sysctl
module = module + '.'
for func in st_.states:
if func.startswith(module): | standardize comment text about "sys" vs "sysctl" matching | saltstack_salt | train | py |
f1c0358ee752252e4910eeb256d146aeee28ba5d | diff --git a/actionpack/lib/action_dispatch/routing/polymorphic_routes.rb b/actionpack/lib/action_dispatch/routing/polymorphic_routes.rb
index <HASH>..<HASH> 100644
--- a/actionpack/lib/action_dispatch/routing/polymorphic_routes.rb
+++ b/actionpack/lib/action_dispatch/routing/polymorphic_routes.rb
@@ -101,10 +101,12 @@ module ActionDispatch
# polymorphic_url(Comment) # same as comments_url()
#
def polymorphic_url(record_or_hash_or_array, options = {})
+ recipient = self
+
if record_or_hash_or_array.kind_of?(Array)
record_or_hash_or_array = record_or_hash_or_array.compact
if record_or_hash_or_array.first.is_a?(ActionDispatch::Routing::RoutesProxy)
- proxy = record_or_hash_or_array.shift
+ recipient = record_or_hash_or_array.shift
end
record_or_hash_or_array = record_or_hash_or_array[0] if record_or_hash_or_array.size == 1
end
@@ -139,7 +141,7 @@ module ActionDispatch
args.collect! { |a| convert_to_model(a) }
- (proxy || self).send(named_route, *args)
+ recipient.send(named_route, *args)
end
# Returns the path component of a URL for the given record. It uses | eliminate conditional when sending the named route method | rails_rails | train | rb |
846e16b2ca8e7815ff6e82891000c3f5df256865 | diff --git a/src/main/java/reactor/ipc/netty/resources/DefaultLoopResources.java b/src/main/java/reactor/ipc/netty/resources/DefaultLoopResources.java
index <HASH>..<HASH> 100644
--- a/src/main/java/reactor/ipc/netty/resources/DefaultLoopResources.java
+++ b/src/main/java/reactor/ipc/netty/resources/DefaultLoopResources.java
@@ -190,7 +190,7 @@ final class DefaultLoopResources extends AtomicLong implements LoopResources {
if (null == eventLoopGroup) {
EventLoopGroup newEventLoopGroup = LoopResources.colocate(cacheNioServerLoops());
if (!clientLoops.compareAndSet(null, newEventLoopGroup)) {
- newEventLoopGroup.shutdownGracefully();
+ // Do not shutdown newEventLoopGroup as this will shutdown the server loops
}
eventLoopGroup = cacheNioClientLoops();
}
@@ -246,7 +246,7 @@ final class DefaultLoopResources extends AtomicLong implements LoopResources {
if (null == eventLoopGroup) {
EventLoopGroup newEventLoopGroup = LoopResources.colocate(cacheNativeServerLoops());
if (!cacheNativeClientLoops.compareAndSet(null, newEventLoopGroup)) {
- newEventLoopGroup.shutdownGracefully();
+ // Do not shutdown newEventLoopGroup as this will shutdown the server loops
}
eventLoopGroup = cacheNativeClientLoops();
} | When creating client loop resources do not invoke shutdown on the ColocatedEventLoopGroup
When creating client loop resources do not invoke shutdown on the
ColocatedEventLoopGroup because this will invoke shutdown on the
server loop resource. | reactor_reactor-netty | train | java |
a9f6329ba17dcb1d8d90137ba3d452a11d14a1f3 | diff --git a/src/14.Expression.polar.js b/src/14.Expression.polar.js
index <HASH>..<HASH> 100644
--- a/src/14.Expression.polar.js
+++ b/src/14.Expression.polar.js
@@ -2,14 +2,15 @@ Expression.prototype.polar = function() {
var ri = this.realimag();
var two = new Expression.Integer(2);
return Expression.List.ComplexPolar([
- ri[0]['^'](two)['+'](ri[1]['^'](two)),
+ Global.sqrt.default(ri[0]['^'](two)['+'](ri[1]['^'](two))),
Global.atan2.default(Expression.Vector([ri[1], ri[0]]))
]);
};
Expression.prototype.abs = function() {
console.warn('SLOW?');
var ri = this.realimag();
- return ri[0]['*'](ri[0])['+'](ri[1]['*'](ri[1]));
+ var two = new Expression.Integer(2);
+ return Global.sqrt.default(ri[0]['^'](two)['+'](ri[1]['^'](two)));
};
Expression.prototype.arg = function() {
console.warn('Slow?'); | Fix absolute value of cartesian (was previously x*x+y*y, but should be Sqrt[x^2+y^2]) | aantthony_javascript-cas | train | js |
39a6baf380dc4c5b29e18764874a811c9e293c0b | diff --git a/src/main/java/org/jboss/netty/channel/socket/nio/NioSocketChannelConfig.java b/src/main/java/org/jboss/netty/channel/socket/nio/NioSocketChannelConfig.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/jboss/netty/channel/socket/nio/NioSocketChannelConfig.java
+++ b/src/main/java/org/jboss/netty/channel/socket/nio/NioSocketChannelConfig.java
@@ -144,5 +144,6 @@ public interface NioSocketChannelConfig extends SocketChannelConfig {
* will be called with the new predictor. The default factory is
* <tt>{@link AdaptiveReceiveBufferSizePredictorFactory}(64, 1024, 65536)</tt>.
*/
- void setReceiveBufferSizePredictorFactory(ReceiveBufferSizePredictorFactory predictorFactory);
+ void setReceiveBufferSizePredictorFactory(
+ ReceiveBufferSizePredictorFactory predictorFactory);
} | fixing formatting for NioSocketChannelConfig, which I had incorrectly merged earlier. | netty_netty | train | java |
0259f208fe813a4ca15d5687468c114852cf28f2 | diff --git a/lib/tessel/deployment/rust.js b/lib/tessel/deployment/rust.js
index <HASH>..<HASH> 100644
--- a/lib/tessel/deployment/rust.js
+++ b/lib/tessel/deployment/rust.js
@@ -28,7 +28,7 @@ var exportables = {
var cargoToml = toml.parse(fs.readFileSync(path.join(pushdir, 'Cargo.toml'), 'utf8'));
if (cargoToml.package) {
- basename = path.basename(program);
+ basename = pushdir;
program = cargoToml.package.name;
} | Fixes undefined variable when running t2 run Cargo.toml | tessel_t2-cli | train | js |
cd6808f52a62da7221ef1f005700e52a78c4b377 | diff --git a/ploy/__init__.py b/ploy/__init__.py
index <HASH>..<HASH> 100644
--- a/ploy/__init__.py
+++ b/ploy/__init__.py
@@ -525,8 +525,10 @@ class Controller(object):
self.configfile = args.configfile
if args.debug:
logging.root.setLevel(logging.DEBUG)
- args.func(sub_argv, args.func.__doc__)
- self.instances.close_connections()
+ try:
+ args.func(sub_argv, args.func.__doc__)
+ finally:
+ self.instances.close_connections()
def ploy(configpath=None, configname=None, progname=None): # pragma: no cover | Always close connections even after an exception. | ployground_ploy | train | py |
ee15cd4c873c6eade4f158b4bfa7f856c0845cfb | diff --git a/packages/core/logger/src/index.js b/packages/core/logger/src/index.js
index <HASH>..<HASH> 100644
--- a/packages/core/logger/src/index.js
+++ b/packages/core/logger/src/index.js
@@ -95,8 +95,11 @@ Logger.prototype.writeToFile = function (_txt) {
if (!this.logFile) {
return;
}
+
+ let origin = "[" + ((new Error().stack).split("at ")[3]).trim() + "]";
+
const formattedDate = [`[${date.format(new Date(), DATE_FORMAT)}]`]; // adds a timestamp to the logs in the logFile
- fs.appendFileSync(this.logFile, "\n" + formattedDate.concat(Array.from(arguments)).join(' '));
+ fs.appendFileSync(this.logFile, "\n" + formattedDate.concat(origin, Array.from(arguments)).join(' '));
};
Logger.prototype.error = function () { | feature (@embark/core): show origin of each log in the logfile logs (#<I>) | embark-framework_embark | train | js |
d2bc6ce188d8cd2073baa57cfb7100aa5f77b9a1 | diff --git a/tests/acceptance/manager/PaymentsCest.php b/tests/acceptance/manager/PaymentsCest.php
index <HASH>..<HASH> 100644
--- a/tests/acceptance/manager/PaymentsCest.php
+++ b/tests/acceptance/manager/PaymentsCest.php
@@ -104,7 +104,7 @@ class PaymentsCest
$page->setBillTotalSum(-$chargesSum);
$I->pressButton('Save');
-// $this->billId = $page->seeActionSuccess();
+ $this->billId = $page->seeActionSuccess();
}
/**
@@ -113,7 +113,7 @@ class PaymentsCest
* @param Manager $I
* @throws \Codeception\Exception\ModuleException
*/
- protected function ensureICanUpdateBill(Manager $I): void
+ public function ensureICanUpdateBill(Manager $I): void
{
$indexPage = new IndexPage($I);
$updatePage = new Update($I);
@@ -139,7 +139,7 @@ class PaymentsCest
* @param Manager $I
* @throws \Exception
*/
- protected function ensureBillWasSuccessfullyUpdated (Manager $I): void
+ public function ensureBillWasSuccessfullyUpdated (Manager $I): void
{
$indexPage = new IndexPage($I);
$updatePage = new Update($I); | discard last changes (#<I>) | hiqdev_hipanel-module-finance | train | php |
4019c99e99e595d5990d6f4610b8219199b56c48 | diff --git a/lib/instrumentation/index.js b/lib/instrumentation/index.js
index <HASH>..<HASH> 100644
--- a/lib/instrumentation/index.js
+++ b/lib/instrumentation/index.js
@@ -364,7 +364,7 @@ Instrumentation.prototype.addEndedSpan = function (span) {
this._encodeAndSendSpan(span.getBufferedSpan())
}
- if (span.getParentSpan().ended || !span.isCompressionEligible()) {
+ if (!span.isCompressionEligible() || span.getParentSpan().ended) {
const buffered = span.getBufferedSpan()
if (buffered) {
this._encodeAndSendSpan(buffered)
diff --git a/test/agent.test.js b/test/agent.test.js
index <HASH>..<HASH> 100644
--- a/test/agent.test.js
+++ b/test/agent.test.js
@@ -440,6 +440,7 @@ test('#startSpan()', function (t) {
t.notEqual(span._context.traceparent.id, '00f067aa0ba902b7')
t.strictEqual(span._context.traceparent.parentId, '00f067aa0ba902b7')
t.strictEqual(span._context.traceparent.flags, '01')
+ span.end()
agent.destroy()
t.end()
}) | fix: span compression handling could crash on a span without a set parent span (#<I>)
Creating a span with a manual `{ childOf: 'some-traceparent-string' }` results
in a span without a parent `Span`. An attempt to compress it on .end()
needs to handle that. | elastic_apm-agent-nodejs | train | js,js |
a4042f34d307e49fe71c167c558788044d9d60b9 | diff --git a/src/Behat/Behat/Annotation/Annotation.php b/src/Behat/Behat/Annotation/Annotation.php
index <HASH>..<HASH> 100644
--- a/src/Behat/Behat/Annotation/Annotation.php
+++ b/src/Behat/Behat/Annotation/Annotation.php
@@ -166,7 +166,7 @@ abstract class Annotation implements AnnotationInterface
*/
public function __sleep() {
$serializable = array();
- foreach ( $this as $paramName => $paramValue ) {
+ foreach ($this as $paramName => $paramValue) {
if (!is_string($paramValue) && !is_array($paramValue) && is_callable($paramValue)) {
continue;
} | Update Annotation.php
Removed extra spaces. | Behat_Behat | train | php |
d220be8468f090f42159616aee4b7d734499fea3 | diff --git a/_pytest/terminal.py b/_pytest/terminal.py
index <HASH>..<HASH> 100644
--- a/_pytest/terminal.py
+++ b/_pytest/terminal.py
@@ -7,6 +7,7 @@ import pluggy
import py
import sys
import time
+import platform
def pytest_addoption(parser):
@@ -274,7 +275,7 @@ class TerminalReporter:
if not self.showheader:
return
self.write_sep("=", "test session starts", bold=True)
- verinfo = ".".join(map(str, sys.version_info[:3]))
+ verinfo = platform.python_version()
msg = "platform %s -- Python %s" % (sys.platform, verinfo)
if hasattr(sys, 'pypy_version_info'):
verinfo = ".".join(map(str, sys.pypy_version_info[:3])) | Use platform.python_version() to show Python version number
This results in something like "<I>b2" for non-final releases
while still being "<I>" for final releases. | vmalloc_dessert | train | py |
fffa4d209899853217e38c341e02878bd512f0ce | diff --git a/edtf/__init__.py b/edtf/__init__.py
index <HASH>..<HASH> 100644
--- a/edtf/__init__.py
+++ b/edtf/__init__.py
@@ -1,5 +1,3 @@
-__version__ = "0.9.2"
-
from edtf_date import EDTFDate
from edtf import EDTF
-from edtf_interval import EDTFInterval
\ No newline at end of file
+from edtf_interval import EDTFInterval
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -23,11 +23,10 @@ from os.path import join, dirname
import setuptools
-from edtf import __version__
setuptools.setup(
name='edtf',
- version=__version__,
+ version='0.9.2',
url='https://github.com/ixc/python-edtf',
author='Greg Turner',
author_email='greg@interaction.net.au',
@@ -52,4 +51,4 @@ setuptools.setup(
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
],
-)
\ No newline at end of file
+) | Remove version from '__init__.py' to stop importing packages before they are installed. | ixc_python-edtf | train | py,py |
7c46cbd3fbf4b1ee2186c62d398d16281eef9a2e | diff --git a/tests/extensions/test_environment.py b/tests/extensions/test_environment.py
index <HASH>..<HASH> 100644
--- a/tests/extensions/test_environment.py
+++ b/tests/extensions/test_environment.py
@@ -11,6 +11,14 @@ def test_environment_with_inline_default():
assert res == {'a': 1, 'file_path': 'my_file.local.cfg'}
+def test_environment_with_nested_inline_default():
+ jstr = '{"a": 1, "file_path": "my_file.{{env::ENVIRONMENT:={{env::DEFAULT}}.cfg}}"}'
+ with modified_environ('ENVIRONMENT', DEFAULT='the_default'):
+ res = DictMentor().bind(Environment()).load_yaml(jstr)
+
+ assert res == {'a': 1, 'file_path': 'my_file.the_default.cfg'}
+
+
def test_environment_with_multiple_patterns():
jstr = '{"a": 1, "file_path": "{{env::A}}-{{env::B}}-{{env::C}}"}'
with modified_environ(A='aval', B='bval', C='cval'): | Adds test case with nested environment default | HazardDede_dictmentor | train | py |
f05c986f3238f62ebe2750791d3e4cdc0c97c55d | diff --git a/src/main/java/eu/project/ttc/engines/cleaner/TermProperty.java b/src/main/java/eu/project/ttc/engines/cleaner/TermProperty.java
index <HASH>..<HASH> 100644
--- a/src/main/java/eu/project/ttc/engines/cleaner/TermProperty.java
+++ b/src/main/java/eu/project/ttc/engines/cleaner/TermProperty.java
@@ -51,7 +51,7 @@ public enum TermProperty {
WR_LOG_Z_SCORE("wrLogZScore", "zscore", true, Double.class),
FREQUENCY("frequency", "f", false, Integer.class),
PILOT("pilot", "pilot", false, String.class),
- LEMMA("lemma", "lemma", false, String.class),
+ LEMMA("lemma", "lm", false, String.class),
GROUPING_KEY("groupingKey", "gkey", false, String.class),
PATTERN("pattern", "p", false, String.class),
SPOTTING_RULE("spottingRule", "rule", false, String.class), | Renamed some properties to match documentation | termsuite_termsuite-core | train | java |
1c2e527f0f7ee18fe740bdd4538cf88711ceab1d | diff --git a/lib/messaging/stream_name.rb b/lib/messaging/stream_name.rb
index <HASH>..<HASH> 100644
--- a/lib/messaging/stream_name.rb
+++ b/lib/messaging/stream_name.rb
@@ -2,8 +2,6 @@ module Messaging
module StreamName
extend self
- include EventSource::StreamName
-
def self.included(cls)
cls.extend Macro
end | The EventSource::Stream name module is not mixed-in | eventide-project_messaging | train | rb |
6c8b43da948311bef1ffe83f379696f0734d4696 | diff --git a/unyt/array.py b/unyt/array.py
index <HASH>..<HASH> 100644
--- a/unyt/array.py
+++ b/unyt/array.py
@@ -1711,8 +1711,6 @@ class unyt_array(np.ndarray):
return type(self)(np.copy(np.asarray(self)), self.units)
def __array_finalize__(self, obj):
- if obj is None and hasattr(self, 'units'):
- return
self.units = getattr(obj, 'units', NULL_UNIT)
def __pos__(self): | get rid of unncessary condition in __array_finalize__ | yt-project_unyt | train | py |
726bcb6b25daa20cdc0536c132d4b8f83377ab4b | diff --git a/examples/association_loader.rb b/examples/association_loader.rb
index <HASH>..<HASH> 100644
--- a/examples/association_loader.rb
+++ b/examples/association_loader.rb
@@ -36,7 +36,7 @@ class AssociationLoader < GraphQL::Batch::Loader
end
def preload_association(records)
- ::ActiveRecord::Associations::Preloader.new.preload(records, @association_name)
+ ::ActiveRecord::Associations::Preloader.new(records: records, associations: @association_name).call
end
def read_association(record) | Fix AssociationLoader Rails 7 Deprecation Warning
Currently running this example code with rails 7 yields the following warning:
```
DEPRECATION WARNING: `preload` is deprecated and will be removed in Rails <I>. Call `Preloader.new(kwargs).call` instead.
```
Updating the preloader code to use kwargs and call silences the warning. | Shopify_graphql-batch | train | rb |
552edfda118e4bad1ee1b1f87223b9f974cb0956 | diff --git a/visidata/cmdlog.py b/visidata/cmdlog.py
index <HASH>..<HASH> 100644
--- a/visidata/cmdlog.py
+++ b/visidata/cmdlog.py
@@ -23,10 +23,10 @@ option('cmdlog_histfile', '', 'file to autorecord each cmdlog action to')
vd.activeCommand = None
def open_vd(p):
- return CommandLog(p.name, source=p)
+ return CommandLog(p.name, source=p, precious=True)
def open_vdj(p):
- return CommandLogJsonl(p.name, source=p)
+ return CommandLogJsonl(p.name, source=p, precious=True)
VisiData.save_vd = VisiData.save_tsv
VisiData.save_vdj = VisiData.save_jsonl | [sheets_all-] make opened .vd/.vdj precious
CommandLogs generated by Shift+D are not precious, but loaded data
should be. Since open_vd creates a Sheet using CommandLog(), precious needs to
be set to True. | saulpw_visidata | train | py |
fe871ba8891ec200ed2dc5ce6c9a32f34f0cdde9 | diff --git a/test/client/spec-widget.js b/test/client/spec-widget.js
index <HASH>..<HASH> 100644
--- a/test/client/spec-widget.js
+++ b/test/client/spec-widget.js
@@ -143,7 +143,7 @@ describe('widget' , function() {
label: 'Bar'
});
-
+ expect(widget.el.innerHTML.trim()).to.equal('Bar');
expect(widget.el.parentNode).to.equal(parentNode);
expect(widget.el !== oldEl).to.equal(true); | Updated test related to re-render | marko-js_marko-widgets | train | js |
986c55e6f96d13180290ac1216c4c1c594cbb100 | diff --git a/src/environment.js b/src/environment.js
index <HASH>..<HASH> 100644
--- a/src/environment.js
+++ b/src/environment.js
@@ -17,15 +17,19 @@ var loops = [];
* @param {number} [height=300] - viewport height
*/
export function init(canvas, width=300, height=300) {
- scene = scene || new THREE.Scene();
- scene.fog = new THREE.Fog(0x000000, 4, 7);
- renderer = renderer || new THREE.WebGLRenderer({
- canvas: canvas || undefined,
- antialias: true
- });
+ if (!scene) {
+ scene = new THREE.Scene();
+ scene.fog = new THREE.Fog(0x000000, 4, 7);
+ }
- setSize(width, height);
+ if (!renderer) {
+ renderer = new THREE.WebGLRenderer({
+ canvas: canvas || undefined,
+ antialias: true
+ });
+ }
+ setSize(width, height);
startRenderLoop();
} | environment.js refactoring | Galiaf47_lib3d | train | js |
a06b981730a2900b68e2091de61998b9423fb676 | diff --git a/djangodblog/manager.py b/djangodblog/manager.py
index <HASH>..<HASH> 100644
--- a/djangodblog/manager.py
+++ b/djangodblog/manager.py
@@ -42,8 +42,17 @@ Note: You will need to create the tables by hand if you use this option.
assert(not getattr(settings, 'DBLOG_DATABASE', None) or django.VERSION < (1, 2), 'The `DBLOG_DATABASE` setting requires Django < 1.2')
class DBLogManager(models.Manager):
+ def _get_settings(self):
+ options = getattr(settings, 'DBLOG_DATABASE', None)
+ if options:
+ if 'DATABASE_PORT' not in options:
+ options['DATABASE_PORT'] = ''
+ if 'DATABASE_OPTIONS' not in options:
+ options['DATABASE_OPTIONS'] = {}
+ return options
+
def get_query_set(self):
- db_options = getattr(settings, 'DBLOG_DATABASE', None)
+ db_options = self._get_settings()
if not db_options:
return super(DBLogManager, self).get_query_set()
@@ -73,7 +82,7 @@ class DBLogManager(models.Manager):
return connection
def _insert(self, values, return_id=False, raw_values=False):
- db_options = getattr(settings, 'DBLOG_DATABASE', None)
+ db_options = self._get_settings()
if not db_options:
return super(DBLogManager, self)._insert(values, return_id, raw_values) | Handle defaults for PORT/OPTIONS if they're not set | elastic_apm-agent-python | train | py |
f88a3434553c2ba328fbc2bcc7e0c100df441441 | diff --git a/codec-http/src/test/java/io/netty/handler/codec/rtsp/RtspDecoderTest.java b/codec-http/src/test/java/io/netty/handler/codec/rtsp/RtspDecoderTest.java
index <HASH>..<HASH> 100644
--- a/codec-http/src/test/java/io/netty/handler/codec/rtsp/RtspDecoderTest.java
+++ b/codec-http/src/test/java/io/netty/handler/codec/rtsp/RtspDecoderTest.java
@@ -65,7 +65,6 @@ public class RtspDecoderTest {
((FullHttpRequest) res1).release();
HttpObject res2 = ch.readInbound();
- System.out.println(res2);
assertNotNull(res2);
assertTrue(res2 instanceof FullHttpResponse);
((FullHttpResponse) res2).release(); | Remove System.out.println(...) in test (#<I>)
Motivation:
We did had some System.out.println(...) call in a test which seems to be some left-over from debugging.
Modifications:
Remove System.out.println(...)
Result:
Code cleanup | netty_netty | train | java |
e78184e749e1a6989a70fd15d5f2ce55591fe101 | diff --git a/lib/fs_utils/file_list.js b/lib/fs_utils/file_list.js
index <HASH>..<HASH> 100644
--- a/lib/fs_utils/file_list.js
+++ b/lib/fs_utils/file_list.js
@@ -40,6 +40,7 @@ class FileList extends EventEmitter {
this.files = new Map();
this.staticFiles = new Map();
this.assets = [];
+ this.reading = new Map();
this.compiling = new Set();
this.copying = new Set();
this.compiled = new Set();
@@ -86,7 +87,7 @@ class FileList extends EventEmitter {
}
get hasPendingFiles() {
- return !!(this.compiling.size || this.copying.size);
+ return !!(this.reading.size || this.compiling.size || this.copying.size);
}
resetTimer() {
@@ -211,8 +212,13 @@ class FileList extends EventEmitter {
}
debug(`Reading ${path}`);
+ // TODO: What if it's in reading already?
+ const readDate = Date.now();
+ this.reading.set(path, readDate);
readFileAndCache(path).then(() => {
- if (this.disposed) return;
+ const cachedDate = this.reading.get(path);
+ if (this.disposed || !cachedDate || cachedDate > readDate) return;
+ this.reading.delete(path);
// .json files from node_modules should always be compiled
if (!isAsset && !ignored && (compiler && compiler.length) || deppack.isNpmJSON(path)) {
const sourceFile = this.find(path) || | FileList: track files which are being read. Closes gh-<I>. | brunch_brunch | train | js |
bd913af5d32a4c435a5cc637e735471ea73eb8c2 | diff --git a/src/preferences/PreferencesDialogs.js b/src/preferences/PreferencesDialogs.js
index <HASH>..<HASH> 100644
--- a/src/preferences/PreferencesDialogs.js
+++ b/src/preferences/PreferencesDialogs.js
@@ -55,7 +55,7 @@ define(function (require, exports, module) {
var obj = PathUtils.parseUrl(url);
if (!obj) {
- result = Strings.BASEURL_ERROR_UNKOWN_ERROR;
+ result = Strings.BASEURL_ERROR_UNKNOWN_ERROR;
} else if (obj.href.search(/^(http|https):\/\//i) !== 0) {
result = StringUtils.format(Strings.BASEURL_ERROR_INVALID_PROTOCOL, obj.href.substring(0, obj.href.indexOf("//")));
} else if (obj.search !== "") { | UNKOWN -> UNKNOWN | adobe_brackets | train | js |
9bba48da9f06cbb09e3cf654f919fa7d134ddff7 | diff --git a/Neos.Utility.Files/Classes/Files.php b/Neos.Utility.Files/Classes/Files.php
index <HASH>..<HASH> 100644
--- a/Neos.Utility.Files/Classes/Files.php
+++ b/Neos.Utility.Files/Classes/Files.php
@@ -180,7 +180,7 @@ abstract class Files
public static function removeEmptyDirectoriesOnPath(string $path, string $basePath = null)
{
if ($basePath !== null) {
- $basePath = trim($basePath, '/');
+ $basePath = rtrim($basePath, '/');
if (strpos($path, $basePath) !== 0) {
throw new FilesException(sprintf('Could not remove empty directories on path because the given base path "%s" is not a parent path of "%s".', $basePath, $path), 1323962907);
} | BUGFIX: Do not remove leading slashes from base path | neos_flow-development-collection | train | php |
f44ca8ec76efdb72138450929c14587755ed0424 | diff --git a/lib/endpoint.js b/lib/endpoint.js
index <HASH>..<HASH> 100644
--- a/lib/endpoint.js
+++ b/lib/endpoint.js
@@ -1708,10 +1708,12 @@ function setOrExport(which, endpoint, param, value, callback) {
callback = value ;
}
- const __x = (callback) => {
+ const __x = async(callback) => {
+ const p = [];
for (const [key, value] of Object.entries(obj)) {
- endpoint.execute(which, `${key}=${value}`);
+ p.push(endpoint.execute(which, `${key}=${value}`));
}
+ await Promise.all(p);
callback(null);
} ; | fix regression bug: setting multiple channel variables was returning early | davehorton_drachtio-fsmrf | train | js |
a387644dc841da1057e238550fd5bb76edec5c17 | diff --git a/cassandra/connection.py b/cassandra/connection.py
index <HASH>..<HASH> 100644
--- a/cassandra/connection.py
+++ b/cassandra/connection.py
@@ -443,13 +443,10 @@ class Connection(object):
raise ProtocolError(msg)
def set_keyspace(self, keyspace):
- if not keyspace:
+ if not keyspace or keyspace == self.keyspace:
return
with self.lock:
- if keyspace == self.keyspace:
- return
-
query = 'USE "%s"' % (keyspace,)
try:
result = self.wait_for_response( | Move current keyspace check out of Connection lock | datastax_python-driver | train | py |
46137e718439926c9226dbebb4bf2dc9fc581266 | diff --git a/aiodns/__init__.py b/aiodns/__init__.py
index <HASH>..<HASH> 100644
--- a/aiodns/__init__.py
+++ b/aiodns/__init__.py
@@ -1,5 +1,8 @@
-import asyncio
+try:
+ import asyncio
+except ImportError:
+ import trollius as asyncio
import pycares
from . import error
diff --git a/tests.py b/tests.py
index <HASH>..<HASH> 100755
--- a/tests.py
+++ b/tests.py
@@ -1,6 +1,9 @@
#!/usr/bin/env python
-import asyncio
+try:
+ import asyncio
+except ImportError:
+ import trollius as asyncio
import unittest
import aiodns | Adapt to new Trollius version | saghul_aiodns | train | py,py |
a17adbaa345eb2cda4723a3d26ce62e55bdeff5f | diff --git a/spec/rmagick/image_list/mosaic_spec.rb b/spec/rmagick/image_list/mosaic_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/rmagick/image_list/mosaic_spec.rb
+++ b/spec/rmagick/image_list/mosaic_spec.rb
@@ -9,7 +9,6 @@ RSpec.describe Magick::ImageList, "#mosaic" do
it "raises an error when images is not set" do
image_list = described_class.new
- image_list = image_list.copy
image_list.instance_variable_set("@images", nil)
expect { image_list.mosaic }.to raise_error(Magick::ImageMagickError) | RSpec: remove unecessary copy (#<I>)
The copy here doesn't appear to serve any purpose. | rmagick_rmagick | train | rb |
b237eeeeb61e275b7e2108816f85ea76c48951a8 | diff --git a/pes.js b/pes.js
index <HASH>..<HASH> 100644
--- a/pes.js
+++ b/pes.js
@@ -96,7 +96,7 @@ var PES = {
return this.baseRead();
} catch (e) {
this.binary.seek(pos);
- this.binary._bitShift = 0;
+ this.binary.view.alignBy();
}
}
}), { | Alignment done via exposed jDataView method. | RReverser_mpegts | train | js |
fe8d252b26b4c11929f34e8360c1302c903537c6 | diff --git a/lib/Condorcet/Condorcet.php b/lib/Condorcet/Condorcet.php
index <HASH>..<HASH> 100644
--- a/lib/Condorcet/Condorcet.php
+++ b/lib/Condorcet/Condorcet.php
@@ -736,14 +736,14 @@ class Condorcet
}
}
+ $vote_r['tag'][0] = $this->_nextVoteTag++ ;
+
// Vote identifiant
if ($tag !== null)
{
- $vote_r['tag'] = $this->tagsConvert($tag) ;
+ $vote_r['tag'] = array_merge($vote_r['tag'], $this->tagsConvert($tag)) ;
}
- $vote_r['tag'][] = $this->_nextVoteTag++ ;
-
// Register
$this->_Votes[] = $vote_r ; | Vote ID by tag will ever be first tag entry | julien-boudry_Condorcet | train | php |
0099c8a4289cf9cb2560f87a05ab03e36c6d307f | diff --git a/cake/console/libs/tasks/view.php b/cake/console/libs/tasks/view.php
index <HASH>..<HASH> 100644
--- a/cake/console/libs/tasks/view.php
+++ b/cake/console/libs/tasks/view.php
@@ -159,6 +159,7 @@ class ViewTask extends Shell {
**/
function all() {
$actions = $this->scaffoldActions;
+ $this->Controller->interactive = false;
$tables = $this->Controller->listAll($this->connection, false);
$this->interactive = false;
foreach ($tables as $table) {
@@ -167,9 +168,7 @@ class ViewTask extends Shell {
$this->controllerPath = Inflector::underscore($this->controllerName);
if (App::import('Model', $model)) {
$vars = $this->__loadController();
- if ($vars) {
- $this->bakeActions($actions, $vars);
- }
+ $this->bakeActions($actions, $vars);
}
}
} | Removing more if() blocks
Silencing Controller task in all() | cakephp_cakephp | train | php |
1da88c488814e5904185f947e0e6018da861d24d | diff --git a/src/AbstractArrayBackedDaftObject.php b/src/AbstractArrayBackedDaftObject.php
index <HASH>..<HASH> 100644
--- a/src/AbstractArrayBackedDaftObject.php
+++ b/src/AbstractArrayBackedDaftObject.php
@@ -294,12 +294,8 @@ abstract class AbstractArrayBackedDaftObject extends AbstractDaftObject implemen
*/
$propVal = $array[$prop];
- if (isset($jsonDef[$prop])) {
- $jsonType = $jsonDef[$prop];
-
- if (false === is_array($propVal)) {
- static::ThrowBecauseArrayJsonTypeNotValid($jsonType, $prop);
- }
+ if (isset($jsonDef[$prop]) && false === is_array($propVal)) {
+ static::ThrowBecauseArrayJsonTypeNotValid($jsonDef[$prop], $prop);
}
return $propVal; | merging if conditions & removing redundant variable assignments | SignpostMarv_daft-object | train | php |
9159aafd183307d959f9eae1e31ffeee29ab4b69 | diff --git a/escpos/printer.py b/escpos/printer.py
index <HASH>..<HASH> 100644
--- a/escpos/printer.py
+++ b/escpos/printer.py
@@ -167,7 +167,7 @@ class Network(Escpos):
:param msg: arbitrary code to be printed
"""
- self.device.send(msg)
+ self.device.sendall(msg)
def __del__(self):
""" Close TCP connection """ | IMPROVE use sendall instead of send in network-printer | python-escpos_python-escpos | train | py |
6ea2c1e5795accd62081154041f737b876d05851 | diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -375,7 +375,7 @@ function pages(options, callback) {
// The new syntax for aposArea() requires a more convincing fake page!
// Populate slug and permissions correctly
req.extras[item] = page ? page : { slug: item };
- if (!page) {
+ if (!page && req.user && req.user.permissions.admin) {
req.extras[item]._edit = true;
}
return callback(null); | fixed a bug where the edit controls for `global` areas and singletons appeared before an admin logged in (if global did not yet exist). You could not actually edit it (not a security flaw). | apostrophecms-legacy_apostrophe-pages | train | js |
b96054d2722a2d05f6e615a98adcf7eb1eedb24c | diff --git a/ontquery/__init__.py b/ontquery/__init__.py
index <HASH>..<HASH> 100644
--- a/ontquery/__init__.py
+++ b/ontquery/__init__.py
@@ -964,6 +964,7 @@ class InterLexRemote(OntService): # note to self
def __init__(self, *args, host='uri.interlex.org', port='', **kwargs):
self.host = host
self.port = port
+ self._not_ok_cache = set()
import rdflib # FIXME
self.Graph = rdflib.Graph
@@ -1025,8 +1026,12 @@ class InterLexRemote(OntService): # note to self
else:
return None
+ if url in self._not_ok_cache:
+ return None
+
resp = get(url)
if not resp.ok:
+ self._not_ok_cache.add(url)
return None
ttl = resp.content
g = self.Graph().parse(data=ttl, format='turtle') | interlex remote added not ok cache | tgbugs_ontquery | train | py |
b11c100f829cf7bc0b2dae21c05e6b469447f5a4 | diff --git a/tests/calculators/hazard/classical/post_processing_test.py b/tests/calculators/hazard/classical/post_processing_test.py
index <HASH>..<HASH> 100644
--- a/tests/calculators/hazard/classical/post_processing_test.py
+++ b/tests/calculators/hazard/classical/post_processing_test.py
@@ -410,7 +410,6 @@ def _curve_db(location_nr, level_nr, curves_per_location, sigma):
class HazardMapsTestCase(unittest.TestCase):
- pass
def test_compute_hazard_map(self):
aaae = numpy.testing.assert_array_almost_equal | tests/calcs/hazard/classical/post_processing_test:
Removed a superfluous `pass` from a test case class. | gem_oq-engine | train | py |
091a55a6d3428ddccf473c8ff6f1a13e58e27204 | diff --git a/src/FrameReflower/Text.php b/src/FrameReflower/Text.php
index <HASH>..<HASH> 100644
--- a/src/FrameReflower/Text.php
+++ b/src/FrameReflower/Text.php
@@ -109,7 +109,9 @@ class Text extends AbstractFrameReflower
}
// split the text into words
- $words = preg_split('/([\s-]+)/u', $text, -1, PREG_SPLIT_DELIM_CAPTURE);
+ // regex splits on everything that's a separator (^\S double negative), excluding nbsp (\xa0), plus dashes
+ //TODO: this misses narrow nbsp (http://www.fileformat.info/info/unicode/char/202f/index.htm)
+ $words = preg_split('/([^\S\xA0]+|-+)/u', $text, -1, PREG_SPLIT_DELIM_CAPTURE);
$wc = count($words);
// Determine the split point | Exclude nbsp from word boundary detection
When we added the Unicode flag to the text splitting regex it appears that the \s escape sequence was modified to include the full set of Unicode space separators. This set includes the no-break space (nbsp) character. With this update we're now using a double negative to select the same set of characters, minus the nbsp character.
Addresses #<I> | dompdf_dompdf | train | php |
c83100285e27fc6989cfcdb443e5377301c1ff03 | diff --git a/CrashReport/src/org/acra/ErrorReporter.java b/CrashReport/src/org/acra/ErrorReporter.java
index <HASH>..<HASH> 100644
--- a/CrashReport/src/org/acra/ErrorReporter.java
+++ b/CrashReport/src/org/acra/ErrorReporter.java
@@ -339,6 +339,7 @@ public class ErrorReporter implements Thread.UncaughtExceptionHandler {
*/
private void saveCrashReportFile() {
try {
+ Log.d(LOG_TAG, "Writing crash report file.");
Random generator = new Random();
int random = generator.nextInt(99999);
String FileName = "stack-" + random + ".stacktrace";
@@ -405,11 +406,13 @@ public class ErrorReporter implements Thread.UncaughtExceptionHandler {
input.close();
}
+
+ sendCrashReport(context, previousCrashReport);
+
// DELETE FILES !!!!
File curFile = new File(mCrashProperties.get(FILE_PATH_KEY)
+ "/" + curString);
curFile.delete();
- sendCrashReport(context, previousCrashReport);
}
}
} catch (Exception e) { | Delete report file only after succesful sending... | ACRA_acra | train | java |
477156bc144f7a778a38423c010feae71ba5e4e4 | diff --git a/src/Product/PriceSnippetRenderer.php b/src/Product/PriceSnippetRenderer.php
index <HASH>..<HASH> 100644
--- a/src/Product/PriceSnippetRenderer.php
+++ b/src/Product/PriceSnippetRenderer.php
@@ -60,14 +60,14 @@ class PriceSnippetRenderer implements SnippetRenderer
*/
public function render(Product $product)
{
- return $this->renderProductPriceInContexts($product);
+ return $this->renderProductPrice($product);
}
/**
* @param Product $product
* @return SnippetList
*/
- private function renderProductPriceInContexts(Product $product)
+ private function renderProductPrice(Product $product)
{
return new SnippetList(...$this->getPriceSnippets($product));
}
@@ -104,6 +104,7 @@ class PriceSnippetRenderer implements SnippetRenderer
$key = $this->getSnippetKeyForCountry($product, $country);
$amount = $product->getFirstValueOfAttribute($this->priceAttributeCode);
$price = new Price($amount);
+ // todo: apply tax here
return Snippet::create($key, $price->getAmount());
} | Issue #<I>: Refactor method name to be more descriptive in the overall flow | lizards-and-pumpkins_catalog | train | php |
d0062b209228d7a622218a291e51371ee89316ab | diff --git a/packages/openneuro-client/src/datasets.js b/packages/openneuro-client/src/datasets.js
index <HASH>..<HASH> 100644
--- a/packages/openneuro-client/src/datasets.js
+++ b/packages/openneuro-client/src/datasets.js
@@ -15,6 +15,7 @@ export const getDataset = gql`
email
}
draft {
+ id
modified
files {
id | fix bug in apollo draft query | OpenNeuroOrg_openneuro | train | js |
e71fdc2722c00912f89be22c7fe5b9383c4c550d | diff --git a/jodd-mail/src/main/java/jodd/mail/SendMailSession.java b/jodd-mail/src/main/java/jodd/mail/SendMailSession.java
index <HASH>..<HASH> 100644
--- a/jodd-mail/src/main/java/jodd/mail/SendMailSession.java
+++ b/jodd-mail/src/main/java/jodd/mail/SendMailSession.java
@@ -80,6 +80,13 @@ public class SendMailSession {
}
/**
+ * Returns {@code true} if mail session is still connected.
+ */
+ public boolean isConnected() {
+ return mailTransport.isConnected();
+ }
+
+ /**
* Prepares message and sends it.
* Returns Message ID of sent email.
*/ | Added isConnected() method (closes #<I>) | oblac_jodd | train | java |
517d22c5d890107e6345ce51f664f22d02fdb1d8 | diff --git a/handlers/session_channel_handler_windows2016.go b/handlers/session_channel_handler_windows2016.go
index <HASH>..<HASH> 100644
--- a/handlers/session_channel_handler_windows2016.go
+++ b/handlers/session_channel_handler_windows2016.go
@@ -329,7 +329,7 @@ func (sess *session) handleSubsystemRequest(request *ssh.Request) {
}
lagerWriter := helpers.NewLagerWriter(logger.Session("sftp-server"))
- sftpServer, err := sftp.NewServer(sess.channel, sess.channel, sftp.WithDebug(lagerWriter))
+ sftpServer, err := sftp.NewServer(sess.channel, sftp.WithDebug(lagerWriter))
if err != nil {
logger.Error("sftp-new-server-failed", err)
if request.WantReply { | bump to latest sftp server for windows channel handler | cloudfoundry_diego-ssh | train | go |
6b21678027362bd58e18448b685eb2f5c426daf7 | diff --git a/MultipleFileUpload/MultipleFileUpload.php b/MultipleFileUpload/MultipleFileUpload.php
index <HASH>..<HASH> 100644
--- a/MultipleFileUpload/MultipleFileUpload.php
+++ b/MultipleFileUpload/MultipleFileUpload.php
@@ -241,10 +241,6 @@ class MultipleFileUpload extends Forms\Controls\UploadControl {
* @param string $label Label
*/
public function __construct($label = NULL, $maxSelectedFiles = 25) {
- // Monitorování
- $this->monitor('Nette\Forms\Form');
- //$this->monitor('Nette\Application\Presenter');
-
parent::__construct($label);
if (!self::$handleUploadsCalled) {
@@ -257,16 +253,6 @@ class MultipleFileUpload extends Forms\Controls\UploadControl {
$this->simUploadThreads = 5;
}
- /**
- * Monitoring
- * @param mixed $component
- */
- protected function attached($component) {
- if ($component instanceof Nette\Application\UI\Form) {
- $component->getElementPrototype()->enctype = 'multipart/form-data';
- $component->getElementPrototype()->method = 'post';
- }
- }
/**
* Generates control | remove redundant monitoring (already in UploadControl) | jkuchar_MultipleFileUpload | train | php |
846c4043d09f1c280d27712e9ea4aebb5576c0e3 | diff --git a/spec/bitbucket_rest_api/repos/pull_request_spec.rb b/spec/bitbucket_rest_api/repos/pull_request_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/bitbucket_rest_api/repos/pull_request_spec.rb
+++ b/spec/bitbucket_rest_api/repos/pull_request_spec.rb
@@ -85,20 +85,20 @@ describe BitBucket::Repos::PullRequest do
},
close_source_branch: true
}
+ end
+ it 'makes a POST request to create a new pull request' do
expect(subject).to receive(:request).with(
:post,
'/2.0/repositories/mock_user/mock_repo/pullrequests',
@params
)
- end
- it 'makes a POST request to create a new pull request' do
subject.create('mock_user', 'mock_repo', @params)
end
- xit 'validates presence of required params' do
- # expect do
+ it 'validates presence of required params' do
+ expect do
subject.create(
'mock_user',
'mock_repo',
@@ -124,7 +124,7 @@ describe BitBucket::Repos::PullRequest do
close_source_branch: true
}
)
- # end.to(raise_error())
+ end.to raise_error
end
end | Fix skipped test in pull_request_spec | bitbucket-rest-api_bitbucket | train | rb |
526f891890f03c1b80e59af3b32710e365e56594 | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -27,7 +27,7 @@ def extras_require():
def main():
setup(
name='straitlets',
- version='0.2.4',
+ version='0.2.5',
description="Serializable IPython Traitlets",
author="Quantopian Team",
author_email="opensource@quantopian.com", | BLD: Bump PyPI version | quantopian_serializable-traitlets | train | py |
b2c85c8fc5d35a95004ec32fb5378b559242b4ff | diff --git a/slick.grid.js b/slick.grid.js
index <HASH>..<HASH> 100644
--- a/slick.grid.js
+++ b/slick.grid.js
@@ -1507,6 +1507,10 @@ if (typeof Slick === "undefined") {
ensureCellNodesInRowsCache(row);
for (var columnIdx in cacheEntry.cellNodesByColumnIdx) {
+ if (!cacheEntry.cellNodesByColumnIdx.hasOwnProperty(columnIdx)) {
+ continue;
+ }
+
columnIdx = columnIdx | 0;
var m = columns[columnIdx],
d = getDataItem(row), | Fix #<I> - check hasOwnProperty() in for (... in ...) loops over arrays. | coatue-oss_slickgrid2 | train | js |
32919afaad9984ef3e7d2ea9d12729d972ef34a4 | diff --git a/Bundle/BlogBundle/Manager/ArticleManager.php b/Bundle/BlogBundle/Manager/ArticleManager.php
index <HASH>..<HASH> 100644
--- a/Bundle/BlogBundle/Manager/ArticleManager.php
+++ b/Bundle/BlogBundle/Manager/ArticleManager.php
@@ -77,6 +77,7 @@ class ArticleManager
//Transform VBP into BP
$this->virtualToBusinessPageTransformer->transform($page);
$page->setParent($article->getBlog());
+ $page->setStatus($article->getStatus());
$this->entityManager->persist($page);
$this->entityManager->flush(); | give an Article BusinessPage the draft article's status | Victoire_victoire | train | php |
7e070ce17d83814f8f1ebf3ff138a1a344b1adca | diff --git a/src/Factory.php b/src/Factory.php
index <HASH>..<HASH> 100644
--- a/src/Factory.php
+++ b/src/Factory.php
@@ -3,7 +3,6 @@
namespace Phlib\Logger;
use Psr\Log\LoggerInterface;
-use Psr\Log\LogLevel;
/**
* Class Factory
@@ -11,6 +10,9 @@ use Psr\Log\LogLevel;
*/
class Factory
{
+ /**
+ * @var array
+ */
private $decorators = [
'level' => '\Phlib\Logger\Decorator\LevelFilter'
]; | Tidy up imports and docblocks | phlib_logger | train | php |
295bf413e57fb0b4da5a73319236403eba493734 | diff --git a/activesupport/lib/active_support/json/encoding.rb b/activesupport/lib/active_support/json/encoding.rb
index <HASH>..<HASH> 100644
--- a/activesupport/lib/active_support/json/encoding.rb
+++ b/activesupport/lib/active_support/json/encoding.rb
@@ -2,6 +2,7 @@ require 'active_support/core_ext/object/to_json'
require 'active_support/core_ext/module/delegation'
require 'active_support/deprecation'
require 'active_support/json/variable'
+require 'active_support/ordered_hash'
require 'bigdecimal'
require 'active_support/core_ext/big_decimal/conversions' # for #to_s | add missing require for ordered_hash dependency | rails_rails | train | rb |
0c2aa9406ae958960498c0f8120587bf6563d12b | diff --git a/tests/Console/Command/IndexCreateOrUpdateMappingCommandTest.php b/tests/Console/Command/IndexCreateOrUpdateMappingCommandTest.php
index <HASH>..<HASH> 100644
--- a/tests/Console/Command/IndexCreateOrUpdateMappingCommandTest.php
+++ b/tests/Console/Command/IndexCreateOrUpdateMappingCommandTest.php
@@ -20,6 +20,10 @@ final class IndexCreateOrUpdateMappingCommandTest extends TestCase
$mock->shouldReceive('exists')
->once()
->andReturn(true);
+
+ $mock->shouldReceive('get')
+ ->once()
+ ->andReturn('{}');
});
$this->mock(Client::class, function (MockInterface $mock) {
@@ -108,6 +112,10 @@ final class IndexCreateOrUpdateMappingCommandTest extends TestCase
$mock->shouldReceive('exists')
->once()
->andReturn(true);
+
+ $mock->shouldReceive('get')
+ ->once()
+ ->andReturn('{}');
});
$this->mock(Client::class, function (MockInterface $mock) { | fixes tests for load json mapping file | cviebrock_laravel-elasticsearch | train | php |
ca605f74fe11543709e0dd691648fed52a4cd77a | diff --git a/tests/MetarDecoderTest.php b/tests/MetarDecoderTest.php
index <HASH>..<HASH> 100644
--- a/tests/MetarDecoderTest.php
+++ b/tests/MetarDecoderTest.php
@@ -182,7 +182,8 @@ class MetarDecoderTest extends \PHPUnit_Framework_TestCase
$d = $this->decoder->parseNotStrict($metar);
$this->assertFalse($d->isValid());
$this->assertEquals(1, count($d->getDecodingExceptions()));
- $error = $d->getDecodingExceptions()[0];
+ $errors = $d->getDecodingExceptions();
+ $error = $errors[0];
$this->assertEquals('CloudChunkDecoder', $error->getChunkDecoder());
$this->assertNull($d->getClouds());
} | Ensure compatibility for PHP<I> | SafranCassiopee_php-metar-decoder | train | php |
17c192605bb58980f6dd1890f5fd04c98607544d | diff --git a/vent/core/rq_worker/watch.py b/vent/core/rq_worker/watch.py
index <HASH>..<HASH> 100644
--- a/vent/core/rq_worker/watch.py
+++ b/vent/core/rq_worker/watch.py
@@ -83,11 +83,11 @@ def gpu_queue(options):
# check for vent usage/processes running
if (dedicated and
dev not in usage['vent_usage']['mem_mb'] and
- mem_needed <= usage[dev]['global_memory'] and
- not usage[dev]['processes']):
+ mem_needed <= usage[int(dev)]['global_memory'] and
+ not usage[int(dev)]['processes']):
device = dev
# check for ram constraints
- elif mem_needed <= (usage[dev]['global_memory'] - ram_used):
+ elif mem_needed <= (usage[int(dev)]['global_memory'] - ram_used):
device = dev
# TODO make this sleep incremental up to a point, potentially kill | dict key is an int not a string :( | CyberReboot_vent | train | py |
821f230c098b083b966a8b16a458fbb7b1de61f5 | diff --git a/chorus/src/main/java/org/chorusbdd/chorus/handlers/processes/ProcessesHandler.java b/chorus/src/main/java/org/chorusbdd/chorus/handlers/processes/ProcessesHandler.java
index <HASH>..<HASH> 100755
--- a/chorus/src/main/java/org/chorusbdd/chorus/handlers/processes/ProcessesHandler.java
+++ b/chorus/src/main/java/org/chorusbdd/chorus/handlers/processes/ProcessesHandler.java
@@ -65,7 +65,7 @@ public class ProcessesHandler {
private ProcessManager processManager = ProcessManager.getInstance();
- @Initialize
+ @Initialize(scope= Scope.FEATURE)
public void setup() {
processManager.setFeatureDetails(featureDir, featureFile, featureToken);
processConfigTemplates = loadProcessConfig(); | Change scoping of initialize / load configs to feature initialization, to match past behaviour | Chorus-bdd_Chorus | train | java |
11f90fd2957cf5a2b3d5c3421f28b910e58b3fd9 | diff --git a/test/rdoc_test.rb b/test/rdoc_test.rb
index <HASH>..<HASH> 100644
--- a/test/rdoc_test.rb
+++ b/test/rdoc_test.rb
@@ -16,13 +16,13 @@ class RdocTest < Test::Unit::TestCase
it 'renders inline rdoc strings' do
rdoc_app { rdoc '= Hiya' }
assert ok?
- assert_body /<h1[^>]*>Hiya<\/h1>/
+ assert_body /<h1[^>]*>Hiya(<span><a href=\"#label-Hiya\">¶<\/a> <a href=\"#documentation\">↑<\/a><\/span>)?<\/h1>/
end
it 'renders .rdoc files in views path' do
rdoc_app { rdoc :hello }
assert ok?
- assert_body /<h1[^>]*>Hello From RDoc<\/h1>/
+ assert_body /<h1[^>]*>Hello From RDoc(<span><a href=\"#label-Hello\+From\+RDoc\">¶<\/a> <a href=\"#documentation\">↑<\/a><\/span>)?<\/h1>/
end
it "raises error if template not found" do | Fix test failures with RDoc 4 broken by commit rdoc/rdoc@7f<I>d | sinatra_sinatra | train | rb |
6565defa92c67e15e4e133f0aff0d27612a79531 | diff --git a/test/modules/mock.js b/test/modules/mock.js
index <HASH>..<HASH> 100644
--- a/test/modules/mock.js
+++ b/test/modules/mock.js
@@ -1,7 +1,8 @@
/**
* Defines phantomas global API mock
*/
-var noop = function() {};
+var assert = require('assert'),
+ noop = function() {};
var phantomas = function() {
this.emitter = new (require('events').EventEmitter)();
@@ -133,7 +134,7 @@ function assertMetric(name, value) {
value = value || 1;
return function(phantomas) {
- phantomas.hasValue(name, value);
+ assert.strictEqual(value, phantomas.getMetric(name));
};
}
@@ -149,12 +150,10 @@ module.exports = {
getContext: function(moduleName, topic, metricsCheck) {
var phantomas = initModule(moduleName),
- context;
+ context = {};
- context = {
- topic: function() {
- return topic(phantomas);
- }
+ context.topic = function() {
+ return topic(phantomas);
};
Object.keys(metricsCheck || {}).forEach(function(name) { | Unit tests: fix assertMetric() | macbre_phantomas | train | js |
1a3c53563b88330228e496bdfe931e731300aba9 | diff --git a/base/src/main/java/uk/ac/ebi/atlas/controllers/ResourceNotFoundException.java b/base/src/main/java/uk/ac/ebi/atlas/controllers/ResourceNotFoundException.java
index <HASH>..<HASH> 100644
--- a/base/src/main/java/uk/ac/ebi/atlas/controllers/ResourceNotFoundException.java
+++ b/base/src/main/java/uk/ac/ebi/atlas/controllers/ResourceNotFoundException.java
@@ -3,9 +3,9 @@ package uk.ac.ebi.atlas.controllers;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.ResponseStatus;
+//TODO Make this a subclass of MissingResourceException to include fields about the resource class and key
@ResponseStatus(value = HttpStatus.NOT_FOUND)
public final class ResourceNotFoundException extends RuntimeException {
-
public ResourceNotFoundException(Exception exception){
super(exception);
} | Add a TODO note for an enhancement of ResourceNotFoundException
(cherry picked from commit a<I>f0) | ebi-gene-expression-group_atlas | train | java |
1e3edc43e0e373bc711101894bddbba8f0e4291e | diff --git a/static/scripts/partialNavigation.js b/static/scripts/partialNavigation.js
index <HASH>..<HASH> 100644
--- a/static/scripts/partialNavigation.js
+++ b/static/scripts/partialNavigation.js
@@ -39,7 +39,8 @@
if (res) res = (href.indexOf('.js.html') === -1)
&& (href.indexOf('http://') === -1)
&& (href.indexOf('https://') === -1)
- && (href.charAt(0) !== '/');
+ && (href.charAt(0) !== '/')
+ && (href.charAt(0) !== '.');
return res;
}
var needUpdateURL = false; | fix partial navigation in case of relative links | UnityBaseJS_ub-jsdoc | train | js |
f0f13cd9398014683c460945e2e7eff618252be8 | diff --git a/spec/unit/synchronization_spec.rb b/spec/unit/synchronization_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/unit/synchronization_spec.rb
+++ b/spec/unit/synchronization_spec.rb
@@ -49,7 +49,7 @@ module WebsocketRails
context "when dispatching user events" do
before do
- @event = Event.new(:channel_event, :user_id => :username, :data => 'hello channel one')
+ @event = Event.new(:channel_event, :user_id => "username", :data => 'hello channel one')
end
context "and the user is not connected to this server" do
@@ -61,11 +61,11 @@ module WebsocketRails
context "and the user is connected to this server" do
before do
@connection = double('Connection')
- WebsocketRails.users[:username] = @connection
+ WebsocketRails.users["username"] = @connection
end
it "triggers the event on the correct user" do
- WebsocketRails.users[:username].should_receive(:trigger).with @event
+ WebsocketRails.users["username"].should_receive(:trigger).with @event
subject.trigger_incoming @event
end
end | Type cast username to string in Synchronization spec. | websocket-rails_websocket-rails | train | rb |
3b059e6a64912a8333aa05da982d8d7ccc382314 | diff --git a/lib/rango/templates/helpers.rb b/lib/rango/templates/helpers.rb
index <HASH>..<HASH> 100644
--- a/lib/rango/templates/helpers.rb
+++ b/lib/rango/templates/helpers.rb
@@ -79,7 +79,7 @@ module Rango
raise ArgumentError, "Block has to have a name!" if name.nil?
raise ArgumentError, "You have to provide value or block, not both of them!" if value && block
value = self.template.scope.capture(&block) if value.nil? && block
- self.template.blocks[name] = "#{self.template.blocks[name]}\n#{value}" if value
+ self.template.blocks[name] = value if value
return self.template.blocks[name]
end | You have to can block(:name) in your extend/enhance so you can wrap block content in a tag.
Example:
extend_block(:content) do
#wrapper= block(:content) | botanicus_rango | train | rb |
dc767e5a921eeda8c5de2a7e725bb7e7fd1c2b89 | diff --git a/spec/bitbucket_rest_api/repos_spec.rb b/spec/bitbucket_rest_api/repos_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/bitbucket_rest_api/repos_spec.rb
+++ b/spec/bitbucket_rest_api/repos_spec.rb
@@ -119,11 +119,19 @@ describe BitBucket::Repos do
'/1.0/repositories/mock_username/mock_repo/tags/',
{},
{}
- )
+ ).and_return(['tag1', 'tag2' ,'tag3'])
end
- it 'should send a GET request for the tags belonging to the given repo' do
- repo.tags('mock_username', 'mock_repo')
+ context 'without a block' do
+ it 'should send a GET request for the tags belonging to the given repo' do
+ repo.tags('mock_username', 'mock_repo')
+ end
+ end
+
+ context 'with a block' do
+ it 'should send a GET request for the tags belonging to the given repo' do
+ repo.tags('mock_username', 'mock_repo') { |tag| tag }
+ end
end
end | Test .tags with a block in repos_spec | bitbucket-rest-api_bitbucket | train | rb |
f5a6ec70a5d2ef94d345cc8406a79d4f86269a17 | diff --git a/lib/pdk/config.rb b/lib/pdk/config.rb
index <HASH>..<HASH> 100644
--- a/lib/pdk/config.rb
+++ b/lib/pdk/config.rb
@@ -8,6 +8,7 @@ module PDK
class Config
autoload :JSON, 'pdk/config/json'
autoload :JSONSchemaNamespace, 'pdk/config/json_schema_namespace'
+ autoload :JSONSchemaSetting, 'pdk/config/json_schema_setting'
autoload :LoadError, 'pdk/config/errors'
autoload :Namespace, 'pdk/config/namespace'
autoload :Setting, 'pdk/config/setting'
diff --git a/lib/pdk/config/json_schema_setting.rb b/lib/pdk/config/json_schema_setting.rb
index <HASH>..<HASH> 100644
--- a/lib/pdk/config/json_schema_setting.rb
+++ b/lib/pdk/config/json_schema_setting.rb
@@ -1,4 +1,4 @@
-require 'pdk/config/json_schema_namespace'
+require 'pdk'
module PDK
class Config | (maint) Ensure pdk/config/json_schema_setting works standalone | puppetlabs_pdk | train | rb,rb |
c0e9448a6f7ca4f8488bb23ae21ff917579b610e | diff --git a/ipuz/__init__.py b/ipuz/__init__.py
index <HASH>..<HASH> 100644
--- a/ipuz/__init__.py
+++ b/ipuz/__init__.py
@@ -110,7 +110,7 @@ def validate_clueplacement(field_name, field_data):
def validate_answer(field_name, field_data):
- if type(field_data) not in [str, unicode]:
+ if type(field_data) not in [str, unicode] or field_data == "":
raise IPUZException("Invalid answer value found")
diff --git a/tests/test_ipuz.py b/tests/test_ipuz.py
index <HASH>..<HASH> 100644
--- a/tests/test_ipuz.py
+++ b/tests/test_ipuz.py
@@ -342,6 +342,10 @@ class IPUZAnswerTestCase(IPUZSampleCrosswordTestCase):
self.puzzle["answer"] = 3
self.validate("Invalid answer value found")
+ def test_answer_is_non_empty_string(self):
+ self.puzzle["answer"] = ""
+ self.validate("Invalid answer value found")
+
def test_answers_not_a_list(self):
self.puzzle["answers"] = 3
self.validate("Invalid answers value found") | Ensure answer value is not an empty string | svisser_ipuz | train | py,py |
9417870f0beee985e3104fafe6e3deb9a9bd704d | diff --git a/restclients/models/bridge.py b/restclients/models/bridge.py
index <HASH>..<HASH> 100644
--- a/restclients/models/bridge.py
+++ b/restclients/models/bridge.py
@@ -37,7 +37,7 @@ class BridgeCustomField(models.Model):
class BridgeUser(models.Model):
- bridge_id = models.IntegerField(default=0)
+ bridge_id = models.CharField(max_length=16, null=True, default=None)
uwnetid = models.CharField(max_length=128)
first_name = models.CharField(max_length=128)
full_name = models.CharField(max_length=128) | bridge_id is a string. | uw-it-aca_uw-restclients | train | py |
67837dc6de728d32a1a7bff65894aa1c6380171b | diff --git a/manifest.php b/manifest.php
index <HASH>..<HASH> 100755
--- a/manifest.php
+++ b/manifest.php
@@ -28,7 +28,7 @@ return array(
'name' => 'taoQtiItem',
'label' => 'QTI item model',
'license' => 'GPL-2.0',
- 'version' => '10.3.3',
+ 'version' => '10.4.0',
'author' => 'Open Assessment Technologies',
'requires' => array(
'taoItems' => '>=4.2.4',
diff --git a/scripts/update/Updater.php b/scripts/update/Updater.php
index <HASH>..<HASH> 100644
--- a/scripts/update/Updater.php
+++ b/scripts/update/Updater.php
@@ -439,6 +439,6 @@ class Updater extends \common_ext_ExtensionUpdater
$this->setVersion('10.0.0');
}
- $this->skip('10.0.0', '10.3.3');
+ $this->skip('10.0.0', '10.4.0');
}
} | Bump to version <I> | oat-sa_extension-tao-itemqti | train | php,php |
d354be91f9cc7754a9718e312292b1672d45c757 | diff --git a/librosa/__init__.py b/librosa/__init__.py
index <HASH>..<HASH> 100644
--- a/librosa/__init__.py
+++ b/librosa/__init__.py
@@ -10,6 +10,27 @@ Includes constants, core utility functions, etc
import numpy, scipy
import beat, framegenerator, _chroma, _mfcc, tf_agc
+import audioread
+
+def load(path, mono=True, frame_size=1024):
+ '''
+ Load an audio file into a single, long time series
+
+ Input:
+ path: path to the input file
+ mono: convert to mono? | Default: True
+ frame_size: buffer size | Default: 1024 samples
+ Output:
+ y: the time series
+ sr: the sampling rate
+ '''
+
+ with audioread.audio_open(path) as f:
+ sr = f.samplerate
+ y = numpy.concatenate([frame for frame in framegenerator.audioread_timeseries(f, frame_size)], axis=0)
+ pass
+
+ return (y, sr)
def pad(w, d_pad, v=0.0, center=True):
''' | added a wrapper to audioread | librosa_librosa | train | py |
1b922d258b8aa1fc6f95b9f3e0d6ec422910730b | diff --git a/lib/provider/ticket.rb b/lib/provider/ticket.rb
index <HASH>..<HASH> 100644
--- a/lib/provider/ticket.rb
+++ b/lib/provider/ticket.rb
@@ -16,7 +16,8 @@ module TicketMaster::Provider
:backlog => object.backlog,
:wip => object.wip,
:created_at => object.created_at,
- :updated_at => object.updated_at}
+ :updated_at => object.updated_at,
+ :project_slug => object.project_slug}
else
hash = object
end | added project_slug field to tickets initialization | hybridgroup_taskmapper-kanbanpad | train | rb |
bc240b8825b1c830318e8f3345c5300a947d89f8 | diff --git a/lib/coveralls/configuration.rb b/lib/coveralls/configuration.rb
index <HASH>..<HASH> 100644
--- a/lib/coveralls/configuration.rb
+++ b/lib/coveralls/configuration.rb
@@ -143,8 +143,7 @@ module Coveralls
}
# Branch
- branch = `git branch`.split("\n").delete_if { |i| i[0] != "*" }
- hash[:branch] = [branch].flatten.first.gsub("* ","")
+ hash[:branch] = `git rev-parse --abbrev-ref HEAD`
# Remotes
remotes = nil | Improve method for determining current git branch | lemurheavy_coveralls-ruby | train | rb |
1ed207686d2a5e4759501012e6ddc89d9d09e7d4 | diff --git a/cmd2/cmd2.py b/cmd2/cmd2.py
index <HASH>..<HASH> 100644
--- a/cmd2/cmd2.py
+++ b/cmd2/cmd2.py
@@ -1685,9 +1685,9 @@ class Cmd(cmd.Cmd):
if py_bridge_call:
# Stop saving command's stdout before command finalization hooks run
self.stdout.pause_storage = True
- except KeyboardInterrupt as e:
+ except KeyboardInterrupt as ex:
if raise_keyboard_interrupt:
- raise e
+ raise ex
except (Cmd2ArgparseError, EmptyStatement):
# Don't do anything, but do allow command finalization hooks to run
pass
@@ -3255,10 +3255,8 @@ class Cmd(cmd.Cmd):
# noinspection PyBroadException
try:
interp.runcode(py_code_to_run)
- except KeyboardInterrupt as e:
- raise e
except BaseException:
- # We don't care about any other exceptions that happened in the Python code
+ # We don't care about any exceptions that happened in the Python code
pass
# Otherwise we will open an interactive Python shell | Since runcode() catches most KeyboardInterrupts, just ignore any that make their way up to our code.
This is more consistent than raising the rare few that we see. | python-cmd2_cmd2 | train | py |
bfba0a4a5339f5c462f4b51cbf72968687aeb60f | diff --git a/src/main/java/liquibase/ext/ora/truncate/TruncateStatement.java b/src/main/java/liquibase/ext/ora/truncate/TruncateStatement.java
index <HASH>..<HASH> 100644
--- a/src/main/java/liquibase/ext/ora/truncate/TruncateStatement.java
+++ b/src/main/java/liquibase/ext/ora/truncate/TruncateStatement.java
@@ -27,8 +27,8 @@ public class TruncateStatement extends AbstractSqlStatement {
return clusterName;
}
- public boolean purgeMaterializedViewLog() {
- return purgeMaterializedViewLog != null ? purgeMaterializedViewLog.booleanValue() : false;
+ public Boolean purgeMaterializedViewLog() {
+ return purgeMaterializedViewLog;
}
public TruncateStatement setPurgeMaterializedViewLog(Boolean purgeMaterializedViewLog) {
@@ -36,8 +36,8 @@ public class TruncateStatement extends AbstractSqlStatement {
return this;
}
- public boolean reuseStorage() {
- return reuseStorage != null ? reuseStorage.booleanValue() : false;
+ public Boolean reuseStorage() {
+ return reuseStorage;
}
public TruncateStatement setReuseStorage(Boolean reuseStorage) { | Modified boolean get methods to be able to return null. | liquibase_liquibase-oracle | train | java |
63dbfc6228bb9e31969b5344cec95b5f31f466a4 | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -27,7 +27,8 @@ with open(metadata['__file__'], 'r') as f:
BASE_DIR = os.path.join(os.path.expanduser("~"), ".indy")
-tests_require = ['attrs==19.1.0', 'pytest==3.3.1', 'pytest-xdist==1.22.1', 'python3-indy==1.11.1-dev-1343', 'pytest-asyncio==0.8.0']
+tests_require = ['attrs==19.1.0', 'pytest==3.3.1', 'pytest-xdist==1.22.1', 'pytest-forked==0.2',
+ 'python3-indy==1.11.1-dev-1343', 'pytest-asyncio==0.8.0']
setup(
name=metadata['__title__'], | INDY-<I>: fix pytest-forked version | hyperledger_indy-node | train | py |
fca36960fc0f6a7e5a79858d570a33bebb7156ff | diff --git a/underfs/s3/src/main/java/tachyon/underfs/s3/S3UnderFileSystem.java b/underfs/s3/src/main/java/tachyon/underfs/s3/S3UnderFileSystem.java
index <HASH>..<HASH> 100644
--- a/underfs/s3/src/main/java/tachyon/underfs/s3/S3UnderFileSystem.java
+++ b/underfs/s3/src/main/java/tachyon/underfs/s3/S3UnderFileSystem.java
@@ -180,6 +180,12 @@ public class S3UnderFileSystem extends UnderFileSystem {
@Override
public String[] list(String path) throws IOException {
// Non recursive list
+ if (!isFolder(path)) {
+ return null;
+ }
+ System.out.println("List with : " + path);
+ path = path.endsWith(PATH_SEPARATOR) ? path : path + PATH_SEPARATOR;
+ System.out.println("List after : " + path);
return listInternal(path, false);
}
@@ -387,6 +393,7 @@ public class S3UnderFileSystem extends UnderFileSystem {
* @throws IOException
*/
private boolean isFolder(String key) {
+ key = key.endsWith(PATH_SEPARATOR) ? key.substring(0, key.length() - 1) : key;
// Root is always a folder
if (isRoot(key)) {
return true; | Some additional parsing in list status for s3. | Alluxio_alluxio | train | java |
db808aabbacf1e4ea65745a4f91f99a67cb2ac9f | diff --git a/molgenis-ontology/src/main/resources/js/sorta-result-anonymous.js b/molgenis-ontology/src/main/resources/js/sorta-result-anonymous.js
index <HASH>..<HASH> 100644
--- a/molgenis-ontology/src/main/resources/js/sorta-result-anonymous.js
+++ b/molgenis-ontology/src/main/resources/js/sorta-result-anonymous.js
@@ -63,6 +63,12 @@
}
});
+ $(thresholdValue).keydown(function(e){
+ if(e.keyCode === 13){
+ $(inputGroupButton).click();
+ }
+ });
+
getMatchResults(function(matchedResults){
var perfectMatches = [];
var partialMatches = []; | after hitting the enter to update the threshold, the page is stopped from submitting the form therefore causing an error | molgenis_molgenis | train | js |
d2cdd641857af07a5439e8b8fb2e2b5110aa9566 | diff --git a/scss/functions/compass/helpers.py b/scss/functions/compass/helpers.py
index <HASH>..<HASH> 100644
--- a/scss/functions/compass/helpers.py
+++ b/scss/functions/compass/helpers.py
@@ -91,17 +91,15 @@ def reject(lst, *values):
@register('first-value-of')
-def first_value_of(lst):
- if isinstance(lst, QuotedStringValue):
- first = lst.value.split()[0]
- return type(lst)(first)
- elif isinstance(lst, List):
- if len(lst):
- return lst[0]
- else:
- return Null()
+def first_value_of(*args):
+ args = List.from_maybe_starargs(args)
+ if len(args) == 1 and isinstance(args[0], QuotedStringValue):
+ first = args[0].value.split()[0]
+ return type(args[0])(first)
+ elif len(args):
+ return args[0]
else:
- return lst
+ return Null()
@register('-compass-list') | first-value-of() fixed to maybe receive lists | Kronuz_pyScss | train | py |
ed73c2be0f33b946386e811e04e5460a93621cc5 | diff --git a/src/Ouzo/Goodies/Utilities/Clock.php b/src/Ouzo/Goodies/Utilities/Clock.php
index <HASH>..<HASH> 100644
--- a/src/Ouzo/Goodies/Utilities/Clock.php
+++ b/src/Ouzo/Goodies/Utilities/Clock.php
@@ -117,10 +117,8 @@ class Clock
private function _modify($interval)
{
- $freshDateTime = new DateTime();
- $freshDateTime->setTimestamp($this->dateTime->getTimestamp());
- $freshDateTime->modify($interval);
- return new Clock($freshDateTime);
+ $freshDateTime = clone $this->dateTime;
+ return new Clock($freshDateTime->modify($interval));
}
public function minusDays($days) | [Utilities] Fixed maintaining timeZone in Clock | letsdrink_ouzo | train | php |
8d468d4d408c76d9715bce84f46a6dee923e8b63 | diff --git a/test/test.js b/test/test.js
index <HASH>..<HASH> 100644
--- a/test/test.js
+++ b/test/test.js
@@ -1,4 +1,3 @@
-'use strict';
const chai = require('chai');
const expect = chai.expect;
const sinon = require('sinon'); | Remove 'use-strict' to be able to use const | coveo_pretty-typescript | train | js |
Subsets and Splits
Java Commits in Train Set
Queries for all entries where the diff_languages column is 'java', providing a filtered dataset but without deeper analysis.
Java Commits Test Data
Returns a subset of 5000 entries from the dataset where the programming language difference is Java, providing basic filtering for exploration.
Java Commits Sample
Retrieves the first 1,000 records where the 'diff_languages' column is 'java', providing limited insight into the specific data entries.
Java Commits Validation Sample
Retrieves a sample of entries from the validation dataset where the diff languages are Java, providing limited insight into specific Java-related data points.
Java Commits in Validation
This query retrieves a limited sample of entries from the validation dataset where the programming language difference is Java, providing basic filtering with minimal insight.
Java Commits Sample
This query retrieves a sample of 100 records where the 'diff_languages' is 'java', providing basic filtering but limited analytical value.
Java Commits Sample
Retrieves 100 samples where the language difference is Java, providing basic filtering but minimal analytical value.
Java Commits Sample
Retrieves 10 samples where the diff_languages column is 'java', providing basic examples of data entries with this specific language.
Java Commits Validation Sample
Retrieves 1,000 records where the differences in languages are marked as Java, providing a snapshot of that specific subset but limited to raw data.
Java Commits Sample
This query retrieves 1000 random samples from the dataset where the programming language is Java, offering limited insight beyond raw data.