hash stringlengths 40 40 | diff stringlengths 131 26.7k | message stringlengths 7 694 | project stringlengths 5 67 | split stringclasses 1 value | diff_languages stringlengths 2 24 |
|---|---|---|---|---|---|
766283a50ee2f96e7c313b7a02ea917cf4121ab1 | diff --git a/app/models/post.rb b/app/models/post.rb
index <HASH>..<HASH> 100644
--- a/app/models/post.rb
+++ b/app/models/post.rb
@@ -5,6 +5,10 @@ class Post < ActiveRecord::Base
has_and_belongs_to_many :categories
belongs_to :user
+ validates_presence_of :title
+ validates_presence_of :short_description
+ validates_presence_of :author
+
enum type: [:article, :video, :infographic, :promo]
enum job_phase: [:discovery, :find_the_job, :get_the_job, :on_the_job]
enum display: [:large, :medium, :small] | Added validation in Post model for :title, :short_description and :author | cortex-cms_cortex | train | rb |
5cce73f9a315143aa6703f51948e3aade7f55ba4 | diff --git a/holoviews/core/element.py b/holoviews/core/element.py
index <HASH>..<HASH> 100644
--- a/holoviews/core/element.py
+++ b/holoviews/core/element.py
@@ -294,8 +294,9 @@ class NdElement(Element, NdMapping):
else:
reduced = tuple(reduce_fn(self.dimension_values(vdim.name))
for vdim in self.value_dimensions)
- params = dict(group=self.group) if self.value != type(self).__name__ else {}
- reduced_table = self.__class__([((), reduced)], label=self.label,
+ reduced_dims = [d for d in self.key_dimensions if d.name not in reduce_map]
+ params = dict(group=self.group) if self.group != type(self).__name__ else {}
+ reduced_table = self.__class__([((), reduced)], label=self.label, key_dimensions=reduced_dims,
value_dimensions=self.value_dimensions, **params)
return reduced_table | Fix for 0D reduction on NdElements | pyviz_holoviews | train | py |
e74cbc0fecee84ace826a2b52ae5f6549b42b70d | diff --git a/concrete/src/User/Group/GroupSearchColumnSet.php b/concrete/src/User/Group/GroupSearchColumnSet.php
index <HASH>..<HASH> 100644
--- a/concrete/src/User/Group/GroupSearchColumnSet.php
+++ b/concrete/src/User/Group/GroupSearchColumnSet.php
@@ -8,7 +8,7 @@ class GroupSearchColumnSet extends \Concrete\Core\Search\Column\Set
{
public static function getGroupName($g)
{
- return '<a data-group-name="' . $g->getGroupDisplayName() . '" href="' . URL::to('/dashboard/users/groups', 'edit', $g->getGroupID()) . '" data-group-id="' . $g->getGroupID() . '" href="#">' . $g->getGroupDisplayName() . '</a>';
+ return '<a data-group-name="' . $g->getGroupDisplayName(false) . '" href="' . URL::to('/dashboard/users/groups', 'edit', $g->getGroupID()) . '" data-group-id="' . $g->getGroupID() . '" href="#">' . $g->getGroupDisplayName() . '</a>';
}
public function __construct() | Do not include html on ajax group search (#<I>)
Former-commit-id: a<I>dfd<I>bd6f<I>deada<I>dd3f1
Former-commit-id: <I>d6a<I>ec<I>dd9beb7a7adc3bcf1a<I>c<I> | concrete5_concrete5 | train | php |
a4be1e5e8b1eaa924d8fe53230783f74341812b3 | diff --git a/keyring/tests/backends/test_Windows.py b/keyring/tests/backends/test_Windows.py
index <HASH>..<HASH> 100644
--- a/keyring/tests/backends/test_Windows.py
+++ b/keyring/tests/backends/test_Windows.py
@@ -24,6 +24,7 @@ def is_winvault_supported():
and has_pywin32
)
+
@unittest.skipUnless(is_win32_crypto_supported(),
"Need Windows")
class Win32CryptoKeyringTestCase(FileKeyringTests, unittest.TestCase):
@@ -46,3 +47,19 @@ class WinVaultKeyringTestCase(BackendBasicTests, unittest.TestCase):
def init_keyring(self):
return keyring.backends.Windows.WinVaultKeyring()
+
+
+@unittest.skipUnless(keyring.backends.Windows.RegistryKeyring.viable,
+ "RegistryKeyring not viable")
+class RegistryKeyringTestCase(BackendBasicTests, unittest.TestCase):
+ def tearDown(self):
+ # clean up any credentials created
+ for cred in self.credentials_created:
+ try:
+ self.keyring.delete_password(*cred)
+ except (Exception,):
+ e = sys.exc_info()[1]
+ print >> sys.stderr, e
+
+ def init_keyring(self):
+ return keyring.backends.Windows.RegistryKeyring() | Add WindowsRegistryKeyring to the test suite. | jaraco_keyring | train | py |
956cb19c78d595fdf0eb2702d8e94acbfd3d0490 | diff --git a/salt/modules/schedule.py b/salt/modules/schedule.py
index <HASH>..<HASH> 100644
--- a/salt/modules/schedule.py
+++ b/salt/modules/schedule.py
@@ -127,7 +127,16 @@ def list_(show_all=False,
continue
if '_seconds' in schedule[job]:
- schedule[job]['seconds'] = schedule[job]['_seconds']
+ # if _seconds is greater than zero
+ # then include the original back in seconds.
+ # otherwise remove seconds from the listing as the
+ # original item didn't include it.
+ if schedule[job]['_seconds'] > 0:
+ schedule[job]['seconds'] = schedule[job]['_seconds']
+ else:
+ del schedule[job]['seconds']
+
+ # remove _seconds from the listing
del schedule[job]['_seconds']
if schedule: | when using splay, seconds was being included in listing for schedule items that didn't original include it. This PR fixes that scenario. | saltstack_salt | train | py |
f04f7aff1abf3a380c93888aae2b7f7859148931 | diff --git a/lando_messaging/clients.py b/lando_messaging/clients.py
index <HASH>..<HASH> 100644
--- a/lando_messaging/clients.py
+++ b/lando_messaging/clients.py
@@ -124,13 +124,6 @@ class LandoWorkerClient(object):
payload = StoreJobOutputPayload(credentials, job_details, output_directory, vm_instance_name)
self._send(JobCommands.STORE_JOB_OUTPUT, payload)
- def cancel_job(self, job_id):
- """
- Request that the worker cancel a currently running job.
- :param job_id: int: unique id for the job we want to cancel
- """
- self._send(JobCommands.CANCEL_JOB, job_id)
-
def _send(self, command, payload):
"""
Send a message over work queue to worker. | remove worker client cancel_job message
Currently lando terminates the VM so this method is not used.
Even if it was it may not be seen by the worker until it finishes running the job step. | Duke-GCB_lando-messaging | train | py |
af4da65ade273c6717bbbe86849719c431778e77 | diff --git a/logger.js b/logger.js
index <HASH>..<HASH> 100644
--- a/logger.js
+++ b/logger.js
@@ -147,7 +147,13 @@ logger._init_timestamps = function () {
});
if (_timestamps) {
- console.log = original_console_log.bind(console, new Date().toISOString());
+ console.log = function() {
+ var new_arguments = [new Date().toISOString()];
+ for (var key in arguments) {
+ new_arguments.push(arguments[key]);
+ }
+ original_console_log.apply(console, new_arguments);
+ }
}
else {
console.log = original_console_log; | fix adding timestamps to log messages
The current method of adding a log timestamp results in the startup time of
Haraka being added to every log message, which isn't ideal. Use a proxy function
which uses a new Date for every log message written to the console. | haraka_Haraka | train | js |
82759c073e299be8579f1d64158591a6a0b293f0 | diff --git a/lib/art-decomp/executable.rb b/lib/art-decomp/executable.rb
index <HASH>..<HASH> 100644
--- a/lib/art-decomp/executable.rb
+++ b/lib/art-decomp/executable.rb
@@ -39,13 +39,12 @@ module ArtDecomp class Executable
decs << dec
if dump_tables
Dir.mkdir File.join(@dir, i.to_s)
- File.open(File.join(@dir, i.to_s, 'g'), 'w') { |f| f << dec.g_table }
- File.open(File.join(@dir, i.to_s, 'h'), 'w') { |f| f << dec.h_table }
+ File.write_data File.join(@dir, i.to_s, 'g'), dec.g_table
+ File.write_data File.join(@dir, i.to_s, 'h'), dec.h_table
end
end
- filename = File.join @dir, 'decompositions'
- File.open(filename, 'w') { |f| f << Marshal.dump(decs) }
+ File.dump_object File.join(@dir, 'decompositions'), decs
end
end end | use File.dump_object and File.write_data in Executable#run | chastell_art-decomp | train | rb |
2bfb5f9f8b7636f1371082c2b60469f0eddffe46 | diff --git a/lib/resque/worker.rb b/lib/resque/worker.rb
index <HASH>..<HASH> 100644
--- a/lib/resque/worker.rb
+++ b/lib/resque/worker.rb
@@ -192,7 +192,9 @@ module Resque
report_failed_job(job,exception)
end
- do_exit_or_exit!
+ if will_fork?
+ run_at_exit_hooks ? exit : exit!
+ end
end
done_working
@@ -207,18 +209,10 @@ module Resque
unregister_worker
rescue Exception => exception
- log "Failed to start worker : #{exception.inspect}"
-
- unregister_worker(exception)
- end
+ unless exception.class == SystemExit && !@child && run_at_exit_hooks
+ log "Failed to start worker : #{exception.inspect}"
- def do_exit_or_exit!
- return unless will_fork?
- exit! unless run_at_exit_hooks
- begin
- exit
- rescue SystemExit
- nil
+ unregister_worker(exception)
end
end | Avoid unregister when exiting from children and run_at_exit_hooks == true | resque_resque | train | rb |
d247751c2ee9db59b7194cf5838040a65c4fb31d | diff --git a/lib/hanami/extensions/view/slice_configured_context.rb b/lib/hanami/extensions/view/slice_configured_context.rb
index <HASH>..<HASH> 100644
--- a/lib/hanami/extensions/view/slice_configured_context.rb
+++ b/lib/hanami/extensions/view/slice_configured_context.rb
@@ -55,15 +55,15 @@ module Hanami
end
def resolve_settings
- slice.app[:settings] if slice.app.key?(:settings)
+ slice["settings"] if slice.key?("settings")
end
def resolve_routes
- slice.app["routes"] if slice.app.key?("routes")
+ slice["routes"] if slice.key?("routes")
end
def resolve_assets
- slice.app[:assets] if slice.app.key?(:assets)
+ slice["assets"] if slice.key?("assets")
end
end
end | Use string keys instead of symbols
This is the convention we want to encourage | hanami_hanami | train | rb |
6ddfb40a4748bca16b008da41c884e21ca9c9730 | diff --git a/tests/test_curve.py b/tests/test_curve.py
index <HASH>..<HASH> 100644
--- a/tests/test_curve.py
+++ b/tests/test_curve.py
@@ -76,6 +76,13 @@ def test_block(well):
assert b.df.mean()[0] - 26.072967 < 0.001
+def test_despike(well):
+ """
+ Test despiker with even window and z != 2.
+ """
+ gr = well.data['GR']
+ assert gr.df.max()[0] - gr.despike(50, z=1).df.max()[0] - 91.83918 < 0.001
+
# define test data
data_num = np.linspace(1, 200, 20)
data_num_2d = np.array([data_num, np.linspace(400, 201, 20)]).T
diff --git a/welly/curve.py b/welly/curve.py
index <HASH>..<HASH> 100644
--- a/welly/curve.py
+++ b/welly/curve.py
@@ -279,8 +279,8 @@ class Curve(object):
def despike(self, window_length=33, samples=True, z=2):
"""
Args:
- window (int): window length in samples. Default 33 (or 5 m for
- most curves sampled at 0.1524 m intervals).
+ window_length (int): window length in samples. Default 33
+ (or 5 m for most curves sampled at 0.1524 m intervals).
samples (bool): window length is in samples. Use False for a window
length given in metres.
z (float): Z score | add despike test and update docstring | agile-geoscience_welly | train | py,py |
97e20d6431fe43a8f898abfb3ccd3b25060c4753 | diff --git a/classes/Boom/Auth/Auth.php b/classes/Boom/Auth/Auth.php
index <HASH>..<HASH> 100644
--- a/classes/Boom/Auth/Auth.php
+++ b/classes/Boom/Auth/Auth.php
@@ -54,7 +54,7 @@ class Auth
// Clear the autologin token from the database
$token = ORM::factory('User_Token', ['token' => $token]);
- if ($token->loaded() and $logout_all) {
+ if ($token->loaded()) {
// Delete all user tokens. This isn't the most elegant solution but does the job
$tokens = ORM::factory('User_Token')->where('user_id','=',$token->user_id)->find_all();
@@ -199,13 +199,13 @@ class Auth
return $this;
}
- public function complete_login($person)
+ public function complete_login(Person\Person $person)
{
// Store the person ID in the session data.
$this->session->set($this->sessionKey, $person->getId());
}
- public function force_login($person, $mark_as_forced = false)
+ public function force_login(Person\Person $person, $mark_as_forced = false)
{
$this->person = $person; | Added type hints for Person\Person to Auth\Auth | boomcms_boom-core | train | php |
6e53a1183970406aa002ebdc40fd2931f8974e0a | diff --git a/validator/tests/unit/test_validator_registry.py b/validator/tests/unit/test_validator_registry.py
index <HASH>..<HASH> 100644
--- a/validator/tests/unit/test_validator_registry.py
+++ b/validator/tests/unit/test_validator_registry.py
@@ -76,9 +76,7 @@ class TestValidatorRegistryTransaction(unittest.TestCase):
transaction = ValidatorRegistryTransaction.register_validator(
name, validator_id, signup_info)
transaction.sign_object(key2)
- try:
+ with self.assertRaises(InvalidTransactionError):
transaction.check_valid(store)
transaction.apply(store)
- self.fail("Bad: Verified an invalid transaction")
- except InvalidTransactionError:
- pass
+ self.fail("Failure: Verified an invalid transaction") | fixup: add assertRaises to negative unit test | hyperledger_sawtooth-core | train | py |
9e34162a09c8cd9a49ce8a8db6f498a784aba9fc | diff --git a/matterbridge.go b/matterbridge.go
index <HASH>..<HASH> 100644
--- a/matterbridge.go
+++ b/matterbridge.go
@@ -34,7 +34,6 @@ func main() {
fmt.Printf("version: %s %s\n", version, githash)
return
}
- flag.Parse()
if *flagDebug {
log.Info("Enabling debug")
log.SetLevel(log.DebugLevel) | remove second flag.Parse() (#<I>)
flag.Parse() is already being called on line <I> <URL> | 42wim_matterbridge | train | go |
003d4c4d451262c2b6122ee7eb1599dab95ee159 | diff --git a/lib/Schema.js b/lib/Schema.js
index <HASH>..<HASH> 100644
--- a/lib/Schema.js
+++ b/lib/Schema.js
@@ -56,6 +56,16 @@ Schema.prototype.fields = function (key1, key2, key3) {
return subset;
};
+/**
+ * Calls the given function with the Schema as first argument and the given config (optionally). Plugins can be used
+ * to hook into class methods by overriding them.
+ *
+ * You may call this function multiple times with the same plugin, the plugin will only be applied once.
+ *
+ * @param {Function} plugin
+ * @param {Object=} config
+ * @returns {Function}
+ */
Schema.use = function (plugin, config) {
this._plugins = this._plugins || []; | Added docs to Schema.use() | peerigon_alamid-schema | train | js |
49f6f391fe4687dd204ccf2b737c57e4407caf79 | diff --git a/Treant.js b/Treant.js
index <HASH>..<HASH> 100644
--- a/Treant.js
+++ b/Treant.js
@@ -40,8 +40,7 @@
extend: function() {
if ( $ ) {
- arguments.unshift( true );
- arguments.unshift( {} );
+ Array.prototype.unshift.apply( arguments, [true, {}] );
return $.extend.apply( $, arguments );
}
else { | Fix for jQuery usage of extend() | fperucic_treant-js | train | js |
a0d12786905b8ea84462f2cd93d486a4344e7fda | diff --git a/src/maidenhair/functions.py b/src/maidenhair/functions.py
index <HASH>..<HASH> 100644
--- a/src/maidenhair/functions.py
+++ b/src/maidenhair/functions.py
@@ -132,6 +132,9 @@ def load(pathname, using=None, unite=False, basecolumn=0,
dataset += loader.glob(_pathname,
using=using, parser=parser,
unite=unite, basecolumn=basecolumn,
+ with_filename=with_filename,
+ recursive=recursive,
+ natsort=natsort,
**kwargs)
if relative:
from maidenhair.filters import relative | Fix `maidenhair.load` issue
`with_filename` and `recursive` and `natsort` arguments were not passed
to `loader.glab` function. | lambdalisue_maidenhair | train | py |
1e4fa3dab17abfcf2104eb9dbc686d59a0353ee2 | diff --git a/lib/faraday/error.rb b/lib/faraday/error.rb
index <HASH>..<HASH> 100644
--- a/lib/faraday/error.rb
+++ b/lib/faraday/error.rb
@@ -1,6 +1,7 @@
module Faraday
module Error
- class ConnectionFailed < StandardError; end
- class ResourceNotFound < StandardError; end
+ class ClientError < StandardError; end
+ class ConnectionFailed < ClientError; end
+ class ResourceNotFound < ClientError; end
end
end | Faraday::Error::ClientError is a common base class for all faraday errors | lostisland_faraday | train | rb |
0c56e27f04d32f5b4206681d05f9b3481576492f | diff --git a/gulpfile.js b/gulpfile.js
index <HASH>..<HASH> 100644
--- a/gulpfile.js
+++ b/gulpfile.js
@@ -535,7 +535,7 @@ gulp.task('build:demo', () => {
});
gulp.task('build:demo:prod', () => {
- return execDemoCmd(`build --preserve-symlinks --prod --aot --build-optimizer --base-href /@angular-material-extensions/password-strength/ --deploy-url /@angular-material-extensions/password-strength/`, {cwd: `${config.demoDir}`});
+ return execDemoCmd(`build --preserve-symlinks --prod --aot --build-optimizer --base-href /password-strength/ --deploy-url /password-strength/`, {cwd: `${config.demoDir}`});
});
gulp.task('serve:demo-ssr', ['build:demo'], () => {
@@ -569,7 +569,7 @@ gulp.task('push:demo', () => {
});
gulp.task('deploy:demo', (cb) => {
- runSequence('build:demo', 'build:doc', 'push:demo', cb);
+ runSequence('build:demo:prod', 'build:doc', 'push:demo', cb);
}); | chore(package): fixed base href and deploy url params for the build demo prod task | angular-material-extensions_password-strength | train | js |
3b813a77c2af4ce98b024264c2ce5a8ce44dc804 | diff --git a/src/com/opencms/core/I_CmsConstants.java b/src/com/opencms/core/I_CmsConstants.java
index <HASH>..<HASH> 100755
--- a/src/com/opencms/core/I_CmsConstants.java
+++ b/src/com/opencms/core/I_CmsConstants.java
@@ -1,7 +1,7 @@
/*
* File : $Source: /alkacon/cvs/opencms/src/com/opencms/core/Attic/I_CmsConstants.java,v $
-* Date : $Date: 2002/02/14 14:24:07 $
-* Version: $Revision: 1.196 $
+* Date : $Date: 2002/04/05 06:37:10 $
+* Version: $Revision: 1.197 $
*
* This library is part of OpenCms -
* the Open Source Content Mananagement System
@@ -36,7 +36,7 @@ package com.opencms.core;
* @author Michael Emmerich
* @author Michaela Schleich
*
- * @version $Revision: 1.196 $ $Date: 2002/02/14 14:24:07 $
+ * @version $Revision: 1.197 $ $Date: 2002/04/05 06:37:10 $
*/
public interface I_CmsConstants {
@@ -248,6 +248,11 @@ public interface I_CmsConstants {
static final int C_USER_TYPE_WEBUSER = 1;
/**
+ * A user-type
+ */
+ static final int C_USER_TYPE_SYSTEMANDWEBUSER = 2;
+
+ /**
* Key for additional info address.
*/
final static String C_ADDITIONAL_INFO_ZIPCODE = "USER_ZIPCODE"; | Added constant for usertype system- and webuser. | alkacon_opencms-core | train | java |
7e19f939d6e922a4e3c5a3b81ff6a1bd3b8aaff2 | diff --git a/internetarchive/item.py b/internetarchive/item.py
index <HASH>..<HASH> 100644
--- a/internetarchive/item.py
+++ b/internetarchive/item.py
@@ -244,6 +244,7 @@ class Item(BaseItem):
'{0}.epub'.format(self.identifier),
'{0}.mobi'.format(self.identifier),
'{0}_daisy.zip'.format(self.identifier),
+ '{0}_archive_marc.xml'.format(self.identifier),
]
for f in otf_files:
item_files.append(dict(name=f, otf=True)) | allow client to download on-the-fly archive_marc.xml | jjjake_internetarchive | train | py |
50387d5be821b42456da17ff04b72ce9f88305f8 | diff --git a/src/main/java/net/fortuna/ical4j/model/component/VEvent.java b/src/main/java/net/fortuna/ical4j/model/component/VEvent.java
index <HASH>..<HASH> 100644
--- a/src/main/java/net/fortuna/ical4j/model/component/VEvent.java
+++ b/src/main/java/net/fortuna/ical4j/model/component/VEvent.java
@@ -1450,7 +1450,7 @@ public class VEvent extends CalendarComponent {
public final DtEnd getEndDate(final boolean deriveFromDuration) {
DtEnd dtEnd = (DtEnd) getProperty(Property.DTEND);
// No DTEND? No problem, we'll use the DURATION.
- if (dtEnd == null && deriveFromDuration) {
+ if (dtEnd == null && deriveFromDuration && getStartDate() != null) {
final DtStart dtStart = getStartDate();
final Duration vEventDuration;
if (getDuration() != null) { | follow up to #<I> - dtstart may be missing | ical4j_ical4j | train | java |
4f0b04beb13d1ed5ce438b2a3f5e1468cc39d976 | diff --git a/webview/__init__.py b/webview/__init__.py
index <HASH>..<HASH> 100755
--- a/webview/__init__.py
+++ b/webview/__init__.py
@@ -27,6 +27,9 @@ from webview.util import _token, base_uri, parse_file_type, escape_string, make_
from webview.window import Window
from .localization import localization as original_localization
+# For export
+from .http_server import Routing, StaticFiles, StaticResources # noqa
+
logger = logging.getLogger('pywebview')
handler = logging.StreamHandler()
formatter = logging.Formatter('[pywebview] %(message)s') | Expose the new serving stuff on webview | r0x0r_pywebview | train | py |
029a6277e3ccde497c04cc46bf17b3871014f49f | diff --git a/glue/ligolw/lsctables.py b/glue/ligolw/lsctables.py
index <HASH>..<HASH> 100644
--- a/glue/ligolw/lsctables.py
+++ b/glue/ligolw/lsctables.py
@@ -719,16 +719,6 @@ class ProcessParamsTable(LSCTableMulti):
if row.process_id == key:
row.program = value
- def __getitem__(self, key):
- """
- Return a sorted list of rows matching the process ID key.
- """
- params = LSCTableMulti.__getitem__(self, key)
- # sort by process ID, then parameter name (all rows should
- # be unique by this measure).
- params.sort(lambda a, b: cmp((a.process_id, a.param), (b.process_id, b.param)))
- return params
-
def makeReference(self, elem):
"""
Convert ilwd:char strings into object references. | Remove crufty __getitem__ method from ProcessParamsTable class. I don't think
it's breaking anything, but this isn't the way things are supposed to work
anymore (so anything broken by its removal needs fixing anyway). | gwastro_pycbc-glue | train | py |
58cd0f58959eede8a5488111fc60620f81f5eb7b | diff --git a/lib/barometer/utils/payload_request.rb b/lib/barometer/utils/payload_request.rb
index <HASH>..<HASH> 100644
--- a/lib/barometer/utils/payload_request.rb
+++ b/lib/barometer/utils/payload_request.rb
@@ -19,7 +19,7 @@ module Barometer
def parse_response(response)
using_around_filters(response) do
- XmlReader.parse(response, *api.unwrap_nodes)
+ reader.parse(response, *api.unwrap_nodes)
end
end
@@ -33,6 +33,14 @@ module Barometer
api.after_parse(output) if api.respond_to?(:after_parse)
output
end
+
+ def reader
+ json? ? JsonReader : XmlReader
+ end
+
+ def json?
+ api.respond_to?(:format) && api.format == :json
+ end
end
end
end | Payload request now supports xml or json api | attack_barometer | train | rb |
b7f3f8db4b4300cada5de1e01e43c1ba5e18a3c3 | diff --git a/rtv/content.py b/rtv/content.py
index <HASH>..<HASH> 100644
--- a/rtv/content.py
+++ b/rtv/content.py
@@ -140,8 +140,8 @@ class BaseContent(object):
elif reddit_link.match(data['url_full']):
data['url_type'] = 'x-post'
- data['url'] = 'x-post via {}'.format(strip_subreddit_url(
- data['url_full'])[3:])
+ data['url'] = 'self.{}'.format(strip_subreddit_url(
+ data['url_full'])[3:])
else:
data['url_type'] = 'external' | content.py: more standardization | michael-lazar_rtv | train | py |
15df9773179850e140a4fd3f8e4b954c011dc227 | diff --git a/pyphi/direction.py b/pyphi/direction.py
index <HASH>..<HASH> 100644
--- a/pyphi/direction.py
+++ b/pyphi/direction.py
@@ -36,9 +36,9 @@ class Direction(Enum):
def order(self, mechanism, purview):
"""Order the mechanism and purview in time.
- If the direction is ``CAUSE``, then the ``purview`` is at |t-1| and the
- ``mechanism`` is at time |t|. If the direction is ``EFFECT``, then the
- ``mechanism`` is at time |t| and the purview is at |t+1|.
+ If the direction is ``CAUSE``, then the purview is at |t-1| and the
+ mechanism is at time |t|. If the direction is ``EFFECT``, then the
+ mechanism is at time |t| and the purview is at |t+1|.
"""
if self is Direction.CAUSE:
return purview, mechanism | Fix style in `Direction.order()` docstring | wmayner_pyphi | train | py |
172672c44ee3d51140c3d0cc8e473a4d16432d76 | diff --git a/ui/src/admin/components/chronograf/OrganizationsTable.js b/ui/src/admin/components/chronograf/OrganizationsTable.js
index <HASH>..<HASH> 100644
--- a/ui/src/admin/components/chronograf/OrganizationsTable.js
+++ b/ui/src/admin/components/chronograf/OrganizationsTable.js
@@ -34,10 +34,10 @@ class OrganizationsTable extends Component {
const {organizations, onDeleteOrg, onRenameOrg} = this.props
const {isCreatingOrganization} = this.state
- const tableTitle =
- organizations.length === 1
- ? '1 Organizations'
- : `${organizations.length} Organizations`
+ const tableTitle = `${organizations.length} Organization${organizations.length ===
+ 1
+ ? ''
+ : 's'}`
return (
<div className="container-fluid"> | Fix plural vs singular for 1 organization in OrganizationsTable | influxdata_influxdb | train | js |
36b5b041785c14015b885adfea7d620fbdf93621 | diff --git a/src/PermissionChecker.php b/src/PermissionChecker.php
index <HASH>..<HASH> 100644
--- a/src/PermissionChecker.php
+++ b/src/PermissionChecker.php
@@ -2,16 +2,18 @@
namespace pff\modules;
use Minime\Annotations\Cache\ApcCache;
-use pff\IBeforeHook;
-use pff\IConfigurableModule;
-use pff\pffexception;
+use pff\Abs\AController;
+use pff\Abs\AModule;
+use pff\Iface\IBeforeHook;
+use pff\Iface\IConfigurableModule;
+use pff\Exception\PffException;
use Minime\Annotations\Reader;
use Minime\Annotations\Parser;
/**
* Manages Controller->action permissions
*/
-class PermissionChecker extends \pff\AModule implements IConfigurableModule, IBeforeHook{
+class PermissionChecker extends AModule implements IConfigurableModule, IBeforeHook{
private $userClass,
$sessionUserId,
@@ -26,7 +28,7 @@ class PermissionChecker extends \pff\AModule implements IConfigurableModule, IBe
private $classReflection;
/**
- * @var \pff\AController
+ * @var AController
*/
private $controller; | new autoloading for pff2-core | stonedz_pff2-permissions | train | php |
d9cb04901491388a55bacb80cee02164b694ac5c | diff --git a/src/edeposit/amqp/harvester/filters/aleph_filter.py b/src/edeposit/amqp/harvester/filters/aleph_filter.py
index <HASH>..<HASH> 100755
--- a/src/edeposit/amqp/harvester/filters/aleph_filter.py
+++ b/src/edeposit/amqp/harvester/filters/aleph_filter.py
@@ -68,14 +68,24 @@ def compare_names(first, second):
return 0
similarity_factor = 0
- for fitem, sitem in zipped:
- if fitem == sitem:
+ for fitem, _ in zipped:
+ if fitem in second:
similarity_factor += 1
return (float(similarity_factor) / len(zipped)) * 100
def filter_publication(publication):
+ """
+ Filter publications based at data from Aleph.
+
+ Args:
+ publication (obj): :class:`structures.Publication` instance.
+
+ Returns:
+ obj/None: None if the publication was found in Aleph or `publication` \
+ if not.
+ """
query = None
isbn_query = False
@@ -127,7 +137,7 @@ def filter_publication(publication):
# try to compare authors from `publication` and Aleph
for pub_author in pub_authors:
- if compare_names(author_str, pub_author):
+ if compare_names(author_str, pub_author) >= 50:
return None # book already in database
return publication # book is not in database | #8: Fixed few algorithm bugs, added docstrings. | edeposit_edeposit.amqp.harvester | train | py |
0f56f6e6dac5baf18d7ab3329e379a4875700f4b | diff --git a/search/engine/solr/lang/en/search_solr.php b/search/engine/solr/lang/en/search_solr.php
index <HASH>..<HASH> 100644
--- a/search/engine/solr/lang/en/search_solr.php
+++ b/search/engine/solr/lang/en/search_solr.php
@@ -28,7 +28,8 @@ $string['errorcreatingschema'] = 'Error creating the Solr schema: {$a}';
$string['errorvalidatingschema'] = 'Error validating Solr schema: field {$a->fieldname} does not exist. Please <a href="{$a->setupurl}">follow this link</a> to set up the required fields.';
$string['extensionerror'] = 'The Apache Solr PHP extension is not installed. Please check the documentation.';
$string['fileindexing'] = 'Enable file indexing';
-$string['fileindexing_help'] = 'If your Solr install supports it, this feature allows Moodle to send files to be indexed.';
+$string['fileindexing_help'] = 'If your Solr install supports it, this feature allows Moodle to send files to be indexed.<br/>
+You will need to reindex all site contents after enabling this option for all files to be added.';
$string['fileindexsettings'] = 'File indexing settings';
$string['maxindexfilekb'] = 'Maximum file size to index (kB)';
$string['maxindexfilekb_help'] = 'Files larger than this number of kilobytes will not be included in search indexing. If set to zero, files of any size will be indexed.'; | MDL-<I> Global Search: Enable file indexing message update.
If you have Global Search set up on a site without file
indexing enabled, then at a later date you enable file indexing
files associated with existing objects are not added.
The only way to have files for existing objects indexed is to
run a reindex of all content.
This patches updates the file indexing language string in the
solr search settings of Global Search to make this clear
to the site administrator. | moodle_moodle | train | php |
f72cd2dc18c0377ee94f17d9513b9c40b75b2521 | diff --git a/lib/rfd.rb b/lib/rfd.rb
index <HASH>..<HASH> 100644
--- a/lib/rfd.rb
+++ b/lib/rfd.rb
@@ -303,19 +303,19 @@ module Rfd
end
def cd(dir)
+ wclear
@row = nil
@dir = File.expand_path(dir.is_a?(Rfd::Item) ? dir.path : @dir ? File.join(@dir, dir) : dir)
end
def ls(page = nil)
- wclear
-
unless page
@items = Dir.foreach(@dir).map {|fn| Item.new dir: @dir, name: fn}.to_a
@total_pages = @items.size / maxy + 1
end
@current_page = page ? page : 0
+ FFI::NCurses.wmove @window, 0, 0
@displayed_items = @items[@current_page * maxy, maxy]
@displayed_items.each do |item|
FFI::NCurses.wattr_set @window, FFI::NCurses::A_NORMAL, item.color, nil
@@ -342,7 +342,8 @@ module Rfd
end
def switch_page(page)
- ls (@current_page = page)
+ wclear if page != @current_page
+ ls page
end
def draw_path_and_page_number | No need to wclear when lsing current page again | amatsuda_rfd | train | rb |
88df29c24023ee34628599bb24003e4e73cee56b | diff --git a/src/test/java/com/hp/autonomy/hod/client/api/queryprofile/QueryProfileSuiteITCase.java b/src/test/java/com/hp/autonomy/hod/client/api/queryprofile/QueryProfileSuiteITCase.java
index <HASH>..<HASH> 100644
--- a/src/test/java/com/hp/autonomy/hod/client/api/queryprofile/QueryProfileSuiteITCase.java
+++ b/src/test/java/com/hp/autonomy/hod/client/api/queryprofile/QueryProfileSuiteITCase.java
@@ -5,11 +5,9 @@
package com.hp.autonomy.hod.client.api.queryprofile;
-import org.junit.Ignore;
import org.junit.runner.RunWith;
import org.junit.runners.Suite;
-@Ignore // until we can get our own API key these tests will not be reliable because of interference from live query profiles
@RunWith(Suite.class)
@Suite.SuiteClasses({
CreateDeleteQueryProfileServiceSuiteChild.class, | Removed @Ignore from query profile integration tests so they can be fixed. [rev: none] | microfocus-idol_java-hod-client | train | java |
24514d1457526efea7b6833411d907b3da3ae211 | diff --git a/lib/Sabre/CalDAV/CalendarRootNode.php b/lib/Sabre/CalDAV/CalendarRootNode.php
index <HASH>..<HASH> 100644
--- a/lib/Sabre/CalDAV/CalendarRootNode.php
+++ b/lib/Sabre/CalDAV/CalendarRootNode.php
@@ -1,14 +1,14 @@
<?php
/**
- * Users collection
+ * Calendars collection
*
- * This object is responsible for generating a collection of users.
+ * This object is responsible for generating a collection of calendars.
*
* @package Sabre
* @subpackage CalDAV
* @copyright Copyright (C) 2007-2012 Rooftop Solutions. All rights reserved.
- * @author Evert Pot (http://www.rooftopsolutions.nl/)
+ * @author Evert Pot (http://www.rooftopsolutions.nl/)
* @license http://code.google.com/p/sabredav/wiki/License Modified BSD License
*/
class Sabre_CalDAV_CalendarRootNode extends Sabre_DAVACL_AbstractPrincipalCollection { | fixed a comment (I'm only <I>% sure if I'm correct) | sabre-io_dav | train | php |
aa4f40d7c6f676f3be6ae017d01fa8e02e73de8d | diff --git a/featurex/tests/test_diagnostics.py b/featurex/tests/test_diagnostics.py
index <HASH>..<HASH> 100644
--- a/featurex/tests/test_diagnostics.py
+++ b/featurex/tests/test_diagnostics.py
@@ -23,7 +23,7 @@ def test_correlation_matrix():
assert corr.shape == (df.shape[1], df.shape[1])
assert np.array_equal(np.diagonal(corr), ([1.0] * df.shape[1]))
assert np.isfinite(corr['a']['b'])
- assert corr['a']['b'] == corr['b']['a']
+ assert np.isclose(corr['a']['b'], corr['b']['a'], 1e-05)
def test_eigenvalues():
df = pd.DataFrame(np.random.randn(100, 2), columns=['a', 'b']) | small adjustment to diagnostic test to allow for some error | tyarkoni_pliers | train | py |
9de17e42e18f2d2d464458a52c9559cfc4eeeb00 | diff --git a/perceval/_version.py b/perceval/_version.py
index <HASH>..<HASH> 100644
--- a/perceval/_version.py
+++ b/perceval/_version.py
@@ -1,2 +1,2 @@
# Versions compliant with PEP 440 https://www.python.org/dev/peps/pep-0440
-__version__ = "0.9.13"
+__version__ = "0.9.14" | Update version number to <I> | chaoss_grimoirelab-perceval | train | py |
424b23033f2f9ef4b5907fedcf8f8f36861c5d5c | diff --git a/src/Rcm/Controller/PageSearchApiController.php b/src/Rcm/Controller/PageSearchApiController.php
index <HASH>..<HASH> 100644
--- a/src/Rcm/Controller/PageSearchApiController.php
+++ b/src/Rcm/Controller/PageSearchApiController.php
@@ -2,16 +2,21 @@
namespace Rcm\Controller;
+use Rcm\Plugin\BaseController;
use Zend\Mvc\Controller\AbstractRestfulController;
use Zend\View\Model\JsonModel;
use Zend\Http\Response;
use Rcm\Entity\Site;
use Rcm\Service\PageManager;
+use Rcm\Controller\PageCheckController;
-class PageSearchApiController extends AbstractRestfulController
+class PageSearchApiController extends PageManager
{
function siteTitleSearchAction()
{
+
+
+
$query = $this->getEvent()->getRouteMatch()->getParam('query');
$em = $this->getServiceLocator()->get('Doctrine\ORM\EntityManager');
$sm = $this->getServiceLocator()->get( | got dialog box to pop up for navigation. | reliv_Rcm | train | php |
aec76e6f5b634120a31531bd4988a174ceebfd27 | diff --git a/protos/compile.py b/protos/compile.py
index <HASH>..<HASH> 100755
--- a/protos/compile.py
+++ b/protos/compile.py
@@ -53,6 +53,7 @@ def main():
for fname in args.proto_file:
# Get the Java class name
(name, _) = os.path.splitext(fname)
+ name = os.path.basename(name)
if not name in SERVICES:
sys.stderr.write("Error: Service %s is not known. Known services: %s\n" % (name, SERVICES.keys()))
return RESULT['INVALID_SERVICE'] | Only use the basename for filename comparison | operasoftware_operaprestodriver | train | py |
8a986916da7b160833f06794cf4c52d8d5d6532b | diff --git a/app/code/community/Inchoo/SocialConnect/Controller/Abstract.php b/app/code/community/Inchoo/SocialConnect/Controller/Abstract.php
index <HASH>..<HASH> 100644
--- a/app/code/community/Inchoo/SocialConnect/Controller/Abstract.php
+++ b/app/code/community/Inchoo/SocialConnect/Controller/Abstract.php
@@ -69,7 +69,7 @@ abstract class Inchoo_SocialConnect_Controller_Abstract extends Mage_Core_Contro
if (!$session->getBeforeAuthUrl() || $session->getBeforeAuthUrl() == Mage::getBaseUrl()) {
// Set default URL to redirect customer to
- $session->setBeforeAuthUrl($this->_getCustomerSession()->getSocialConnectRedirect());
+ $session->setBeforeAuthUrl($session->getSocialConnectRedirect());
// Redirect customer to the last page visited after logging in
if ($session->isLoggedIn()) {
if (!Mage::getStoreConfigFlag( | Avoid getting customer session twice. This fixes #<I>. | Marko-M_Inchoo_SocialConnect | train | php |
a5c5d8d36c546b124a21fd8d7ca48f50b5459020 | diff --git a/src/View/Php.php b/src/View/Php.php
index <HASH>..<HASH> 100644
--- a/src/View/Php.php
+++ b/src/View/Php.php
@@ -67,6 +67,11 @@ class Php implements EngineInterface {
$file = locate_template( $view, false );
if ( ! $file ) {
+ // locate_template failed to find the view - try adding a .php extension
+ $file = locate_template( $view . '.php', false );
+ }
+
+ if ( ! $file ) {
// locate_template failed to find the view - test if a valid absolute path was passed
$file = $this->resolveFileFromFilesystem( $view );
} | fix support for views without mentioning the .php suffix | htmlburger_wpemerge | train | php |
cc6060b9dc006ed115f939390e590dd2c59dd1ca | diff --git a/lib/helpers.js b/lib/helpers.js
index <HASH>..<HASH> 100644
--- a/lib/helpers.js
+++ b/lib/helpers.js
@@ -4,6 +4,7 @@
Object.defineProperty(exports, "__esModule", {
value: true
});
+exports.FindCache = undefined;
exports.exec = exec;
exports.execNode = execNode;
exports.rangeFromLineNumber = rangeFromLineNumber;
@@ -33,7 +34,7 @@ function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { de
let XRegExp = null;
const EventsCache = new WeakMap();
-const FindCache = new Map();
+const FindCache = exports.FindCache = new Map();
// TODO: Remove this when electron upgrades node
const assign = Object.assign || function (target, source) { | :arrow_up: Upgrade dist files | steelbrain_atom-linter | train | js |
6799e960846e2c8fd07cf57d20807e76556bc167 | diff --git a/execution/errors/vm.go b/execution/errors/vm.go
index <HASH>..<HASH> 100644
--- a/execution/errors/vm.go
+++ b/execution/errors/vm.go
@@ -1,7 +1,6 @@
package errors
import (
- "bytes"
"fmt"
"github.com/hyperledger/burrow/crypto"
@@ -41,15 +40,5 @@ type CallError struct {
}
func (err CallError) Error() string {
- buf := new(bytes.Buffer)
- buf.WriteString("Call error: ")
- buf.WriteString(err.CodedError.Error())
- if len(err.NestedErrors) > 0 {
- buf.WriteString(", nested call errors:\n")
- for _, nestedErr := range err.NestedErrors {
- buf.WriteString(nestedErr.Error())
- buf.WriteByte('\n')
- }
- }
- return buf.String()
+ return fmt.Sprintf("Call error: %v (and %d nested sub-call errors)", err.CodedError, len(err.NestedErrors))
} | Reduce verbosity of CallError | hyperledger_burrow | train | go |
5ecd2719418e15b942dfad87c62dadd58552f256 | diff --git a/spec/agent_spec.rb b/spec/agent_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/agent_spec.rb
+++ b/spec/agent_spec.rb
@@ -368,7 +368,22 @@ describe Instrumental::Agent, "connection problems" do
wait
@agent.increment('reconnect_test', 1, 1234)
wait
- @agent.queue.pop(true).should == "increment reconnect_test 1 1234\n"
+ @agent.queue.pop(true).should include("increment reconnect_test 1 1234\n")
+ end
+
+ it "should not wait longer than EXIT_FLUSH_TIMEOUT seconds to exit a process" do
+ @server = TestServer.new
+ @agent = Instrumental::Agent.new('test_token', :collector => @server.host_and_port, :synchronous => false)
+ TCPSocket.stub!(:new) { |*args| sleep(5) && StringIO.new }
+ with_constants('Instrumental::Agent::EXIT_FLUSH_TIMEOUT' => 3) do
+ if (pid = fork { @agent.increment('foo', 1) })
+ tm = Time.now.to_f
+ Process.wait(pid)
+ diff = Time.now.to_f - tm
+ diff.should >= 3
+ diff.should < 5
+ end
+ end
end
end | add test for exit flush timeout guard | Instrumental_instrumental_agent-ruby | train | rb |
bbb0638afa428d239912311f9f514dc0917e0e37 | diff --git a/gwpy/plotter/frequencyseries.py b/gwpy/plotter/frequencyseries.py
index <HASH>..<HASH> 100644
--- a/gwpy/plotter/frequencyseries.py
+++ b/gwpy/plotter/frequencyseries.py
@@ -247,7 +247,11 @@ class FrequencySeriesAxes(Axes):
# fill in zeros
if isinstance(mesh.norm, colors.LogNorm):
cmap = mesh.get_cmap()
- cmap.set_bad(cmap.colors[0])
+ try:
+ # only listed colormaps have cmap.colors
+ cmap.set_bad(cmap.colors[0])
+ except AttributeError:
+ pass
return mesh | FrequencySeriesAxes.plot_variance: protect against attributeerror | gwpy_gwpy | train | py |
07fa52639086756b6b4e5bbf6026dca930a73025 | diff --git a/fastentity.go b/fastentity.go
index <HASH>..<HASH> 100644
--- a/fastentity.go
+++ b/fastentity.go
@@ -185,17 +185,6 @@ func find(rs []rune, groups []*group) map[string][][]rune {
return results
}
-type incr struct {
- sync.Mutex
- n int
-}
-
-func (i *incr) incr() {
- i.Lock()
- i.n++
- i.Unlock()
-}
-
var entityFileSuffix = ".entities.csv"
// FromDir creates a new Store by loading entity files from a given directory path. Any files
@@ -210,7 +199,11 @@ func FromDir(dir string) (*Store, error) {
s := New()
var wg sync.WaitGroup
- count := &incr{}
+ count := struct {
+ sync.Mutex
+ n int
+ }{}
+
errCh := make(chan error, len(files))
for _, stat := range files {
if strings.HasSuffix(stat.Name(), entityFileSuffix) {
@@ -229,7 +222,9 @@ func FromDir(dir string) (*Store, error) {
errCh <- fmt.Errorf("error reading from %v: %v\n", path, err)
return
}
- count.incr()
+ count.Lock()
+ count.n++
+ count.Unlock()
}(fmt.Sprintf("%s/%s", dir, stat.Name()), strings.TrimSuffix(stat.Name(), entityFileSuffix))
}
} | Remove incr type, use an anonymous struct instead | sajari_fastentity | train | go |
a0ca044cf411666197eb2aa7c19d8a579a3e8944 | diff --git a/lib/utils.py b/lib/utils.py
index <HASH>..<HASH> 100644
--- a/lib/utils.py
+++ b/lib/utils.py
@@ -120,17 +120,10 @@ def get_docker_ip_for_environment(node_details, environment_id):
def update_history(cloud_hero):
+ """
+ Send each command to the /history endpoint.
+ """
user_command = ' '.join(sys.argv)
timestamp = int(time.time())
- content = read_from_file(CLOUD_HERO_HISTORY, is_json=True)
-
- command_history = (user_command, timestamp)
- if not content:
- content = [command_history]
- else:
- content.append(command_history)
- if len(content) > 10:
- cloud_hero.send_history(content)
- content = None
-
- write_to_file(content, CLOUD_HERO_HISTORY, is_json=True)
+ command = (user_command, timestamp)
+ cloud_hero.send_history([command]) | Send one command at a time. | cloud-hero_hero-cli | train | py |
3aa6eb1c709ab6ddbf09b44b891014b780b82f07 | diff --git a/gulpfile.js b/gulpfile.js
index <HASH>..<HASH> 100644
--- a/gulpfile.js
+++ b/gulpfile.js
@@ -19,7 +19,6 @@ gulp.task('pressure', function() {
'./src/adapters/adapter.js',
'./src/adapters/adapter_force_touch.js',
'./src/adapters/adapter_3d_touch.js',
- './src/adapters/adapter_force_touch.js',
'./src/config.js',
'./src/helpers.js',
])
@@ -52,7 +51,6 @@ gulp.task('jquery-pressure', function() {
'./src/adapters/adapter.js',
'./src/adapters/adapter_force_touch.js',
'./src/adapters/adapter_3d_touch.js',
- './src/adapters/adapter_force_touch.js',
'./src/config.js',
'./src/helpers.js',
]) | Removed double loading force touch adapter | stuyam_pressure | train | js |
23af7ea302a654ec21114fef8ccc4eb0eb09b355 | diff --git a/src/Versioned.php b/src/Versioned.php
index <HASH>..<HASH> 100644
--- a/src/Versioned.php
+++ b/src/Versioned.php
@@ -2457,7 +2457,7 @@ SQL
}
$liveVersionNumber = static::get_versionnumber_by_stage($this->owner, Versioned::LIVE, $id);
- return $liveVersionNumber === $this->owner->Version;
+ return (int) $liveVersionNumber === (int) $this->owner->Version;
}
/**
@@ -2473,7 +2473,7 @@ SQL
}
$draftVersionNumber = static::get_versionnumber_by_stage($this->owner, Versioned::DRAFT, $id);
- return $draftVersionNumber === $this->owner->Version;
+ return (int) $draftVersionNumber === (int) $this->owner->Version;
}
/** | FIX Ensure consistent strict equality checks in version numbers | silverstripe_silverstripe-versioned | train | php |
692a94a52feeb90573caa563eb852eb2828618ce | diff --git a/java/run_all.py b/java/run_all.py
index <HASH>..<HASH> 100644
--- a/java/run_all.py
+++ b/java/run_all.py
@@ -66,7 +66,7 @@ except:
cleanup()
sys.exit('Failed to perform mvn')
-print os.path.realpath(.)
+print os.path.realpath('.')
# compile and run each example
failures = []
for f in listdir(os.path.realpath('src/main/java/com/basistech/rosette/examples')): | Updated java python script | rosette-api_java | train | py |
1817db31801e3edafbb5ac5c130114f72bc5da1f | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -52,6 +52,7 @@ setuptools.setup(
"warehouse.accounts",
"warehouse.cache",
"warehouse.cache.origin",
+ "warehouse.classifiers",
"warehouse.cli",
"warehouse.cli.db",
"warehouse.i18n", | Ensure warehouse.classifiers gets installed. | pypa_warehouse | train | py |
67758a49bb8217d790e19979cfeed1a4dac68aa2 | diff --git a/cli/sawtooth_cli/identity.py b/cli/sawtooth_cli/identity.py
index <HASH>..<HASH> 100644
--- a/cli/sawtooth_cli/identity.py
+++ b/cli/sawtooth_cli/identity.py
@@ -332,7 +332,7 @@ def _do_identity_policy_list(args):
output = [policy.name]
for entry in policy.entries:
output.append(
- Policy.Type.Name(entry.type) + " " + entry.key)
+ Policy.EntryType.Name(entry.type) + " " + entry.key)
writer.writerow(output)
except csv.Error:
raise CliException('Error writing CSV')
@@ -341,7 +341,7 @@ def _do_identity_policy_list(args):
for policy in printable_policies:
value = "Entries: "
for entry in policy.entries:
- entry_string = Policy.Type.Name(entry.type) + " " \
+ entry_string = Policy.EntryType.Name(entry.type) + " " \
+ entry.key
value += entry_string + " "
output[policy.name] = value | Update Policy.Type to EntryType in identity cli
This change was left out during <I> release changes in identity.proto file.
This fixes identity cli app crashing for cmd options addressed in this commit. | hyperledger_sawtooth-core | train | py |
2cc7e88e20bff7ba6a95f3c804a3963ebe826a9d | diff --git a/src/main/java/com/couchbase/lite/internal/AttachmentInternal.java b/src/main/java/com/couchbase/lite/internal/AttachmentInternal.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/couchbase/lite/internal/AttachmentInternal.java
+++ b/src/main/java/com/couchbase/lite/internal/AttachmentInternal.java
@@ -101,7 +101,7 @@ public class AttachmentInternal {
((Boolean) attachInfo.get("stub")).booleanValue()) {
// This item is just a stub; validate and skip it
if (attachInfo.containsKey("revpos")) {
- int revPos = ((Integer) attachInfo.get("revpos")).intValue();
+ int revPos = ((Number) attachInfo.get("revpos")).intValue();
if (revPos <= 0) {
throw new CouchbaseLiteException(Status.BAD_ATTACHMENT);
} | Fix class cast exception when revpos is a Double
It seems that _revpos_ can sometimes be a Double and this causes an exception.
If we change the cast to a Number, it all works out well because `Number` also has the `intValue()` method.
We've been using a build with this fix in production in the last couple of months without any issues so I'm pretty sure it's safe. The only thing I'm unsure is why this exception happens in the first place. | couchbase_couchbase-lite-java-core | train | java |
fd03e8d2daf3a7d3587dae25d12f6fbabb71a832 | diff --git a/src/Scaffolding/Scaffolders/CRUD/Read.php b/src/Scaffolding/Scaffolders/CRUD/Read.php
index <HASH>..<HASH> 100644
--- a/src/Scaffolding/Scaffolders/CRUD/Read.php
+++ b/src/Scaffolding/Scaffolders/CRUD/Read.php
@@ -29,11 +29,6 @@ class Read extends ListQueryScaffolder implements OperationResolver, CRUDInterfa
const EXCLUDE = 'Exclude';
/**
- * @var DataObjectQueryFilter
- */
- protected $queryFilter;
-
- /**
* Read constructor.
*
* @param string $dataObjectClass | CRUD/Read to beware of QueryFilterAware | silverstripe_silverstripe-graphql | train | php |
7278de5cc55d10762cf492f9baf00a3c4cee8c19 | diff --git a/glances/plugins/glances_processlist.py b/glances/plugins/glances_processlist.py
index <HASH>..<HASH> 100644
--- a/glances/plugins/glances_processlist.py
+++ b/glances/plugins/glances_processlist.py
@@ -20,7 +20,6 @@
"""Process list plugin."""
import os
-import shlex
from datetime import timedelta
from glances.compat import iteritems
@@ -44,9 +43,8 @@ def convert_timedelta(delta):
def split_cmdline(cmdline):
"""Return path, cmd and arguments for a process cmdline."""
- cmdline_split = shlex.split(cmdline[0])
- path, cmd = os.path.split(cmdline_split[0])
- arguments = ' '.join(cmdline_split[1:])
+ path, cmd = os.path.split(cmdline[0])
+ arguments = ' '.join(cmdline[1:])
return path, cmd, arguments
@@ -325,7 +323,6 @@ class Plugin(GlancesPlugin):
# XXX: remove `cmdline != ['']` when we'll drop support for psutil<4.0.0
if cmdline and cmdline != ['']:
# !!! DEBUG
- logger.info(p['name'])
logger.info(cmdline)
logger.info(split_cmdline(cmdline))
# /!!! | Electron processes displayed wrong in process list (second round) #<I> | nicolargo_glances | train | py |
cbed2fbf8bb6a43f278629bc6017e5cbfc77686f | diff --git a/components/rake-support/share/rails-template.rb b/components/rake-support/share/rails-template.rb
index <HASH>..<HASH> 100644
--- a/components/rake-support/share/rails-template.rb
+++ b/components/rake-support/share/rails-template.rb
@@ -7,6 +7,7 @@ else
File.open('Gemfile', 'w') {|f| f << text.gsub(/^(gem 'sqlite3)/, '# \1') }
gem "activerecord-jdbc-adapter", "0.9.7", :require => "jdbc_adapter"
gem "jdbc-sqlite3"
+ gem "jruby-openssl"
gem "org.torquebox.rake-support", :require => 'torquebox-rails'
end | Include jruby-openssl in a typical apps list of deps. | torquebox_torquebox | train | rb |
a593ba8076ab634c31bfc72447958b7a9576007f | diff --git a/spyderlib/widgets/editor.py b/spyderlib/widgets/editor.py
index <HASH>..<HASH> 100644
--- a/spyderlib/widgets/editor.py
+++ b/spyderlib/widgets/editor.py
@@ -199,7 +199,6 @@ class FileInfo(QObject):
def cleanup_todo_results(self):
self.todo_results = []
- self.editor.cleanup_todo_list()
class EditorStack(QWidget): | Editor/disabling todo list markers/bugfix: removed missing method call | spyder-ide_spyder | train | py |
a85cb35b824ed5c109c7b385a2a8f167d1ced800 | diff --git a/spec/base_spec.rb b/spec/base_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/base_spec.rb
+++ b/spec/base_spec.rb
@@ -17,8 +17,8 @@ describe "Twitter::Base" do
end
it "should default to friends timeline" do
- @base.should_receive(:call).with("friends_timeline", :anything)
- @base.should_receive(:statuses).with(:anything)
+ @base.should_receive(:call).with("friends_timeline", {:auth=>true, :args=>{}, :since=>nil})
+ @base.should_receive(:statuses)
@base.timeline
end
diff --git a/spec/cli/helper_spec.rb b/spec/cli/helper_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/cli/helper_spec.rb
+++ b/spec/cli/helper_spec.rb
@@ -37,13 +37,13 @@ describe Twitter::CLI::Helpers do
specify "should properly format" do
stdout_for {
output_tweets(@collection)
- }.should match /with a few words[\w\W]*with a\./
+ }.should match(/with a few words[\w\W]*with a\./)
end
specify 'should format in reverse' do
stdout_for {
output_tweets(@collection, :reverse => true)
- }.should match /with a\.[\w\W]*with a few words/
+ }.should match(/with a\.[\w\W]*with a few words/)
end
end
end
\ No newline at end of file | Fixed some failing/warning specs. | sferik_twitter | train | rb,rb |
d225cd747d80a35f6656ad7a8d69a2e74df4452f | diff --git a/build/controllers/AppController.php b/build/controllers/AppController.php
index <HASH>..<HASH> 100644
--- a/build/controllers/AppController.php
+++ b/build/controllers/AppController.php
@@ -23,6 +23,20 @@ class AppController extends Controller
public $defaultAction = 'link';
/**
+ * Properly removes symlinked directory under Windows, MacOS and Linux
+ *
+ * @param string $file path to symlink
+ */
+ protected function unlink($file)
+ {
+ if (is_dir($file)) {
+ rmdir($file);
+ } else {
+ unlink($file);
+ }
+ }
+
+ /**
* This command runs the following shell commands in the dev repo root:
*
* - Run `composer update`
@@ -41,13 +55,13 @@ class AppController extends Controller
// cleanup
if (is_link($link = "$appDir/vendor/yiisoft/yii2")) {
$this->stdout("Removing symlink $link.\n");
- unlink($link);
+ $this->unlink($link);
}
$extensions = $this->findDirs("$appDir/vendor/yiisoft");
foreach($extensions as $ext) {
if (is_link($link = "$appDir/vendor/yiisoft/yii2-$ext")) {
$this->stdout("Removing symlink $link.\n");
- unlink($link);
+ $this->unlink($link);
}
} | Fixed build app/link to work on Windows | yiisoft_yii-core | train | php |
ec5071199c2a2af398d9f902618e73ddd35a257f | diff --git a/test_constructible.py b/test_constructible.py
index <HASH>..<HASH> 100644
--- a/test_constructible.py
+++ b/test_constructible.py
@@ -31,6 +31,11 @@ class TestCase(unittest.TestCase):
return subTest()
+ if not hasattr(unittest.TestCase, 'assertIsInstance'):
+ def assertIsInstance(self, obj, cls, msg=None):
+ self.assertTrue(isinstance(obj, cls), msg=msg)
+
+
class TestHelperFunctions(TestCase):
def test_isqrt(self):
''' test the isqrt function ''' | added assertIsInstance for testing on Python <I> | leovt_constructible | train | py |
03f969c503ea56d0192ad772c540bf805e28e9e9 | diff --git a/lib/generators/rspec/templates/decorator_spec.rb b/lib/generators/rspec/templates/decorator_spec.rb
index <HASH>..<HASH> 100644
--- a/lib/generators/rspec/templates/decorator_spec.rb
+++ b/lib/generators/rspec/templates/decorator_spec.rb
@@ -1,4 +1,4 @@
-require 'spec_helper'
+require 'rails_helper'
describe <%= class_name %>Decorator do
end | Updated template to use rails_helper instead of spec_helper so it works with Rails 4 | drapergem_draper | train | rb |
dbf9e1da65d8aaaf22f43e99a1d9fd0e8eabf238 | diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -6,7 +6,12 @@ function BitField(data){
if(!(this instanceof BitField)) {
return new BitField(data);
}
- if(typeof data === "number" || data === undefined){
+
+ if(arguments.length === 0){
+ data = 0;
+ }
+
+ if(typeof data === "number"){
if(data % 8 !== 0) data += 1 << 3;
data = new Container(data >> 3);
if(data.fill) data.fill(0); // clear node buffers of garbage | removed implicit cast of `undefined` to float | fb55_bitfield | train | js |
9bfe672292d43256d0784a7f55049bb2b2d3f39f | diff --git a/src/Illuminate/Events/Dispatcher.php b/src/Illuminate/Events/Dispatcher.php
index <HASH>..<HASH> 100755
--- a/src/Illuminate/Events/Dispatcher.php
+++ b/src/Illuminate/Events/Dispatcher.php
@@ -182,7 +182,13 @@ class Dispatcher implements DispatcherContract
if (is_array($events)) {
foreach ($events as $event => $listeners) {
- foreach ($listeners as $listener) {
+ foreach (Arr::wrap($listeners) as $listener) {
+ if (is_string($listener) && method_exists($subscriber, $listener)) {
+ $this->listen($event, [get_class($subscriber), $listener]);
+
+ continue;
+ }
+
$this->listen($event, $listener);
}
} | support shorter subscription syntax (#<I>) | laravel_framework | train | php |
806d7fd435c61dcd98fa65a57c3ad63c53043f0c | diff --git a/lib/parse/error.rb b/lib/parse/error.rb
index <HASH>..<HASH> 100644
--- a/lib/parse/error.rb
+++ b/lib/parse/error.rb
@@ -16,8 +16,10 @@ module Parse
def initialize(response)
@response = response
- @code = response["code"]
- @error = response["error"]
+ if response
+ @code = response["code"]
+ @error = response["error"]
+ end
super("#{@code}: #{@error}")
end | fix error handling when parsing response occasionally yields nil | adelevie_parse-ruby-client | train | rb |
882a779e221546c73036e2c77cc7a6370b9ecc6c | diff --git a/test/flash_test.py b/test/flash_test.py
index <HASH>..<HASH> 100644
--- a/test/flash_test.py
+++ b/test/flash_test.py
@@ -131,12 +131,17 @@ def flash_test(board_id):
memory_map = board.target.getMemoryMap()
ram_regions = [region for region in memory_map if region.type == 'ram']
ram_region = ram_regions[0]
- rom_region = memory_map.getBootMemory()
ram_start = ram_region.start
ram_size = ram_region.length
+
+ # Grab boot flash and any regions coming immediately after
+ rom_region = memory_map.getBootMemory()
rom_start = rom_region.start
rom_size = rom_region.length
+ for region in memory_map:
+ if region.isFlash and (region.start == rom_start + rom_size):
+ rom_size += region.length
target = board.target
link = board.link | Extend flash test to cover consecutive regions
When setting the rom start and size for the flash test include
flash regions coming immediately after the boot region. This allows
the flash algorithm to be more thoroughly tested. | mbedmicro_pyOCD | train | py |
cc75b1fef8a12ca3605c9d84c71812ed9396aafa | diff --git a/gatling-redis-java/src/main/java/io/gatling/javaapi/redis/RedisClientPool.java b/gatling-redis-java/src/main/java/io/gatling/javaapi/redis/RedisClientPool.java
index <HASH>..<HASH> 100644
--- a/gatling-redis-java/src/main/java/io/gatling/javaapi/redis/RedisClientPool.java
+++ b/gatling-redis-java/src/main/java/io/gatling/javaapi/redis/RedisClientPool.java
@@ -35,7 +35,7 @@ public class RedisClientPool {
private com.redis.RedisClientPool scalaInstance;
private synchronized void loadScalaInstance() {
- if (scalaInstance != null) {
+ if (scalaInstance == null) {
scalaInstance =
new com.redis.RedisClientPool(
host, | Fix Java RedisClientPool initialization, close #<I>
Motivation:
Java RedisClientPool is not properly initialized, causing a NPE.
Modification:
Fix lazy loading boolean logic. | gatling_gatling | train | java |
57ae2dbfb862bde31b79fbaef8e26073c1102669 | diff --git a/shutit_pexpect.py b/shutit_pexpect.py
index <HASH>..<HASH> 100644
--- a/shutit_pexpect.py
+++ b/shutit_pexpect.py
@@ -2382,6 +2382,8 @@ $'"""
shutit.log('File is larger than ~100K - this may take some time',level=logging.WARNING)
self.send(' ' + shutit_util.get_command('head') + ' -c -1 > ' + path + "." + random_id + " << 'END_" + random_id + """'\n""" + b64contents + '''\nEND_''' + random_id, echo=echo,loglevel=loglevel, timeout=99999)
self.send(' command cat ' + path + '.' + random_id + ' | base64 --decode > ' + path, echo=echo,loglevel=loglevel)
+ # Remove the file
+ self.send(' command rm -f ' + path + '.' + random_id,loglevel=loglevel)
else:
host_child = shutit.get_shutit_pexpect_session_from_id('host_child').pexpect_child
path = path.replace(' ', '\ ') | remove temp file when running send_file | ianmiell_shutit | train | py |
d2af2a1d4e8187989cdef0c95b5b238f48e976cb | diff --git a/bonecp/src/main/java/com/jolbox/bonecp/ConnectionPartition.java b/bonecp/src/main/java/com/jolbox/bonecp/ConnectionPartition.java
index <HASH>..<HASH> 100644
--- a/bonecp/src/main/java/com/jolbox/bonecp/ConnectionPartition.java
+++ b/bonecp/src/main/java/com/jolbox/bonecp/ConnectionPartition.java
@@ -161,7 +161,8 @@ public class ConnectionPartition implements Serializable{
try {
pool.getFinalizableRefs().remove(internalDBConnection);
if (internalDBConnection != null && !internalDBConnection.isClosed()){ // safety!
- logger.warn("BoneCP detected an unclosed connection and will now attempt to close it for you. " +
+ String poolName = pool.getConfig().getPoolName() != null ? "(in pool '"+pool.getConfig().getPoolName()+"') " : "";
+ logger.warn("BoneCP detected an unclosed connection "+poolName + "and will now attempt to close it for you. " +
"You should be closing this connection in your application - enable connectionWatch for additional debugging assistance.");
// if (!(internalDBConnection instanceof Proxy)){ // this is just a safety against finding EasyMock proxies at this point.
internalDBConnection.close(); | Add pool name to unclosed connection warning. | wwadge_bonecp | train | java |
732421dc9e1f54532356835f34d6bdddf8e0b973 | diff --git a/turbo/model.py b/turbo/model.py
index <HASH>..<HASH> 100644
--- a/turbo/model.py
+++ b/turbo/model.py
@@ -14,16 +14,7 @@ from turbo.log import model_log
from turbo.util import escape as _es
-class Record(dict):
- """a dict object to replace default mongodb record
-
- """
- def __init__(self, record, *args, **kwargs):
- super(Record, self).__init__(*args, **kwargs)
- self.update(record)
-
- def __getitem__(self, key, default=None):
- return super(Record, self).get(key, default)
+_record = defaultdict(lambda: None)
def convert_to_record(func):
@@ -36,9 +27,9 @@ def convert_to_record(func):
result = func(self, *args, **kwargs)
if result is not None:
if isinstance(result, dict):
- return Record(result)
+ return _record(result)
- return (Record(i) for i in result)
+ return (_record(i) for i in result)
return result
@@ -141,7 +132,6 @@ class MixinModel(object):
return import_object(ins_name, package_space)
@staticmethod
- @property
def default_record():
"""
generate one default record which return '' when key is empty | repleace model Record with defaultdict | wecatch_app-turbo | train | py |
35eec99d41777282785666b4190c97518c9bf458 | diff --git a/testing/test_capture.py b/testing/test_capture.py
index <HASH>..<HASH> 100644
--- a/testing/test_capture.py
+++ b/testing/test_capture.py
@@ -744,6 +744,7 @@ class TestFDCapture:
cap.done()
pytest.raises(AttributeError, cap.suspend)
+
@contextlib.contextmanager
def saved_fd(fd):
new_fd = os.dup(fd)
@@ -751,6 +752,7 @@ def saved_fd(fd):
yield
finally:
os.dup2(new_fd, fd)
+ os.close(new_fd)
class TestStdCapture: | stop leaking file descriptors
tripps --lsof on os x but not on linux. there's possibly a bug in the leak
detector (not investigated here)
--HG--
branch : stop_leaking_fds | vmalloc_dessert | train | py |
dce6ffa070f1ee764785e78b9e3754dea62108a5 | diff --git a/cluster/container_test.go b/cluster/container_test.go
index <HASH>..<HASH> 100644
--- a/cluster/container_test.go
+++ b/cluster/container_test.go
@@ -886,6 +886,7 @@ func TestAttachToContainer(t *testing.T) {
defer server1.Close()
server2 := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
called = true
+ w.Write([]byte{1, 0, 0, 0, 0, 0, 0, 18})
w.Write([]byte("something happened"))
}))
defer server2.Close()
@@ -901,7 +902,6 @@ func TestAttachToContainer(t *testing.T) {
OutputStream: &safe.Buffer{},
Logs: true,
Stdout: true,
- RawTerminal: true,
}
err = cluster.AttachToContainer(opts)
if err != nil { | cluster: adapt to api changes in go-dockerclient | tsuru_docker-cluster | train | go |
88afa949be3f7a3cf31272f50a0f005de845a504 | diff --git a/spec/functional/resource/user_spec.rb b/spec/functional/resource/user_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/functional/resource/user_spec.rb
+++ b/spec/functional/resource/user_spec.rb
@@ -54,6 +54,7 @@ describe Chef::Resource::User, metadata do
end
before do
+ pending "porting implementation for user provider in aix" if OHAI_SYSTEM[:platform] == 'aix'
# Silence shell_out live stream
Chef::Log.level = :warn
end | Mentioned the failing functional user resource tests as pending only for AIX. | chef_chef | train | rb |
28e5ace84a16965d6c5ed72c18057cf2143e225c | diff --git a/openpnm/models/misc/misc.py b/openpnm/models/misc/misc.py
index <HASH>..<HASH> 100644
--- a/openpnm/models/misc/misc.py
+++ b/openpnm/models/misc/misc.py
@@ -2,9 +2,9 @@ import numpy as np
import scipy.stats as spts
-def numpy_func(target, prop, func, **kwargs):
+def generic_function(target, prop, func, **kwargs):
r"""
- Runs an arbitrary Numpy function on the given data
+ Runs an arbitrary function on the given data
This allows users to place a customized Numpy calculation into the
automatated model regeneration pipeline.
@@ -284,10 +284,12 @@ def normal(target, seeds, scale, loc):
return value
-def generic(target, seeds, func):
+def generic_distribution(target, seeds, func):
r"""
Accepts an 'rv_frozen' object from the Scipy.stats submodule and returns
- values from the distribution for the given seeds using the ``ppf`` method.
+ values from the distribution for the given seeds
+
+ This uses the ``ppf`` method of the stats object
Parameters
---------- | rectifying generic_function with generic_distribution | PMEAL_OpenPNM | train | py |
1a7e7a93b261c32284f75719745b3b78f2eae766 | diff --git a/src/scaffolder-test.js b/src/scaffolder-test.js
index <HASH>..<HASH> 100644
--- a/src/scaffolder-test.js
+++ b/src/scaffolder-test.js
@@ -264,7 +264,7 @@ suite('javascript project scaffolder', () => {
async () => {
const {verificationCommand} = await scaffold(options);
- assert.equal(verificationCommand, 'npm test');
+ assert.equal(verificationCommand, 'npm run generate:* && npm test');
}
);
});
diff --git a/src/scaffolder.js b/src/scaffolder.js
index <HASH>..<HASH> 100644
--- a/src/scaffolder.js
+++ b/src/scaffolder.js
@@ -114,7 +114,7 @@ export async function scaffold(options) {
badges: buildBadgesDetails(contributors),
documentation: scaffoldDocumentation({projectTypeResults}),
vcsIgnore: buildVcsIgnoreLists(contributors),
- verificationCommand: 'npm test',
+ verificationCommand: 'npm run generate:* && npm test',
projectDetails: {...projectHomepage && {homepage: projectHomepage}},
nextSteps: contributors
.reduce((acc, contributor) => (contributor.nextSteps ? [...acc, ...contributor.nextSteps] : acc), []) | feat(generate): called generate scripts btwn scaffolding & verification | travi_javascript-scaffolder | train | js,js |
1f66f981449ce4d5676d5dfdf195da0d3b9b1470 | diff --git a/src/server/pfs/server/obj_block_api_server.go b/src/server/pfs/server/obj_block_api_server.go
index <HASH>..<HASH> 100644
--- a/src/server/pfs/server/obj_block_api_server.go
+++ b/src/server/pfs/server/obj_block_api_server.go
@@ -349,8 +349,8 @@ func (s *objBlockAPIServer) PutObjects(server pfsclient.ObjectAPI_PutObjectsServ
}
func (s *objBlockAPIServer) CreateObject(ctx context.Context, request *pfsclient.CreateObjectRequest) (response *types.Empty, retErr error) {
- func() { s.Log(nil, nil, nil, 0) }()
- defer func(start time.Time) { s.Log(nil, nil, retErr, time.Since(start)) }(time.Now())
+ func() { s.Log(request, nil, nil, 0) }()
+ defer func(start time.Time) { s.Log(request, nil, retErr, time.Since(start)) }(time.Now())
if err := s.writeProto(ctx, s.objectPath(request.Object), request.BlockRef); err != nil {
return nil, err
} | Include request in CreateObject logs. | pachyderm_pachyderm | train | go |
bdf1acf3722c38de70b3004bdf2024cfa65cde40 | diff --git a/liquibase-core/src/main/java/liquibase/changelog/DatabaseChangeLog.java b/liquibase-core/src/main/java/liquibase/changelog/DatabaseChangeLog.java
index <HASH>..<HASH> 100644
--- a/liquibase-core/src/main/java/liquibase/changelog/DatabaseChangeLog.java
+++ b/liquibase-core/src/main/java/liquibase/changelog/DatabaseChangeLog.java
@@ -25,6 +25,7 @@ import liquibase.precondition.Conditional;
import liquibase.precondition.core.PreconditionContainer;
import liquibase.resource.ResourceAccessor;
import liquibase.util.StreamUtil;
+import liquibase.util.StringUtils;
import liquibase.util.file.FilenameUtils;
import java.io.File;
@@ -484,7 +485,9 @@ public class DatabaseChangeLog implements Comparable<DatabaseChangeLog>, Conditi
}
}
} catch (UnknownChangelogFormatException e) {
- LogFactory.getInstance().getLog().warning("included file " + relativeBaseFileName + "/" + fileName + " is not a recognized file type");
+ if (StringUtils.trimToEmpty(fileName).matches("\\.\\w+$")) {
+ LogFactory.getInstance().getLog().warning("included file " + relativeBaseFileName + "/" + fileName + " is not a recognized file type");
+ }
return false;
}
PreconditionContainer preconditions = changeLog.getPreconditions(); | CORE-<I> Spurious warning with includeAll: file is not a recognized file type
Only log warning if the file looks like a file name. | liquibase_liquibase | train | java |
c4ee324110c4c40df17e2e814b43f2acdf5245df | diff --git a/src/MenuBar/index.js b/src/MenuBar/index.js
index <HASH>..<HASH> 100644
--- a/src/MenuBar/index.js
+++ b/src/MenuBar/index.js
@@ -39,6 +39,14 @@ export default class MenuBar extends React.Component {
const dataKeys = pickBy(topLevelItem, function(value, key) {
return startsWith(key, "data-");
});
+
+ // Support enhancers for top level items too
+ topLevelItem = enhancers.reduce((v, f) => f(v), topLevelItem);
+
+ if (topLevelItem.hidden) {
+ return null;
+ }
+
const button = (
<Button
{...dataKeys} //spread all data-* attributes
@@ -46,6 +54,7 @@ export default class MenuBar extends React.Component {
minimal
className="tg-menu-bar-item"
onClick={topLevelItem.onClick}
+ disabled={topLevelItem.disabled}
onMouseOver={
topLevelItem.submenu ? this.handleMouseOver(i) : noop
} | Support enhancers in top-level MenuBar items | TeselaGen_teselagen-react-components | train | js |
f0f47c94fe8cefe1eb48a8aef5df9a04e4da4622 | diff --git a/src/photini/__init__.py b/src/photini/__init__.py
index <HASH>..<HASH> 100644
--- a/src/photini/__init__.py
+++ b/src/photini/__init__.py
@@ -1,4 +1,4 @@
from __future__ import unicode_literals
__version__ = '2017.9.0'
-build = '913 (b390f24)'
+build = '914 (6fe00b9)'
diff --git a/src/photini/metadata.py b/src/photini/metadata.py
index <HASH>..<HASH> 100644
--- a/src/photini/metadata.py
+++ b/src/photini/metadata.py
@@ -1343,6 +1343,12 @@ class Metadata(object):
self.software = 'Photini editor v' + __version__
self.character_set = 'utf_8'
save_iptc = force_iptc or (self._if and self._if.has_iptc())
+ if self._sc:
+ # workaround for bug in exiv2 xmp timestamp altering
+ for name in ('date_digitised', 'date_modified', 'date_taken'):
+ for family, tag in self._primary_tags[name]:
+ self._sc.clear_value(tag)
+ self._sc.save(file_times)
for name in self._primary_tags:
value = getattr(self, name)
# write data to primary tags | Workaround for exiv2 bug in updating timestamps | jim-easterbrook_Photini | train | py,py |
b1d713106711405eacc6b251c56daee80a237dd4 | diff --git a/test/test_runner.rb b/test/test_runner.rb
index <HASH>..<HASH> 100644
--- a/test/test_runner.rb
+++ b/test/test_runner.rb
@@ -22,6 +22,7 @@
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
require 'rubygems'
+require 'logger'
require 'minitest/unit'
require 'minitest/autorun'
@@ -113,7 +114,13 @@ class PulpMiniTestRunner
}
if options[:logging] == "true"
- Runcible::Base.config[:logger] = 'stdout'
+ log = ::Logger.new(STDOUT)
+ log.level = Logger::DEBUG
+ Runcible::Base.config[:logging] = {
+ :logger => log,
+ :debug => true,
+ :stdout => true
+ }
end
if options[:auth_type] == "http" | Fixes logging=true option during testing based off the previous
changes to requiring the logger to be based off the Ruby standard
logger API. | Katello_runcible | train | rb |
ef9caad63e9d86c3bfe7efb4ff7b917d3327bc8c | diff --git a/activesupport/lib/active_support/time_with_zone.rb b/activesupport/lib/active_support/time_with_zone.rb
index <HASH>..<HASH> 100644
--- a/activesupport/lib/active_support/time_with_zone.rb
+++ b/activesupport/lib/active_support/time_with_zone.rb
@@ -246,6 +246,7 @@ module ActiveSupport
utc.future?
end
+ # Returns +true+ if +other+ is equal to current object.
def eql?(other)
other.eql?(utc)
end
@@ -329,6 +330,11 @@ module ActiveSupport
EOV
end
+ # Returns Array of parts of Time in sequence of
+ # [seconds, minutes, hours, day, month, year, weekday, yearday, dst?, zone].
+ #
+ # now = Time.zone.now # => Tue, 18 Aug 2015 02:29:27 UTC +00:00
+ # now.to_a # => [27, 29, 2, 18, 8, 2015, 2, 230, false, "UTC"]
def to_a
[time.sec, time.min, time.hour, time.day, time.mon, time.year, time.wday, time.yday, dst?, zone]
end
@@ -358,11 +364,14 @@ module ActiveSupport
utc.to_r
end
- # Return an instance of Time in the system timezone.
+ # Returns an instance of Time in the system timezone.
def to_time
utc.to_time
end
+ # Returns an instance of DateTime in the UTC timezone.
+ #
+ # Time.zone.now.to_datetime # => Tue, 18 Aug 2015 02:32:20 +0000
def to_datetime
utc.to_datetime.new_offset(Rational(utc_offset, 86_400))
end | Added docs for TimeWithZone [ci skip] | rails_rails | train | rb |
3032240dde2e76b4bfe1c11c05eeaa7252e1b14e | diff --git a/tests/test_config_cmd.py b/tests/test_config_cmd.py
index <HASH>..<HASH> 100644
--- a/tests/test_config_cmd.py
+++ b/tests/test_config_cmd.py
@@ -60,6 +60,24 @@ class ConfigTestCase(support.LoggingSilencer,
self.assertEquals(cmd.libraries, ['one'])
self.assertEquals(cmd.library_dirs, ['three', 'four'])
+ def test_clean(self):
+ # _clean removes files
+ tmp_dir = self.mkdtemp()
+ f1 = os.path.join(tmp_dir, 'one')
+ f2 = os.path.join(tmp_dir, 'two')
+
+ self.write_file(f1, 'xxx')
+ self.write_file(f2, 'xxx')
+
+ for f in (f1, f2):
+ self.assert_(os.path.exists(f))
+
+ pkg_dir, dist = self.create_dist()
+ cmd = config(dist)
+ cmd._clean(f1, f2)
+
+ for f in (f1, f2):
+ self.assert_(not os.path.exists(f))
def test_suite():
return unittest.makeSuite(ConfigTestCase) | Merged revisions <I> via svnmerge from
svn+ssh://<EMAIL>/python/trunk
........
r<I> | tarek.ziade | <I>-<I>-<I> <I>:<I>:<I> <I> (Sun, <I> Apr <I>) | 1 line
removed string usage and added a test for _clean
........ | pypa_setuptools | train | py |
778a38fae4fc2d7df0dfa0e799ecfc68de74e3d3 | diff --git a/empirical.py b/empirical.py
index <HASH>..<HASH> 100644
--- a/empirical.py
+++ b/empirical.py
@@ -495,6 +495,8 @@ class Patch:
n_pairs = np.sum(np.arange(len(plot_locs.keys())))
result = np.recarray((n_pairs,), dtype=[('plot-a','S32'),
('plot-b', 'S32'),
+ ('spp-a', int),
+ ('spp-b', int),
('dist', float),
('sorensen', float),
('jaccard', float)])
@@ -516,6 +518,10 @@ class Patch:
# Get similarity indices
spp_a = len(sad_dict[plota])
spp_b = len(sad_dict[plotb])
+
+ result[row]['spp-a'] = spp_a
+ result[row]['spp-b'] = spp_b
+
intersect = set(sad_dict[plota]).intersection(sad_dict[plotb])
union = set(sad_dict[plota]).union(sad_dict[plotb]) | Add columns to comm_sep output for spp num in each plot | jkitzes_macroeco | train | py |
f9f2f19a7bd7b0221317d15b8cd895a75f1a6ed6 | diff --git a/core/src/main/java/org/jboss/gwt/elemento/core/EventType.java b/core/src/main/java/org/jboss/gwt/elemento/core/EventType.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/org/jboss/gwt/elemento/core/EventType.java
+++ b/core/src/main/java/org/jboss/gwt/elemento/core/EventType.java
@@ -120,9 +120,9 @@ public class EventType<T extends Event, V extends EventTarget> {
// Value Change Events
public static final EventType<HashChangeEvent, Window> hashchange = of("hashchange");
- public static final EventType<InputEvent, Element> input = of("input");
+ public static final EventType<Event, Element> input = of("input");
public static final EventType<Event, Document> readystatechange = of("readystatechange");
- public static final EventType<InputEvent, Element> change = of("change");
+ public static final EventType<Event, Element> change = of("change");
// Uncategorized Events
public static final EventType<Event, Element> invalid = of("invalid"); | Fix "input" and "change" are of type Event
InputEvent events are fired only when a contentEditable=true element is modified. If you listen to "input" event, you might receive InputEvent types, but you can also receive just Event types so it must be of type Event or it will throw a ClassCastException. | hal_elemento | train | java |
0f51c12270b8c54e96a6ac2c17e51b62a46286e5 | diff --git a/src/Eris/Random/MersenneTwister.php b/src/Eris/Random/MersenneTwister.php
index <HASH>..<HASH> 100644
--- a/src/Eris/Random/MersenneTwister.php
+++ b/src/Eris/Random/MersenneTwister.php
@@ -1,6 +1,8 @@
<?php
namespace Eris\Random;
+use PHPUnit_Framework_SkippedTestError;
+
class MersenneTwister implements Source
{
private $seed;
@@ -24,6 +26,13 @@ class MersenneTwister implements Source
private $t = 15;
private $c = 0xefc60000;
private $l = 18;
+
+ public function __construct()
+ {
+ if (defined('HHVM_VERSION')) {
+ throw new PHPUnit_Framework_SkippedTestError("Pure PHP random implemnentation segfaults HHVM, so it's not available for this platform");
+ }
+ }
public function seed($seed)
{ | Skipping a test under HHVM to avoid segfaults | giorgiosironi_eris | train | php |
bdc5502875ef26ee57e43f983c12a9170576012c | diff --git a/lib/motion-table/console.rb b/lib/motion-table/console.rb
index <HASH>..<HASH> 100644
--- a/lib/motion-table/console.rb
+++ b/lib/motion-table/console.rb
@@ -9,7 +9,7 @@ module MotionTable
end
def log(log, withColor:color)
- puts color[0] + " " + NAME + log + " " + color[1]
+ puts "#{color[0]} #{NAME} #{log} #{color[1]}".to_s
end
end
end | Trying to add color-coding. | jamonholmgren_motion-table | train | rb |
7098c9083ccd08eb36cce5abcc99ad0506945ef0 | diff --git a/activerecord/lib/active_record/type/decimal.rb b/activerecord/lib/active_record/type/decimal.rb
index <HASH>..<HASH> 100644
--- a/activerecord/lib/active_record/type/decimal.rb
+++ b/activerecord/lib/active_record/type/decimal.rb
@@ -14,14 +14,16 @@ module ActiveRecord
private
def cast_value(value)
- if value.class == Rational
- BigDecimal.new(value, precision.to_i)
- elsif value.respond_to?(:to_d)
+ case value
+ when Numeric, String, Rational
+ BigDecimal(value, precision.to_i)
+ when proc { value.respond_to?(:to_d) }
value.to_d
else
- value.to_s.to_d
+ cast_value(value.to_s)
end
end
+
end
end
end
diff --git a/activerecord/test/cases/types_test.rb b/activerecord/test/cases/types_test.rb
index <HASH>..<HASH> 100644
--- a/activerecord/test/cases/types_test.rb
+++ b/activerecord/test/cases/types_test.rb
@@ -103,7 +103,7 @@ module ActiveRecord
end
def test_type_cast_rational_to_decimal_with_precision
- type = Type::Decimal.new(:precision => 2)
+ type = Type::Decimal.new(precision: 2)
assert_equal BigDecimal("0.33"), type.type_cast_from_user(Rational(1, 3))
end | Change class evaluation for Rationals in cast_value | rails_rails | train | rb,rb |
867f9129436e59f2b70f33a951beff0803d22203 | diff --git a/Events/CustomerPlugin.php b/Events/CustomerPlugin.php
index <HASH>..<HASH> 100644
--- a/Events/CustomerPlugin.php
+++ b/Events/CustomerPlugin.php
@@ -295,11 +295,10 @@ class CustomerPlugin
$storeId = 0,
$sendemailStoreId = null
) {
- exit('3');
if (!$storeId) {
$storeId = $this->getWebsiteStoreId($customer, $sendemailStoreId);
}
-
+ return $proceed($customer, $type, $backUrl, $storeId, $sendemailStoreId);
/*if (! $this->scopeConfig->getValue(
path_in_the_config_table, | fix(customer-plugin): quick fix MAGENTO-OPS | emartech_magento2-extension | train | php |
ca6154bd918ce8f50d93b47e3d774dd9926ee134 | diff --git a/salt/modules/status.py b/salt/modules/status.py
index <HASH>..<HASH> 100644
--- a/salt/modules/status.py
+++ b/salt/modules/status.py
@@ -467,6 +467,9 @@ def pid(sig):
salt '*' status.pid <sig>
'''
+ # Check whether the sig is already quoted (we check at the end in case they
+ # send a sig like `-E 'someregex'` to use egrep) and doesn't begin with a
+ # dash (again, like `-E someregex`). Quote sigs that qualify.
if (not sig.endswith('"') and not sig.endswith("'") and
not sig.startswith('-')):
sig = "'" + sig + "'" | Add documentation to tell why we chose to quote as we did | saltstack_salt | train | py |
14a2539d8e07387dc5d382b49ef63bf69419955b | diff --git a/spec/aruba/processes/spawn_process_spec.rb b/spec/aruba/processes/spawn_process_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/aruba/processes/spawn_process_spec.rb
+++ b/spec/aruba/processes/spawn_process_spec.rb
@@ -26,7 +26,7 @@ RSpec.describe Aruba::Processes::SpawnProcess do
end
describe '#stderr' do
- let(:command_line) { "ruby -e 'warn \"yo\"'" }
+ let(:command_line) { "sh -c \"echo 'yo' >&2\"" }
before do
process.start | Simplify command to run in processes/spawn_process_spec.rb | cucumber_aruba | train | rb |
b7a7a24261227249f24a0fbc1411856b78644ac2 | diff --git a/src/installer/__init__.py b/src/installer/__init__.py
index <HASH>..<HASH> 100644
--- a/src/installer/__init__.py
+++ b/src/installer/__init__.py
@@ -8,7 +8,7 @@ from ._pip import (
from .operations import sync, clean
from .synchronizer import Cleaner, Synchronizer
-__version__ = '0.1.1'
+__version__ = '0.1.2.dev0'
__all__ = [
"Cleaner", "clean", "EditableInstaller", "RequirementUninstaller", | Prebump to <I>.dev0 | sarugaku_installer | train | py |
343ee94d9a0a4edfbe60492409f22ae6c30340d0 | diff --git a/sonar-server/src/test/java/org/sonar/server/log/ws/LogsWebServiceMediumTest.java b/sonar-server/src/test/java/org/sonar/server/log/ws/LogsWebServiceMediumTest.java
index <HASH>..<HASH> 100644
--- a/sonar-server/src/test/java/org/sonar/server/log/ws/LogsWebServiceMediumTest.java
+++ b/sonar-server/src/test/java/org/sonar/server/log/ws/LogsWebServiceMediumTest.java
@@ -1,3 +1,22 @@
+/*
+ * SonarQube, open source software quality management tool.
+ * Copyright (C) 2008-2014 SonarSource
+ * mailto:contact AT sonarsource DOT com
+ *
+ * SonarQube is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * SonarQube is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ */
package org.sonar.server.log.ws;
import org.junit.After; | fix quality flaw (missing license header) | SonarSource_sonarqube | train | java |
050398753d4f37f340804cec7e15f2cb08309c15 | diff --git a/cartoframes/contrib/vector.py b/cartoframes/contrib/vector.py
index <HASH>..<HASH> 100644
--- a/cartoframes/contrib/vector.py
+++ b/cartoframes/contrib/vector.py
@@ -89,7 +89,9 @@ class QueryLayer(object):
def _set_interactivity(self, interactivity):
"""Adds interactivity syntax to the styling"""
- if isinstance(interactivity, list) or isinstance(interactivity, tuple):
+ if interactivity is None:
+ return
+ elif isinstance(interactivity, list) or isinstance(interactivity, tuple):
self.interactivity = 'click'
interactive_cols = '\n'.join(
'@{0}: ${0}'.format(col) for col in interactivity | adds catch for none-valued interactivity | CartoDB_cartoframes | train | py |
4ddc9a478637dc01e5c2afe00202608fd3f61aac | diff --git a/acceptance/tests/environment/use_environment_from_environmentpath.rb b/acceptance/tests/environment/use_environment_from_environmentpath.rb
index <HASH>..<HASH> 100644
--- a/acceptance/tests/environment/use_environment_from_environmentpath.rb
+++ b/acceptance/tests/environment/use_environment_from_environmentpath.rb
@@ -109,6 +109,9 @@ def run_with_environment(agent, environment, options = {})
"--server", master,
]
agent_config << '--environment' << environment if environment
+ # This to test how the agent behaves when using the directory environment
+ # loaders (which will not load an environment if it does not exist)
+ agent_config << "--environmentpath='$confdir/environments'" if agent != master
agent_config << {
'ENV' => { "FACTER_agent_file_location" => atmp },
} | (PUP-<I>) Test agent with directory environments enabled | puppetlabs_puppet | train | rb |
69cceed4aed504994e766768897d5215fa97da7f | diff --git a/types/container/config.go b/types/container/config.go
index <HASH>..<HASH> 100644
--- a/types/container/config.go
+++ b/types/container/config.go
@@ -36,7 +36,7 @@ type HealthConfig struct {
type Config struct {
Hostname string // Hostname
Domainname string // Domainname
- User string // User that will run the command(s) inside the container
+ User string // User that will run the command(s) inside the container, also support user:group
AttachStdin bool // Attach the standard input, makes possible user interaction
AttachStdout bool // Attach the standard output
AttachStderr bool // Attach the standard error | container: Config.User field supports "user:group" syntax | docker_engine-api | train | go |
f3f51e35b3404063a5a628622b599b1857a9fd46 | diff --git a/src/main/java/io/ebean/typequery/TQRootBean.java b/src/main/java/io/ebean/typequery/TQRootBean.java
index <HASH>..<HASH> 100644
--- a/src/main/java/io/ebean/typequery/TQRootBean.java
+++ b/src/main/java/io/ebean/typequery/TQRootBean.java
@@ -867,6 +867,13 @@ public abstract class TQRootBean<T, R> {
* raw("orderQty < shipQty")
*
* }</pre>
+ *
+ * <h4>Subquery example:</h4>
+ * <pre>{@code
+ *
+ * .raw("t0.customer_id in (select customer_id from customer_group where group_id = any(?::uuid[]))", groupIds)
+ *
+ * }</pre>
*/
public R raw(String rawExpression) {
peekExprList().raw(rawExpression);
@@ -909,6 +916,13 @@ public abstract class TQRootBean<T, R> {
* raw("add_days(orderDate, 10) < ?", someDate)
*
* }</pre>
+ *
+ * <h4>Subquery example:</h4>
+ * <pre>{@code
+ *
+ * .raw("t0.customer_id in (select customer_id from customer_group where group_id = any(?::uuid[]))", groupIds)
+ *
+ * }</pre>
*/
public R raw(String rawExpression, Object bindValue) {
peekExprList().raw(rawExpression, bindValue); | No effective change - javadoc improvement for raw() expression | ebean-orm_ebean-querybean | train | java |
f8983166d897a28ad72a9812271801fc197c327f | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -22,7 +22,7 @@ setup(
classifiers = [],
install_requires = [
'ansimarkup>=1.3.0',
- 'better_exceptions_fork>=0.1.8.post1',
+ 'better_exceptions_fork>=0.1.8.post2',
'pendulum>=1.3.0',
],
) | Bump required "better_exceptions_fork" version to <I>.post2 | Delgan_loguru | train | py |
91f3da35fee661788e9416a147d5b82771118ce6 | diff --git a/ros_buildfarm/common.py b/ros_buildfarm/common.py
index <HASH>..<HASH> 100644
--- a/ros_buildfarm/common.py
+++ b/ros_buildfarm/common.py
@@ -31,7 +31,7 @@ class JobValidationError(Exception):
"""
def __init__(self, message):
- self.message = message
+ super(JobValidationError, self).__init__(message)
next_scope_id = 1 | Call super in JobValidationError to correcly print the error (#<I>)
Without this call, you get a meaningless JobValidationError. With this change it'll properly print the message when an exception occurs. | ros-infrastructure_ros_buildfarm | train | py |
8a9665f93f7d2776e87ff32926cd237cd41873c7 | diff --git a/src/AnimeDb/Bundle/AnimeDbBundle/Command/UpdateCommand.php b/src/AnimeDb/Bundle/AnimeDbBundle/Command/UpdateCommand.php
index <HASH>..<HASH> 100644
--- a/src/AnimeDb/Bundle/AnimeDbBundle/Command/UpdateCommand.php
+++ b/src/AnimeDb/Bundle/AnimeDbBundle/Command/UpdateCommand.php
@@ -199,7 +199,7 @@ class UpdateCommand extends ContainerAwareCommand
->ignoreUnreadableDirs()
->in($this->getContainer()->getParameter('kernel.root_dir').'/../src')
->in($this->getContainer()->getParameter('kernel.root_dir'))
- ->notPath('Resources/'.$this->getContainer()->getParameter('database_path'))
+ ->notPath('app/Resources')
->notPath('DoctrineMigrations');
$fs->remove($finder);
} catch (\Exception $e) {} | save all files in app/Resources | anime-db_anime-db | train | php |
1340820a05e674bb6313b03ccb877cd5d5c34f6d | diff --git a/janitor/policy.py b/janitor/policy.py
index <HASH>..<HASH> 100644
--- a/janitor/policy.py
+++ b/janitor/policy.py
@@ -116,6 +116,7 @@ class Policy(object):
self.log.error(
"Maid record path: %s does not exist" % maid_record)
raise ValueError("record path does not exist")
+ self.log.info("Recording aws traffic to: %s" % maid_record)
import placebo
pill = placebo.attach(session, maid_record)
pill.record() | note/log use of flight record mode | cloud-custodian_cloud-custodian | train | py |
ed052c5836354c8aca11491ed5da08c56c422f02 | diff --git a/src/main/java/water/H2O.java b/src/main/java/water/H2O.java
index <HASH>..<HASH> 100644
--- a/src/main/java/water/H2O.java
+++ b/src/main/java/water/H2O.java
@@ -1385,13 +1385,22 @@ public final class H2O {
}
static void initializePersistence() {
+ Log.POST(3001);
HdfsLoader.loadJars();
+ Log.POST(3002);
if( OPT_ARGS.aws_credentials != null ) {
try {
+ Log.POST(3003);
PersistS3.getClient();
- } catch( IllegalArgumentException e ) { Log.err(e); }
+ Log.POST(3004);
+ } catch( IllegalArgumentException e ) {
+ Log.POST(3005);
+ Log.err(e);
+ }
}
+ Log.POST(3006);
Persist.initialize();
+ Log.POST(3007);
}
static void initializeLicenseManager() { | Add a bunch of POST calls in initializePersistence. | h2oai_h2o-2 | train | java |
b65a65317659e2f4553d276b2367144f4ca1290a | diff --git a/modules/system/traits/AssetMaker.php b/modules/system/traits/AssetMaker.php
index <HASH>..<HASH> 100644
--- a/modules/system/traits/AssetMaker.php
+++ b/modules/system/traits/AssetMaker.php
@@ -29,6 +29,17 @@ trait AssetMaker
public $assetPath;
/**
+ * Disables the use, and subequent broadcast, of assets. This is useful
+ * to call during an AJAX request to speed things up. This method works
+ * by specifically targeting the hasAssetsDefined method.
+ * @return void
+ */
+ public function clearAssetDefinitions()
+ {
+ $this->assets = ['js'=>[], 'css'=>[], 'rss'=>[]];
+ }
+
+ /**
* Outputs <link> and <script> tags to load assets previously added with addJs and addCss method calls
* @param string $type Return an asset collection of a given type (css, rss, js) or null for all.
* @return string | Add ability to null asset broadcasting from AJAX | octobercms_october | train | php |
Subsets and Splits
Java Commits in Train Set
Queries for all entries where the diff_languages column is 'java', providing a filtered dataset but without deeper analysis.
Java Commits Test Data
Returns a subset of 5000 entries from the dataset where the programming language difference is Java, providing basic filtering for exploration.
Java Commits Sample
Retrieves the first 1,000 records where the 'diff_languages' column is 'java', providing limited insight into the specific data entries.
Java Commits Validation Sample
Retrieves a sample of entries from the validation dataset where the diff languages are Java, providing limited insight into specific Java-related data points.
Java Commits in Validation
This query retrieves a limited sample of entries from the validation dataset where the programming language difference is Java, providing basic filtering with minimal insight.
Java Commits Sample
This query retrieves a sample of 100 records where the 'diff_languages' is 'java', providing basic filtering but limited analytical value.
Java Commits Sample
Retrieves 100 samples where the language difference is Java, providing basic filtering but minimal analytical value.
Java Commits Sample
Retrieves 10 samples where the diff_languages column is 'java', providing basic examples of data entries with this specific language.
Java Commits Validation Sample
Retrieves 1,000 records where the differences in languages are marked as Java, providing a snapshot of that specific subset but limited to raw data.
Java Commits Sample
This query retrieves 1000 random samples from the dataset where the programming language is Java, offering limited insight beyond raw data.