hash stringlengths 40 40 | diff stringlengths 131 26.7k | message stringlengths 7 694 | project stringlengths 5 67 | split stringclasses 1 value | diff_languages stringlengths 2 24 |
|---|---|---|---|---|---|
9cd67c44c8b8894da70a76b6206983122e5852f9 | diff --git a/openquake/commonlib/nrml.py b/openquake/commonlib/nrml.py
index <HASH>..<HASH> 100644
--- a/openquake/commonlib/nrml.py
+++ b/openquake/commonlib/nrml.py
@@ -136,7 +136,7 @@ def node_to_nrml(node, output=sys.stdout):
Convert a node into a NRML file. output must be a file
object open in write mode. If you want to perform a
consistency check, open it in read-write mode, then it will
- be read after creation and checked against the NRML schema.
+ be read after creation and validated.
:params node: a Node object
:params output: a file-like object in write or read-write mode
@@ -148,7 +148,7 @@ def node_to_nrml(node, output=sys.stdout):
node_to_xml(root, output)
if hasattr(output, 'mode') and '+' in output.mode: # read-write mode
output.seek(0)
- # node_from_nrml(output) # validate the written file
+ node_from_nrml(output) # validate the written file
if __name__ == '__main__': | I restore validation after writing in node_to_nrml | gem_oq-engine | train | py |
5c6396eefa7a183dfcff8a370afe72945d07fdbb | diff --git a/jstore.php b/jstore.php
index <HASH>..<HASH> 100644
--- a/jstore.php
+++ b/jstore.php
@@ -40,6 +40,9 @@ class jstore
public function admin($key){
$default = $this->get($key)->toArray();
+ foreach($default as $arraykey => $entry){
+ $default[$arraykey] = json_encode($entry);
+ }
include('admintemplate.php');
}
} | Pass defaults to to admin template already json encoded for direct adding to the schema | tjwlucas_phpjstore | train | php |
2c49d1a20c234b9947982d980aa3f45a988604ef | diff --git a/dpark/moosefs/__init__.py b/dpark/moosefs/__init__.py
index <HASH>..<HASH> 100644
--- a/dpark/moosefs/__init__.py
+++ b/dpark/moosefs/__init__.py
@@ -136,9 +136,6 @@ class File(object):
self.master = master
self.cscache = {}
- def __len__(self):
- return (self.length - 1) / CHUNKSIZE + 1
-
def get_chunk(self, i):
chunk = self.cscache.get(i)
if not chunk:
@@ -148,8 +145,9 @@ class File(object):
def locs(self, i=None):
if i is None:
+ n = (self.length - 1) / CHUNKSIZE + 1
return [[host for host, _ in self.get_chunk(i).addrs]
- for i in range(len(self))]
+ for i in range(n)]
return [host for host, _ in self.get_chunk(i).addrs] | bugfix: File is a iterator, should not have __len__ | douban_dpark | train | py |
725b3b5482e0cb9c73897ea0df598ac446ba46eb | diff --git a/oauth2/oauth2-client/src/main/java/org/springframework/security/oauth2/client/web/server/OAuth2AuthorizationCodeGrantWebFilter.java b/oauth2/oauth2-client/src/main/java/org/springframework/security/oauth2/client/web/server/OAuth2AuthorizationCodeGrantWebFilter.java
index <HASH>..<HASH> 100644
--- a/oauth2/oauth2-client/src/main/java/org/springframework/security/oauth2/client/web/server/OAuth2AuthorizationCodeGrantWebFilter.java
+++ b/oauth2/oauth2-client/src/main/java/org/springframework/security/oauth2/client/web/server/OAuth2AuthorizationCodeGrantWebFilter.java
@@ -124,7 +124,7 @@ public class OAuth2AuthorizationCodeGrantWebFilter implements WebFilter {
Assert.notNull(authorizedClientRepository, "authorizedClientRepository cannot be null");
this.authenticationManager = authenticationManager;
this.authorizedClientRepository = authorizedClientRepository;
- this.requiresAuthenticationMatcher = new PathPatternParserServerWebExchangeMatcher("/authorize/oauth2/code/{registrationId}");
+ this.requiresAuthenticationMatcher = new PathPatternParserServerWebExchangeMatcher("/{action}/oauth2/code/{registrationId}");
this.authenticationConverter = authenticationConverter;
this.authenticationSuccessHandler = new RedirectServerAuthenticationSuccessHandler();
this.authenticationFailureHandler = (webFilterExchange, exception) -> Mono.error(exception); | Fix OAuth2AuthorizationCodeGrantWebFilter works w/ /{action/
Issue: gh-<I> | spring-projects_spring-security | train | java |
31c16fb78f8367a3df34884199896e4bf0ae4c94 | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -34,7 +34,7 @@ install_requires = [
'IDUtils~=0.0,>=0.2.4',
'autosemver~=0.0,>=0.5.1',
'dojson~=1.0,>=1.3.1',
- 'inspire-schemas~=41.0,>=41.0.0',
+ 'inspire-schemas~=42.0,>=42.0.0',
'langdetect~=1.0,>=1.0.7',
'pycountry~=17.0,>=17.5.4',
] | setup: bump inspire-schemas to version ~<I> | inspirehep_inspire-dojson | train | py |
5065ef12e0353f60d41eb03d4b1bf5274b5f80f7 | diff --git a/unitest-restful.py b/unitest-restful.py
index <HASH>..<HASH> 100755
--- a/unitest-restful.py
+++ b/unitest-restful.py
@@ -112,7 +112,7 @@ class TestGlances(unittest.TestCase):
req = requests.get("%s/%s/%s" % (URL, method, i))
self.assertTrue(req.ok)
self.assertIsInstance(req.json(), dict)
- print req.json()[i]
+ print(req.json()[i])
self.assertIsInstance(req.json()[i], numbers.Number)
def test_005_values(self): | Correct unitest Restfull JSON to be Python compatible | nicolargo_glances | train | py |
4b2b978291275ab243fc84a3ad68551e70f39837 | diff --git a/libcontainer/specconv/spec_linux.go b/libcontainer/specconv/spec_linux.go
index <HASH>..<HASH> 100644
--- a/libcontainer/specconv/spec_linux.go
+++ b/libcontainer/specconv/spec_linux.go
@@ -325,7 +325,7 @@ func createCgroupConfig(opts *CreateOpts) (*configs.Cgroup, error) {
// for e.g. "system.slice:docker:1234"
parts := strings.Split(myCgroupPath, ":")
if len(parts) != 3 {
- return nil, fmt.Errorf("expected cgroupsPath to be of format \"slice:prefix:name\" for systemd cgroups")
+ return nil, fmt.Errorf("expected cgroupsPath to be of format \"slice:prefix:name\" for systemd cgroups, got %q instead", myCgroupPath)
}
c.Parent = parts[0]
c.ScopePrefix = parts[1] | Add cgroup name to error message
More information should help troubleshoot an issue when this error occurs. | opencontainers_runc | train | go |
230849b0c70f13735455195dc2affb0b4b358038 | diff --git a/test/index.js b/test/index.js
index <HASH>..<HASH> 100644
--- a/test/index.js
+++ b/test/index.js
@@ -659,7 +659,7 @@ function testRenderOutput() {
}
function testDefaultRenderImpl() {
- var ht = new (Hogan.Template || HoganTemplate)();
+ var ht = new Hogan.Template();
is(ht.render() === '', true, 'default renderImpl returns an array.');
} | Adjust test to reflect new API consistency. | twitter_hogan.js | train | js |
49eed5915d361a4fb01dfe964a21d24b4697e6d5 | diff --git a/timepiece/views.py b/timepiece/views.py
index <HASH>..<HASH> 100644
--- a/timepiece/views.py
+++ b/timepiece/views.py
@@ -113,9 +113,11 @@ def view_entries(request):
user=request.user,
end_time__isnull=True,
)
- for entry in my_active_entries:
- active_hours = get_active_hours(entry)
- current_total += active_hours
+
+ if current_total:
+ current_total += sum([get_active_hours(entry) \
+ for entry in my_active_entries]) or 0
+
# temporarily disabled until the allocations represent accurate goals
# -TM 6/27
allocations = [] | refs #<I> - Updated total count loop | caktus_django-timepiece | train | py |
7db959a0541d3b48953805535035bff9cd7a901a | diff --git a/lib/puppet-lint/bin.rb b/lib/puppet-lint/bin.rb
index <HASH>..<HASH> 100644
--- a/lib/puppet-lint/bin.rb
+++ b/lib/puppet-lint/bin.rb
@@ -76,6 +76,13 @@ class PuppetLint::Bin
opts.separator ""
opts.separator " Disable checks:"
+ opts.on('--only-check CHECKS', 'Provide a comma seperated list of checks that should be run') do |check_list|
+ enable_checks = check_list.split(',').map { |check| check.to_sym }
+ (PuppetLint.configuration.checks - enable_checks).each do |check|
+ PuppetLint.configuration.send("disable_#{check}")
+ end
+ end
+
PuppetLint.configuration.checks.each do |check|
opts.on("--no-#{check}-check", "Skip the #{check} check") do
PuppetLint.configuration.send("disable_#{check}") | Allow specifying a list of checks that should run.
This reverses the default assumption of "everything enabled" | rodjek_puppet-lint | train | rb |
2141737a4fad49769ca130beb2c3136786f39ec1 | diff --git a/public/js/clients/chrome.js b/public/js/clients/chrome.js
index <HASH>..<HASH> 100644
--- a/public/js/clients/chrome.js
+++ b/public/js/clients/chrome.js
@@ -4,7 +4,7 @@ const { connect } = require("../lib/chrome-remote-debug-protocol");
const defer = require("../utils/defer");
const { Tab } = require("../types");
const { isEnabled, getValue } = require("../feature");
-const { networkRequest } = require("../utils/networkRequest");
+const networkRequest = require("../utils/networkRequest");
const { setupCommands, clientCommands } = require("./chrome/commands");
const { setupEvents, clientEvents, pageEvents } = require("./chrome/events"); | Change the require call in the chrome client file to resolve to the `networkRequest` function. | firefox-devtools_debugger | train | js |
d47a492e366f70a20bdfce8aee936196a3f8884f | diff --git a/nextTick.js b/nextTick.js
index <HASH>..<HASH> 100644
--- a/nextTick.js
+++ b/nextTick.js
@@ -8,7 +8,7 @@ module.exports = (function(global) {
function makeTicker(tickFn) {
return function nextTick(callback) {
- tickFn(callback)
+ tickFn(function() { callback() }) // Do not pass through arguments from setTimeout/requestAnimationFrame
}
}
}(this)) | do not pass through arguments from setTimeout/requestAnimationFrame in nextTick | marcuswestin_std.js | train | js |
3bd368a42fc39ba7f873349038bfd053baff79a9 | diff --git a/codebird.es7.js b/codebird.es7.js
index <HASH>..<HASH> 100644
--- a/codebird.es7.js
+++ b/codebird.es7.js
@@ -604,7 +604,7 @@
} else {
key = eval(evalStr + ".push([]);") - 1;
}
- evalStr += `[${key}']`;
+ evalStr += `[${key}]`;
if (j !== keys.length - 1 && eval("typeof " + evalStr) === "undefined") {
eval(evalStr + " = [];");
} | Removed extra ' on line <I>
Removes SyntaxError (Unexpected string) caused when calling cb.__call () on an app-only auth request. | jublo_codebird-js | train | js |
e7cb719f2534a49058b4a03f9ce1beafddf59ecf | diff --git a/lib/actions/user.js b/lib/actions/user.js
index <HASH>..<HASH> 100644
--- a/lib/actions/user.js
+++ b/lib/actions/user.js
@@ -298,7 +298,7 @@ export function verifyPhoneNumber (code) {
}
/**
- * Check itinerary availability (existence) for the given monitored trip.
+ * Check itinerary existence for the given monitored trip.
*/
export function checkItineraryExistence (trip) {
return async function (dispatch, getState) {
@@ -315,7 +315,7 @@ export function checkItineraryExistence (trip) {
if (status === 'success' && data) {
dispatch(setitineraryExistence(data))
} else {
- alert('Error checking the availability of your selected trip.')
+ alert('Error checking whether your selected trip is possible.')
}
}
} | refactor(actions/user): Twaek comment/error msg for itinerary check. | opentripplanner_otp-react-redux | train | js |
e50c34d5925a5db3a72db7c4d2c25c1a6cf4c9e0 | diff --git a/lib/api/express.js b/lib/api/express.js
index <HASH>..<HASH> 100644
--- a/lib/api/express.js
+++ b/lib/api/express.js
@@ -7,7 +7,7 @@
'use strict';
module.exports = function exported(_) {
- return function express(context, pattern, done) {
+ return function express(context, pattern, done) { // eslint-disable-line consistent-return
var api = this;
var handler = api.views.lookup(pattern);
var callback = done ? done : _.noop;
@@ -15,7 +15,5 @@ module.exports = function exported(_) {
if (handler && _.isFunction(handler)) {
return handler.call(api, context, pattern, callback);
}
-
- return null;
};
}; | test: disable `consistent-return` for api#express | eq8_eq8js-api | train | js |
afc8e8b93756931711f771d4c3b577c25816f0e5 | diff --git a/app/controllers/mailkick/subscriptions_controller.rb b/app/controllers/mailkick/subscriptions_controller.rb
index <HASH>..<HASH> 100644
--- a/app/controllers/mailkick/subscriptions_controller.rb
+++ b/app/controllers/mailkick/subscriptions_controller.rb
@@ -1,5 +1,7 @@
module Mailkick
class SubscriptionsController < ActionController::Base
+ protect_from_forgery with: :exception
+
before_action :set_email
def show | Added protect_from_forgery - not an issue since all requests are GET, but safer for the future | ankane_mailkick | train | rb |
7554e22587b371c8c622b8ab180a83a1c42ac241 | diff --git a/src/Enum/Fields/FulfillmentFields.php b/src/Enum/Fields/FulfillmentFields.php
index <HASH>..<HASH> 100644
--- a/src/Enum/Fields/FulfillmentFields.php
+++ b/src/Enum/Fields/FulfillmentFields.php
@@ -14,6 +14,7 @@ class FulfillmentFields extends AbstractObjectEnum
const TRACKING_COMPANY = 'tracking_company';
const TRACKING_NUMBERS = 'tracking_numbers';
const TRACKING_URLS = 'tracking_urls';
+ const LOCATION_ID = 'location_id';
const UPDATED_AT = 'updated_at';
const VARIANT_INVENTORY_MANAGEMENT = 'variant_inventory_management';
@@ -30,6 +31,7 @@ class FulfillmentFields extends AbstractObjectEnum
'tracking_company' => 'string',
'tracking_numbers' => 'array',
'tracking_urls' => 'array',
+ 'location_id' => 'integer',
'updated_at' => 'DateTime',
'variant_inventory_management' => 'string'
); | Add location_id to FulfillmentFields (#<I>) | robwittman_shopify-php-sdk | train | php |
868c3d15b7b55f55cf16d22b46d6d7c54a1345ee | diff --git a/src/adapters/pulsepoint.js b/src/adapters/pulsepoint.js
index <HASH>..<HASH> 100644
--- a/src/adapters/pulsepoint.js
+++ b/src/adapters/pulsepoint.js
@@ -50,6 +50,10 @@ var PulsePointAdapter = function PulsePointAdapter() {
bid.width = adSize[0];
bid.height = adSize[1];
bidmanager.addBidResponse(bidRequest.placementCode, bid);
+ } else {
+ var passback = bidfactory.createBid(2);
+ passback.bidderCode = bidRequest.bidder;
+ bidmanager.addBidResponse(bidRequest.placementCode, passback);
}
} | Pulsepoint: Registering passback | prebid_Prebid.js | train | js |
a494118dc730afcb6aba8e59f1f3d09a7d4f9302 | diff --git a/packages/cli/src/download-npm-package.js b/packages/cli/src/download-npm-package.js
index <HASH>..<HASH> 100644
--- a/packages/cli/src/download-npm-package.js
+++ b/packages/cli/src/download-npm-package.js
@@ -15,7 +15,7 @@ export default async (pkg, dest) => {
const tmpObj = tmp.dirSync({ unsafeCleanup: true });
// step 2: download package from npm
- const result = await spawn.sync("npm", ["pack", pkg], {
+ const result = spawn.sync("npm", ["pack", pkg], {
stdio: "ignore",
cwd: tmpObj.name,
}); | fix: cli await on spawn bug | 36node_sketch | train | js |
7155c23f59666f7246c7d91a42e46f4844dd1fcb | diff --git a/api/opentrons/robot/robot_configs.py b/api/opentrons/robot/robot_configs.py
index <HASH>..<HASH> 100755
--- a/api/opentrons/robot/robot_configs.py
+++ b/api/opentrons/robot/robot_configs.py
@@ -110,12 +110,11 @@ def load(filename=None):
def save(config, filename=None):
filename = filename or environment.get_path('OT_CONFIG_FILE')
- default_dict = dict(children(default._asdict()))
+ _default = children(default._asdict())
diff = build([
- (key, value)
- for key, value in children(config._asdict())
- if default_dict[key] != value
+ item for item in children(config._asdict())
+ if item not in _default
])
with open(filename, 'w') as file: | shorten listcomp that produces diff in save to make it more readable | Opentrons_opentrons | train | py |
3031643c754e8a8da7c57166df008e261c27307a | diff --git a/core/engine.go b/core/engine.go
index <HASH>..<HASH> 100644
--- a/core/engine.go
+++ b/core/engine.go
@@ -285,7 +285,9 @@ func (e *Engine) processMetrics(globalCtx context.Context, processMetricsAfterRu
case <-processMetricsAfterRun:
e.logger.Debug("Processing metrics and thresholds after the test run has ended...")
processSamples()
- e.processThresholds()
+ if !e.NoThresholds {
+ e.processThresholds()
+ }
processMetricsAfterRun <- struct{}{}
case sc := <-e.Samples: | Check if thresholds are enabled before processing them | loadimpact_k6 | train | go |
f3352505128dc74d47476c6812e8cd7a5c7de328 | diff --git a/server.go b/server.go
index <HASH>..<HASH> 100644
--- a/server.go
+++ b/server.go
@@ -1125,10 +1125,17 @@ func (s *server) handleConnectPeer(msg *connectPeerMsg) {
// persistent connection to the peer.
srvrLog.Debugf("Connecting to %v", addr)
if msg.persistent {
- go s.connMgr.Connect(&connmgr.ConnReq{
+ connReq := &connmgr.ConnReq{
Addr: addr,
Permanent: true,
- })
+ }
+
+ s.pendingConnMtx.Lock()
+ s.persistentConnReqs[targetPub] = append(s.persistentConnReqs[targetPub],
+ connReq)
+ s.pendingConnMtx.Unlock()
+
+ go s.connMgr.Connect(connReq)
msg.err <- nil
} else {
// If we're not making a persistent connection, then we'll | server: fix regression in handling persistent connection requesrts
This commit fixes a bug that was introduced when the concurrent
connection handling logic was re-written: if we don’t properly add the
persistent outbound connection to the persistent conn reqs map. The fix
is easy: add the pending conn req to the proper map. | lightningnetwork_lnd | train | go |
ef7236c3e3a36dc9effba0a308811158ab3e25d7 | diff --git a/lib/Alchemy/Phrasea/Model/Entities/User.php b/lib/Alchemy/Phrasea/Model/Entities/User.php
index <HASH>..<HASH> 100644
--- a/lib/Alchemy/Phrasea/Model/Entities/User.php
+++ b/lib/Alchemy/Phrasea/Model/Entities/User.php
@@ -418,19 +418,26 @@ class User
/**
* @param string $gender
*
+ * @return $this
* @throws InvalidArgumentException
*/
public function setGender($gender)
{
- if (null !== $gender && !in_array($gender, [
- self::GENDER_MISS,
- self::GENDER_MR,
- self::GENDER_MRS
+ if (null !== $gender) {
+ $gender = (string)$gender;
+
+ }
+
+ if (!in_array($gender, [
+ null,
+ (string)self::GENDER_MISS,
+ (string)self::GENDER_MR,
+ (string)self::GENDER_MRS,
], true)) {
throw new InvalidArgumentException(sprintf("Invalid gender %s.", $gender));
}
- $this->gender = $gender;
+ $this->gender = $gender ? (int)$gender : null;
return $this;
} | Avoid BC break in User.
Convert gender to string then back to int after strict check.
The other way does not work as false is considered equal to 0 without strict mode. | alchemy-fr_Phraseanet | train | php |
2a3be066a86c46260e1e0b27b7b370468857f6db | diff --git a/addok/helpers/results.py b/addok/helpers/results.py
index <HASH>..<HASH> 100644
--- a/addok/helpers/results.py
+++ b/addok/helpers/results.py
@@ -75,6 +75,8 @@ def score_by_autocomplete_distance(helper, result):
def _score_by_str_distance(helper, result, scale=1.0):
+ if helper.lat is not None and helper.lon is not None:
+ scale = scale * 0.9
result.add_score('str_distance', compare_str(result.labels[0], helper.query) * scale, ceiling=1.0) | reduce str_distance when lat/lon in query | addok_addok | train | py |
4cd29f6c5afce3ce498d19c10d9272bbf44605da | diff --git a/lib/weblib.php b/lib/weblib.php
index <HASH>..<HASH> 100644
--- a/lib/weblib.php
+++ b/lib/weblib.php
@@ -4498,8 +4498,12 @@ function page_id_and_class(&$getid, &$getclass) {
static $class = NULL;
static $id = NULL;
- if(empty($class) || empty($id)) {
- $path = str_replace($CFG->httpswwwroot.'/', '', $ME); //Because the page could be HTTPSPAGEREQUIRED
+ if (empty($CFG->pagepath)) {
+ $CFG->pagepath = $ME;
+ }
+
+ if (empty($class) || empty($id)) {
+ $path = str_replace($CFG->httpswwwroot.'/', '', $CFG->pagepath); //Because the page could be HTTPSPAGEREQUIRED
$path = str_replace('.php', '', $path);
if (substr($path, -1) == '/') {
$path .= 'index';
@@ -4509,7 +4513,6 @@ function page_id_and_class(&$getid, &$getclass) {
$class = 'course';
} else if (substr($path, 0, 5) == 'admin') {
$id = str_replace('/', '-', $path);
- $id = str_replace('admin2', 'admin', $id);
$class = 'admin';
} else {
$id = str_replace('/', '-', $path); | Take $CFG->pagepath into account for page_id_and_class so "weird" pages
can end up wth nice page classes and ids | moodle_moodle | train | php |
3e3e32da2c23f2ee6e04096b4137fdd5421e578c | diff --git a/tests/Objects/DumbModelTest.php b/tests/Objects/DumbModelTest.php
index <HASH>..<HASH> 100644
--- a/tests/Objects/DumbModelTest.php
+++ b/tests/Objects/DumbModelTest.php
@@ -52,10 +52,11 @@ class DumbModelTest extends \PHPUnit_Framework_TestCase {
/**
* @expectedException \Thru\ActiveRecord\DatabaseLayer\TableDoesntExistException
- * @expectedExceptionMessage 42S02: SQLSTATE[42S02]: Base table or view not found: 1146 Table 'active_record_test.doesntexist' doesn't exist
+ * @expectedExceptionMessageRegExp /42S02: SQLSTATE\[42S02\]: Base table or view not found: 1146 Table '(.*).doesntexist' doesn't exist/
*/
public function testTableExistsGotcha(){
$result = DumbModel::queryOne("SELECT * FROM doesntexist", 'Thru\ActiveRecord\Test\Models\NotStdClass');
}
}
+ | Make test regexp a bit less picky. | Thruio_ActiveRecord | train | php |
2b90f2b7c7303366d3c6c7cdab96381583196294 | diff --git a/openupgradelib/openupgrade_120.py b/openupgradelib/openupgrade_120.py
index <HASH>..<HASH> 100644
--- a/openupgradelib/openupgrade_120.py
+++ b/openupgradelib/openupgrade_120.py
@@ -362,7 +362,7 @@ def _convert_field_bootstrap_3to4_orm(env, model_name, field_name,
update_field_multilang(
records,
field_name,
- lambda old, *a, **k: convert_field_bootstrap_3to4(old),
+ lambda old, *a, **k: convert_string_bootstrap_3to4(old),
) | [FIX] typo in <I> tools
Fix typo in openupgrade_<I> in `_convert_field_bootstrap_3to4_orm` method. it have to call string conversion. | OCA_openupgradelib | train | py |
0beca111f1674d0b9442a1dc2a7ccd51c347f7eb | diff --git a/virtualchain/lib/blockchain/bitcoin_blockchain/multisig.py b/virtualchain/lib/blockchain/bitcoin_blockchain/multisig.py
index <HASH>..<HASH> 100644
--- a/virtualchain/lib/blockchain/bitcoin_blockchain/multisig.py
+++ b/virtualchain/lib/blockchain/bitcoin_blockchain/multisig.py
@@ -129,7 +129,7 @@ def make_multisig_segwit_info( m, pks ):
# 1 pubkey means p2wpkh
key_hash = hashing.bin_hash160(pubs[0].decode('hex')).encode('hex')
- script = '0014' + key_hash
+ script = '160014' + key_hash
addr = btc_make_p2sh_address(script)
else: | script for singlesig segwit should start with 0x<I> | blockstack_virtualchain | train | py |
1f0c49248faf1c52f2d627d9bee43958d8cd6ebf | diff --git a/src/main/java/org/osgl/mvc/result/RenderBinary.java b/src/main/java/org/osgl/mvc/result/RenderBinary.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/osgl/mvc/result/RenderBinary.java
+++ b/src/main/java/org/osgl/mvc/result/RenderBinary.java
@@ -168,7 +168,7 @@ public class RenderBinary extends Result {
* readable file to send back
*/
public RenderBinary(File file) {
- this(file, file.getName(), true);
+ this(file, file.getName(), false);
}
/** | RenderBinary(File) shall default to attachment disposition | osglworks_java-mvc | train | java |
ce770ea005d93cdeed8ad56746cc248b6eef8863 | diff --git a/manifest.php b/manifest.php
index <HASH>..<HASH> 100755
--- a/manifest.php
+++ b/manifest.php
@@ -45,10 +45,7 @@ return array(
array('type' => 'CheckFileSystemComponent', 'value' => array('id' => 'fs_taoSubjects_includes', 'location' => 'taoSubjects/includes', 'rights' => 'r'))
)
),
- 'managementRole' => 'http://www.tao.lu/Ontologies/TAOSubject.rdf#SubjectsManagerRole',
- 'optimizableClasses' => array(
- 'http://www.tao.lu/Ontologies/TAOSubject.rdf#Subject'
- ),
+ 'managementRole' => 'http://www.tao.lu/Ontologies/TAOSubject.rdf#SubjectsManagerRole',
'constants' => array(
# actions directory
"DIR_ACTIONS" => $extpath."actions".DIRECTORY_SEPARATOR, | The whole User class hierarchy is considered as optimzable by the generis extension. No need to add the Test Taker class independently.
git-svn-id: <URL> | oat-sa_extension-tao-testtaker | train | php |
9d68fa337586a6a64b6a9f19fc8f2b079376a4db | diff --git a/airflow/hooks/druid_hook.py b/airflow/hooks/druid_hook.py
index <HASH>..<HASH> 100644
--- a/airflow/hooks/druid_hook.py
+++ b/airflow/hooks/druid_hook.py
@@ -85,8 +85,6 @@ class DruidHook(BaseHook):
self.log.info("Job still running for %s seconds...", sec)
- sec = sec + 1
-
if self.max_ingestion_time and sec > self.max_ingestion_time:
# ensure that the job gets killed if the max ingestion time is exceeded
requests.post("{0}/{1}/shutdown".format(url, druid_task_id))
@@ -95,6 +93,8 @@ class DruidHook(BaseHook):
time.sleep(self.timeout)
+ sec = sec + self.timeout
+
status = req_status.json()['status']['status']
if status == 'RUNNING':
running = True | [AIRFLOW-<I>] DruidHook: time check is wrong (#<I>) | apache_airflow | train | py |
6b286f9fcf6b9d1787794ac2caffa01001680ecc | diff --git a/lib/airbrake-ruby/filters/gem_root_filter.rb b/lib/airbrake-ruby/filters/gem_root_filter.rb
index <HASH>..<HASH> 100644
--- a/lib/airbrake-ruby/filters/gem_root_filter.rb
+++ b/lib/airbrake-ruby/filters/gem_root_filter.rb
@@ -24,7 +24,7 @@ module Airbrake
# If the frame is unparseable, then 'file' is nil, thus nothing to
# filter (all frame's data is in 'function' instead).
next unless (file = frame[:file])
- file.sub!(/\A#{gem_path}/, GEM_ROOT_LABEL)
+ frame[:file] = file.sub(/\A#{gem_path}/, GEM_ROOT_LABEL)
end
end
end | filters/gem_root: do not mutate file to avoid side effects
Mutating `file` with `GEM_ROOT` causes problems with code hunks. They rely on
the pristine value. | airbrake_airbrake-ruby | train | rb |
d1e26993200a518c96a873f8d773a1eca3354bf7 | diff --git a/src/server/cmd/pachctl/cmd/cmd.go b/src/server/cmd/pachctl/cmd/cmd.go
index <HASH>..<HASH> 100644
--- a/src/server/cmd/pachctl/cmd/cmd.go
+++ b/src/server/cmd/pachctl/cmd/cmd.go
@@ -175,7 +175,7 @@ kubectl %v port-forward "$pod" %d:8081
}
portForward.Flags().IntVarP(&port, "port", "p", 30650, "The local port to bind to.")
portForward.Flags().IntVarP(&uiPort, "ui-port", "u", 38080, "The local port to bind to.")
- portForward.Flags().IntVarP(&uiWebsocketPort, "proxy-port", "x", 32082, "The local port to bind to.")
+ portForward.Flags().IntVarP(&uiWebsocketPort, "proxy-port", "x", 32081, "The local port to bind to.")
portForward.Flags().StringVarP(&kubeCtlFlags, "kubectlflags", "k", "", "Any kubectl flags to proxy, e.g. --kubectlflags='--kubeconfig /some/path/kubeconfig'")
rootCmd.AddCommand(version) | Fix websocket port forward port <I>-><I>
Makes it consistent with internal/external port exposed by container (<I>;<I>) | pachyderm_pachyderm | train | go |
5f4613d5f05e679b53e0a43468d3afa8fe14901f | diff --git a/spyderlib/app/spyder.py b/spyderlib/app/spyder.py
index <HASH>..<HASH> 100644
--- a/spyderlib/app/spyder.py
+++ b/spyderlib/app/spyder.py
@@ -1020,6 +1020,7 @@ class MainWindow(QMainWindow):
def add_ipm_action(text, path):
"""Add installed Python module doc action to help submenu"""
# QAction.triggered works differently for PySide and PyQt
+ path = file_uri(path)
if not API == 'pyside':
slot=lambda _checked, path=path: programs.start_file(path)
else: | Fix little removal after PR #<I>
[ci skip] | spyder-ide_spyder | train | py |
7c50bc49aa5a884c5f4faaf221df8977e3c52a91 | diff --git a/local_modules/cubemap-to-octmap/test.js b/local_modules/cubemap-to-octmap/test.js
index <HASH>..<HASH> 100644
--- a/local_modules/cubemap-to-octmap/test.js
+++ b/local_modules/cubemap-to-octmap/test.js
@@ -11,8 +11,8 @@ mediump vec2 octahedralProjection(mediump vec3 dir) {
}
*/
-var Vec3 = require('pex-math/Vec3')
-var Vec2 = require('pex-math/Vec2')
+var Vec3 = require('pex-math/vec3')
+var Vec2 = require('pex-math/vec2')
var abs = Math.abs
var random = Math.random | Fix pex-math casing in octomap test | pex-gl_pex-renderer | train | js |
54a5726be60fdd370d82bfb14044c2babb28eaaf | diff --git a/lib/hacker_term/version.rb b/lib/hacker_term/version.rb
index <HASH>..<HASH> 100644
--- a/lib/hacker_term/version.rb
+++ b/lib/hacker_term/version.rb
@@ -1,3 +1,3 @@
module HackerTerm
- VERSION = "0.0.6"
+ VERSION = "0.1.0"
end | Bump to <I>. | ciaranarcher_hacker_term | train | rb |
ea7644db939cff77507de9aab2b12077dca2b361 | diff --git a/lib/alchemy/seeder.rb b/lib/alchemy/seeder.rb
index <HASH>..<HASH> 100644
--- a/lib/alchemy/seeder.rb
+++ b/lib/alchemy/seeder.rb
@@ -78,8 +78,8 @@ module Alchemy
def create_default_site
desc "Creating default site"
site = Alchemy::Site.find_or_initialize_by_host(
- :name => 'Default',
- :host => 'default'
+ :name => 'Default Site',
+ :host => '*'
)
if site.new_record?
site.save!
diff --git a/lib/alchemy/upgrader.rb b/lib/alchemy/upgrader.rb
index <HASH>..<HASH> 100644
--- a/lib/alchemy/upgrader.rb
+++ b/lib/alchemy/upgrader.rb
@@ -20,12 +20,26 @@ module Alchemy
copy_new_config_file
removed_richmedia_essences_notice
convert_picture_storage
+ upgrade_to_sites
display_todos
end
private
+ def upgrade_to_sites
+ desc "Creating default site and migrating existing languages to it"
+ if Site.count == 0
+ Alchemy::Site.transaction do
+ site = Alchemy::Site.create!(host: '*', name: 'Default Site')
+ Alchemy::Language.update_all(site_id: site.id)
+ log "Done."
+ end
+ else
+ log "Site(s) already present.", :skip
+ end
+ end
+
# Creates Language model if it does not exist (Alchemy CMS prior v1.5)
# Also creates missing associations between pages and languages
def upgrade_to_language | Add creation of default site to alchemy:upgrade task. | AlchemyCMS_alchemy_cms | train | rb,rb |
e4d7219077177dbfcefb17151108a61fbe85643e | diff --git a/javascript/WorkflowGridField.js b/javascript/WorkflowGridField.js
index <HASH>..<HASH> 100644
--- a/javascript/WorkflowGridField.js
+++ b/javascript/WorkflowGridField.js
@@ -7,13 +7,6 @@
onmatch: function(e) {
var ele = $(this);
var row = ele.closest('tr');
- row.on('click', function(e) {
- /*
- * Prevent a precursor POST to gridfield record URLs (all GridFields) when clicking on target-object's
- * hyperlinks, which results in a 404.
- */
- e.stopPropagation();
- });
if(ele.find('.col-buttons.disabled').length) {
row
@@ -23,7 +16,7 @@
return (e.target.nodeName === 'A' && e.target.className.match(/edit-link/) === null);
});
ele.find('a.edit-link').attr('title', '');
- }
+ }
}
});
@@ -34,6 +27,16 @@
}
});
+ /*
+ * Prevent a precursor POST to gridfield record URLs (all Pending/Submitted GridFields)
+ * when clicking on target-object's hyperlinks, which results in a 404.
+ */
+ $('.ss-gridfield .ss-gridfield-item td.col-Title a').entwine({
+ onclick: function(e) {
+ e.stopPropagation();
+ }
+ });
+
});
}(jQuery)); | FIX: Fixes regression in <I>eaf1d. Incorrect disabling JS logic order. | symbiote_silverstripe-advancedworkflow | train | js |
30170f898cfe7453f7b6f3b7bc4db501b9c487e8 | diff --git a/lib/determine-basal/determine-basal.js b/lib/determine-basal/determine-basal.js
index <HASH>..<HASH> 100644
--- a/lib/determine-basal/determine-basal.js
+++ b/lib/determine-basal/determine-basal.js
@@ -210,8 +210,9 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_
iobArray.forEach(function(iobTick) {
//console.error(iobTick);
predBGI = Math.round(( -iobTick.activity * sens * 5 )*100)/100;
- // predicted carb impact drops from current carb impact down to zero over duration of carb impact
- predCI = Math.max(0, ci * ( 1 - predBGs.length/Math.max(cid,1) ) );
+ // predicted carb impact drops linearly from current carb impact down to zero
+ // eventually accounting for all carbs (if they can be absorbed over DIA)
+ predCI = Math.max(0, ci * ( 1 - predBGs.length/Math.max(cid*2,1) ) );
predBG = predBGs[predBGs.length-1] + predBGI + predCI;
console.error(predBGI, predCI, predBG);
predBGs.push(predBG); | extend carb impact to account for all carbs (if they can be absorbed over DIA) | openaps_oref0 | train | js |
1f1891c534883c5b5bb7f9dae120d89b0d993809 | diff --git a/rb/lib/selenium/webdriver/firefox/bridge.rb b/rb/lib/selenium/webdriver/firefox/bridge.rb
index <HASH>..<HASH> 100644
--- a/rb/lib/selenium/webdriver/firefox/bridge.rb
+++ b/rb/lib/selenium/webdriver/firefox/bridge.rb
@@ -41,9 +41,9 @@ module Selenium
def quit
super
- @launcher.quit
-
nil
+ ensure
+ @launcher.quit
end
private | JariBakken: Make sure Firefox shutdown happens even if the RPC fails.
r<I> | SeleniumHQ_selenium | train | rb |
3fb05b5eef02cc8d43259e75d780f73e9b28a6d8 | diff --git a/lib/services/db.js b/lib/services/db.js
index <HASH>..<HASH> 100644
--- a/lib/services/db.js
+++ b/lib/services/db.js
@@ -199,14 +199,11 @@ _.each(['put', 'batch'], function (key) {
var args = _.slice(arguments, 1),
result = fn.apply(module.exports, args);
- if (_.isObject(result) && _.isFunction(result.then)) {
- return result.then(function (result) {
- module.exports.trigger.apply(module.exports, [key].concat(args));
+ return result.then(function (result) {
+ module.exports.trigger.apply(module.exports, [key].concat(args));
- // don't wait
- return result;
- });
- }
- return result;
+ // don't wait
+ return result;
+ });
});
}); | remove check if promise, because it's always a promise | clay_amphora | train | js |
31323ba7af030f6079a0eed007ff3fe91053568d | diff --git a/myawis/__init__.py b/myawis/__init__.py
index <HASH>..<HASH> 100644
--- a/myawis/__init__.py
+++ b/myawis/__init__.py
@@ -12,6 +12,11 @@ try:
except ImportError:
from urllib.parse import quote, urlencode
+URLINFO_RESPONSE_GROUPS = ",".join(
+ ["RelatedLinks", "Categories", "Rank", "ContactInfo", "RankByCountry",
+ "UsageStats", "Speed", "Language", "OwnedDomains", "LinksInCount",
+ "SiteData", "AdultContent"])
+
class CallAwis(object): | Add constant with UrlInfo response groups | ashim888_awis | train | py |
47e80f2f1f5c94445323b98b5ba92c19bbb9da97 | diff --git a/th_github/my_github.py b/th_github/my_github.py
index <HASH>..<HASH> 100644
--- a/th_github/my_github.py
+++ b/th_github/my_github.py
@@ -35,7 +35,7 @@ from th_github.models import Github
logger = getLogger('django_th.trigger_happy')
-cache = caches['th_github']
+cache = caches['django_th']
class ServiceGithub(ServicesMgr): | Replaced cache variable
REPLACED:
cache = caches['th_<service>']
With
cache = caches['django_th'] | push-things_django-th | train | py |
8f7a3658d7537a6b910fe470548ddd999039eca3 | diff --git a/switchyard/lib/packet/packet.py b/switchyard/lib/packet/packet.py
index <HASH>..<HASH> 100644
--- a/switchyard/lib/packet/packet.py
+++ b/switchyard/lib/packet/packet.py
@@ -171,7 +171,7 @@ class Packet(object):
def _checkidx(self, index):
if isinstance(index, int):
if index < 0:
- index = len(self._headers) - index
+ index = len(self._headers) + index
if not (0 <= index < len(self._headers)):
raise IndexError("Index out of range")
return index | Fix packet lib negative indexing to access header | jsommers_switchyard | train | py |
6fca4f91a167868c8d6406907d1582533eeca144 | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -3,7 +3,7 @@ from setuptools import setup
setup(
name='slackdown',
- version='0.0.2',
+ version='0.0.3',
description='A simple Slack message text formatting to HTML code converter.',
author='Andrew Briz',
author_email='briz.andrew@gmail.com', | Updated pypi version number. | datadesk_slackdown | train | py |
857cf7c87d7cebe0fe51409eb73541847be71a97 | diff --git a/lib/natural/tokenizers/tokenizer_ja.js b/lib/natural/tokenizers/tokenizer_ja.js
index <HASH>..<HASH> 100644
--- a/lib/natural/tokenizers/tokenizer_ja.js
+++ b/lib/natural/tokenizers/tokenizer_ja.js
@@ -68,10 +68,10 @@ var Tokenizer = require('./tokenizer'),
*/
var TokenizerJa = function() {
this.chartype_ = [
- [/[一二三四五六七八九十百千万億兆]/, 'M'],
- [/[一-龠〆ヵヶ]/, 'H'],
- [/[ぁ-ゔ]/, 'I'],
- [/[ァ-ヴー]/, 'K'],
+ [/[〇一二三四五六七八九十百千万億兆]/, 'M'],
+ [/[一-龠〆]/, 'H'],
+ [/[ぁ-ゖ]/, 'I'],
+ [/[ァ-ヶー]/, 'K'],
[/[a-zA-Z]/, 'A'],
[/[0-9]/, 'N']
]; | Harmonize regexp with normalizer and add missing characters | NaturalNode_natural | train | js |
68883e30bf1e5df32b951d4d3177ce17f436df5e | diff --git a/lib/container/docker/template.js b/lib/container/docker/template.js
index <HASH>..<HASH> 100644
--- a/lib/container/docker/template.js
+++ b/lib/container/docker/template.js
@@ -133,6 +133,7 @@ const engine = {
update.version = deployment.Version.Index;
if (!update.TaskTemplate) update.TaskTemplate = {};
+ update.TaskTemplate.ForceUpdate = 1;
if (!update.TaskTemplate.ContainerSpec) update.TaskTemplate.ContainerSpec = {};
if (!update.TaskTemplate.ContainerSpec.Env) update.TaskTemplate.ContainerSpec.Env = [];
update.TaskTemplate.ContainerSpec.Env.push('SOAJS_REDEPLOY_TRIGGER=true');
@@ -166,7 +167,7 @@ const engine = {
options.params.newBuild.variables[i] = options.params.newBuild.variables[i].replace("$SOAJS_HA_NAME", "{{.Task.Name}}");
}
}
-
+ update.TaskTemplate.ForceUpdate = 1;
update.TaskTemplate.ContainerSpec.Env = options.params.newBuild.variables;
update.TaskTemplate.ContainerSpec.Image = options.params.newBuild.image;
update.TaskTemplate.ContainerSpec.Command = options.params.newBuild.command; | added force true when updating a service | soajs_soajs.core.drivers | train | js |
eb8d85c1d355d4f13d2778e5ef939fd6d35db2e5 | diff --git a/user/profile.php b/user/profile.php
index <HASH>..<HASH> 100644
--- a/user/profile.php
+++ b/user/profile.php
@@ -57,7 +57,7 @@ $context = $usercontext = get_context_instance(CONTEXT_USER, $userid, MUST_EXIST
if (!$currentuser &&
!empty($CFG->forceloginforprofiles) &&
- !has_capability('moodle/user:viewdetails', $context) &&
+ !has_capability('moodle/user:viewdetails', $context) &&
!has_coursecontact_role($userid)) {
// Course managers can be browsed at site level. If not forceloginforprofiles, allow access (bug #4366)
$struser = get_string('user');
@@ -96,7 +96,11 @@ if (isguestuser()) { // Guests can never edit their profile
}
}
-
+if (has_capability('moodle/user:viewhiddendetails', $context)) {
+ $hiddenfields = array();
+} else {
+ $hiddenfields = array_flip(explode(',', $CFG->hiddenuserfields));
+}
// Start setting up the page
$strpublicprofile = get_string('publicprofile'); | NOBUG Added missing hiddenfields var from user/profile.php | moodle_moodle | train | php |
1127407104f074cf9b9b8e0b53b3a27a56d3be50 | diff --git a/src/instrumentation/index.js b/src/instrumentation/index.js
index <HASH>..<HASH> 100644
--- a/src/instrumentation/index.js
+++ b/src/instrumentation/index.js
@@ -100,6 +100,8 @@ function getRawGroupedTracesTimings (traces, groupedTraces) {
logger.log('%c -- opbeat.instrumentation.getRawGroupedTracesTimings.error.relativeTraceStartLargerThanTransactionDuration', 'color: #ff0000', relativeTraceStart, transaction._start, transaction.duration(), { trace: trace, transaction: transaction })
} else if (relativeTraceStart < 0) {
logger.log('%c -- opbeat.instrumentation.getRawGroupedTracesTimings.error.negativeRelativeTraceStart!', 'color: #ff0000', relativeTraceStart, trace._start, transaction._start, trace)
+ } else if (trace.duration() > transaction.duration()) {
+ logger.log('%c -- opbeat.instrumentation.getRawGroupedTracesTimings.error.traceDurationLargerThanTranscationDuration', 'color: #ff0000', trace.duration(), transaction.duration(), { trace: trace, transaction: transaction })
} else {
data.push([groupIndex, relativeTraceStart, trace.duration()])
} | Don’t send traces where the duration is longer than the transaction duration | opbeat_opbeat-react | train | js |
c11e9d7076573e7cb2dc6d90aab93151c936190d | diff --git a/client/webpack/webpack.config.dev.js b/client/webpack/webpack.config.dev.js
index <HASH>..<HASH> 100644
--- a/client/webpack/webpack.config.dev.js
+++ b/client/webpack/webpack.config.dev.js
@@ -3,6 +3,9 @@ const webpack = require('webpack');
const config = require('./webpack.config.base');
module.exports = Object.assign({}, config, {
+ // See http://webpack.github.io/docs/configuration.html#devtool
+ devtool: 'inline-source-map',
+
plugins: config.plugins.concat([
new webpack.DefinePlugin({
'process.env': { | Add missing source maps in dev mode | springload_wagtaildraftail | train | js |
1f04f0a282774b7f4764cfe6e680d2fd0eb6b70d | diff --git a/ci/publishElmRelease.js b/ci/publishElmRelease.js
index <HASH>..<HASH> 100644
--- a/ci/publishElmRelease.js
+++ b/ci/publishElmRelease.js
@@ -19,7 +19,12 @@ async function tagElmRelease(config, context) {
exec(`elm-package publish`);
- return true;
+ return {
+ name: 'Elm release',
+ url:
+ 'http://package.elm-lang.org/packages/cultureamp/elm-css-modules-loader/' +
+ elmPackageJson.version,
+ };
}
module.exports = tagElmRelease; | fix(ci): Fix semantic-release plugin
Previously this returned true, but it should return an object with
information about the release.
Fixes #<I> | cultureamp_elm-css-modules-loader | train | js |
5d8b7c345e27138fdcb5f8270a50b926940ffdf8 | diff --git a/pygooglechart.py b/pygooglechart.py
index <HASH>..<HASH> 100644
--- a/pygooglechart.py
+++ b/pygooglechart.py
@@ -1,5 +1,5 @@
"""
-PyGoogleChart - A Python wrapper for the Google Chart API
+PyGoogleChart - A complete Python wrapper for the Google Chart API
http://pygooglechart.slowchop.com/ | branched to <I> | gak_pygooglechart | train | py |
1be69cdcaaf524a1571ffd4c4384d96cca000967 | diff --git a/salt/modules/ipset.py b/salt/modules/ipset.py
index <HASH>..<HASH> 100644
--- a/salt/modules/ipset.py
+++ b/salt/modules/ipset.py
@@ -13,7 +13,6 @@ if six.PY3:
import ipaddress
else:
import salt.ext.ipaddress as ipaddress
-from salt.ext.six.moves import range # pylint: disable=import-error,redefined-builtin
# Fix included in py2-ipaddress for 32bit architectures | No longer using this range functions.
So don't import it. | saltstack_salt | train | py |
b69c0feae9d66ce8a7099cc640ff293bd783e0af | diff --git a/src/Task/Task.php b/src/Task/Task.php
index <HASH>..<HASH> 100644
--- a/src/Task/Task.php
+++ b/src/Task/Task.php
@@ -156,7 +156,7 @@ abstract class Task
$this->file->set('log', $logFile);
$this->setStatus(self::STATE_RUNNING);
- $this->addOutput('Task started.' . "\n");
+ $this->addOutput('Task ' . $this->getId() . ' started.' . "\n");
$this->doPerform();
} catch (\Exception $exception) { | Show id in task log file | tenside_core | train | php |
138a157b4471a7a96144fa5ef302007938637105 | diff --git a/go/test/endtoend/vtgate/main_test.go b/go/test/endtoend/vtgate/main_test.go
index <HASH>..<HASH> 100644
--- a/go/test/endtoend/vtgate/main_test.go
+++ b/go/test/endtoend/vtgate/main_test.go
@@ -129,12 +129,6 @@ create table t7_fk(
CONSTRAINT t7_fk_ibfk_1 foreign key (t7_uid) references t7_xxhash(uid)
on delete set null on update cascade
) Engine=InnoDB;
-
-create table t8(
- id8 bigint,
- testId bigint,
- primary key(id8)
-) Engine=InnoDB;
`
VSchema = `
@@ -369,14 +363,6 @@ create table t8(
"name": "unicode_loose_xxhash"
}
]
- },
- "t8": {
- "column_vindexes": [
- {
- "column": "id8",
- "name": "hash"
- }
- ]
}
}
}` | Removed unrequired table from vtgate's main E2E test | vitessio_vitess | train | go |
e5f0fb1b48d2296dfacb73d7ce4591b3f9d1934e | diff --git a/src/MadeYourDay/Contao/ThemeAssistant.php b/src/MadeYourDay/Contao/ThemeAssistant.php
index <HASH>..<HASH> 100644
--- a/src/MadeYourDay/Contao/ThemeAssistant.php
+++ b/src/MadeYourDay/Contao/ThemeAssistant.php
@@ -574,10 +574,15 @@ class ThemeAssistant extends \Backend
}
}
elseif ($data['templateVars'][$key]['type'] === 'length') {
- if ($value && isset($value['value']) && isset($value['unit'])) {
- $value = (trim($value['value']) ? trim($value['value']) : '0') . trim($value['unit']);
+ if (is_array($value)) {
+ $value['value'] = empty($value['value']) ? '0' : $value['value'];
+ $value['unit'] = empty($value['unit']) ? '' : $value['unit'];
+ $value = trim($value['value']) . trim($value['unit']);
}
- if (! $value) {
+ else if (is_string($value) && trim($value)) {
+ $value = trim($value);
+ }
+ else {
$value = '0';
}
} | Fixed bug with missing unit in inputUnid fields | madeyourday_contao-rocksolid-theme-assistant | train | php |
f54ff2083046bda1e1b88b457fc9672b8dc2be16 | diff --git a/lib/collection.js b/lib/collection.js
index <HASH>..<HASH> 100644
--- a/lib/collection.js
+++ b/lib/collection.js
@@ -15,6 +15,19 @@ var Static = require('./static')
, model = require('./model');
/**
+ * Create a collection of sub documents.
+ *
+ * @param {Constructor} Child sub document constructor
+ * @param {Object} parent main document
+ * @return {Object} constructed sub document
+ * @api private
+ */
+function createSubCollection(Child, parent) {
+ Child.prototype.save = Child.prototype.save.bind(parent);
+ return new Child;
+}
+
+/**
* Default model definition of fossa.
*
* Options:
@@ -41,6 +54,17 @@ module.exports = function collection(options) {
* @api public
*/
, initialize: function initialize() {
+ // Initialize subdocuments.
+ var subdocs = options.subdocs;
+ if (subdocs) {
+ Object.keys(subdocs).forEach(function addSubDocs(key) {
+ this[key] = createSubCollection(subdocs[key], this);
+ }.bind(this));
+
+ // Remove the key from the options.
+ delete options.subdocs;
+ }
+
// Store references the collection name, database and fossa.
_.extend(this, options);
} | [minor] bind subdocuments to the collection, save routes to main | observing_fossa | train | js |
672ec99dd923d69ea6cf11d0b6280ad897445868 | diff --git a/ospd/command/command.py b/ospd/command/command.py
index <HASH>..<HASH> 100644
--- a/ospd/command/command.py
+++ b/ospd/command/command.py
@@ -71,7 +71,7 @@ class BaseCommand(metaclass=InitSubclassMeta):
'name': self.get_name(),
'attributes': self.get_attributes(),
'description': self.get_description(),
- 'element': self.get_elements(),
+ 'elements': self.get_elements(),
}
def __repr__(self): | Fix typo in BaseCommand as_dict
It must be elements instead of element. | greenbone_ospd | train | py |
242d3261460a6206ac8fee39f69df33972af113e | diff --git a/btb/benchmark/challenges/atmchallenge.py b/btb/benchmark/challenges/atmchallenge.py
index <HASH>..<HASH> 100644
--- a/btb/benchmark/challenges/atmchallenge.py
+++ b/btb/benchmark/challenges/atmchallenge.py
@@ -113,7 +113,7 @@ class ATMChallenge(MLChallenge):
loaded_challenges = []
for dataset in datasets:
try:
- loaded_challenges.append(cls(dataset=dataset))
+ loaded_challenges.append(cls(dataset))
LOGGER.info('Dataset %s loaded', dataset)
except Exception as ex:
LOGGER.warn('Dataset: %s could not be loaded. Error: %s', dataset, ex)
@@ -139,4 +139,4 @@ class ATMChallenge(MLChallenge):
super().__init__(**kwargs)
def __repr__(self):
- return '{}({})'.format(self.__class__.__name__, self.dataset)
+ return "{}('{}')".format(self.__class__.__name__, self.dataset) | Fix lint / details. | HDI-Project_BTB | train | py |
69aecdf6e3027c44f3f75589cf0f2bab42716cec | diff --git a/bids/variables/io.py b/bids/variables/io.py
index <HASH>..<HASH> 100644
--- a/bids/variables/io.py
+++ b/bids/variables/io.py
@@ -303,10 +303,8 @@ def _load_time_variables(layout, dataset=None, columns=None, scan_length=None,
if regressors:
sub_ents = {k: v for k, v in entities.items()
if k in BASE_ENTITIES}
- # .tsv.gz is not strictly BIDS compatible, but our test data is gzipped,
- # so we'll be forgiving here
confound_files = layout.get(suffix=['regressors', 'timeseries'],
- scope=scope, extension=['.tsv', '.tsv.gz'],
+ scope=scope, extension='.tsv',
**sub_ents)
for cf in confound_files:
_data = pd.read_csv(cf.path, sep='\t', na_values='n/a') | FIX: Go back to .tsv only | bids-standard_pybids | train | py |
7e8dcdb2388e5e17eaa42f15b497ce8496e42c25 | diff --git a/lib/gxapi/google_analytics.rb b/lib/gxapi/google_analytics.rb
index <HASH>..<HASH> 100644
--- a/lib/gxapi/google_analytics.rb
+++ b/lib/gxapi/google_analytics.rb
@@ -100,9 +100,13 @@ module Gxapi
@client ||= begin
client = Google::APIClient.new
# key stuff is hardcoded for now
- key = Google::APIClient::KeyUtils.load_from_pkcs12(
- Gxapi.config.google.private_key_path, 'notasecret'
- )
+ if encoded_key = Gxapi.config.google.private_key
+ key = OpenSSL::PKey::RSA.new Base64.decode64(encoded_key), 'notasecret'
+ else
+ key = Google::APIClient::KeyUtils.load_from_pkcs12(
+ Gxapi.config.google.private_key_path, 'notasecret'
+ )
+ end
client.authorization = Signet::OAuth2::Client.new(
token_credential_uri: 'https://accounts.google.com/o/oauth2/token',
audience: 'https://accounts.google.com/o/oauth2/token', | Allow private key to be passed as a Base<I> string | dlangevin_gxapi_rails | train | rb |
48c678fb7f87b9614d64305f11e47e487f25de47 | diff --git a/upup/pkg/fi/cloudup/bootstrapchannelbuilder.go b/upup/pkg/fi/cloudup/bootstrapchannelbuilder.go
index <HASH>..<HASH> 100644
--- a/upup/pkg/fi/cloudup/bootstrapchannelbuilder.go
+++ b/upup/pkg/fi/cloudup/bootstrapchannelbuilder.go
@@ -203,7 +203,13 @@ func (b *BootstrapChannelBuilder) buildManifest() (*channelsapi.Addons, map[stri
if b.cluster.Spec.Networking.Weave != nil {
key := "networking.weave"
- version := "1.9.4"
+ var version string
+ switch {
+ case kv.Major == 1 && kv.Minor <= 5:
+ version = "1.9.3"
+ default:
+ version = "1.9.4"
+ }
// TODO: Create configuration object for cni providers (maybe create it but orphan it)?
location := key + "/v" + version + ".yaml" | for different k8s version different weave | kubernetes_kops | train | go |
10c5845fe47ebbbb04f09bf44c7af993b457a234 | diff --git a/src/main/java/de/btobastian/javacord/entities/message/MessageDecoration.java b/src/main/java/de/btobastian/javacord/entities/message/MessageDecoration.java
index <HASH>..<HASH> 100644
--- a/src/main/java/de/btobastian/javacord/entities/message/MessageDecoration.java
+++ b/src/main/java/de/btobastian/javacord/entities/message/MessageDecoration.java
@@ -19,20 +19,16 @@
package de.btobastian.javacord.entities.message;
/**
- * All decorations available in discord.
+ * All basic decorations available in discord.
*/
public enum MessageDecoration {
ITALICS("*"),
BOLD("**"),
- BOLD_ITALICS("***"),
STRIKEOUT("~~"),
CODE_SIMPLE("`"),
CODE_LONG("```"),
- UNDERLINE("__"),
- UNDERLINE_ITALICS("__*"),
- UNDERLINE_BOLD("__**"),
- UNDERLINE_BOLD_ITALICS("__***");
+ UNDERLINE("__");
private final String prefix;
private final String suffix; | MessageDecoration does no longer contain combined decorations (e.g. BOLD_ITALICS) | Javacord_Javacord | train | java |
b14251fd1abc533aedae409b6d3545a0023ab5e7 | diff --git a/thunder/series/series.py b/thunder/series/series.py
index <HASH>..<HASH> 100755
--- a/thunder/series/series.py
+++ b/thunder/series/series.py
@@ -881,18 +881,15 @@ class Series(Data):
def convolve(self, signal, mode='full'):
"""
- Conolve series data against another signal.
+ Convolve series data against another signal.
Parameters
----------
signal : array
Signal to convolve with (must be 1D)
- var : str
- Variable name if loading from a MAT file
-
mode : str, optional, default='full'
- Mode of convolution, options are 'full', 'same', and 'same'
+ Mode of convolution, options are 'full', 'same', and 'valid'
"""
from numpy import convolve | Doc fixes in series.convolve | thunder-project_thunder | train | py |
116f578deb5183eddfce493ff4ad47060585ea3a | diff --git a/Gruntfile.js b/Gruntfile.js
index <HASH>..<HASH> 100644
--- a/Gruntfile.js
+++ b/Gruntfile.js
@@ -287,9 +287,27 @@ module.exports = function(grunt) {
return good;
});
- grunt.registerTask('docs', ['eslint:examples', 'clean:docs', 'jsdoc', 'makeindex']);
- grunt.registerTask('build', ['eslint:lib', 'clean:dist', 'requirejs', /*'concat',*/ 'uglify']);
- grunt.registerTask('publish', ['bumpversion', 'build', 'browserify', 'docs']);
+ grunt.registerTask('docs', [
+ 'eslint:examples',
+ 'clean:docs',
+ 'jsdoc',
+ 'makeindex',
+ ]);
+ grunt.registerTask('build', [
+ 'eslint:lib',
+ 'clean:dist',
+ 'requirejs',
+ /*'concat',*/
+ 'uglify',
+ ]);
+ grunt.registerTask('publish', [
+ 'eslint:lib',
+ 'eslint:examples',
+ 'bumpversion',
+ 'build',
+ 'browserify',
+ 'docs',
+ ]);
grunt.registerTask('default', 'build');
setLicense(); | run lint before bumping version | greggman_twgl.js | train | js |
7b38c3571e89ea3880d7d95bd41c72538ab9fafa | diff --git a/h5p-default-storage.class.php b/h5p-default-storage.class.php
index <HASH>..<HASH> 100644
--- a/h5p-default-storage.class.php
+++ b/h5p-default-storage.class.php
@@ -297,13 +297,10 @@ class H5PDefaultStorage implements \H5PFileStorage {
* Recursive function that makes sure the specified directory exists and
* is writable.
*
- * TODO: Will be made private when the editor file handling is done by this
- * class!
- *
* @param string $path
* @return bool
*/
- public static function dirReady($path) {
+ private static function dirReady($path) {
if (!file_exists($path)) {
$parent = preg_replace("/\/[^\/]+\/?$/", '', $path);
if (!self::dirReady($parent)) { | Made dir handling private to prevent abuse
h5p/h5p-moodle-plugin#<I>
HFJ-<I> | h5p_h5p-php-library | train | php |
aa68330bd4c01deee355a86e5f61f4c9d25cc6cc | diff --git a/lib/punchblock/translator/asterisk/call.rb b/lib/punchblock/translator/asterisk/call.rb
index <HASH>..<HASH> 100644
--- a/lib/punchblock/translator/asterisk/call.rb
+++ b/lib/punchblock/translator/asterisk/call.rb
@@ -37,9 +37,9 @@ module Punchblock
when 'Hangup'
pb_logger.debug "Received a Hangup AMI event. Sending End event."
send_pb_event Event::End.new(:reason => :hangup)
- when 'AGIExec'
- if component = component_with_id(ami_event['CommandId'])
+ when 'AsyncAGI'
pb_logger.debug "Received an AsyncAGI event. Looking for matching AGICommand component."
+ if component = component_with_id(ami_event['CommandID'])
pb_logger.debug "Found component #{component.id} for event. Forwarding event..."
component.handle_ami_event! ami_event
end | Calls should send AsyncAGI events to components, not AGIExec events | adhearsion_punchblock | train | rb |
41c9db932730bbf3f2832053ffca9dc557f82970 | diff --git a/json-formatter/go/message_lookup.go b/json-formatter/go/message_lookup.go
index <HASH>..<HASH> 100644
--- a/json-formatter/go/message_lookup.go
+++ b/json-formatter/go/message_lookup.go
@@ -1,7 +1,6 @@
package json
import (
- "fmt"
messages "github.com/cucumber/cucumber-messages-go/v6"
)
@@ -40,10 +39,7 @@ func (self *MessageLookup) ProcessMessage(envelope *messages.Envelope) (err erro
scenario := child.GetScenario()
if scenario != nil {
- fmt.Println("Adding scenario", scenario.Name)
- fmt.Println("Scenario ID", scenario.Id)
self.scenarioByID[scenario.Id] = scenario
- fmt.Println("scenarios stored:", len(self.scenarioByID))
for _, step := range scenario.Steps {
self.stepByID[step.Id] = step
} | Remove STDOUT printing which breaks JSON output | cucumber_cucumber | train | go |
abc7e2cfdfee2449ca3a81e854c307fc534b2713 | diff --git a/assets/shiny-server.js b/assets/shiny-server.js
index <HASH>..<HASH> 100644
--- a/assets/shiny-server.js
+++ b/assets/shiny-server.js
@@ -23,7 +23,7 @@
store = window.localStorage;
whitelistStr = store["shiny.whitelist"];
if (!whitelistStr || whitelistStr === ""){
- whitelist = [];
+ whitelist = availableOptions;
} else{
whitelist = JSON.parse(whitelistStr);
}
@@ -66,7 +66,8 @@
$(document).keydown(function(event){
- if (event.shiftKey && event.ctrlKey && event.keyCode == 65){
+ console.log(event);
+ if (event.shiftKey && event.ctrlKey && event.altKey && event.keyCode == 65){
ShinyServer.toggleNetworkSelector();
}
}); | Changed keyboard shortcut to ctrl+shift+alt+a | rstudio_shiny-server | train | js |
eb88a532231923cf099d4c3625773b7524675f29 | diff --git a/public/app/plugins/datasource/opentsdb/datasource.js b/public/app/plugins/datasource/opentsdb/datasource.js
index <HASH>..<HASH> 100644
--- a/public/app/plugins/datasource/opentsdb/datasource.js
+++ b/public/app/plugins/datasource/opentsdb/datasource.js
@@ -173,11 +173,7 @@ function (angular, _, kbn) {
};
OpenTSDBDatasource.prototype.performAggregatorsQuery = function() {
- var options = {
- method: 'GET',
- url: this.url + '/api/aggregators'
- };
- return $http(options).then(function(result) {
+ return this._get('/api/aggregators', {}).then(function(result) {
if (result.data instanceof Array) {
return result.data.sort();
} else { | fix for a change in datasource object | grafana_grafana | train | js |
be4570689f0a50efdc4ee45d2aa3562cf0f5f202 | diff --git a/src/frontend/org/voltdb/planner/AbstractParsedStmt.java b/src/frontend/org/voltdb/planner/AbstractParsedStmt.java
index <HASH>..<HASH> 100644
--- a/src/frontend/org/voltdb/planner/AbstractParsedStmt.java
+++ b/src/frontend/org/voltdb/planner/AbstractParsedStmt.java
@@ -329,6 +329,10 @@ public abstract class AbstractParsedStmt {
String type = attrs.getNamedItem("type").getNodeValue();
ExpressionType exprType = ExpressionType.get(type);
AbstractExpression expr = null;
+
+ if (exprType == ExpressionType.INVALID) {
+ throw new PlanningErrorException("Unsupported operation type '" + type + "'");
+ }
try {
expr = exprType.getExpressionClass().newInstance();
} catch (Exception e) { | Provide more informative message when invalid operation is encountered during planning. | VoltDB_voltdb | train | java |
a753c7882d4c037d3922378cf6cb3468069575e6 | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -24,13 +24,16 @@ version = version_mod.version
qml_dir = os.path.abspath('pyblish_qml/qml')
qml_package_data = list()
for root, dirs, files in os.walk(qml_dir):
- for suffix in ("ttf", "qml", "js", "txt", "png", "py", "otf"):
+ for suffix in ("ttf", "qml", "js", "txt", "png", "py", "otf", "ico"):
relpath = os.path.relpath(root, qml_dir)
relpath = relpath.replace("\\", "/")
qml_package_data.append("qml/" + relpath.strip(".") + "/*." + suffix)
# qmldir file has no suffix
qml_package_data.append(os.path.join("qml", "Pyblish", "qmldir"))
+qml_package_data.append(os.path.join("qml", "Pyblish", "Graphs", "qmldir"))
+qml_package_data.append(os.path.join("qml", "Pyblish", "ListItems", "qmldir"))
+qml_package_data.append(os.path.join("qml", "Perspective", "qmldir"))
classifiers = [
"Development Status :: 5 - Production/Stable", | missing some qmldir files and ico file | pyblish_pyblish-qml | train | py |
21726bec6bd62589c81500bc6ad970126e585c76 | diff --git a/src/main/java/skadistats/clarity/decoder/s2/prop/VectorDecoder.java b/src/main/java/skadistats/clarity/decoder/s2/prop/VectorDecoder.java
index <HASH>..<HASH> 100644
--- a/src/main/java/skadistats/clarity/decoder/s2/prop/VectorDecoder.java
+++ b/src/main/java/skadistats/clarity/decoder/s2/prop/VectorDecoder.java
@@ -13,6 +13,7 @@ public class VectorDecoder implements FieldDecoder<Vector> {
return new Vector(
new float[] {
fd.decode(bs, f),
+ fd.decode(bs, f),
fd.decode(bs, f)
}
); | Baseline decoding now finally works!
It WORKS! | skadistats_clarity | train | java |
ca115a92c5805b496df8d64e745d35a27f9e90e1 | diff --git a/jellyfish/_jellyfish.py b/jellyfish/_jellyfish.py
index <HASH>..<HASH> 100644
--- a/jellyfish/_jellyfish.py
+++ b/jellyfish/_jellyfish.py
@@ -466,7 +466,7 @@ def metaphone(s):
i += 1
if nextnext in 'aeiou' or nextnext == '*****':
result.append('w')
- elif next in 'aeiou' or next == '*****':
+ elif next in 'aeiou':
result.append('w')
elif c == 'x':
if i == 0: | fix trailing W in metaphone Python impl, ANDREW->ANTR | jamesturk_jellyfish | train | py |
3f20039791286afcdd854aa01f16cd96b342184e | diff --git a/lib/infusionsoft/version.rb b/lib/infusionsoft/version.rb
index <HASH>..<HASH> 100644
--- a/lib/infusionsoft/version.rb
+++ b/lib/infusionsoft/version.rb
@@ -1,4 +1,4 @@
module Infusionsoft
# The version of the gem
- VERSION = '1.3.3d'.freeze unless defined?(::Infusionsoft::VERSION)
+ VERSION = '1.3.4a'.freeze unless defined?(::Infusionsoft::VERSION)
end | <I> is a bad version history so skipping to <I> | nateleavitt_infusionsoft | train | rb |
9210bac0e4c849270dc5b5d15237a1491c22ef34 | diff --git a/src/Services/FieldSetFieldFinder.php b/src/Services/FieldSetFieldFinder.php
index <HASH>..<HASH> 100644
--- a/src/Services/FieldSetFieldFinder.php
+++ b/src/Services/FieldSetFieldFinder.php
@@ -237,8 +237,12 @@ class FieldSetFieldFinder
}
elseif( $field instanceof HasOne )
{
- /** @var HasOne $field */
- return $field->getRelationFieldSet( $field->getValue() ?: $field->getRelatedModel() );
+ if( $field->getValue() )
+ {
+ return $field->getRelationFieldSet( $field->getValue() );
+ }
+
+ return new FieldSet( $field->getRelatedModel(), $field->getNameSpacedName() );
}
elseif( $field instanceof Translatable )
{ | Fix issue with arbory image field validation in new nodes | arbory_arbory | train | php |
20f59862046b7debdce8e1e859ebb2295cf2a27b | diff --git a/npm/test-integration.js b/npm/test-integration.js
index <HASH>..<HASH> 100644
--- a/npm/test-integration.js
+++ b/npm/test-integration.js
@@ -23,7 +23,7 @@ module.exports = function (exit) {
recursive(SPEC_SOURCE_DIR, function (err, files) {
if (err) { console.error(err); return exit(1); }
- var mocha = new Mocha({timeout: 1000 * 60});
+ var mocha = new Mocha({ timeout: 1000 * 60 });
// specially load bootstrap file
mocha.addFile(path.join(SPEC_SOURCE_DIR, '_bootstrap.js'));
diff --git a/npm/test-system.js b/npm/test-system.js
index <HASH>..<HASH> 100644
--- a/npm/test-system.js
+++ b/npm/test-system.js
@@ -32,7 +32,10 @@ module.exports = function (exit) {
// run test specs using mocha
function (next) {
recursive(SPEC_SOURCE_DIR, function (err, files) {
- if (err) { console.error(err.stack || err); return next(1); }
+ if (err) {
+ console.error(err.stack || err);
+ return next(1);
+ }
var mocha = new Mocha();
@@ -47,7 +50,7 @@ module.exports = function (exit) {
});
},
- // packity
+ // packity
function (next) {
var packity = require('packity'),
options = { | Fixed lint errors in scripts | postmanlabs_uvm | train | js,js |
b908c82b7b60afae16f510f6cbb5093b9a69ee33 | diff --git a/buffer/src/main/java/io/netty/buffer/ByteBufOutputStream.java b/buffer/src/main/java/io/netty/buffer/ByteBufOutputStream.java
index <HASH>..<HASH> 100644
--- a/buffer/src/main/java/io/netty/buffer/ByteBufOutputStream.java
+++ b/buffer/src/main/java/io/netty/buffer/ByteBufOutputStream.java
@@ -40,7 +40,7 @@ public class ByteBufOutputStream extends OutputStream implements DataOutput {
private final ByteBuf buffer;
private final int startIndex;
- private final DataOutputStream utf8out = new DataOutputStream(this);
+ private DataOutputStream utf8out; // lazily-instantiated
/**
* Creates a new stream which writes data to the specified {@code buffer}.
@@ -131,7 +131,11 @@ public class ByteBufOutputStream extends OutputStream implements DataOutput {
@Override
public void writeUTF(String s) throws IOException {
- utf8out.writeUTF(s);
+ DataOutputStream out = utf8out;
+ if (out == null) {
+ utf8out = out = new DataOutputStream(this);
+ }
+ out.writeUTF(s);
}
/** | Lazily construct contained DataOutputStream in ByteBufOutputStream (#<I>)
Motivation
This is used solely for the DataOutput#writeUTF8() method, which may
often not be used.
Modifications
Lazily construct the contained DataOutputStream in ByteBufOutputStream.
Result
Saves an allocation in some common cases | netty_netty | train | java |
87e22d8cc59f5a07cf50f1aa66bb9f321d6f95f7 | diff --git a/packages/mobx-little-router-react/src/components/Link.js b/packages/mobx-little-router-react/src/components/Link.js
index <HASH>..<HASH> 100644
--- a/packages/mobx-little-router-react/src/components/Link.js
+++ b/packages/mobx-little-router-react/src/components/Link.js
@@ -16,12 +16,19 @@ class Link extends Component {
activeClassName?: string,
style?: Object,
children?: React.Element<*>,
- exact?: boolean
+ exact?: boolean,
+ reload?: boolean
}
onClick = (evt: Event) => {
+ const { to, reload } = this.props
+
+ if (reload) {
+ return
+ }
+
evt.preventDefault()
- this.context.router.history.push(this.props.to)
+ this.context.router.history.push(to)
}
render() { | Adding reload property to Link | mobx-little-router_mobx-little-router | train | js |
f389671128d7a9d1fb3a271a8878d0191a4baf31 | diff --git a/query.go b/query.go
index <HASH>..<HASH> 100644
--- a/query.go
+++ b/query.go
@@ -217,11 +217,10 @@ func First(subject Enumerable) (retval interface{}, err error) {
var isOpen bool
- if retval, isOpen = <-subject.Enumerate(nil); isOpen {
+ if retval, isOpen = <-subject.Enumerate(done); isOpen {
err = nil
}
-
- subject.Enumerate(done)
+ close(done)
return
} | Fixing memory leak in `First(Enumerable)` impl. | marstr_collection | train | go |
e47e30c5d6cc44703c118d2141e23263af7d2200 | diff --git a/lxd/storage/drivers/driver_btrfs_volumes.go b/lxd/storage/drivers/driver_btrfs_volumes.go
index <HASH>..<HASH> 100644
--- a/lxd/storage/drivers/driver_btrfs_volumes.go
+++ b/lxd/storage/drivers/driver_btrfs_volumes.go
@@ -507,7 +507,7 @@ func (d *btrfs) SetVolumeQuota(vol Volume, size string, op *operations.Operation
}
} else if qgroup != "" {
// Remove the limit.
- _, err := shared.RunCommand("btrfs", "qgroup", "destroy", qgroup, volPath)
+ _, err := shared.RunCommand("btrfs", "qgroup", "limit", "none", qgroup, volPath)
if err != nil {
return err
} | lxd/storage/drivers/btrfs: Don't destroy qgroups
When deleting a qgroup, it's not possible to get the usage of an
instance or volume anymore. Therefore, instead of deleting the qgroup,
we just don't set a limit. | lxc_lxd | train | go |
495e0c36404e1f11a1b142355e7e59737ee15ad8 | diff --git a/app/angular.audio.js b/app/angular.audio.js
index <HASH>..<HASH> 100644
--- a/app/angular.audio.js
+++ b/app/angular.audio.js
@@ -150,6 +150,12 @@ angular.module('ngAudio', [])
.factory('NgAudioObject', ['cleverAudioFindingService', '$rootScope', '$interval', '$timeout', 'ngAudioGlobals', function(cleverAudioFindingService, $rootScope, $interval, $timeout, ngAudioGlobals) {
return function(id, scope) {
+ function twiddle(){
+ audio.play();
+ audio.pause();
+ window.removeEventListener("click",twiddle);
+ }
+
var $audioWatch,
$intervalWatch,
$willPlay = false,
@@ -287,9 +293,9 @@ angular.module('ngAudio', [])
audio = nativeAudio;
if (ngAudioGlobals.unlock) {
- window.addEventListener("click",function twiddle(){
- audio.play();
- audio.pause();
+ window.addEventListener("click", twiddle);
+
+ audio.addEventListener('playing', function() {
window.removeEventListener("click",twiddle);
}); | Better fix for pr<I>, fix twiddle being called too early. | danielstern_ngAudio | train | js |
71e7f76fb6d362e7574b15efa79822469fb43aea | diff --git a/lib/fabrique/version.rb b/lib/fabrique/version.rb
index <HASH>..<HASH> 100644
--- a/lib/fabrique/version.rb
+++ b/lib/fabrique/version.rb
@@ -1,3 +1,3 @@
module Fabrique
- VERSION = "1.0.1"
+ VERSION = "1.0.2"
end | Release <I>
* Cope with bundler-<I>. | starjuice_fabrique | train | rb |
28de73985d4d7ab5b165e9b031c5cc9f4b7ca5e5 | diff --git a/dom/statusclient/src/main/java/org/isisaddons/module/publishmq/dom/statusclient/StatusMessageClient.java b/dom/statusclient/src/main/java/org/isisaddons/module/publishmq/dom/statusclient/StatusMessageClient.java
index <HASH>..<HASH> 100644
--- a/dom/statusclient/src/main/java/org/isisaddons/module/publishmq/dom/statusclient/StatusMessageClient.java
+++ b/dom/statusclient/src/main/java/org/isisaddons/module/publishmq/dom/statusclient/StatusMessageClient.java
@@ -83,8 +83,10 @@ public class StatusMessageClient {
ensureInitialized();
- Client client = clientBuilder.build();
+ Client client = null;
try {
+ client = clientBuilder.build();
+
final WebTarget webTarget = client.target(uriBuilder.build());
final Invocation.Builder invocationBuilder = webTarget.request();
@@ -103,6 +105,8 @@ public class StatusMessageClient {
// if failed to log message via REST service, then fallback by logging to slf4j
LOG.warn(statusMessage.toString());
}
+ } catch(Exception ex) {
+ LOG.error(statusMessage.toString(), ex);
} finally {
closeQuietly(client);
} | Graciously handle errors of the statusmessage client | isisaddons-legacy_isis-module-publishmq | train | java |
cd052b471ec3e808e293e888ce4670dbf790197f | diff --git a/src/java/com/threerings/gwt/ui/Popups.java b/src/java/com/threerings/gwt/ui/Popups.java
index <HASH>..<HASH> 100644
--- a/src/java/com/threerings/gwt/ui/Popups.java
+++ b/src/java/com/threerings/gwt/ui/Popups.java
@@ -35,9 +35,9 @@ public class Popups
* Displays an info message centered horizontally on the page and centered vertically on the
* specified target widget.
*/
- public static void infoOver (String message, Widget target)
+ public static void infoOn (String message, Widget target)
{
- showOver(new InfoPopup(message), target);
+ centerOn(new InfoPopup(message), target);
}
/** | Actually infoOn() and centerOn() is what we wanted. | threerings_gwt-utils | train | java |
6f3e6485c8bb7d7b585e4d8e681052a807c543ca | diff --git a/azurerm/import_arm_public_ip_test.go b/azurerm/import_arm_public_ip_test.go
index <HASH>..<HASH> 100644
--- a/azurerm/import_arm_public_ip_test.go
+++ b/azurerm/import_arm_public_ip_test.go
@@ -60,7 +60,8 @@ func TestAccAzureRMPublicIpStatic_importBasic_withDNSLabel(t *testing.T) {
resourceName := "azurerm_public_ip.test"
ri := acctest.RandInt()
- config := testAccAzureRMPublicIPStatic_basic_withDNSLabel(ri, testLocation())
+ dnl := fmt.Sprintf("tfacc%d", ri)
+ config := testAccAzureRMPublicIPStatic_basic_withDNSLabel(ri, testLocation(), dnl)
resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) }, | Updated import test to handle additional parameter. (#<I>) | terraform-providers_terraform-provider-azurerm | train | go |
704779dba6b03f557420211e15e907efa46490b7 | diff --git a/unit_lookup_table.py b/unit_lookup_table.py
index <HASH>..<HASH> 100644
--- a/unit_lookup_table.py
+++ b/unit_lookup_table.py
@@ -139,6 +139,7 @@ latex_symbol_lut = {
"code_temperature" : "\\rm{code}\/\\rm{temperature}",
"code_metallicity" : "\\rm{code}\/\\rm{metallicity}",
"code_velocity" : "\\rm{code}\/\\rm{velocity}",
+ "code_magnetic" : "\\rm{code}\/\\rm{magnetic}",
"Msun" : "\\rm{M}_\\odot",
"msun" : "\\rm{M}_\\odot",
"Rsun" : "\\rm{R}_\\odot", | fixing the pluto-specific field list in the docs
--HG--
branch : yt | yt-project_unyt | train | py |
0dbb32d67d53ac2567d43f684553a5a6f2eea2ed | diff --git a/picocli-examples/src/main/java/picocli/examples/subcommands/ParentCommandDemo.java b/picocli-examples/src/main/java/picocli/examples/subcommands/ParentCommandDemo.java
index <HASH>..<HASH> 100644
--- a/picocli-examples/src/main/java/picocli/examples/subcommands/ParentCommandDemo.java
+++ b/picocli-examples/src/main/java/picocli/examples/subcommands/ParentCommandDemo.java
@@ -42,7 +42,6 @@ public class ParentCommandDemo {
@Option(names = {"-r", "--recursive"}, description = "Recursively list subdirectories")
private boolean recursive;
- @Override
public void run() {
list(new File(parent.baseDirectory, "."));
} | fix compile error (Java 5 does not support @Override on interface implementations) | remkop_picocli | train | java |
88c9581217f38f2a1597be577ef33b6be2521072 | diff --git a/selene/core/entity.py b/selene/core/entity.py
index <HASH>..<HASH> 100644
--- a/selene/core/entity.py
+++ b/selene/core/entity.py
@@ -272,7 +272,7 @@ class Element(WaitingEntity):
# also it will make sense to make this behaviour configurable...
return self
- def actual_not_overlapped_element(self):
+ def _actual_not_overlapped_element(self):
element = self()
element_html = re.sub('\\s+', ' ', element.get_attribute('outerHTML'))
@@ -320,7 +320,7 @@ class Element(WaitingEntity):
def type(self, text: Union[str, int]) -> Element:
def fn(element: Element):
if self.config.wait_for_no_overlap_found_by_js:
- element = element.actual_not_overlapped_element()
+ element = element._actual_not_overlapped_element()
else:
element = element()
element.send_keys(str(text)) | [#<I>] REFACTOR: privatizing element.actual_not_overlapped_element
because the method name is pretty risky, might be changed, e.g. to elaborate that it's done by js, or maybe in relation to [#<I>] | yashaka_selene | train | py |
648dd1125330b4363b28c17bf3c1dd24fcc2b1b1 | diff --git a/kappa/event_source.py b/kappa/event_source.py
index <HASH>..<HASH> 100644
--- a/kappa/event_source.py
+++ b/kappa/event_source.py
@@ -179,6 +179,8 @@ class S3EventSource(EventSource):
LOG.debug(exc.response)
LOG.exception('Unable to add S3 event source')
+ enable = add
+
def update(self, function):
self.add(function)
@@ -199,6 +201,8 @@ class S3EventSource(EventSource):
NotificationConfiguration=response)
LOG.debug(response)
+ disable = remove
+
def status(self, function):
LOG.debug('status for s3 notification for %s', function.name)
response = self._s3.call( | Add ability to enable and disable S3EventSource | garnaat_kappa | train | py |
f7c9a9f6f6c53bc5411658d66be4cf4dea0a7141 | diff --git a/spec/unit/resources/base_spec.rb b/spec/unit/resources/base_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/unit/resources/base_spec.rb
+++ b/spec/unit/resources/base_spec.rb
@@ -37,8 +37,9 @@ module ChefAPI
describe '.build' do
it 'creates a new instance' do
described_class.stub(:new)
+ described_class.stub(:schema).and_return(double(attributes: {}))
- expect(described_class).to receive(:new).with(foo: 'bar')
+ expect(described_class).to receive(:new).with({foo: 'bar'}, {})
described_class.build(foo: 'bar')
end
end | Pass attributes into the Base.build method | tas50_chef-api | train | rb |
e9acc7973f39761a237b015a9bfc103c4a10b085 | diff --git a/timeside/core.py b/timeside/core.py
index <HASH>..<HASH> 100644
--- a/timeside/core.py
+++ b/timeside/core.py
@@ -28,7 +28,8 @@ import re
import numpy
import uuid
import networkx as nx
-
+import inspect
+import os
import gobject
gobject.threads_init()
@@ -52,10 +53,14 @@ class MetaProcessor(MetaComponent):
if id in _processors:
# Doctest test can duplicate a processor
# This can be identify by the conditon "module == '__main__'"
+ new_path = os.path.realpath(inspect.getfile(new_class))
+ id_path = os.path.realpath(inspect.getfile(_processors[id]))
if new_class.__module__ == '__main__':
new_class = _processors[id]
elif _processors[id].__module__ == '__main__':
pass
+ elif new_path == id_path:
+ new_class = _processors[id]
else:
raise ApiError("%s and %s have the same id: '%s'"
% (new_class.__name__, | fix(core): fix symlink issue when identifying duplicated processors | Parisson_TimeSide | train | py |
b1c3acd1b3433e494d4ce91896244f4045890817 | diff --git a/lib/mongo/server/connection.rb b/lib/mongo/server/connection.rb
index <HASH>..<HASH> 100644
--- a/lib/mongo/server/connection.rb
+++ b/lib/mongo/server/connection.rb
@@ -169,6 +169,7 @@ module Mongo
ensure_connected do |socket|
socket.write(PING_BYTES)
reply = Protocol::Reply.deserialize(socket)
+ p reply
reply.documents[0][Operation::Result::OK] == 1
end
end | Print reply for jenkins test on ssl | mongodb_mongo-ruby-driver | train | rb |
0519a8031d13da24d7102c763371ee3003294b8f | diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -8,11 +8,11 @@ module.exports = function (path, opts, cb) {
var pos = 0
return function (read) {
fs.open(path, flags, mode, function (err, fd) {
- if(err) return read(err)
+ if(err) return read(err, cb)
read(null, function next (end, data) {
if(end === true) fs.close(fd, cb)
else if(end) cb(end) //error!
- else
+ else
if(typeof data === 'string') data = Buffer.from(data) // convert strings to buffers
fs.write(fd, data, 0, data.length, pos, function (err, bytes) {
if(err) read(err, function () { fs.close(fd, cb) }) | cb after source is aborted | pull-stream_pull-write-file | train | js |
15ddea01bdda109277ebcfe26af22e8ea9ac00ed | diff --git a/src/js/libpannellum.js b/src/js/libpannellum.js
index <HASH>..<HASH> 100644
--- a/src/js/libpannellum.js
+++ b/src/js/libpannellum.js
@@ -266,9 +266,9 @@ function Renderer(container) {
faceImg.onload = onLoad;
faceImg.onerror = incLoaded; // ignore missing face to support partial fallback image
if (imageType == 'multires') {
- faceImg.src = encodeURI(path.replace('%s', sides[s]) + '.' + image.extension);
+ faceImg.src = path.replace('%s', sides[s]) + '.' + image.extension;
} else {
- faceImg.src = encodeURI(image[s].src);
+ faceImg.src = image[s].src;
}
}
fillMissingFaces(fallbackImgSize);
@@ -1196,7 +1196,7 @@ function Renderer(container) {
* @param {MultiresNode} node - Input node.
*/
function processNextTile(node) {
- loadTexture(node, encodeURI(node.path + '.' + image.extension), function(texture, loaded) {
+ loadTexture(node, node.path + '.' + image.extension, function(texture, loaded) {
node.texture = texture;
node.textureLoaded = loaded ? 2 : 1;
}, globalParams.crossOrigin); | Remove unnecessary `encodeURI` calls. | mpetroff_pannellum | train | js |
60d267ac3e9e688ca575c84c376c836a9f694748 | diff --git a/src/main/java/hex/Layer.java b/src/main/java/hex/Layer.java
index <HASH>..<HASH> 100644
--- a/src/main/java/hex/Layer.java
+++ b/src/main/java/hex/Layer.java
@@ -77,7 +77,7 @@ public abstract class Layer extends Iced {
Dropout(int units) {
_bits = new byte[(units+7)/8];
- _rand = new Random();
+ _rand = new Random(0);
}
// for input layer
diff --git a/src/main/java/hex/nn/Dropout.java b/src/main/java/hex/nn/Dropout.java
index <HASH>..<HASH> 100644
--- a/src/main/java/hex/nn/Dropout.java
+++ b/src/main/java/hex/nn/Dropout.java
@@ -28,7 +28,7 @@ public class Dropout {
Dropout(int units) {
_bits = new byte[(units+7)/8];
- _rand = new Random();
+ _rand = new Random(0);
}
// for input layer | Always create deterministic RNG, even though it is seeded later. | h2oai_h2o-2 | train | java,java |
847752a295d411ccff31f3137c140ec0f5445c07 | diff --git a/activerecord/lib/active_record/relation/merger.rb b/activerecord/lib/active_record/relation/merger.rb
index <HASH>..<HASH> 100644
--- a/activerecord/lib/active_record/relation/merger.rb
+++ b/activerecord/lib/active_record/relation/merger.rb
@@ -145,12 +145,17 @@ module ActiveRecord
# Remove equalities from the existing relation with a LHS which is
# present in the relation being merged in.
def reject_overwrites(lhs_wheres, rhs_wheres)
+ partition_overwrites(lhs_wheres, rhs_wheres).last
+ end
+
+ def partition_overwrites(lhs_wheres, rhs_wheres)
nodes = rhs_wheres.find_all do |w|
w.respond_to?(:operator) && w.operator == :==
end
seen = Set.new(nodes) { |node| node.left }
- lhs_wheres.reject do |w|
+ # returns [deleted, keepers]
+ lhs_wheres.partition do |w|
w.respond_to?(:operator) && w.operator == :== && seen.include?(w.left)
end
end | partition the where values so we can access the removed ones | rails_rails | train | rb |
ba61e17b357c05c8fecc275ecc50bc683fc1833e | diff --git a/worker/uniter/context/factory_test.go b/worker/uniter/context/factory_test.go
index <HASH>..<HASH> 100644
--- a/worker/uniter/context/factory_test.go
+++ b/worker/uniter/context/factory_test.go
@@ -142,6 +142,11 @@ func (s *FactorySuite) TestNewRunContextRelationId(c *gc.C) {
s.AssertRelationContext(c, ctx, 0)
}
+func (s *FactorySuite) TestNewRunContextRelationIdDoesNotExist(c *gc.C) {
+ _, err := s.factory.NewRunContext(12, "baz")
+ c.Assert(err, gc.ErrorMatches, `unknown relation id:.*`)
+}
+
func (s *FactorySuite) TestNewHookContext(c *gc.C) {
ctx, err := s.factory.NewHookContext(hook.Info{Kind: hooks.ConfigChanged})
c.Assert(err, gc.IsNil) | juju-run: add missing relationId test for context factory | juju_juju | train | go |
ffcb546137a4ab609f8bef0d39feb2a9526bbade | diff --git a/src/Passport.php b/src/Passport.php
index <HASH>..<HASH> 100644
--- a/src/Passport.php
+++ b/src/Passport.php
@@ -409,11 +409,11 @@ class Passport
}
/**
- * Set the current client for the application with the given scopes.
+ * Set the current client for the application with the given scopes.
*
- * @param \Laravel\Passport\Client $client
- * @param array $scopes
- * @return \Laravel\Passport\Client
+ * @param \Laravel\Passport\Client $client
+ * @param array $scopes
+ * @return \Laravel\Passport\Client
*/
public static function actingAsClient($client, $scopes = [])
{ | style: correct NBSP for regular spaces (#<I>) | laravel_passport | train | php |
3c6f9a5213185455e6bff5f2631c46e14d55ed0d | diff --git a/XBRL-Global.php b/XBRL-Global.php
index <HASH>..<HASH> 100644
--- a/XBRL-Global.php
+++ b/XBRL-Global.php
@@ -429,6 +429,26 @@ class XBRL_Global
}
/**
+ * Recursively remove files
+ * @param string $dir
+ */
+ public static function removeFiles($dir)
+ {
+ foreach ( glob( $dir ) as $file )
+ {
+ if ( is_dir( $file ) )
+ {
+ self::removeFiles( "$file/*" );
+ rmdir( $file );
+ }
+ else
+ {
+ unlink( $file );
+ }
+ }
+ }
+
+ /**
* Removes any cached file and directories
* @return boolean True if the directory exists and has been deleted
*/
@@ -438,23 +458,8 @@ class XBRL_Global
if ( ! is_dir( $this->cacheLocation ) ) return false;
- $rmrf = function ($dir) use ( &$rmrf )
- {
- foreach ( glob( $dir ) as $file )
- {
- if ( is_dir( $file ) )
- {
- $rmrf( "$file/*" );
- rmdir( $file );
- }
- else
- {
- unlink( $file );
- }
- }
- };
-
- $rmrf( $this->cacheLocation );
+
+ self::removeFiles( $this->cacheLocation );
return true;
} | Refactoring to create a removeFiles function | bseddon_XBRL | train | php |
Subsets and Splits
Java Commits in Train Set
Queries for all entries where the diff_languages column is 'java', providing a filtered dataset but without deeper analysis.
Java Commits Test Data
Returns a subset of 5000 entries from the dataset where the programming language difference is Java, providing basic filtering for exploration.
Java Commits Sample
Retrieves the first 1,000 records where the 'diff_languages' column is 'java', providing limited insight into the specific data entries.
Java Commits Validation Sample
Retrieves a sample of entries from the validation dataset where the diff languages are Java, providing limited insight into specific Java-related data points.
Java Commits in Validation
This query retrieves a limited sample of entries from the validation dataset where the programming language difference is Java, providing basic filtering with minimal insight.
Java Commits Sample
This query retrieves a sample of 100 records where the 'diff_languages' is 'java', providing basic filtering but limited analytical value.
Java Commits Sample
Retrieves 100 samples where the language difference is Java, providing basic filtering but minimal analytical value.
Java Commits Sample
Retrieves 10 samples where the diff_languages column is 'java', providing basic examples of data entries with this specific language.
Java Commits Validation Sample
Retrieves 1,000 records where the differences in languages are marked as Java, providing a snapshot of that specific subset but limited to raw data.
Java Commits Sample
This query retrieves 1000 random samples from the dataset where the programming language is Java, offering limited insight beyond raw data.