hash stringlengths 40 40 | diff stringlengths 131 26.7k | message stringlengths 7 694 | project stringlengths 5 67 | split stringclasses 1 value | diff_languages stringlengths 2 24 |
|---|---|---|---|---|---|
ef2255b43b53d8add0445a23c279b3dffe1a7f9e | diff --git a/lib/Thelia/Controller/Admin/FileController.php b/lib/Thelia/Controller/Admin/FileController.php
index <HASH>..<HASH> 100644
--- a/lib/Thelia/Controller/Admin/FileController.php
+++ b/lib/Thelia/Controller/Admin/FileController.php
@@ -190,16 +190,10 @@ class FileController extends BaseAdminController
throw new ProcessFileException('', 404);
}
- $defaultTitle = $parentModel->getTitle();
-
- if (empty($defaultTitle)) {
- $defaultTitle = $fileBeingUploaded->getClientOriginalName();
- }
-
$fileModel
->setParentId($parentId)
->setLocale(Lang::getDefaultLanguage()->getLocale())
- ->setTitle($defaultTitle)
+ ->setTitle($parentModel->getTitle())
;
$fileCreateOrUpdateEvent = new FileCreateOrUpdateEvent($parentId); | The image file name is no longer the default image title | thelia_core | train | php |
4745301da61211f83e75b577abbcfd232fa15600 | diff --git a/src/OAuth2ServerServiceProvider.php b/src/OAuth2ServerServiceProvider.php
index <HASH>..<HASH> 100644
--- a/src/OAuth2ServerServiceProvider.php
+++ b/src/OAuth2ServerServiceProvider.php
@@ -81,7 +81,7 @@ class OAuth2ServerServiceProvider extends ServiceProvider
$grant->setVerifyCredentialsCallback($grantParams['callback']);
}
if (array_key_exists('auth_code_ttl', $grantParams)) {
- $grant->setAuthCodeTTL($grantParams['auth_code_ttl']);
+ $grant->setAuthTokenTTL($grantParams['auth_code_ttl']);
}
if (array_key_exists('refresh_token_ttl', $grantParams)) {
$grant->setRefreshTokenTTL($grantParams['refresh_token_ttl']); | change setAuthCodeTTL to setAuthTokenTTL so it matches the library | lucadegasperi_oauth2-server-laravel | train | php |
54a14055afd83665ca29bdfad281be44ebc4c033 | diff --git a/src/Psalm/Internal/Stubs/CoreGenericFunctions.php b/src/Psalm/Internal/Stubs/CoreGenericFunctions.php
index <HASH>..<HASH> 100644
--- a/src/Psalm/Internal/Stubs/CoreGenericFunctions.php
+++ b/src/Psalm/Internal/Stubs/CoreGenericFunctions.php
@@ -126,7 +126,6 @@ function array_search($needle, array $haystack, bool $strict = false) {}
* @param array<mixed,T> $arr
* @param callable(T,T):int $callback
* @param-out array<int,T> $arr
- * @return bool
*/
function usort(array &$arr, callable $callback): bool {} | "usort": revert small change in the phpdoc | vimeo_psalm | train | php |
4ce65709633c28999559e11ac27ff31dfc35897f | diff --git a/app/components/marty/data_import_view.rb b/app/components/marty/data_import_view.rb
index <HASH>..<HASH> 100644
--- a/app/components/marty/data_import_view.rb
+++ b/app/components/marty/data_import_view.rb
@@ -27,6 +27,7 @@ class Marty::DataImportView < Marty::CmFormPanel
comboname.on('select', function(combo, record) {
textname.setValue("");
+ me.netzkeGetComponent('result').updateBodyHtml('');
});
}
JS
diff --git a/lib/marty/version.rb b/lib/marty/version.rb
index <HASH>..<HASH> 100644
--- a/lib/marty/version.rb
+++ b/lib/marty/version.rb
@@ -1,3 +1,3 @@
module Marty
- VERSION = "0.0.22"
+ VERSION = "0.0.23"
end | clear import view result message when new import is selected. | arman000_marty | train | rb,rb |
7bc85cc3967173e723cbadcea1f1b69bfcbc24db | diff --git a/ontrack-extension-git/src/main/java/net/nemerosa/ontrack/extension/git/service/GitServiceImpl.java b/ontrack-extension-git/src/main/java/net/nemerosa/ontrack/extension/git/service/GitServiceImpl.java
index <HASH>..<HASH> 100644
--- a/ontrack-extension-git/src/main/java/net/nemerosa/ontrack/extension/git/service/GitServiceImpl.java
+++ b/ontrack-extension-git/src/main/java/net/nemerosa/ontrack/extension/git/service/GitServiceImpl.java
@@ -320,8 +320,11 @@ public class GitServiceImpl extends AbstractSCMChangeLogService implements GitSe
info.post("Getting list of tags");
Collection<GitTag> tags = gitClient.getTags();
// Pattern for the tags
- // TODO Make the pattern configurable at branch level using a property
- final Pattern tagPattern = Pattern.compile("(.*)");
+ String tagExpression = "(.*)";
+ if (StringUtils.isNotBlank(configuration.getTagPattern())) {
+ tagExpression = configuration.getTagPattern().replace("*", "(.*)");
+ }
+ final Pattern tagPattern = Pattern.compile(tagExpression);
// Creates the builds
info.post("Creating builds from tags");
for (GitTag tag : tags) { | Git: tag pattern for the build/tag sync | nemerosa_ontrack | train | java |
40ecc46963cfa87ced1850ebdea3f63163741468 | diff --git a/salt/modules/yumpkg.py b/salt/modules/yumpkg.py
index <HASH>..<HASH> 100644
--- a/salt/modules/yumpkg.py
+++ b/salt/modules/yumpkg.py
@@ -287,7 +287,7 @@ def _get_yum_config():
# fall back to parsing the config ourselves
# Look for the config the same order yum does
fn = None
- paths = ('/etc/yum/yum.conf', '/etc/yum.conf')
+ paths = ('/etc/yum/yum.conf', '/etc/yum.conf', '/etc/dnf/dnf.conf')
for path in paths:
if os.path.exists(path):
fn = path | fixes pkgrepo for fedora><I> saltstack/salt#<I>
This makes modules.yumpkg complete independend from yum. yum is not
installed in fedora><I>, so this will actually fix pkgrepo in fedora. | saltstack_salt | train | py |
b6dc0a1a84803c442830d92fc9678401280c8af9 | diff --git a/resources/lang/en/laravel-share-fa4.php b/resources/lang/en/laravel-share-fa4.php
index <HASH>..<HASH> 100644
--- a/resources/lang/en/laravel-share-fa4.php
+++ b/resources/lang/en/laravel-share-fa4.php
@@ -5,6 +5,6 @@ return [
'twitter' => '<li><a href=":url" class="social-button :class" id=":id"><span class="fa fa-twitter"></span></a></li>',
'gplus' => '<li><a href=":url" class="social-button :class" id=":id"><span class="fa fa-google-plus"></span></a></li>',
'linkedin' => '<li><a href=":url" class="social-button :class" id=":id"><span class="fa fa-linkedin"></span></a></li>',
- 'whatsapp' => '<li><a target="_blank" href=":url" class=":class" id=":id"><span class="fa fa-whatsapp"></span></a></li>',
+ 'whatsapp' => '<li><a target="_blank" href=":url" class="social-button :class" id=":id"><span class="fa fa-whatsapp"></span></a></li>',
]; | Update laravel-share-fa4.php | jorenvh_laravel-share | train | php |
72271963f0e3a7a91c2fd7dd45f9f94f00b521c1 | diff --git a/notario/exceptions.py b/notario/exceptions.py
index <HASH>..<HASH> 100644
--- a/notario/exceptions.py
+++ b/notario/exceptions.py
@@ -12,10 +12,6 @@ class Invalid(Exception):
self.schema_item = schema_item
self.path = path
self._reason = reason
-
- # FIXME: How the hell are we supposed to persist attributed from our
- # class to the Exception one? Neither of the above persist which is
- # utterly annoying
Exception.__init__(self, self.__str__())
def __str__(self): | remove FIXME as it is already fixed | alfredodeza_notario | train | py |
0429f0371e75598117021a8a75e54116d0eaa1c9 | diff --git a/src/common/models/User.php b/src/common/models/User.php
index <HASH>..<HASH> 100644
--- a/src/common/models/User.php
+++ b/src/common/models/User.php
@@ -35,6 +35,7 @@ use yii\web\IdentityInterface;
class User extends Model implements IdentityInterface
{
public $id;
+ public $name;
public $email;
public $username;
public $type; | + `user` field to User model | hiqdev_hipanel-core | train | php |
4380584db25a9d159fabbf996cb51d4e9c69dc81 | diff --git a/Resources/public/js/controllers/commentController.js b/Resources/public/js/controllers/commentController.js
index <HASH>..<HASH> 100644
--- a/Resources/public/js/controllers/commentController.js
+++ b/Resources/public/js/controllers/commentController.js
@@ -15,8 +15,10 @@ portfolioApp
};
$scope.updateCountViewComments = function () {
- portfolioManager.portfolio.commentsViewAt = new Date();
- portfolioManager.save(portfolioManager.portfolio);
+ if (0 < portfolioManager.portfolio.unreadComments) {
+ portfolioManager.portfolio.commentsViewAt = new Date();
+ portfolioManager.save(portfolioManager.portfolio);
+ }
$scope.displayComment= !$scope.displayComment;
}
}]);
\ No newline at end of file | [PortfolioBundle] Don't update comment view date on portfolio if no new comments | claroline_Distribution | train | js |
f89df0ce77f0b588a6d808405abc6d6d9fb3491f | diff --git a/sharepoint/lists/__init__.py b/sharepoint/lists/__init__.py
index <HASH>..<HASH> 100644
--- a/sharepoint/lists/__init__.py
+++ b/sharepoint/lists/__init__.py
@@ -205,12 +205,28 @@ class SharePointList(object):
response = self.opener.post_soap(LIST_WEBSERVICE, xml,
soapaction='http://schemas.microsoft.com/sharepoint/soap/UpdateListItems')
+ for result in response.xpath('.//sp:Result', namespaces=namespaces):
+ batch_id, batch_result = result.attrib['ID'].split(',')
+ row = rows_by_batch_id[int(batch_id)]
+ if batch_result in ('Update', 'New'):
+ row._update(result.xpath('z:row', namespaces=namespaces)[0],
+ clear=True)
+ else:
+ self._deleted_rows.remove(row)
+
+ assert not self._deleted_rows
+ assert [(not row._changed) for row in self.rows]
+
class SharePointListRow(object):
# fields, list and opener are added as class attributes in SharePointList.row_class
def __init__(self, row={}):
- self._data = {}
- self._changed = set()
+ self._update(row, clear=True)
+
+ def _update(self, row, clear=False):
+ if clear:
+ self._data = {}
+ self._changed = set()
for field in self.fields:
value = field.parse(row)
if value is not None: | SharePointList.save() now pays attention to responses. | ox-it_python-sharepoint | train | py |
cd90140dec4f4fcf0941965d0a33ce27750b5dde | diff --git a/tests/Phive/Tests/Queue/Db/Pdo/AbstractPdoQueueTest.php b/tests/Phive/Tests/Queue/Db/Pdo/AbstractPdoQueueTest.php
index <HASH>..<HASH> 100644
--- a/tests/Phive/Tests/Queue/Db/Pdo/AbstractPdoQueueTest.php
+++ b/tests/Phive/Tests/Queue/Db/Pdo/AbstractPdoQueueTest.php
@@ -7,7 +7,7 @@ use Phive\Tests\Queue\HandlerAwareQueueTest;
abstract class AbstractPdoQueueTest extends HandlerAwareQueueTest
{
- public function testPdoThrowsExceptionOnError()
+ public function testRuntimeExceptionThrowing()
{
$options = static::$handler->getOptions();
$options['table_name'] = uniqid('non_existing_table_name_');
@@ -34,7 +34,7 @@ abstract class AbstractPdoQueueTest extends HandlerAwareQueueTest
continue;
}
- $this->fail('PDO throws \Phive\RuntimeException on error.');
+ $this->fail(get_class($queue).":$method() throws \\Phive\\RuntimeException on error.");
}
} | Replace deprecated Mongo with MongoClient (since mongo extension <I>) | rybakit_phive-queue | train | php |
e0b3d909a3624e862061bd61b499a98215f9b273 | diff --git a/tests/runtests.php b/tests/runtests.php
index <HASH>..<HASH> 100755
--- a/tests/runtests.php
+++ b/tests/runtests.php
@@ -64,8 +64,16 @@ PHP_CodeCoverage::getInstance()->filter()->addFileToBlacklist( __FILE__, 'PHPUNI
//require_once 'bootstrap.php';
-$runner = ezpTestRunner::instance();
-$runner->run($_SERVER['argv']);
+try
+{
+ $runner = ezpTestRunner::instance();
+ $runner->run($_SERVER['argv']);
+}
+catch ( Exception $e )
+{
+ $cli->error( $e->getMessage() . ' in ' . $e->getFile() . ' on line ' . $e->getLine() );
+ $cli->error( $e->getTraceAsString() );
+}
$script->shutdown(); | Added a try/catch block in test runner script in order to increase verbosity in case of an error | ezsystems_ezpublish-legacy | train | php |
e672881ac8378d20f4791d8673775f20ca87357a | diff --git a/FrontBundle/Controller/NodeController.php b/FrontBundle/Controller/NodeController.php
index <HASH>..<HASH> 100644
--- a/FrontBundle/Controller/NodeController.php
+++ b/FrontBundle/Controller/NodeController.php
@@ -27,8 +27,7 @@ class NodeController extends Controller
*/
public function showAction($nodeId)
{
- $nodes = $this->get('php_orchestra_model.repository.node')->findWithPublishedAndLastVersion($nodeId);
- $node = $nodes->toArray();
+ $node = $this->get('php_orchestra_model.repository.node')->findWithPublishedAndLastVersion($nodeId);
if (is_null($node)) {
throw new NonExistingDocumentException();
@@ -37,7 +36,7 @@ class NodeController extends Controller
$response = $this->render(
'PHPOrchestraFrontBundle:Node:show.html.twig',
array(
- 'node' => array_shift($node),
+ 'node' => $node,
'datetime' => time()
)
); | get a single result, remove toArray and array_shift | open-orchestra_open-orchestra-front-bundle | train | php |
58331b4d35a11bc4b2bfb40386bcaa6b6af8aaaf | diff --git a/src/com/mebigfatguy/fbcontrib/detect/BloatedAssignmentScope.java b/src/com/mebigfatguy/fbcontrib/detect/BloatedAssignmentScope.java
index <HASH>..<HASH> 100755
--- a/src/com/mebigfatguy/fbcontrib/detect/BloatedAssignmentScope.java
+++ b/src/com/mebigfatguy/fbcontrib/detect/BloatedAssignmentScope.java
@@ -216,6 +216,12 @@ public class BloatedAssignmentScope extends BytecodeScanningDetector {
if (catchHandlers.get(pc)) {
ignoreRegs.set(reg);
+ ScopeBlock catchSB = findScopeBlock(rootScopeBlock, pc+1);
+ if ((catchSB != null) && (catchSB.getStart() < pc)) {
+ ScopeBlock sb = new ScopeBlock(pc, catchSB.getFinish());
+ catchSB.setFinish(getPC() - 1);
+ rootScopeBlock.addChild(sb);
+ }
} else if (monitorSyncPCs.size() > 0) {
ignoreRegs.set(reg);
} else if (sawNull) { | remove some BAS FPs when variable is used in multiple stacked catches | mebigfatguy_fb-contrib | train | java |
06d4636d0c0b4f62d924c8354a044c8e2c4ca8f2 | diff --git a/src/main/java/com/j256/ormlite/jdbc/JdbcCompiledStatement.java b/src/main/java/com/j256/ormlite/jdbc/JdbcCompiledStatement.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/j256/ormlite/jdbc/JdbcCompiledStatement.java
+++ b/src/main/java/com/j256/ormlite/jdbc/JdbcCompiledStatement.java
@@ -54,11 +54,12 @@ public class JdbcCompiledStatement implements CompiledStatement {
return new JdbcDatabaseResults(preparedStatement, preparedStatement.executeQuery());
}
- public boolean runExecute() throws SQLException {
- if (type != StatementType.SELECT) {
+ public int runExecute() throws SQLException {
+ if (type != StatementType.EXECUTE) {
throw new IllegalArgumentException("Cannot call execute on a " + type + " statement");
}
- return preparedStatement.execute();
+ preparedStatement.execute();
+ return preparedStatement.getUpdateCount();
}
public DatabaseResults getGeneratedKeys() throws SQLException { | Make runExecute return int again after reading the docs more. | j256_ormlite-jdbc | train | java |
09f8206cbe206eef772e0f9cc0a2792b369382ae | diff --git a/examples/Maps/package.js b/examples/Maps/package.js
index <HASH>..<HASH> 100644
--- a/examples/Maps/package.js
+++ b/examples/Maps/package.js
@@ -1,6 +1,6 @@
enyo.depends(
"$lib/onyx",
- "$lib/layout/fittable",
+ "$lib/layout",
"$lib/extra/jsonp",
"maps",
"source/mockdata.js",
diff --git a/examples/Maps/source/Pullout.js b/examples/Maps/source/Pullout.js
index <HASH>..<HASH> 100644
--- a/examples/Maps/source/Pullout.js
+++ b/examples/Maps/source/Pullout.js
@@ -1,6 +1,6 @@
enyo.kind({
name: "Pullout",
- kind: "onyx.Slideable",
+ kind: "enyo.Slideable",
events: {
onDropPin: "",
onShowTraffic: "", | maps app: onyx.Slideable -> enyo.Slideable | enyojs_onyx | train | js,js |
df7ea53c2d3b8fb38be5cbc341c0bfae7950a376 | diff --git a/pyontutils/scigraph_codegen.py b/pyontutils/scigraph_codegen.py
index <HASH>..<HASH> 100755
--- a/pyontutils/scigraph_codegen.py
+++ b/pyontutils/scigraph_codegen.py
@@ -887,8 +887,16 @@ class State2(State):
return None, ''
-def moduleDirect(api_url, basepath, module_name):
+def moduleDirect(basepath, module_name, *, version=2):
""" Avoid the need for dynamics altogether """
+ if version < 2:
+ state = State
+ docs_path = 'api-docs'
+ else:
+ state = State2
+ docs_path = 'swagger.json'
+
+ api_url = f'{basepath}/{docs_path}'
s = state(api_url, basepath)
code = s.code()
return importDirect(code, module_name) | scigraph codegen fix bugs in moduleDirect
now only need to provide basepath, api_uri is generated from that | tgbugs_pyontutils | train | py |
024ea4da90a62bcec00a0d12840968d5f55f1adc | diff --git a/abydos/distance/_token_distance.py b/abydos/distance/_token_distance.py
index <HASH>..<HASH> 100644
--- a/abydos/distance/_token_distance.py
+++ b/abydos/distance/_token_distance.py
@@ -806,9 +806,14 @@ member function, such as Levenshtein."
# A marks array to indicate stars, primes, & covers
# bit 1 = starred
+ MUNKRES_STARRED = 1
# bit 2 = primed
+ MUNKRES_PRIMED = 2
# bit 4 = covered row
+ MUNKRES_ROW_COVERED = 4
# bit 8 = covered col
+ MUNKRES_COL_COVERED = 8
+ MUNKRES_COVERED = MUNKRES_COL_COVERED | MUNKRES_ROW_COVERED
marks = np_zeros((n, n), dtype=np.int8)
for col in range(len(src_only)):
@@ -840,9 +845,9 @@ member function, such as Levenshtein."
for col in range(n):
for row in range(n):
if arr[row, col] == 0:
- if sum(marks[row, :] & 1) == 0 and sum(marks[:, col] & 1) == 0:
- marks[row, col] |= 1
- marks[:, col] |= 8
+ if sum(marks[row, :] & MUNKRES_STARRED) == 0 and sum(marks[:, col] & MUNKRES_STARRED) == 0:
+ marks[row, col] |= MUNKRES_STARRED
+ marks[:, col] |= MUNKRES_COL_COVERED
return intersection | switched from integers to flag value constants | chrislit_abydos | train | py |
f64dc6fe6886ddc472ffa1a3ef8e2581de889e2e | diff --git a/pyresttest/resttest.py b/pyresttest/resttest.py
index <HASH>..<HASH> 100644
--- a/pyresttest/resttest.py
+++ b/pyresttest/resttest.py
@@ -290,7 +290,7 @@ def run_test(mytest, test_config = TestConfig(), context = None):
#print str(test_config.print_bodies) + ',' + str(not result.passed) + ' , ' + str(test_config.print_bodies or not result.passed)
#Print response body if override is set to print all *OR* if test failed (to capture maybe a stack trace)
- if test_config.print_bodies:
+ if test_config.print_bodies or not result.passed:
if test_config.interactive:
print "RESPONSE:"
print result.body
@@ -598,7 +598,7 @@ def main(args):
# Override configs from command line if config set
for t in tests:
- if 'print_bodies' in args and args['print_bodies'] is not None and not bool(args['print_bodies']):
+ if 'print_bodies' in args and args['print_bodies'] is not None and bool(args['print_bodies']):
t.config.print_bodies = safe_to_bool(args['print_bodies'])
if 'interactive' in args and args['interactive'] is not None: | Fix print-bodies command line argument, and print body on failure | svanoort_pyresttest | train | py |
cf0bf27503314be98e678408597c65769994b758 | diff --git a/lib/fit4ruby/version.rb b/lib/fit4ruby/version.rb
index <HASH>..<HASH> 100644
--- a/lib/fit4ruby/version.rb
+++ b/lib/fit4ruby/version.rb
@@ -1,4 +1,4 @@
module Fit4Ruby
# The version number of the library.
- VERSION = '1.6.0'
+ VERSION = '1.6.1'
end | Bumping version to <I> | scrapper_fit4ruby | train | rb |
58007a147221fd167d54972a5db38eadbb69adfb | diff --git a/graphics/widgets.py b/graphics/widgets.py
index <HASH>..<HASH> 100644
--- a/graphics/widgets.py
+++ b/graphics/widgets.py
@@ -42,9 +42,8 @@ class wButton(ptg.Button):
self.status = 0
ptg.Button.__init__(self, *args, **kargs)
-
-def __call__(self, *args, **kargs):
- return self.func(*args, **kargs)
+ def __call__(self, *args, **kargs):
+ return self.func(*args, **kargs)
# Widget to allow toggling between True and False to be collected | Fixed an indent error on the call for the wButton | jbm950_pygame_toolbox | train | py |
3092f0b3db1cee03776e6b71f37cab9ebb2b99f7 | diff --git a/ui/plugins/pinboard.js b/ui/plugins/pinboard.js
index <HASH>..<HASH> 100644
--- a/ui/plugins/pinboard.js
+++ b/ui/plugins/pinboard.js
@@ -126,6 +126,11 @@ treeherder.controller('PinboardCtrl', [
};
$scope.canSaveClassifications = function() {
+ if ($scope.enteringBugNumber) {
+ // we should save this for the user, as they likely
+ // just forgot to hit enter.
+ $scope.saveEnteredBugNumber();
+ }
var thisClass = $scope.classification;
return $scope.hasPinnedJobs() && (thPinboard.hasRelatedBugs() && $scope.user.loggedin ||
thisClass.failure_classification_id !== 4 || | Bug <I> - Save whatever's typed into the bug field before seeing if we can save the classification (#<I>) r=camd | mozilla_treeherder | train | js |
3af12add4455b65c2ba32637acfd1bbebf4e106f | diff --git a/lib/mxit_rails/mxit_api/api_client.rb b/lib/mxit_rails/mxit_api/api_client.rb
index <HASH>..<HASH> 100644
--- a/lib/mxit_rails/mxit_api/api_client.rb
+++ b/lib/mxit_rails/mxit_api/api_client.rb
@@ -224,10 +224,6 @@ module MxitRails::MxitApi
use_ssl = uri.scheme == 'https'
response = Net::HTTP.start(uri.host, uri.port, :use_ssl => use_ssl) do |http|
- if use_ssl
- http.verify_mode = OpenSSL::SSL::VERIFY_NONE
- end
-
yield(http, uri.path)
end
end | Re-enabled SSL verification. OpenSSL::SSL::VERIFY_PEER is the default. | linsen_mxit-rails | train | rb |
c772af34ec44005b55264b60c7cce833d6376157 | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -8,7 +8,7 @@ install_requires = [
setup(
name='Flask-Table',
packages=['flask_table'],
- version='0.2.8',
+ version='0.2.9',
author='Andrew Plummer',
author_email='plummer574@gmail.com',
url='https://github.com/plumdog/flask_table', | Bump to version <I> | plumdog_flask_table | train | py |
ca3726c8e36dd58285b81332a02bb23d46ab2169 | diff --git a/spec/mongoid/finders_spec.rb b/spec/mongoid/finders_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/mongoid/finders_spec.rb
+++ b/spec/mongoid/finders_spec.rb
@@ -407,7 +407,9 @@ describe Mongoid::Finders do
end
describe ".find_by" do
+
context "when the document is found" do
+
let!(:person) do
Person.create(:ssn => "333-22-1111")
end
@@ -418,13 +420,13 @@ describe Mongoid::Finders do
end
context "when the document is not found" do
+
it "raises an error" do
expect {
Person.find_by(:ssn => "333-22-1111")
}.to raise_error(Mongoid::Errors::DocumentNotFound)
end
end
-
end
describe ".only" do | Fix formatting to match spec refactor | mongodb_mongoid | train | rb |
d0c11964dce13c9c3a44fd5690aac90eb75523e4 | diff --git a/packages/core/resolve-command/src/index.js b/packages/core/resolve-command/src/index.js
index <HASH>..<HASH> 100644
--- a/packages/core/resolve-command/src/index.js
+++ b/packages/core/resolve-command/src/index.js
@@ -352,6 +352,13 @@ const executeCommand = async (pool, { jwtToken, ...command }) => {
throw generateCommandError(`Command type "${type}" does not exist`)
}
+ const encrypt = pool.encryptionAdapter
+ ? await pool.encryptionAdapter.getEncrypter(aggregateId)
+ : () =>
+ throw Error(
+ `data encryption is disabled: no encryption adapter provided`
+ )
+
const commandHandler = async (...args) => {
const segment = pool.performanceTracer
? pool.performanceTracer.getSegment()
@@ -383,7 +390,8 @@ const executeCommand = async (pool, { jwtToken, ...command }) => {
aggregateState,
command,
jwtToken,
- aggregateVersion
+ aggregateVersion,
+ encrypt
)
if (!checkOptionShape(event.type, [String])) {
@@ -469,14 +477,16 @@ const createCommand = ({
eventStore,
aggregates,
snapshotAdapter,
- performanceTracer
+ performanceTracer,
+ encryptionAdapter
}) => {
const pool = {
eventStore,
aggregates,
snapshotAdapter,
isDisposed: false,
- performanceTracer
+ performanceTracer,
+ encryptionAdapter
}
const api = { | Add encrypter to command handler | reimagined_resolve | train | js |
c137ab86377db9d74f9deb523460eaa8ffd2dac8 | diff --git a/lib/tower_cli/resources/job.py b/lib/tower_cli/resources/job.py
index <HASH>..<HASH> 100644
--- a/lib/tower_cli/resources/job.py
+++ b/lib/tower_cli/resources/job.py
@@ -52,7 +52,7 @@ class Resource(models.MonitorableResource):
help='Suppress any requests for input.')
@click.option('--extra-vars', type=types.File('r'), required=False)
@click.option('--tags', required=False)
- def launch(self, job_template, tags, monitor=False, timeout=None,
+ def launch(self, job_template, tags=None, monitor=False, timeout=None,
no_input=True, extra_vars=None):
"""Launch a new job based on a job template. | Resolve unittests failures by provided a default value for tags | ansible_tower-cli | train | py |
083bc191ca7938f151c7eb5aab88a1ef85e4f8a8 | diff --git a/lib/graphql/batch/loader.rb b/lib/graphql/batch/loader.rb
index <HASH>..<HASH> 100644
--- a/lib/graphql/batch/loader.rb
+++ b/lib/graphql/batch/loader.rb
@@ -44,8 +44,8 @@ module GraphQL::Batch
end
def resolve #:nodoc:
+ return if resolved?
load_keys = queue
- return if load_keys.empty?
@queue = nil
perform(load_keys)
check_for_broken_promises(load_keys)
@@ -58,13 +58,16 @@ module GraphQL::Batch
# For Promise#sync
def wait #:nodoc:
if executor
- executor.loaders.delete(loader_key)
executor.resolve(self)
else
resolve
end
end
+ def resolved?
+ @queue.nil? || @queue.empty?
+ end
+
protected
# Fulfill the key with provided value, for use in #perform
diff --git a/test/graphql_test.rb b/test/graphql_test.rb
index <HASH>..<HASH> 100644
--- a/test/graphql_test.rb
+++ b/test/graphql_test.rb
@@ -342,6 +342,6 @@ class GraphQL::GraphQLTest < Minitest::Test
}
}
assert_equal expected, result
- assert_equal ["Product/1,2", "Product/2,3"], queries
+ assert_equal ["Product/1,2", "Product/3"], queries
end
end | Improve loader reuse (#<I>) | Shopify_graphql-batch | train | rb,rb |
93c8d47acc9bc07e9524a8e395e7f77561478d36 | diff --git a/core/src/main/java/com/orientechnologies/orient/core/id/ORecordId.java b/core/src/main/java/com/orientechnologies/orient/core/id/ORecordId.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/id/ORecordId.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/id/ORecordId.java
@@ -280,7 +280,7 @@ public class ORecordId implements ORID {
final ODatabaseRecord db = ODatabaseRecordThreadLocal.INSTANCE.get();
if (db == null)
throw new ODatabaseException(
- "No database found in current thread local space. If you manually control database over threads assure to set the current database before to use it by calling: ODatabaseRecordThreadLocal.INSTANCE.set(db);");
+ "No database found in current thread local space. If you manually control databases over threads assure to set the current database before to use it by calling: ODatabaseRecordThreadLocal.INSTANCE.set(db);");
return db.load(this);
} | Thrown ODatabaseException in case ORecordId.getRecord() has no database set in thread local | orientechnologies_orientdb | train | java |
c99ccab122874e3395b32be9023dd78a6bb2eb7d | diff --git a/lxd/node/raft_test.go b/lxd/node/raft_test.go
index <HASH>..<HASH> 100644
--- a/lxd/node/raft_test.go
+++ b/lxd/node/raft_test.go
@@ -26,7 +26,7 @@ func TestDetermineRaftNode(t *testing.T) {
&db.RaftNode{ID: 1},
},
{
- `cluster.https_address set and and no raft_nodes rows`,
+ `cluster.https_address set and no raft_nodes rows`,
"1.2.3.4:8443",
[]string{},
&db.RaftNode{ID: 1}, | lxd/node/raft/test: Corrects typo | lxc_lxd | train | go |
b5513946bd1a65e116351fb062f9cc044c67daa5 | diff --git a/dbkit.py b/dbkit.py
index <HASH>..<HASH> 100644
--- a/dbkit.py
+++ b/dbkit.py
@@ -477,7 +477,7 @@ def transaction():
import sqlite3
import sys
- from dbkit import connect, transaction, query_value, execute, context
+ from dbkit import connect, transaction, query_value, execute
# ...do some stuff...
@@ -514,7 +514,7 @@ def transactional(wrapped):
import sqlite3
import sys
- from dbkit import connect, transactional, query_value, execute, context
+ from dbkit import connect, transactional, query_value, execute
# ...do some stuff... | context() isn't needed in these examples. | kgaughan_dbkit | train | py |
c0e87d57f2f567b7583d881efa331f9d313d0ccb | diff --git a/lib/core/connection/msg.js b/lib/core/connection/msg.js
index <HASH>..<HASH> 100644
--- a/lib/core/connection/msg.js
+++ b/lib/core/connection/msg.js
@@ -130,7 +130,8 @@ class Msg {
}
Msg.getRequestId = function() {
- return ++_requestId;
+ _requestId = (_requestId + 1) & 0x7fffffff;
+ return _requestId;
};
class BinMsg { | fix(OpMsg): cap requestIds at 0x7fffffff
Since OpMsg uses buffer write methods, these methods can throw if the buffer
attempts to write a number to large for the space. We now cap the requestId
at 0x7fffffff and loop back around to 0
Fixes NODE-<I> | mongodb_node-mongodb-native | train | js |
2eff58c5c8863287aa36e3e2bb686b8066aca021 | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -27,6 +27,7 @@ setup(
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
+ 'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
),
) | added python <I> to trove categories | jpvanhal_inflection | train | py |
f12b615c40f0af9e5a44ded768389ce84ccebb9a | diff --git a/ores/scoring_contexts/tests/test_scoring_context.py b/ores/scoring_contexts/tests/test_scoring_context.py
index <HASH>..<HASH> 100644
--- a/ores/scoring_contexts/tests/test_scoring_context.py
+++ b/ores/scoring_contexts/tests/test_scoring_context.py
@@ -23,8 +23,8 @@ def test_scoring_context():
FakeExtractor = namedtuple("Extractor", ['extract', 'solve', 'language'])
- def fake_extract(rev_ids, dependents, caches=None):
- caches = caches or defaultdict(dict)
+ def fake_extract(rev_ids, dependents, cache=None):
+ caches = cache or defaultdict(dict)
for rev_id in rev_ids:
cache = caches[rev_id]
if rev_id % 5 != 0: | Fixes a scoring context test that was failing. | wikimedia_ores | train | py |
0ab9416348254a7ce5bbf80b42a5f6d320e20268 | diff --git a/lib/nodes/bundle.js b/lib/nodes/bundle.js
index <HASH>..<HASH> 100644
--- a/lib/nodes/bundle.js
+++ b/lib/nodes/bundle.js
@@ -51,9 +51,10 @@ registry.decl(BundleNodeName, BlockNode, /** @lends BundleNode.prototype */ {
}
// generate targets for page files
+ var optTechs = this.getOptimizerTechs();
this.getTechs().map(function(tech) {
var techNode = this.createTechNode(tech, bundleNode, this);
- if (techNode) {
+ if (techNode && ~optTechs.indexOf(tech)) {
this.createOptimizerNode(tech, techNode, bundleNode);
}
}, this);
@@ -106,6 +107,10 @@ registry.decl(BundleNodeName, BlockNode, /** @lends BundleNode.prototype */ {
];
},
+ getOptimizerTechs: function() {
+ return this.getTechs();
+ },
+
cleanup: function() {
var arch = this.ctx.arch;
if (!arch.hasNode(this.path)) return; | bem make: Ability to configure list of techs to optimize
Close #<I> | bem-archive_bem-tools | train | js |
28b9bb0abeaee53413c071cf0000ec723bd2e3c7 | diff --git a/lib/tack/forked_sandbox.rb b/lib/tack/forked_sandbox.rb
index <HASH>..<HASH> 100644
--- a/lib/tack/forked_sandbox.rb
+++ b/lib/tack/forked_sandbox.rb
@@ -24,14 +24,10 @@ module Tack
@reader.close
result = block.call
- Marshal.dump([:ok, result], @writer)
+
+ @writer.write(Base64.encode64(Marshal.dump([:ok, result])))
rescue Object => error
- Marshal.dump([
- :error,
- #[error.class, error.message, error.backtrace]
- Base64.encode64(error)
- ],
- @writer)
+ @writer.write(Base64.encode64(Marshal.dump([:error, error])))
ensure
@writer.close
exit! error ? 1 : 0
@@ -49,15 +45,12 @@ module Tack
while !(chunk=@reader.read).empty?
data << chunk
end
- status, result = Marshal.load(data)
+ status, result = Marshal.load(Base64.decode64(data))
case status
when :ok
return result
when :error
- #error_class, error_message, backtrace = result
- #error = error_class.new(error_message)
- #error.set_backtrace(backtrace)
- error = Base64.decode64(result)
+ error = result
raise error
else
raise "Unknown status #{status}" | Fix bug with how errors are passed through ForkedSandbox | bhb_tack | train | rb |
2c379508ce80fbf5f3522ba4a1f1d90eda5a6e57 | diff --git a/test.js b/test.js
index <HASH>..<HASH> 100644
--- a/test.js
+++ b/test.js
@@ -126,28 +126,4 @@ describe('#Requests', function() {
});
});
- it('should timeout', function(done) {
- var api = new WooCommerce({
- url: 'https://test.dev',
- consumerKey: 'ck_XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX',
- consumerSecret: 'cs_XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX',
- timeout: 1
- });
-
- nock('https://test.dev/wc-api/v3')
- .get('/orders')
- .reply(function(uri, requestBody, cb) {
- setTimeout(function() {
- return cb(null, {
- orders: []
- });
- }, 2);
- });
-
- api.get('orders', function(err) {
- chai.expect(err).to.be.a('error');
- chai.expect(err.message).to.equal('ETIMEDOUT');
- return done();
- });
- });
}); | No need to test timeout since is a "request" feature | woocommerce_wc-api-node | train | js |
591e620c52c8f4392ba4c34a565b4dccc9687a76 | diff --git a/lib/woodhouse/scheduler.rb b/lib/woodhouse/scheduler.rb
index <HASH>..<HASH> 100644
--- a/lib/woodhouse/scheduler.rb
+++ b/lib/woodhouse/scheduler.rb
@@ -53,7 +53,12 @@ class Woodhouse::Scheduler
def start_worker(worker)
@config.logger.debug "Starting worker #{worker.describe}"
- @worker_sets[worker] = WorkerSet.new_link(Celluloid.current_actor, worker, @config) unless @worker_sets.has_key?(worker)
+ unless @worker_sets.has_key?(worker)
+ @worker_sets[worker] = WorkerSet.new_link(Celluloid.current_actor, worker, @config)
+ true
+ else
+ false
+ end
end
def stop_worker(worker, wait = false)
diff --git a/spec/scheduler_spec.rb b/spec/scheduler_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/scheduler_spec.rb
+++ b/spec/scheduler_spec.rb
@@ -20,9 +20,8 @@ describe Woodhouse::Scheduler do
end
it "should not create a new worker set when an existing worker is sent to #start_worker" do
- pending "figure out how to test this without mocha, which works badly with Celluloid"
- subject.start_worker worker
- subject.start_worker worker
+ subject.start_worker(worker).should be_true
+ subject.start_worker(worker).should be_false
end
it "should spin down and remove a worker set when a worker is sent to #stop_worker" do | Keep Scheduler#start_worker from leaking references to internals | mboeh_woodhouse | train | rb,rb |
d9e7f04014ddac871e1ab2097f21c20c074ce824 | diff --git a/src/test/java/io/nats/streaming/PublishTests.java b/src/test/java/io/nats/streaming/PublishTests.java
index <HASH>..<HASH> 100644
--- a/src/test/java/io/nats/streaming/PublishTests.java
+++ b/src/test/java/io/nats/streaming/PublishTests.java
@@ -95,11 +95,12 @@ public class PublishTests {
@Test
public void testMaxPubAcksInFlight() throws Exception {
+ int timeoutInSeconds = 5;
try (NatsStreamingTestServer srv = new NatsStreamingTestServer(clusterName, false)) {
try (Connection nc = Nats.connect(srv.getURI())) {
Options opts = new Options.Builder()
.maxPubAcksInFlight(1)
- .pubAckWait(Duration.ofSeconds(2))
+ .pubAckWait(Duration.ofSeconds(timeoutInSeconds))
.natsConn(nc)
.build();
@@ -125,7 +126,7 @@ public class PublishTests {
}
Instant end = Instant.now().plusMillis(100);
// So if the loop ended before the PubAckWait timeout, then it's a failure.
- if (Duration.between(start, end).compareTo(Duration.ofSeconds(1)) < 0) {
+ if (Duration.between(start, end).compareTo(Duration.ofSeconds(timeoutInSeconds)) < 0) {
fail("Should have blocked after 1 message sent");
}
} | working on flaky tests, timing for ack timeout | nats-io_java-nats-streaming | train | java |
46f98b8d5cb1fd35acf3460e4c7159d6dab88998 | diff --git a/heyui/plugins/notify/index.js b/heyui/plugins/notify/index.js
index <HASH>..<HASH> 100644
--- a/heyui/plugins/notify/index.js
+++ b/heyui/plugins/notify/index.js
@@ -23,6 +23,7 @@ const notifyContainerCls = 'h-notify-container';
const notifyBodyCls = 'h-notify-body';
const notifyCloseCls = 'h-notify-close';
const notifyMaskCls = 'h-notify-mask';
+const notifyHasMaskCls = 'h-notify-has-mask';
const notifyShowCls = 'h-notify-show';
const closeIcon = 'h-icon-close';
@@ -72,6 +73,9 @@ class Notify {
html += '</div>';
let $body = document.createElement(`div`);
utils.addClass($body, notifyCls);
+ if (param.hasMask) {
+ utils.addClass($body, notifyHasMaskCls);
+ }
if (param.class) {
utils.addClass($body, param.class);
} | fix: modal plugin add notify-has-class on $body | heyui_heyui | train | js |
8a6a520a2243959637a80e7162112ea674b7eb5a | diff --git a/compliance_checker/cf/cf.py b/compliance_checker/cf/cf.py
index <HASH>..<HASH> 100644
--- a/compliance_checker/cf/cf.py
+++ b/compliance_checker/cf/cf.py
@@ -854,11 +854,9 @@ class CFBaseCheck(BaseCheck):
ok_count = 0
same_type = flag_masks.dtype == v.dtype
- type_ok = v.dtype in [np.character,
- np.dtype('b'),
- np.dtype('i4'),
- np.int32]
-
+ type_ok = (np.issubdtype(v.dtype, int) or
+ np.issubdtype(v.dtype, 'S') or
+ np.issubdtype(v.dtype, 'b'))
if same_type:
ok_count += 1
else: | Allow flag_masks to have any bitwise operator as a value. Fixes #<I> | ioos_compliance-checker | train | py |
aa67936ff715279defad46095ba118f479ee982b | diff --git a/lib/LibCharacteristic.js b/lib/LibCharacteristic.js
index <HASH>..<HASH> 100644
--- a/lib/LibCharacteristic.js
+++ b/lib/LibCharacteristic.js
@@ -14,7 +14,7 @@ module.exports = class LibCharacteristic extends LibObject {
// ===== Constructor =========================================================
constructor (parent, params) {
- super(parent._platform, params.name)
+ super(parent._platform, parent.name)
this._parent = parent
this._context = parent._context
this._key = params.key
@@ -99,8 +99,10 @@ module.exports = class LibCharacteristic extends LibObject {
'set %s to %s%s', this._characteristic.displayName,
value, this._unit
)
+ } else if (this._characteristic.eventOnlyCharacteristic) {
+ this.log('%s %s', this._characteristic.displayName, value)
} else {
- if (!this._characteristic.eventOnlyCharacteristic && value === this._value) {
+ if (value === this._value) {
return
}
this.log( | Enhancements
- Different message for event-only characteristic
- Set name to parent's name [TODO: doesn't yet work for _Name_ characteristic]. | ebaauw_homebridge-lib | train | js |
6d5bc75cf36605a547cd319d82285c0e21503197 | diff --git a/test/index.js b/test/index.js
index <HASH>..<HASH> 100644
--- a/test/index.js
+++ b/test/index.js
@@ -1,6 +1,7 @@
-var Emitter = require('../');
var test = require('tape');
+var Emitter = require('../');
+
test('init', function(is) {
is.plan(1); | refactor requires in test js | maxhoffmann_emitter | train | js |
b6ab81dc100755ddd63cad0a752c48188575cbd7 | diff --git a/lib/utilities.js b/lib/utilities.js
index <HASH>..<HASH> 100755
--- a/lib/utilities.js
+++ b/lib/utilities.js
@@ -1 +1,21 @@
var git = require('../');
+
+/**
+ * Check if error is null, if it is not, convert it to a GitError and call
+ * the callback.
+ *
+ * @param {Object} error
+ * @param {Function} callback
+ * @return {Boolean} True if the error was null, false otherwise.
+ */
+exports.success = function(error, callback) {
+ if (error) {
+ if (error instanceof git.error) {
+ callback(error);
+ } else {
+ callback(git.error(error));
+ }
+ return false;
+ }
+ return true;
+}; | Added success to utilities.js to handle if (error) then wrap it and call callback | nodegit_nodegit | train | js |
fcfef21c77b28a2245422a729fabb20772c3e9b8 | diff --git a/test/stripe/stripe_object_test.rb b/test/stripe/stripe_object_test.rb
index <HASH>..<HASH> 100644
--- a/test/stripe/stripe_object_test.rb
+++ b/test/stripe/stripe_object_test.rb
@@ -258,5 +258,19 @@ module Stripe
serialized = Stripe::StripeObject.serialize_params(obj, :force => true)
assert_equal({ :id => 'id', :metadata => { :foo => 'bar' } }, serialized)
end
+
+ should "#dirty! forces an object and its subobjects to be saved" do
+ obj = Stripe::StripeObject.construct_from({
+ :id => 'id',
+ :metadata => Stripe::StripeObject.construct_from({ :foo => 'bar' })
+ })
+
+ # note that `force` and `dirty!` are for different things, but are
+ # functionally equivalent
+ obj.dirty!
+
+ serialized = Stripe::StripeObject.serialize_params(obj)
+ assert_equal({ :id => 'id', :metadata => { :foo => 'bar' } }, serialized)
+ end
end
end | Add spec for `#dirty!` | stripe_stripe-ruby | train | rb |
ef5c10465f4a5eeda444ac931426d9e8f1436773 | diff --git a/quilt/revert.py b/quilt/revert.py
index <HASH>..<HASH> 100644
--- a/quilt/revert.py
+++ b/quilt/revert.py
@@ -59,7 +59,7 @@ class Revert(Command):
raise QuiltError("File %s is modified by patch %s" % \
(filename, patch.get_name()))
- def _apply_patch_tempoary(self, tmpdir, file, patch):
+ def _apply_patch_temporary(self, tmpdir, file, patch):
backup = Backup()
backup_file = backup.backup_file(file, tmpdir)
patch_file = self.quilt_patches + File(patch.get_name()) | Fix typo in method name
Rename _apply_patch_tempoary to _apply_patch_temporary | bjoernricks_python-quilt | train | py |
f60240703256aae47b147bbd7ca5ca8c9d7e94af | diff --git a/src/Sylius/Bundle/ResourceBundle/Resources/public/js/form-collection.js b/src/Sylius/Bundle/ResourceBundle/Resources/public/js/form-collection.js
index <HASH>..<HASH> 100644
--- a/src/Sylius/Bundle/ResourceBundle/Resources/public/js/form-collection.js
+++ b/src/Sylius/Bundle/ResourceBundle/Resources/public/js/form-collection.js
@@ -168,5 +168,7 @@
$(document).trigger('dom-node-inserted', [$(addedElement)]);
});
- $('[data-form-type="collection"]').CollectionForm();
-}(jQuery);
\ No newline at end of file
+ $(document).ready(function () {
+ $('[data-form-type="collection"]').CollectionForm();
+ });
+}(jQuery); | [ResourceBundle] Initialize form collection when DOM is ready | Sylius_Sylius | train | js |
956d232e47c38a557216b95ed5da2264c8c3ae90 | diff --git a/test/connection_test.js b/test/connection_test.js
index <HASH>..<HASH> 100644
--- a/test/connection_test.js
+++ b/test/connection_test.js
@@ -12,6 +12,7 @@ var testCase = require('nodeunit').testCase,
Script = require('vm'),
Collection = mongodb.Collection,
Server = mongodb.Server,
+ ReadPreference = mongodb.ReadPreference,
ServerManager = require('../test/tools/server_manager').ServerManager;
// Test db
@@ -227,6 +228,17 @@ exports.testConnectUsingSocketOptions = function(test) {
})
}
+exports.testConnectUsingSocketOptionsAndReadPreferenceAsObject = function(test) {
+ var db = new Db(MONGODB, new Server("127.0.0.1", mongodb.Connection.DEFAULT_PORT
+ , {readPreference: new ReadPreference("secondary"), auto_reconnect: true, poolSize: 4, socketOptions:{keepAlive:100}, ssl:useSSL}),{w:0, native_parser: (process.env['TEST_NATIVE'] != null)});
+ db.open(function(err, db) {
+ test.equal(null, err);
+ test.equal(100, db.serverConfig.checkoutWriter().socketOptions.keepAlive)
+ test.done();
+ db.close();
+ })
+}
+
/**
* Retrieve the server information for the current
* instance of the db client | Added connection test when readPreference passed to server is a ReadPreference object | mongodb_node-mongodb-native | train | js |
a251dc4c7ac7d5771dd73d3b2653d0ef0f6646e4 | diff --git a/saharaclient/api/client.py b/saharaclient/api/client.py
index <HASH>..<HASH> 100644
--- a/saharaclient/api/client.py
+++ b/saharaclient/api/client.py
@@ -54,17 +54,18 @@ class Client(object):
service_name=service_name,
region_name=region_name)
input_auth_token = keystone.session.get_token(auth)
- try:
- sahara_catalog_url = keystone.session.get_endpoint(
- auth, interface=endpoint_type,
- service_type=service_type)
- except kex.EndpointNotFound:
- # This is support of 'data_processing' service spelling
- # which was used for releases before Kilo
- service_type = service_type.replace('-', '_')
- sahara_catalog_url = keystone.session.get_endpoint(
- auth, interface=endpoint_type,
- service_type=service_type)
+ if not sahara_catalog_url:
+ try:
+ sahara_catalog_url = keystone.session.get_endpoint(
+ auth, interface=endpoint_type,
+ service_type=service_type)
+ except kex.EndpointNotFound:
+ # This is support of 'data_processing' service spelling
+ # which was used for releases before Kilo
+ service_type = service_type.replace('-', '_')
+ sahara_catalog_url = keystone.session.get_endpoint(
+ auth, interface=endpoint_type,
+ service_type=service_type)
else:
keystone = self.get_keystone_client(
username=username, | Added --bypass-url support for keystone 3
Keystone 3 section of code should respect --bypass-url too.
Change-Id: I<I>e7c9bef<I>bcc<I>f<I>d4fcac3
Closes-Bug: #<I> | openstack_python-saharaclient | train | py |
e49a0d566aad790fbf54071269832aa58160e94d | diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -25,12 +25,14 @@ module.exports = function responseTime(){
return function(req, res, next){
next = next || noop;
if (res._responseTime) return next();
- var start = Date.now();
res._responseTime = true;
+ var startAt = process.hrtime()
+
onHeaders(res, function () {
- var duration = Date.now() - start
- this.setHeader('X-Response-Time', duration + 'ms')
+ var diff = process.hrtime(startAt)
+ var ms = diff[0] * 1e3 + diff[1] * 1e-6
+ this.setHeader('X-Response-Time', ms.toFixed(3) + 'ms')
})
next(); | make timer immune to clock drift and more precise | expressjs_response-time | train | js |
a6760d578220fc85b4878e5347a8e2549205f108 | diff --git a/test/AlexaSmartHome/Request/RequestTest.php b/test/AlexaSmartHome/Request/RequestTest.php
index <HASH>..<HASH> 100644
--- a/test/AlexaSmartHome/Request/RequestTest.php
+++ b/test/AlexaSmartHome/Request/RequestTest.php
@@ -3,6 +3,7 @@
namespace Tests\AlexaSmartHome\Request;
use \InternetOfVoice\LibVoice\AlexaSmartHome\Request\Request;
+use \InvalidArgumentException;
use \PHPUnit\Framework\TestCase;
/**
@@ -13,8 +14,6 @@ use \PHPUnit\Framework\TestCase;
*/
class RequestTest extends TestCase {
/**
- * testRequest
- *
* @group smarthome
*/
public function testRequest() {
@@ -29,4 +28,12 @@ class RequestTest extends TestCase {
$this->assertEquals('BearerToken', $request->getDirective()->getPayload()->getScope()->getType());
$this->assertEquals('access-token-send-by-skill', $request->getDirective()->getPayload()->getScope()->getToken());
}
+
+ /**
+ * @group smarthome
+ */
+ public function testMissingDirective() {
+ $this->expectException(InvalidArgumentException::class);
+ new Request([]);
+ }
} | Smart home: Extend RequestTest | internetofvoice_libvoice | train | php |
5b0ab35414214e10fde2e20df3d56ab654d7310e | diff --git a/bcbio/install.py b/bcbio/install.py
index <HASH>..<HASH> 100644
--- a/bcbio/install.py
+++ b/bcbio/install.py
@@ -146,7 +146,8 @@ def upgrade_bcbio_data(args, remotes):
remotes["genome_resources"])
_upgrade_snpeff_data(s["fabricrc_overrides"]["galaxy_home"], args, remotes)
if 'data' in args.toolplus:
- subprocess.check_call(["gemini", "update", "--dataonly"])
+ gemini = os.path.join(os.path.dirname(sys.executable), "gemini")
+ subprocess.check_call([gemini, "update", "--dataonly"])
def _upgrade_genome_resources(galaxy_dir, base_url):
"""Retrieve latest version of genome resource YAML configuration files. | Do not require gemini to be on PATH for data updates. Thanks to Nishanth Dandapanthu | bcbio_bcbio-nextgen | train | py |
bdf1a7cd2d6692f3ed80d4145b6944e435e67edd | diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -5,6 +5,10 @@ var _ = require('lodash');
function changed(Model, options) {
'use strict';
+ if(typeof Model[options.callback] !== 'function') {
+ console.warn('Callback %s is not a model function', options.callback);
+ }
+
debug('Changed mixin for Model %s', Model.modelName);
var loopback = require('loopback');
@@ -68,6 +72,7 @@ function changed(Model, options) {
Model.getIdName()
]
}).then(function(items) {
+ if(typeof Model[options.callback] !== 'function') return false;
return Model[options.callback](Model.extractChangedItemIds(items));
})
.then(function(res) { | Add warning and skip execution if callback function is not defined | fullcube_loopback-ds-changed-mixin | train | js |
679cb8d8691857317dfd323e85e9920e32709a11 | diff --git a/src/test/java/com/github/dockerjava/core/command/StatsCmdImplTest.java b/src/test/java/com/github/dockerjava/core/command/StatsCmdImplTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/com/github/dockerjava/core/command/StatsCmdImplTest.java
+++ b/src/test/java/com/github/dockerjava/core/command/StatsCmdImplTest.java
@@ -51,7 +51,7 @@ public class StatsCmdImplTest extends AbstractDockerClientTest {
super.afterMethod(result);
}
- @Test
+ @Test(groups = "ignoreInCircleCi")
public void testStatsStreaming() throws InterruptedException, IOException {
TimeUnit.SECONDS.sleep(1); | Ignore with CirecleCI | docker-java_docker-java | train | java |
e118538ca38583e5d27e7b0c5e8504ed8f8b7e66 | diff --git a/src/RdnUpload/File/File.php b/src/RdnUpload/File/File.php
index <HASH>..<HASH> 100755
--- a/src/RdnUpload/File/File.php
+++ b/src/RdnUpload/File/File.php
@@ -25,11 +25,6 @@ class File implements FileInterface
*/
public function __construct($basename, $path)
{
- if (!file_exists($path))
- {
- throw new \RuntimeException("File does not exist ($path)");
- }
-
$this->basename = $basename;
$this->path = $path;
} | don't throw exceptions if path does not exist
could be a temporary file output pointer in which case the file will
exist after the download operation is completed | radnan_rdn-upload | train | php |
1b24008f2b779b6d2e773b29c6041f6a23de1af4 | diff --git a/src/BuiltinServerFactory.php b/src/BuiltinServerFactory.php
index <HASH>..<HASH> 100644
--- a/src/BuiltinServerFactory.php
+++ b/src/BuiltinServerFactory.php
@@ -43,6 +43,11 @@ class BuiltinServerFactory
$timer = new Deferred;
$this->loop->addTimer(0.05, function () use ($timer, $process) {
+ if (DIRECTORY_SEPARATOR === '\\') {
+ // Pipes opened by proc_open() can break stream_select() loop in Windows.
+ // This fix might do the trick...
+ $process->stderr->close();
+ }
$timer->resolve($process);
}); | Improve[?] stream_select() behavior on Windows | mpyw_php-hyper-builtin-server | train | php |
f5db5e53fca696d1d3e5dfe42312be3260a07024 | diff --git a/lib/CaptureClicks.js b/lib/CaptureClicks.js
index <HASH>..<HASH> 100644
--- a/lib/CaptureClicks.js
+++ b/lib/CaptureClicks.js
@@ -99,7 +99,7 @@ var CaptureClicks = React.createClass({
}.bind(this);
this.props.environment.navigate(
- url.pathname,
+ url.pathname + (url.hash.length > 1 ? url.hash : ''),
{onBeforeNavigation: onBeforeNavigation},
function(err, info) {
if (err) { | Allow intra-page hash routing. | STRML_react-router-component | train | js |
1f09c0a487b71cb0ff8067ea388f8394e4684a9b | diff --git a/lib/drivers/docker/container.js b/lib/drivers/docker/container.js
index <HASH>..<HASH> 100644
--- a/lib/drivers/docker/container.js
+++ b/lib/drivers/docker/container.js
@@ -214,7 +214,9 @@ Container.prototype.dockerEnv = function() {
};
Container.prototype.startArgs = function() {
- var args = ['--cluster=0'];
+ var args = [
+ '--cluster=' + this.startOpts.size,
+ ];
if (this.startOpts.trace) {
args.push('--trace');
} | docker: start cluster at full size | strongloop_strong-pm | train | js |
0dda52840af290f5dd289f6c4e268cb9907cfe7e | diff --git a/rest_framework_gis/fields.py b/rest_framework_gis/fields.py
index <HASH>..<HASH> 100644
--- a/rest_framework_gis/fields.py
+++ b/rest_framework_gis/fields.py
@@ -7,6 +7,11 @@ from django.utils.translation import ugettext_lazy as _
from rest_framework.fields import Field
+class JSONDict(dict):
+ def __str__(self):
+ return json.dumps(self)
+
+
class GeometryField(Field):
"""
A field to handle GeoDjango Geometry fields
@@ -20,6 +25,7 @@ class GeometryField(Field):
def to_representation(self, value):
if isinstance(value, dict) or value is None:
return value
+ return JSONDict(json.loads(GEOSGeometry(value).geojson))
# Get GeoDjango geojson serialization and then convert it _back_ to
# a Python object
return json.loads(GEOSGeometry(value).geojson) | Ensure GeoJSON is rendered correctly in browsable API
Valid for python 2 only.
Avoid representations like {u'type': u'Point', u'coordinates':
[<I>, <I>]}
For more information see #<I> | djangonauts_django-rest-framework-gis | train | py |
bb527eb9ad7a5f695fa7668f773bea50b04dcabb | diff --git a/lib/dashboard/public/elements/ncg-dialog.js b/lib/dashboard/public/elements/ncg-dialog.js
index <HASH>..<HASH> 100644
--- a/lib/dashboard/public/elements/ncg-dialog.js
+++ b/lib/dashboard/public/elements/ncg-dialog.js
@@ -19,6 +19,7 @@
listeners: {
'neon-animation-finish': '_onNeonAnimationFinish',
+ 'iron-overlay-opened': '_onIronOverlayOpened',
'iron-overlay-closed': '_onIronOverlayClosed'
},
@@ -51,6 +52,11 @@
}
},
+ _onIronOverlayOpened: function () {
+ var iframeDocument = this.querySelector('iframe').contentDocument;
+ iframeDocument.dispatchEvent(new CustomEvent('dialog-opened'));
+ },
+
_onIronOverlayClosed: function (e) {
var iframeDocument = this.querySelector('iframe').contentDocument;
if (e.detail.confirmed) { | feat(dashboard): emit `dialog-opened` event in a dialog's `document` when it opens | nodecg_nodecg | train | js |
a92db5717b7bd522d62438d46be7d8d9f3503dd5 | diff --git a/packages/wpcom-proxy-request/index.js b/packages/wpcom-proxy-request/index.js
index <HASH>..<HASH> 100644
--- a/packages/wpcom-proxy-request/index.js
+++ b/packages/wpcom-proxy-request/index.js
@@ -195,24 +195,24 @@ function onmessage (e) {
var params = requests[id];
delete requests[id];
- var res = data[0];
+ var body = data[0];
var statusCode = data[1];
var headers = data[2];
debug('got %s status code for URL: %s', statusCode, params.path);
- if (res && headers) {
- res._headers = headers;
+ if (body && headers) {
+ body._headers = headers;
}
if (null == statusCode || 2 === Math.floor(statusCode / 100)) {
// 2xx status code, success
- params.resolve(res);
+ params.resolve(body);
} else {
// any other status code is a failure
var err = new Error();
err.statusCode = statusCode;
- for (var i in res) err[i] = res[i];
- if (res.error) err.name = toTitle(res.error) + 'Error';
+ for (var i in body) err[i] = body[i];
+ if (body.error) err.name = toTitle(body.error) + 'Error';
params.reject(err);
} | index: rename `res` variable to `body`
Matches the `wpcom-xhr-request` logic is the only reason :p | Automattic_wp-calypso | train | js |
44ab140b0749698769df5f16e028720c82116884 | diff --git a/openquake/calculators/risk/scenario_damage/core.py b/openquake/calculators/risk/scenario_damage/core.py
index <HASH>..<HASH> 100644
--- a/openquake/calculators/risk/scenario_damage/core.py
+++ b/openquake/calculators/risk/scenario_damage/core.py
@@ -139,13 +139,15 @@ class ScenarioDamageRiskCalculator(general.BaseRiskCalculator):
# sum per taxonomy
if not fractions:
- self.ddt_fractions[taxonomy] = bfractions[taxonomy]
+ self.ddt_fractions[taxonomy] = numpy.array(
+ bfractions[taxonomy])
else:
self.ddt_fractions[taxonomy] += bfractions[taxonomy]
# global sum
if self.total_fractions is None:
- self.total_fractions = bfractions[taxonomy]
+ self.total_fractions = numpy.array(
+ bfractions[taxonomy])
else:
self.total_fractions += bfractions[taxonomy] | Fixed bug when computing the total damage distribution | gem_oq-engine | train | py |
429e3c7b7de2291a75239871d3af49968fe11487 | diff --git a/test/youtube-dl_test.rb b/test/youtube-dl_test.rb
index <HASH>..<HASH> 100644
--- a/test/youtube-dl_test.rb
+++ b/test/youtube-dl_test.rb
@@ -43,7 +43,7 @@ describe YoutubeDL do
@extractors = YoutubeDL.extractors
end
- it 'should return a Hash' do
+ it 'should return an Array' do
assert_instance_of Array, @extractors
end | .extractors Returns an array, not a Hash [ci skip] | layer8x_youtube-dl.rb | train | rb |
35aa5f0afbfea8f42cb7ff54198f81ab94d12386 | diff --git a/test/client/cluster_test.rb b/test/client/cluster_test.rb
index <HASH>..<HASH> 100644
--- a/test/client/cluster_test.rb
+++ b/test/client/cluster_test.rb
@@ -27,11 +27,11 @@ describe Elastomer::Client::Cluster do
it 'updates the cluster settings' do
@cluster.update_settings :transient => { 'cluster.blocks.read_only' => true }
- h = @cluster.settings
+ h = @cluster.settings :flat_settings => true
assert_equal 'true', h['transient']['cluster.blocks.read_only']
@cluster.update_settings :transient => { 'cluster.blocks.read_only' => false }
- h = @cluster.settings
+ h = @cluster.settings :flat_settings => true
assert_equal 'false', h['transient']['cluster.blocks.read_only']
end | couldn't resist making this test work with ES <I> | github_elastomer-client | train | rb |
31d73a73f49f0a17e205a85ab35aa8a322ee6dba | diff --git a/app/models/shipit/commit_deployment.rb b/app/models/shipit/commit_deployment.rb
index <HASH>..<HASH> 100644
--- a/app/models/shipit/commit_deployment.rb
+++ b/app/models/shipit/commit_deployment.rb
@@ -2,7 +2,7 @@ module Shipit
class CommitDeployment < ActiveRecord::Base
belongs_to :commit
belongs_to :task
- has_many :statuses, class_name: 'CommitDeploymentStatus'
+ has_many :statuses, dependent: :destroy, class_name: 'CommitDeploymentStatus'
after_commit :schedule_create_on_github, on: :create
diff --git a/app/models/shipit/deploy.rb b/app/models/shipit/deploy.rb
index <HASH>..<HASH> 100644
--- a/app/models/shipit/deploy.rb
+++ b/app/models/shipit/deploy.rb
@@ -11,7 +11,7 @@ module Shipit
after_transition any => any, do: :update_commit_deployments
end
- has_many :commit_deployments, inverse_of: :task, foreign_key: :task_id do
+ has_many :commit_deployments, dependent: :destroy, inverse_of: :task, foreign_key: :task_id do
GITHUB_STATUSES = {
'pending' => 'pending',
'failed' => 'failure', | Add missing dependent: :destroy for CommitDeployment | Shopify_shipit-engine | train | rb,rb |
ae352b48cb3c92296bf5f6ecf14764fb0a968232 | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -17,7 +17,7 @@ except ImportError:
MAJOR = 5
MINOR = 0
MICRO = 0
-ISRELEASED = False
+ISRELEASED = True
VERSION = '%d.%d.%d' % (MAJOR, MINOR, MICRO) | * Set is_released flag in setup.py | explosion_thinc | train | py |
f511e8273b408cb4cdb23f14d990f07781827bd5 | diff --git a/src/Controller/ScriptController.php b/src/Controller/ScriptController.php
index <HASH>..<HASH> 100644
--- a/src/Controller/ScriptController.php
+++ b/src/Controller/ScriptController.php
@@ -40,9 +40,8 @@ class ScriptController
$appNamespace = key($autoload["psr-4"]);
$appSourceDir .= current($autoload["psr-4"]);
}
- $output->write("Compiling {$appNamespace}PuzzleConfig to $appSourceDir/PuzzleConfig.php");
+ $output->write("<info>PuzzleDI: Compiling</info> <comment>{$appNamespace}PuzzleConfig</comment> <info>to</info> <comment>$appSourceDir/PuzzleConfig.php</comment>");
$compiler->compile($data, $appNamespace, $appSourceDir);
- $output->write("PuzzleConfig compiled");
}
}
\ No newline at end of file | Added styling to console output
removed unnecessary output line | lexide_puzzle-di | train | php |
ff8d2541244e9c88d71759a41f68ad412c9235b2 | diff --git a/lib/webhooks/webhooks.js b/lib/webhooks/webhooks.js
index <HASH>..<HASH> 100644
--- a/lib/webhooks/webhooks.js
+++ b/lib/webhooks/webhooks.js
@@ -28,7 +28,7 @@ function validateRequest(authToken, twilioHeader, url, params) {
@param {object} request - An expressjs request object (http://expressjs.com/api.html#req.params)
@param {string} authToken - The auth token, as seen in the Twilio portal
@param {object} opts - options for request validation:
- - webhookUrl: The full URL (with query string) you used to configure the webhook with Twilio - overrides host/protocol options
+ - url: The full URL (with query string) you used to configure the webhook with Twilio - overrides host/protocol options
- host: manually specify the host name used by Twilio in a number's webhook config
- protocol: manually specify the protocol used by Twilio in a number's webhook config
*/ | Correcting the hint for webhook URL option (#<I>)
The option for the webhook URL is actually called `url` not `webhookUrl`, as seen here:
```
if (options.url) {
// Let the user specify the full URL
webhookUrl = options.url;
}
``` | twilio_twilio-node | train | js |
dae55b5dec7b8b6d6ec2f906e0fe0e72797802dd | diff --git a/drivers/gpio/motor_driver_test.go b/drivers/gpio/motor_driver_test.go
index <HASH>..<HASH> 100644
--- a/drivers/gpio/motor_driver_test.go
+++ b/drivers/gpio/motor_driver_test.go
@@ -113,18 +113,16 @@ func TestMotorDriverDirection(t *testing.T) {
d.Direction("backward")
}
-func TestMotorDriverDigitalForwardBackward(t *testing.T) {
+func TestMotorDriverDigital(t *testing.T) {
d := initTestMotorDriver()
d.CurrentMode = "digital"
d.ForwardPin = "2"
+ d.BackwardPin = "3"
- d.Forward(100)
- gobottest.Assert(t, d.CurrentSpeed, uint8(100))
- gobottest.Assert(t, d.CurrentDirection, "forward")
-
- d.Backward(100)
- gobottest.Assert(t, d.CurrentSpeed, uint8(100))
- gobottest.Assert(t, d.CurrentDirection, "backward")
+ d.On()
+ gobottest.Assert(t, d.CurrentState, uint8(1))
+ d.Off()
+ gobottest.Assert(t, d.CurrentState, uint8(0))
}
func TestMotorDriverDefaultName(t *testing.T) { | gpio: increase test coverage for motor driver | hybridgroup_gobot | train | go |
567ba6eed72b809a39011017388d5a033cc95723 | diff --git a/connection_test.go b/connection_test.go
index <HASH>..<HASH> 100644
--- a/connection_test.go
+++ b/connection_test.go
@@ -505,6 +505,7 @@ func TestReadTimeout(t *testing.T) {
opts := testutils.NewOpts().
AddLogFilter("Couldn't send outbound error frame", 1).
AddLogFilter("Connection error", 1, "site", "read frames").
+ AddLogFilter("Connection error", 1, "site", "write frames").
AddLogFilter("simpleHandler OnError", 1,
"error", "failed to send error frame, connection state connectionClosed")
WithVerifiedServer(t, opts, func(ch *Channel, hostPort string) { | TestReadTimeout should ignore errors in writeFrames | uber_tchannel-go | train | go |
6a5b1d2416e98843731c7286454adc63e9180fef | diff --git a/go/test/endtoend/sharded/shared_keyspace_test.go b/go/test/endtoend/sharded/shared_keyspace_test.go
index <HASH>..<HASH> 100644
--- a/go/test/endtoend/sharded/shared_keyspace_test.go
+++ b/go/test/endtoend/sharded/shared_keyspace_test.go
@@ -259,4 +259,4 @@ func initCluster(shardNames []string, totalTabletsRequired int) {
keyspace.Shards = append(keyspace.Shards, *shard)
}
clusterInstance.Keyspaces = append(clusterInstance.Keyspaces, keyspace)
-}
+}
\ No newline at end of file | Converted sharded test from py to go (#<I>)
* Converted sharded test from py to go | vitessio_vitess | train | go |
4db12d33bb446fbf969441e85e403f980c1415e2 | diff --git a/ui/js/controllers/perf/compare.js b/ui/js/controllers/perf/compare.js
index <HASH>..<HASH> 100644
--- a/ui/js/controllers/perf/compare.js
+++ b/ui/js/controllers/perf/compare.js
@@ -142,6 +142,12 @@ perf.controller('CompareChooserCtrl', [
}
);
};
+
+ // if we have a try push prepopulated, automatically offer a new revision
+ if ($scope.newRevision.length === 12) {
+ $scope.updateNewRevisionTips();
+ $scope.getPreviousRevision();
+ }
});
}]); | Bug <I> - Automatically offer to set base revision when preloading perf compare chooser | mozilla_treeherder | train | js |
bcdf73db52ed24044616162784b843aece2a9856 | diff --git a/lib/etl/control/destination/database_destination.rb b/lib/etl/control/destination/database_destination.rb
index <HASH>..<HASH> 100644
--- a/lib/etl/control/destination/database_destination.rb
+++ b/lib/etl/control/destination/database_destination.rb
@@ -59,10 +59,10 @@ module ETL #:nodoc:
names = []
values = []
order.each do |name|
- names << name
+ names << "`#{name}`"
values << conn.quote(row[name]) # TODO: this is probably not database agnostic
end
- q = "INSERT INTO #{table_name} (#{names.join(',')}) VALUES (#{values.join(',')})"
+ q = "INSERT INTO `#{table_name}` (#{names.join(',')}) VALUES (#{values.join(',')})"
ETL::Engine.logger.debug("Executing insert: #{q}")
conn.insert(q, "Insert row #{current_row}")
@current_row += 1 | Added backticking to table and column names for database destinations | activewarehouse_activewarehouse-etl | train | rb |
5ab423854de8d22cbcf1df451da816a41d563b7a | diff --git a/tests/test_simplenote.py b/tests/test_simplenote.py
index <HASH>..<HASH> 100644
--- a/tests/test_simplenote.py
+++ b/tests/test_simplenote.py
@@ -8,10 +8,10 @@ from simplenote import Simplenote
class TestSimplenote(unittest.TestCase):
def setUp(self):
- res, status = Simplenote(self.user, self.password).get_note_list()
- [Simplenote(self.user, self.password).delete_note(n["key"]) for n in res]
self.user = "simplenote-test@lordofhosts.de"
self.password = "foobar"
+ res, status = Simplenote(self.user, self.password).get_note_list()
+ [Simplenote(self.user, self.password).delete_note(n["key"]) for n in res]
self.unicode_note = "∮ E⋅da = Q, n → ∞, ∑ f(i) = ∏ g(i), ⎧⎡⎛┌─────┐⎞⎤⎫"
Simplenote(self.user, self.password).add_note("First Note.")
Simplenote(self.user, self.password).add_note("Second Note.") | assign before usage, d'oh | mrtazz_simplenote.py | train | py |
6c38d54d7c84d47b65aaa10ceb8d8b764c5d93c2 | diff --git a/pyemma/coordinates/data/sparsifier.py b/pyemma/coordinates/data/sparsifier.py
index <HASH>..<HASH> 100644
--- a/pyemma/coordinates/data/sparsifier.py
+++ b/pyemma/coordinates/data/sparsifier.py
@@ -58,5 +58,5 @@ class Sparsifier(Transformer):
% (self.data_producer.dimension() - self.dimension()))
self._varying_indices = np.array(self._varying_indices, dtype=int)
- def _map_array(self, X):
+ def _transform_array(self, X):
return X[:, self._varying_indices] | [coor/sparsifier] adopted to map method refactoring | markovmodel_PyEMMA | train | py |
2f6534b212cc8479e971abcf0727e01555d68ff0 | diff --git a/lib/instance/login_user_manager.rb b/lib/instance/login_user_manager.rb
index <HASH>..<HASH> 100644
--- a/lib/instance/login_user_manager.rb
+++ b/lib/instance/login_user_manager.rb
@@ -86,8 +86,11 @@ module RightScale
# nil
def add_user(username, uid)
uid = Integer(uid)
-
- %x(sudo useradd -s /bin/bash -u #{uid} -m #{Shellwords.escape(username)})
+
+ useradd = ['usr/bin/useradd', 'usr/sbin/useradd', 'bin/useradd', 'sbin/useradd'].collect { |key| key if File.executable? key }.first
+ raise SystemConflict, "Failed to find a suitable implementation of 'useradd'." unless useradd
+
+ %x(sudo #{useradd} -s /bin/bash -u #{uid} -m #{Shellwords.escape(username)})
case $?.exitstatus
when 0 | Fix: When logging in as the Rightscale user, 'useradd' is not on the path | rightscale_right_link | train | rb |
f722cd49a86a9c820475fb028bf8e6a12d118ec6 | diff --git a/src/Sulu/Bundle/MediaBundle/Media/Manager/DefaultMediaManager.php b/src/Sulu/Bundle/MediaBundle/Media/Manager/DefaultMediaManager.php
index <HASH>..<HASH> 100644
--- a/src/Sulu/Bundle/MediaBundle/Media/Manager/DefaultMediaManager.php
+++ b/src/Sulu/Bundle/MediaBundle/Media/Manager/DefaultMediaManager.php
@@ -823,7 +823,7 @@ class DefaultMediaManager implements MediaManagerInterface
* @param Media $media
* @return Media
*/
- protected function addFormatsAndUrl(Media $media)
+ public function addFormatsAndUrl(Media $media)
{
$media->setFormats(
$this->formatManager->getFormats( | changed addFormatsAndUrl function from private to protected | sulu_sulu | train | php |
a1758c4b9ba1f908b50552b92db0845686dbd6a3 | diff --git a/src/main/java/io/reactivex/Completable.java b/src/main/java/io/reactivex/Completable.java
index <HASH>..<HASH> 100644
--- a/src/main/java/io/reactivex/Completable.java
+++ b/src/main/java/io/reactivex/Completable.java
@@ -1227,6 +1227,7 @@ public abstract class Completable implements CompletableSource {
* @return the throwable if this terminated with an error, null otherwise
* @throws RuntimeException that wraps an InterruptedException if the wait is interrupted
*/
+ @Nullable
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
public final Throwable blockingGet() {
@@ -1250,6 +1251,7 @@ public abstract class Completable implements CompletableSource {
* @throws RuntimeException that wraps an InterruptedException if the wait is interrupted or
* TimeoutException if the specified timeout elapsed before it
*/
+ @Nullable
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
public final Throwable blockingGet(long timeout, TimeUnit unit) { | Add Nullable annotations for blocking methods in Completable (#<I>) | ReactiveX_RxJava | train | java |
01f35f05b16160cbc892777e84458d97a4be2873 | diff --git a/core/azure-core/src/main/java/com/azure/core/http/policy/HttpLoggingPolicy.java b/core/azure-core/src/main/java/com/azure/core/http/policy/HttpLoggingPolicy.java
index <HASH>..<HASH> 100644
--- a/core/azure-core/src/main/java/com/azure/core/http/policy/HttpLoggingPolicy.java
+++ b/core/azure-core/src/main/java/com/azure/core/http/policy/HttpLoggingPolicy.java
@@ -145,7 +145,7 @@ public class HttpLoggingPolicy implements HttpPipelinePolicy {
logger.asInfo().log("Response body:\n{}", bodyStr);
logger.asInfo().log("<-- END HTTP");
return bufferedResponse;
- });
+ }).switchIfEmpty(Mono.defer(() -> Mono.just(bufferedResponse)));
} else {
logger.asInfo().log("(body content not logged)");
logger.asInfo().log("<-- END HTTP"); | If body is empty then return publisher emitting response instead of propagating empty publisher (#<I>) | Azure_azure-sdk-for-java | train | java |
d05cbcc81665499994ebc1ab25d0495a51a99f66 | diff --git a/lib/sprockets/railtie.rb b/lib/sprockets/railtie.rb
index <HASH>..<HASH> 100644
--- a/lib/sprockets/railtie.rb
+++ b/lib/sprockets/railtie.rb
@@ -4,6 +4,7 @@ require 'action_controller/railtie'
require 'active_support/core_ext/module/remove_method'
require 'sprockets'
require 'sprockets/rails/helper'
+require 'sprockets/rails/version'
module Rails
class Application | railtie.rb gets required directly, and needs VERSION.
Fixes #<I>. | rails_sprockets-rails | train | rb |
5731d2f1a64e0ffc0aa6d8585ec8513d1d7c4d70 | diff --git a/src/Controller/Component/PaginatorComponent.php b/src/Controller/Component/PaginatorComponent.php
index <HASH>..<HASH> 100644
--- a/src/Controller/Component/PaginatorComponent.php
+++ b/src/Controller/Component/PaginatorComponent.php
@@ -35,7 +35,7 @@ use UnexpectedValueException;
*
* @link https://book.cakephp.org/4/en/controllers/components/pagination.html
* @mixin \Cake\Datasource\Paginator
- * @deprecated 4.4.0 Use Cake\Datasource\Paginator directly.
+ * @deprecated 4.4.0 Use Cake\Datasource\Paginator directly. Will be removed in 6.0.
*/
class PaginatorComponent extends Component
{ | Update deprecated tag with <I> removal | cakephp_cakephp | train | php |
8d454205db421fcc656bdd3eb2d0851cef642a5f | diff --git a/stubilous/server.py b/stubilous/server.py
index <HASH>..<HASH> 100644
--- a/stubilous/server.py
+++ b/stubilous/server.py
@@ -1,3 +1,4 @@
+import logging
from flask import Flask, make_response
@@ -31,6 +32,10 @@ def init_routes(app, routes):
route.status,
route.headers)))
+ @app.errorhandler(404)
+ def incorrect_route(ex):
+ logging.error(ex)
+
def run(config):
app = create_app() | Notify about not found urls | CodersOfTheNight_stubilous | train | py |
03e4ba33c70eef73009f89956be533ea556abb31 | diff --git a/elasticsearch_dsl/filter.py b/elasticsearch_dsl/filter.py
index <HASH>..<HASH> 100644
--- a/elasticsearch_dsl/filter.py
+++ b/elasticsearch_dsl/filter.py
@@ -53,4 +53,14 @@ class Bool(BoolMixin, Filter):
# register this as Bool for Filter
Filter._bool = Bool
-EMPTY_FILTER = Bool()
+class MatchAll(Filter):
+ name = 'match_all'
+ def __add__(self, other):
+ return other._clone()
+ __and__ = __rand__ = __radd__ = __add__
+
+ def __or__(self, other):
+ return self
+ __ror__ = __or__
+
+EMPTY_FILTER = MatchAll()
diff --git a/test_elasticsearch_dsl/test_search.py b/test_elasticsearch_dsl/test_search.py
index <HASH>..<HASH> 100644
--- a/test_elasticsearch_dsl/test_search.py
+++ b/test_elasticsearch_dsl/test_search.py
@@ -181,7 +181,7 @@ def test_complex_example():
}
},
'post_filter': {
- 'bool': {'must': [{'terms': {'tags': ['prague', 'czech']}}]}
+ 'terms': {'tags': ['prague', 'czech']}
},
'aggs': {
'per_country': { | Don't wrap all filters in Bool, introduce MatchAll instead | elastic_elasticsearch-dsl-py | train | py,py |
2513f06f43ccd149fda195d4460de042a64efa02 | diff --git a/system/Router/Router.php b/system/Router/Router.php
index <HASH>..<HASH> 100644
--- a/system/Router/Router.php
+++ b/system/Router/Router.php
@@ -389,9 +389,9 @@ class Router implements RouterInterface
foreach ($routes as $key => $value)
{
- $key = $key === '/' ? $key : ltrim($key, '/ ');
- $priority = $this->collection->getRoutesOptions($key)['order'] ?? 0;
- $order[$priority][$key] = $value;
+ $key = $key === '/' ? $key : ltrim($key, '/ ');
+ $priority = $this->collection->getRoutesOptions($key)['order'] ?? 0;
+ $order[abs((int)$priority)][$key] = $value;
}
ksort($order);
$routes = array_merge(...$order); | Added casting to integer and absolute value | codeigniter4_CodeIgniter4 | train | php |
c9d19e0426734d052ac6b09458e49b34aaa5796c | diff --git a/javascript/libjoynr-js/src/main/js/joynr/proxy/ProxyOperation.js b/javascript/libjoynr-js/src/main/js/joynr/proxy/ProxyOperation.js
index <HASH>..<HASH> 100644
--- a/javascript/libjoynr-js/src/main/js/joynr/proxy/ProxyOperation.js
+++ b/javascript/libjoynr-js/src/main/js/joynr/proxy/ProxyOperation.js
@@ -95,7 +95,7 @@ function checkArguments(operationArguments) {
try {
if (Constructor && Constructor.checkMembers) {
- Constructor.checkMembers(argumentValue, Typing.checkPropertyIfDefined);
+ Constructor.checkMembers(argumentValue, Typing.checkProperty);
}
} catch (error) {
errors.push(error.message);
@@ -350,4 +350,4 @@ function ProxyOperation(parent, settings, operationName, operationSignatures) {
return Object.freeze(this);
}
-module.exports = ProxyOperation;
\ No newline at end of file
+module.exports = ProxyOperation; | [JS] Use Typing.checkProperty for parameter checks in ProxyOperation
* if a parameter of a method call is a struct, use checkProperty
instead of checkPropertyIfDefined to make sure missing members
are detected.
Change-Id: Idb8a<I>fa9e<I>adc<I>ddf<I>a<I>d<I>b9d<I> | bmwcarit_joynr | train | js |
a2e27a331fa2dfb6208c0094a62c2b7a62510cef | diff --git a/Net/Gearman/Job.php b/Net/Gearman/Job.php
index <HASH>..<HASH> 100644
--- a/Net/Gearman/Job.php
+++ b/Net/Gearman/Job.php
@@ -30,6 +30,11 @@ if (!defined('NET_GEARMAN_JOB_PATH')) {
define('NET_GEARMAN_JOB_PATH', 'Net/Gearman/Job');
}
+// Define this if you want your Jobs to have a prefix requirement
+if (!defined('NET_GEARMAN_JOB_CLASS_PREFIX')) {
+ define('NET_GEARMAN_JOB_CLASS_PREFIX', 'Net_Gearman_Job_');
+}
+
/**
* Job creation class
*
@@ -63,7 +68,7 @@ abstract class Net_Gearman_Job
{
$file = NET_GEARMAN_JOB_PATH . '/' . $job . '.php';
include_once $file;
- $class = 'Net_Gearman_Job_' . $job;
+ $class = NET_GEARMAN_JOB_CLASS_PREFIX . $job;
if (!class_exists($class)) {
throw new Net_Gearman_Job_Exception('Invalid Job class');
} | Added a define for job class prefix | mhlavac_gearman | train | php |
0e6fa7760b9ad2ccc410186259248d0385d0cf22 | diff --git a/lib/strong_migrations/migration.rb b/lib/strong_migrations/migration.rb
index <HASH>..<HASH> 100644
--- a/lib/strong_migrations/migration.rb
+++ b/lib/strong_migrations/migration.rb
@@ -222,7 +222,7 @@ end"
def backfill_code(table, column, default)
model = table.to_s.classify
- "#{model}.in_batches.update_all #{column}: #{default.inspect}"
+ "#{model}.in_batches do |relation| \n relation.update_all #{column}: #{default.inspect}\n sleep(0.1)\n end"
end
def stop!(message, header: "Custom check") | Added throttling to backfill instructions | ankane_strong_migrations | train | rb |
41b888de7411558aed1b9656b92b8f36237e494b | diff --git a/src/extensions/reorder-columns/bootstrap-table-reorder-columns.js b/src/extensions/reorder-columns/bootstrap-table-reorder-columns.js
index <HASH>..<HASH> 100644
--- a/src/extensions/reorder-columns/bootstrap-table-reorder-columns.js
+++ b/src/extensions/reorder-columns/bootstrap-table-reorder-columns.js
@@ -129,7 +129,9 @@ $.BootstrapTable = class extends $.BootstrapTable {
})
this.columnsSortOrder = sortOrder
- this.persistReorderColumnsState(this)
+ if (this.options.cookie) {
+ this.persistReorderColumnsState(this)
+ }
const ths = []
const formatters = [] | only execute that function if the cookie plugin is enabled (#<I>) | wenzhixin_bootstrap-table | train | js |
0a7003604d0af17d9aa5f10e33f1d35f0fddc2f7 | diff --git a/pysat/_files.py b/pysat/_files.py
index <HASH>..<HASH> 100644
--- a/pysat/_files.py
+++ b/pysat/_files.py
@@ -152,7 +152,7 @@ class Files(object):
# store write to disk preference
self.write_to_disk = write_to_disk
if self.write_to_disk is False:
- # using blank memory rather than loading from diisk
+ # using blank memory rather than loading from disk
self._previous_file_list = pds.Series([], dtype='a')
self._current_file_list = pds.Series([], dtype='a')
@@ -720,11 +720,11 @@ def parse_delimited_filenames(files, format_str, delimiter):
import collections
# create storage for data to be parsed from filenames
- stored = collections.OrderedDict()
- stored['year'] = []; stored['month'] = []; stored['day'] = [];
- stored['hour'] = []; stored['min'] = []; stored['sec'] = [];
- stored['version'] = []; stored['revision'] = [];
-
+ ordered_keys = ['year', 'month', 'day', 'hour', 'min', 'sec',
+ 'version', 'revision']
+ stored = collections.OrderedDict({kk:list() for kk in ordered_keys})
+
+ # exit early if there are no files
if len(files) == 0:
stored['files'] = []
# include format string as convenience for later functions | Fixed comment typo. Cleaned up generation of OrderedDict | rstoneback_pysat | train | py |
42bcdee60f9814f6c01a39f8c7b847847bc1fe0d | diff --git a/src/Repositories/PdoRepository.php b/src/Repositories/PdoRepository.php
index <HASH>..<HASH> 100644
--- a/src/Repositories/PdoRepository.php
+++ b/src/Repositories/PdoRepository.php
@@ -61,18 +61,15 @@ abstract class PdoRepository extends Repository
*/
public static function getPdoParamName($columnName)
{
- $alias = $columnName;
- $count = 1;
+ if (isset(self::$pdoParamAliasesUsed[$columnName])) {
+ self::$pdoParamAliasesUsed[$columnName]++;
- while(in_array($alias, self::$pdoParamAliasesUsed)){
- $count++;
+ return $columnName . self::$pdoParamAliasesUsed[$columnName];
+ } else {
+ self::$pdoParamAliasesUsed[$columnName] = 1;
- $alias = $columnName.$count;
+ return $columnName;
}
-
- self::$pdoParamAliasesUsed[] = $alias;
-
- return $alias;
}
/** | Speed improvement by using isset instead of in_array | RhubarbPHP_Module.Stem | train | php |
915eb64d0e29d1bc0d6b17c2345b31f1607999ca | diff --git a/src/Http/Validation/SeatSettings.php b/src/Http/Validation/SeatSettings.php
index <HASH>..<HASH> 100644
--- a/src/Http/Validation/SeatSettings.php
+++ b/src/Http/Validation/SeatSettings.php
@@ -54,6 +54,7 @@ class SeatSettings extends FormRequest
$allowed_force_min_mask = implode(',', Seat::$options['force_min_mask']);
$allowed_sso = implode(',', Seat::$options['allow_sso']);
$allowed_tracking = implode(',', Seat::$options['allow_tracking']);
+ $require_activation = implode(',', Seat::$options['require_activation']);
return [
'registration' => 'required|in:' . $allowed_registration,
@@ -63,6 +64,7 @@ class SeatSettings extends FormRequest
'min_corporation_access_mask' => 'required|numeric',
'allow_sso' => 'required|in:' . $allowed_sso,
'allow_tracking' => 'required|in:' . $allowed_tracking,
+ 'require_activation' => 'required|in:' . $require_activation,
];
}
} | Ensure require_activation is validated. | eveseat_web | train | php |
f33aa9c94557e634e72caf868872d7d8662cc24a | diff --git a/neurom/core/__init__.py b/neurom/core/__init__.py
index <HASH>..<HASH> 100644
--- a/neurom/core/__init__.py
+++ b/neurom/core/__init__.py
@@ -30,7 +30,6 @@
from .tree import i_chain2 as _chain_neurites
from .tree import Tree as _Tree
-from .types import NeuriteType
def iter_neurites(obj, mapfun=None, filt=None):
diff --git a/neurom/core/section_neuron.py b/neurom/core/section_neuron.py
index <HASH>..<HASH> 100644
--- a/neurom/core/section_neuron.py
+++ b/neurom/core/section_neuron.py
@@ -33,7 +33,7 @@ from collections import defaultdict
from collections import namedtuple
import numpy as np
from neurom.io.hdf5 import H5
-from neurom.core import NeuriteType
+from neurom.core.types import NeuriteType
from neurom.core.tree import Tree, ipreorder, ibifurcation_point
from neurom.core.types import tree_type_checker as is_type
from neurom.core.dataformat import POINT_TYPE | Remove import of core.types.NeuriteTypes in core.__init__.py
This import was causing problems in setup.py, which is run before
the Enum<I> package has been installed. | BlueBrain_NeuroM | train | py,py |
a9e1d11af4334052365514088385ac700f0048b6 | diff --git a/src/CachePlugin.php b/src/CachePlugin.php
index <HASH>..<HASH> 100644
--- a/src/CachePlugin.php
+++ b/src/CachePlugin.php
@@ -76,7 +76,6 @@ final class CachePlugin implements Plugin
throw new \InvalidArgumentException('You can\'t provide config option "respect_cache_headers" and "respect_response_cache_directives". '.'Use "respect_response_cache_directives" instead.');
}
-
$optionsResolver = new OptionsResolver();
$this->configureOptions($optionsResolver);
$this->config = $optionsResolver->resolve($config); | Fix CI # 3
- that has been a fine restriction | php-http_cache-plugin | train | php |
20bdef77472cd64cf6f5a8ed679998234792c28c | diff --git a/orianna/src/main/java/com/merakianalytics/orianna/datapipeline/ImageDownloader.java b/orianna/src/main/java/com/merakianalytics/orianna/datapipeline/ImageDownloader.java
index <HASH>..<HASH> 100644
--- a/orianna/src/main/java/com/merakianalytics/orianna/datapipeline/ImageDownloader.java
+++ b/orianna/src/main/java/com/merakianalytics/orianna/datapipeline/ImageDownloader.java
@@ -15,6 +15,7 @@ import com.merakianalytics.datapipelines.sources.AbstractDataSource;
import com.merakianalytics.datapipelines.sources.Get;
import com.merakianalytics.datapipelines.sources.GetMany;
import com.merakianalytics.orianna.datapipeline.common.HTTPClient;
+import com.merakianalytics.orianna.datapipeline.common.HTTPClient.Configuration;
import com.merakianalytics.orianna.datapipeline.common.HTTPClient.Response;
import com.merakianalytics.orianna.datapipeline.common.Utilities;
import com.merakianalytics.orianna.types.common.OriannaException;
@@ -23,7 +24,9 @@ public class ImageDownloader extends AbstractDataSource {
private final HTTPClient client;
public ImageDownloader() {
- client = new HTTPClient();
+ final Configuration config = new Configuration();
+ config.setHttps(false); // TODO: Make this configurable
+ client = new HTTPClient(config);
}
@Get(BufferedImage.class) | Don't try to use SSL for ddrgon images | meraki-analytics_orianna | train | java |
1dcdd3fb3f139d9fad3df072104af3110cb31733 | diff --git a/gson/src/main/java/com/google/gson/JsonSerializationVisitor.java b/gson/src/main/java/com/google/gson/JsonSerializationVisitor.java
index <HASH>..<HASH> 100644
--- a/gson/src/main/java/com/google/gson/JsonSerializationVisitor.java
+++ b/gson/src/main/java/com/google/gson/JsonSerializationVisitor.java
@@ -135,10 +135,8 @@ final class JsonSerializationVisitor implements ObjectNavigator.Visitor {
}
public void visitPrimitive(Object obj) {
- if (obj != null) {
- JsonElement json = new JsonPrimitive(obj);
- assignToRoot(json);
- }
+ JsonElement json = obj == null ? JsonNull.createJsonNull() : new JsonPrimitive(obj);
+ assignToRoot(json);
}
private void addAsChildOfObject(Field f, Type fieldType, Object fieldValue) { | Implemented suggestions from code review r<I> by adding a JsonNull for primitives if the value is null. | google_gson | train | java |
495cd4dee041fd8ad09d8a751ae1d019da535444 | diff --git a/cohorts/load.py b/cohorts/load.py
index <HASH>..<HASH> 100644
--- a/cohorts/load.py
+++ b/cohorts/load.py
@@ -273,7 +273,7 @@ class Cohort(object):
def plot_benefit(self, on, col=None, col_equals=None):
plot_col, df = self.plot_init(on, col, col_equals)
original_len = len(df)
- df = df[~df[self.benefit_col].isnull()]
+ df = df[df[self.benefit_col].notnull()]
updated_len = len(df)
df[self.benefit_col] = df[self.benefit_col].apply(bool)
if updated_len < original_len: | not isnull to notnull | hammerlab_cohorts | train | py |
1f6ceefedac6a802fa50e8f342d6dda424141e6e | diff --git a/helios-client/src/main/java/com/spotify/helios/client/AuthenticatingHttpConnector.java b/helios-client/src/main/java/com/spotify/helios/client/AuthenticatingHttpConnector.java
index <HASH>..<HASH> 100644
--- a/helios-client/src/main/java/com/spotify/helios/client/AuthenticatingHttpConnector.java
+++ b/helios-client/src/main/java/com/spotify/helios/client/AuthenticatingHttpConnector.java
@@ -210,7 +210,9 @@ public class AuthenticatingHttpConnector implements HttpConnector {
} catch (Exception e) {
// We catch everything because right now the masters do not require authentication.
// So delay reporting errors to the user until the servers return 401 Unauthorized.
- log.debug("Couldn't get identities from ssh-agent", e);
+ log.debug("Unable to get identities from ssh-agent. Note that this might not indicate"
+ + " an actual problem unless your Helios cluster requires authentication"
+ + " for all requests.", e);
}
} | less-scary error message when SSH_AUTH_SOCK is not set
Although we only output the previous message at DEBUG, this log message
can be confusing for users if they have failed tests or another problem
with their client talking to Helios that leads them to think that the
SSH_AUTH_SOCK is the real issue.
So try to wordsmith the message a little bit to indicate it probably
isn't a real issue in all cases. | spotify_helios | train | java |
eb93956f749578984f952b1268cde140d41e795e | diff --git a/lib/Rails/ActiveRecord/Base.php b/lib/Rails/ActiveRecord/Base.php
index <HASH>..<HASH> 100755
--- a/lib/Rails/ActiveRecord/Base.php
+++ b/lib/Rails/ActiveRecord/Base.php
@@ -878,7 +878,7 @@ abstract class Base
static::connection()->executeSql($d);
if (ActiveRecord::lastError()) {
- $this->errors()->addToBase(ActiveRecord::lastError());
+ # The error is logged by Connection.
return false;
}
}
diff --git a/lib/Rails/ActiveRecord/Connection.php b/lib/Rails/ActiveRecord/Connection.php
index <HASH>..<HASH> 100755
--- a/lib/Rails/ActiveRecord/Connection.php
+++ b/lib/Rails/ActiveRecord/Connection.php
@@ -133,7 +133,7 @@ class Connection
$msg = "Error on database query execution\n";
$msg .= $e->getMessage();
- Rails::log()->message($msg);
+ Rails::log()->warning($msg);
}
return $stmt;
} | Changed error handling upon save.
If an error occured in AR\Base when saving a model, the whole array would be added to the errors object of the model, instead of only adding the message of the error.
This would cause an error when trying to implode the error messages with fullMessages(). Furthermore, storing database errors in the model could be dangerous as they could be shown to the users.
Now if an error occurs, it's ignored, because AR\Connection already logs them as warnings. | railsphp_railsphp | train | php,php |
91c87018227b948bba1359391b0416804e0a5d66 | diff --git a/panphon/_panphon.py b/panphon/_panphon.py
index <HASH>..<HASH> 100644
--- a/panphon/_panphon.py
+++ b/panphon/_panphon.py
@@ -388,3 +388,7 @@ class FeatureTable(object):
pattern = u''.join(sequence)
regex = re.compile(pattern)
return regex
+
+ def segment_to_vector(self, seg):
+ ft_dict = dict(self.seg_dict[seg])
+ return [ft_dict[name] for name in self.names] | Added segment_to_vector | dmort27_panphon | train | py |
Subsets and Splits
Java Commits in Train Set
Queries for all entries where the diff_languages column is 'java', providing a filtered dataset but without deeper analysis.
Java Commits Test Data
Returns a subset of 5000 entries from the dataset where the programming language difference is Java, providing basic filtering for exploration.
Java Commits Sample
Retrieves the first 1,000 records where the 'diff_languages' column is 'java', providing limited insight into the specific data entries.
Java Commits Validation Sample
Retrieves a sample of entries from the validation dataset where the diff languages are Java, providing limited insight into specific Java-related data points.
Java Commits in Validation
This query retrieves a limited sample of entries from the validation dataset where the programming language difference is Java, providing basic filtering with minimal insight.
Java Commits Sample
This query retrieves a sample of 100 records where the 'diff_languages' is 'java', providing basic filtering but limited analytical value.
Java Commits Sample
Retrieves 100 samples where the language difference is Java, providing basic filtering but minimal analytical value.
Java Commits Sample
Retrieves 10 samples where the diff_languages column is 'java', providing basic examples of data entries with this specific language.
Java Commits Validation Sample
Retrieves 1,000 records where the differences in languages are marked as Java, providing a snapshot of that specific subset but limited to raw data.
Java Commits Sample
This query retrieves 1000 random samples from the dataset where the programming language is Java, offering limited insight beyond raw data.