hash stringlengths 40 40 | diff stringlengths 131 26.7k | message stringlengths 7 694 | project stringlengths 5 67 | split stringclasses 1 value | diff_languages stringlengths 2 24 |
|---|---|---|---|---|---|
73fc6c58366c19e3ceec15e89f193398297c7f9c | diff --git a/sling/core/commons/src/main/java/com/composum/sling/cpnl/CpnlElFunctions.java b/sling/core/commons/src/main/java/com/composum/sling/cpnl/CpnlElFunctions.java
index <HASH>..<HASH> 100644
--- a/sling/core/commons/src/main/java/com/composum/sling/cpnl/CpnlElFunctions.java
+++ b/sling/core/commons/src/main/java/com/composum/sling/cpnl/CpnlElFunctions.java
@@ -19,7 +19,7 @@ public class CpnlElFunctions {
private static final Logger LOG = LoggerFactory.getLogger(CpnlElFunctions.class);
- public static final Pattern HREF_PATTERN = Pattern.compile("(<a(\\s*.*)?\\s*href\\s*=\\s*['\"])([^'\"]+)([\"'])");
+ public static final Pattern HREF_PATTERN = Pattern.compile("(<a(\\s*[^>]*)?\\s*href\\s*=\\s*['\"])([^'\"]+)([\"'])");
public static String i18n(SlingHttpServletRequest request, String text) {
String translated = null; | fix for the content link transformation in rich text properties | ist-dresden_composum | train | java |
1a45db0fb7669e21a038d8faee4b761021b01760 | diff --git a/salt/modules/win_dns_client.py b/salt/modules/win_dns_client.py
index <HASH>..<HASH> 100644
--- a/salt/modules/win_dns_client.py
+++ b/salt/modules/win_dns_client.py
@@ -45,7 +45,10 @@ def get_dns_servers(interface='Local Area Connection'):
for iface in c.Win32_NetworkAdapter(NetEnabled=True):
if interface == iface.NetConnectionID:
iface_config = c.Win32_NetworkAdapterConfiguration(Index=iface.Index).pop()
- return list(iface_config.DNSServerSearchOrder)
++ try:
++ return list(iface_config.DNSServerSearchOrder)
++ except:
++ return []
log.debug('Interface "{0}" not found'.format(interface))
return False | Update win_dns_client.py
If there is no dns address setted in the windows client, the iface_config.DNSServerSearchOrder method will return None, therefore this module will generate TypeError, 'NoneType' object is not iterable. | saltstack_salt | train | py |
e49a011721c8cf8dc9a09071280fa7ecf4a6bf90 | diff --git a/server.go b/server.go
index <HASH>..<HASH> 100644
--- a/server.go
+++ b/server.go
@@ -180,6 +180,9 @@ func (s *Server) processPacket(b []byte, addr Addr) {
}
s.mu.Lock()
defer s.mu.Unlock()
+ if s.closed.IsSet() {
+ return
+ }
if d.Y == "q" {
readQuery.Add(1)
s.handleQuery(addr, d) | dht: Stop processing packet if server closes after unmarshalling | anacrolix_dht | train | go |
5638adb16cd3f10bc587c249783715596b8a1bac | diff --git a/modules/quality-immutable-object/src/main/java/net/sf/qualitycheck/immutableobject/domain/Package.java b/modules/quality-immutable-object/src/main/java/net/sf/qualitycheck/immutableobject/domain/Package.java
index <HASH>..<HASH> 100644
--- a/modules/quality-immutable-object/src/main/java/net/sf/qualitycheck/immutableobject/domain/Package.java
+++ b/modules/quality-immutable-object/src/main/java/net/sf/qualitycheck/immutableobject/domain/Package.java
@@ -16,6 +16,11 @@ public final class Package implements Characters {
public static final Package UNDEFINED = new Package();
/**
+ * Represents the package <code>java.lang</code>
+ */
+ public static final Package JAVA_LANG = new Package("java.lang");
+
+ /**
* Prefix when written in class file
*/
private static final String PREFIX = "package"; | Added type definition to be able to query them in StringTemplate
templates which have very limited conditionals | before_quality-check | train | java |
63b8f3e059c9c9e96420c15d155400011de4effa | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -6,8 +6,8 @@ import sys
import distutils.errors
import setuptools
-if not hasattr(sys, "hexversion") or sys.hexversion < 0x02040000:
- raise distutils.errors.DistutilsError("Python 2.4 or newer is required")
+if not hasattr(sys, "hexversion") or sys.hexversion < 0x02060000:
+ raise distutils.errors.DistutilsError("Python 2.6 or newer is required")
if os.name == "posix":
from setup_posix import get_config | Requires at least Python <I> | PyMySQL_mysqlclient-python | train | py |
11d39125dee8da4551814129d304bfcf6a971c16 | diff --git a/jsonapi.go b/jsonapi.go
index <HASH>..<HASH> 100644
--- a/jsonapi.go
+++ b/jsonapi.go
@@ -103,7 +103,7 @@ type Vin struct {
Txid string `json:"txid,omitempty"`
Vout int `json:"vout,omitempty"`
ScriptSig *ScriptSig `json:"scriptSig,omitempty"`
- Sequence float64 `json:"sequence"`
+ Sequence uint32 `json:"sequence"`
}
// Vout models parts of the tx data. It is defined seperately since both | The sequence num of a tx input is a uint<I>. | btcsuite_btcd | train | go |
2e2d074566c9ce21c559d7b62eff2e4414a8d72d | diff --git a/tests/test_common.py b/tests/test_common.py
index <HASH>..<HASH> 100644
--- a/tests/test_common.py
+++ b/tests/test_common.py
@@ -68,9 +68,8 @@ def test_normalization_by_one(left, right, alg):
assert isclose(s + d, 1)
-# TODO: fix 0-size strings
@pytest.mark.parametrize('alg', ALGS)
-@hypothesis.given(text=hypothesis.strategies.text(min_size=1))
+@hypothesis.given(text=hypothesis.strategies.text())
def test_normalization_same(text, alg):
assert alg.normalized_distance(text, text) == 0
assert alg.distance(text, text) == 0
diff --git a/textdistance/algorithms/base.py b/textdistance/algorithms/base.py
index <HASH>..<HASH> 100644
--- a/textdistance/algorithms/base.py
+++ b/textdistance/algorithms/base.py
@@ -41,7 +41,7 @@ class Base:
"""
maximum = self.maximum(*sequences)
if maximum == 0:
- return 1
+ return 0
return self.distance(*sequences) / maximum
def normalized_similarity(self, *sequences): | fix normalized distance for empty strings. CLose #<I> | orsinium_textdistance | train | py,py |
7b9fd1f9a1000d43a3d6372d0115dc83c75a237c | diff --git a/spyder_kernels/console/tests/test_console_kernel.py b/spyder_kernels/console/tests/test_console_kernel.py
index <HASH>..<HASH> 100644
--- a/spyder_kernels/console/tests/test_console_kernel.py
+++ b/spyder_kernels/console/tests/test_console_kernel.py
@@ -27,7 +27,6 @@ import pytest
from flaky import flaky
from jupyter_core import paths
from jupyter_client import BlockingKernelClient
-from ipython_genutils import py3compat
import numpy as np
# Local imports
@@ -77,7 +76,8 @@ def setup_kernel(cmd):
if kernel.poll() is not None:
o,e = kernel.communicate()
- e = py3compat.cast_unicode(e)
+ if not PY3 and isinstance(e, bytes):
+ e = e.decode()
raise IOError("Kernel failed to start:\n%s" % e)
if not os.path.exists(connection_file): | Remove deps on ipython_genutils
It requires nose in test suite, that is not package on many linux distrib
and should not be used anymore.
Technically this change is a bit more conservative than what genutils
was doing when trying to detect the default encoding, but that should
not matter much now. | spyder-ide_spyder-kernels | train | py |
37e4eaf5e3fa489bd0597f330ead0c3c11b9a091 | diff --git a/condoor/version.py b/condoor/version.py
index <HASH>..<HASH> 100644
--- a/condoor/version.py
+++ b/condoor/version.py
@@ -1,3 +1,3 @@
"""Version information."""
-__version__ = '1.0.15'
+__version__ = '1.0.16' | Bumping version number to <I> | kstaniek_condoor | train | py |
cbe60067a9a657bcbd5d2f4546280471d30e24b2 | diff --git a/packages/origin.js/src/resources/purchases.js b/packages/origin.js/src/resources/purchases.js
index <HASH>..<HASH> 100644
--- a/packages/origin.js/src/resources/purchases.js
+++ b/packages/origin.js/src/resources/purchases.js
@@ -4,12 +4,18 @@ class Purchases {
this.ipfsService = ipfsService
}
- async get(address){
+ async contractFn(address, functionName, args=[]){
const purchaseContract = this.contractService.purchaseContract
const purchase = await purchaseContract.at(address)
const account = await this.contractService.currentAccount()
- const contractData = await purchase.data(
- { from: account }
+ args.push({ from: account })
+ return await purchase[functionName](args)
+ }
+
+ async get(address){
+ const contractData = await this.contractFn(
+ address,
+ 'data'
)
return {
address: address, | Refactor out helper for calling contract functions | OriginProtocol_origin-js | train | js |
a6cf314a0b3f453597ef84b03ac8b5c8e0f0d4b0 | diff --git a/ledgerautosync/ledger.py b/ledgerautosync/ledger.py
index <HASH>..<HASH> 100644
--- a/ledgerautosync/ledger.py
+++ b/ledgerautosync/ledger.py
@@ -81,7 +81,11 @@ class Ledger(object):
self.t.daemon = True # thread dies with the program
self.t.start()
# read output until prompt
- self.q.get()
+ try:
+ self.q.get(True, 5)
+ except Empty:
+ logging.error("Could not get prompt from ledger!")
+ exit(1)
def run(self, cmd):
if self.use_pipe:
@@ -89,7 +93,11 @@ class Ledger(object):
self.p.stdin.write(" ".join(pipe_clean(cmd)))
self.p.stdin.write("\n")
logging.debug(" ".join(pipe_clean(cmd)))
- return ET.fromstring(self.q.get())
+ try:
+ return ET.fromstring(self.q.get(True, 5))
+ except Empty:
+ logging.error("Could not get prompt from ledger!")
+ exit(1)
else:
cmd = self.args + ["xml"] + cmd
if os.name == 'nt': | add timeouts for reading prompts | egh_ledger-autosync | train | py |
31cad9151aa2a7070e73bc53dc2d104fdce7e0c6 | diff --git a/src/FontLib/AdobeFontMetrics.php b/src/FontLib/AdobeFontMetrics.php
index <HASH>..<HASH> 100644
--- a/src/FontLib/AdobeFontMetrics.php
+++ b/src/FontLib/AdobeFontMetrics.php
@@ -139,7 +139,7 @@ class AdobeFontMetrics {
$this->endSection("CharMetrics");
$kern = $font->getData("kern", "subtable");
- $tree = $kern["tree"];
+ $tree = is_array($kern) ? $kern["tree"] : null;
if (!$encoding && is_array($tree)) {
$this->startSection("KernData"); | Fixes a problem with php <I>
This would fix the issue #<I> | PhenX_php-font-lib | train | php |
b1bdcbe6db407d34ae1d9a35e85cf5d144b40b6e | diff --git a/rinoh/style.py b/rinoh/style.py
index <HASH>..<HASH> 100644
--- a/rinoh/style.py
+++ b/rinoh/style.py
@@ -197,6 +197,10 @@ class SelectorWithPriority(Selector):
return self.selector.get_style_name(matcher)
@property
+ def selectors(self):
+ return (self, )
+
+ @property
def referenced_selectors(self):
return self.selector.referenced_selectors | Implement SelectorWithPriority.selectors
The Selector class hierarchy can use some cleaning up. | brechtm_rinohtype | train | py |
8c6f66a7ab5fcd69c64ad468d79238701a0fac70 | diff --git a/imgaug/augmenters/meta.py b/imgaug/augmenters/meta.py
index <HASH>..<HASH> 100644
--- a/imgaug/augmenters/meta.py
+++ b/imgaug/augmenters/meta.py
@@ -3830,8 +3830,8 @@ class Noop(Augmenter):
super(Noop, self).__init__(name=name, deterministic=deterministic,
random_state=random_state)
- def _augment_images(self, images, random_state, parents, hooks):
- return images
+ def _augment_batch(self, batch, random_state, parents, hooks):
+ return batch
def get_parameters(self):
return [] | Switch Noop to augment_batch() interface | aleju_imgaug | train | py |
292011eac33e96c93cf6c01a1338c2f009e11f07 | diff --git a/tests/test_pages.py b/tests/test_pages.py
index <HASH>..<HASH> 100644
--- a/tests/test_pages.py
+++ b/tests/test_pages.py
@@ -11,7 +11,16 @@ except ImportError:
import pytest
from conftest import skip_if_pypy
-from pikepdf import Array, Dictionary, Name, Page, Pdf, PdfMatrix, Stream
+from pikepdf import (
+ Array,
+ Dictionary,
+ Name,
+ Page,
+ Pdf,
+ PdfMatrix,
+ Stream,
+ __libqpdf_version__,
+)
from pikepdf._cpphelpers import label_from_label_dict
# pylint: disable=redefined-outer-name,pointless-statement
@@ -343,7 +352,7 @@ def test_add_foreign_twice(graph, outpdf):
out.save(outpdf)
-@pytest.mark.xfail(reason="needs qpdf fix to issue 514")
+@pytest.mark.xfail(__libqpdf_version__ < '10.3.2', reason="qpdf issue 514")
def test_add_twice_without_copy_foreign(graph, outpdf):
out = Pdf.new()
out.pages.append(graph.pages[0]) | tests: mark test as passing newer qpdf versions | pikepdf_pikepdf | train | py |
f8633e061c4198083aa3b8fdcb52840d33084660 | diff --git a/test/model_test.rb b/test/model_test.rb
index <HASH>..<HASH> 100644
--- a/test/model_test.rb
+++ b/test/model_test.rb
@@ -974,7 +974,7 @@ class ModelTest < Test::Unit::TestCase
end
context "Exporting" do
- class Person < Ohm::Model
+ class Venue < Ohm::Model
attribute :name
def validate
@@ -984,14 +984,14 @@ class ModelTest < Test::Unit::TestCase
context "a new model without errors" do
should "export an empty hash via to_hash" do
- person = Person.new
+ person = Venue.new
assert_equal({}, person.to_hash)
end
end
context "a new model with some errors" do
should "export a hash with the errors" do
- person = Person.new
+ person = Venue.new
person.valid?
assert_equal({ :errors => [[:name, :not_present]] }, person.to_hash)
@@ -1000,14 +1000,14 @@ class ModelTest < Test::Unit::TestCase
context "an existing model" do
should "export a hash with the its id" do
- person = Person.create(:name => "John Doe")
+ person = Venue.create(:name => "John Doe")
assert_equal({ :id => '1' }, person.to_hash)
end
end
context "an existing model with validation errors" do
should "export a hash with its id and the errors" do
- person = Person.create(:name => "John Doe")
+ person = Venue.create(:name => "John Doe")
person.name = nil
person.valid? | Fix build under <I>.
Need to investigate if this is an issue in real-world apps or just
in our test suite. | soveran_ohm | train | rb |
ccf63acf80a9d881f40d1433166c7c7c44b067e8 | diff --git a/service-catalog-ui/src/components/ServiceInstanceList/ServiceInstanceTable/ServiceInstanceRowRenderer.js b/service-catalog-ui/src/components/ServiceInstanceList/ServiceInstanceTable/ServiceInstanceRowRenderer.js
index <HASH>..<HASH> 100644
--- a/service-catalog-ui/src/components/ServiceInstanceList/ServiceInstanceTable/ServiceInstanceRowRenderer.js
+++ b/service-catalog-ui/src/components/ServiceInstanceList/ServiceInstanceTable/ServiceInstanceRowRenderer.js
@@ -157,7 +157,6 @@ const BindingUsagesCount = ({ instance }) => {
};
const Status = ({ instance }) => {
- instance.status = undefined;
const type = instance.status ? instance.status.type : 'UNKNOWN';
return (
<StatusBadge | Remove forgotten testing assignment (#<I>) | kyma-project_console | train | js |
6a1b583f1eb79bb24a5424337c171c0289f07f61 | diff --git a/app/models/content.rb b/app/models/content.rb
index <HASH>..<HASH> 100644
--- a/app/models/content.rb
+++ b/app/models/content.rb
@@ -46,6 +46,18 @@ class Content < ActiveRecord::Base
end
end
+ # Set the text filter for this object.
+ # NOTE: Due to how Rails injects association methods, this cannot be put in ContentBase
+ # TODO: Allowing assignment of a string here is not very clean.
+ def text_filter= filter
+ filter_object = filter.to_text_filter
+ if filter_object
+ self.text_filter_id = filter_object.id
+ else
+ self.text_filter_id = filter.to_i
+ end
+ end
+
def shorten_url
return unless self.published
diff --git a/app/models/content_base.rb b/app/models/content_base.rb
index <HASH>..<HASH> 100644
--- a/app/models/content_base.rb
+++ b/app/models/content_base.rb
@@ -10,16 +10,6 @@ module ContentBase
attr_accessor :just_changed_published_status
alias_method :just_changed_published_status?, :just_changed_published_status
- # Set the text filter for this object.
- def text_filter= filter
- filter_object = filter.to_text_filter
- if filter_object
- self.text_filter_id = filter_object.id
- else
- self.text_filter_id = filter.to_i
- end
- end
-
def really_send_notifications
interested_users.each do |value|
send_notification_to_user(value) | Move #text_filter= override to where it will be picked up | publify_publify | train | rb,rb |
d482aec18212730b92b623fccb0bc5303e8523d1 | diff --git a/lib/searchkick/reindex.rb b/lib/searchkick/reindex.rb
index <HASH>..<HASH> 100644
--- a/lib/searchkick/reindex.rb
+++ b/lib/searchkick/reindex.rb
@@ -17,7 +17,7 @@ module Searchkick
# check if alias exists
if Searchkick.client.indices.exists_alias(name: alias_name)
# import before swap
- searchkick_import(index) unless skip_import
+ searchkick_import(index: index) unless skip_import
# get existing indices to remove
old_indices = Searchkick.client.indices.get_alias(name: alias_name).keys
@@ -29,7 +29,7 @@ module Searchkick
Searchkick.client.indices.update_aliases body: {actions: [{add: {index: new_name, alias: alias_name}}]}
# import after swap
- searchkick_import(index) unless skip_import
+ searchkick_import(index: index) unless skip_import
end
index.refresh
@@ -52,9 +52,8 @@ module Searchkick
@descendents << klass unless @descendents.include?(klass)
end
- private
-
- def searchkick_import(index)
+ def searchkick_import(options = {})
+ index = options[:index] || searchkick_index
batch_size = searchkick_options[:batch_size] || 1000
# use scope for import | Expose searchkick_import and searchkick_index_options | ankane_searchkick | train | rb |
72db25876961114b9eb4585eb0332b6424286702 | diff --git a/tests/test_s3_calling_format.py b/tests/test_s3_calling_format.py
index <HASH>..<HASH> 100644
--- a/tests/test_s3_calling_format.py
+++ b/tests/test_s3_calling_format.py
@@ -1,4 +1,5 @@
import boto
+import inspect
import os
import pytest
@@ -272,7 +273,15 @@ def test_cipher_suites():
# seems to be a more natural choice, but leaves the '.sock'
# attribute null.
conn.get_all_buckets()
- htcon = conn._pool.get_http_connection('s3.amazonaws.com', True)
+
+ # Set up 'port' keyword argument for newer Botos that require it.
+ spec = inspect.getargspec(conn._pool.get_http_connection)
+ kw = {'host': 's3.amazonaws.com',
+ 'is_secure': True}
+ if 'port' in spec.args:
+ kw['port'] = 443
+
+ htcon = conn._pool.get_http_connection(**kw)
chosen_cipher_suite = htcon.sock.cipher()[0].split('-') | Test compatability with newer boto
New Boto versions require a 'port' argument be supplied to
get_http_connection. | wal-e_wal-e | train | py |
8cd0d1fd9ad11e4da2f4d8b9c943704b809adc52 | diff --git a/src/Type/AbstractPhpEnumType.php b/src/Type/AbstractPhpEnumType.php
index <HASH>..<HASH> 100644
--- a/src/Type/AbstractPhpEnumType.php
+++ b/src/Type/AbstractPhpEnumType.php
@@ -41,7 +41,7 @@ abstract class AbstractPhpEnumType extends Type
*/
public function getSQLDeclaration(array $fieldDeclaration, AbstractPlatform $platform)
{
- return 'VARCHAR(256) COMMENT "php_enum"';
+ return $platform->getVarcharTypeDeclarationSQL([]);
}
/**
diff --git a/tests/Type/AbstractPhpEnumTypeTest.php b/tests/Type/AbstractPhpEnumTypeTest.php
index <HASH>..<HASH> 100644
--- a/tests/Type/AbstractPhpEnumTypeTest.php
+++ b/tests/Type/AbstractPhpEnumTypeTest.php
@@ -42,8 +42,12 @@ class AbstractPhpEnumTypeTest extends TestCase
public function testGetSQLDeclaration()
{
+ $this->platform
+ ->method('getVarcharTypeDeclarationSQL')
+ ->will($this->returnValue('declaration'));
+
$this->assertEquals(
- 'VARCHAR(256) COMMENT "php_enum"',
+ 'declaration',
$this->type->getSQLDeclaration([], $this->platform)
);
} | Removed column comment from type SQL declaration.
Some platforms supported by doctrine don't support comments (e.g sqlite, which I'm trying to use for integration tests). Looking into the primary doctrine column definitions it doesn't seem like doctrine uses comments anywhere.
Seeing as the comment doesn't seem to have any programmtic use I've removed it. | acelaya_doctrine-enum-type | train | php,php |
eeff63e15758b30a7ec0cfa8d37af68eb5519664 | diff --git a/alphatwirl/concurrently/run.py b/alphatwirl/concurrently/run.py
index <HASH>..<HASH> 100755
--- a/alphatwirl/concurrently/run.py
+++ b/alphatwirl/concurrently/run.py
@@ -140,8 +140,8 @@ def compose_result_path(package_path):
##__________________________________________________________________||
def store_result(result, result_path):
mkdir_p(os.path.dirname(result_path))
- f = gzip.open(result_path, 'wb')
- pickle.dump(result, f, protocol=pickle.HIGHEST_PROTOCOL)
+ with gzip.open(result_path, 'wb') as f:
+ pickle.dump(result, f, protocol=pickle.HIGHEST_PROTOCOL)
##__________________________________________________________________||
def mkdir_p(path): | use with for file in run.py | alphatwirl_alphatwirl | train | py |
8319e5db9fbf45d18f89881b3f2ba9cb361d878e | diff --git a/src/Elements/CssStore.js b/src/Elements/CssStore.js
index <HASH>..<HASH> 100644
--- a/src/Elements/CssStore.js
+++ b/src/Elements/CssStore.js
@@ -1,4 +1,4 @@
-import {each, has} from '../lib/util';
+import {each} from '../lib/util';
function formatStyle(style)
{
@@ -46,8 +46,14 @@ export default class CssStore
each(document.styleSheets, (styleSheet) =>
{
- // Started with version 64, Chrome does not allow cross origin script to access this property.
- if (!has(styleSheet, 'cssRules')) return;
+ try
+ {
+ // Started with version 64, Chrome does not allow cross origin script to access this property.
+ if (!styleSheet.cssRules) return;
+ } catch (e)
+ {
+ return;
+ }
each(styleSheet.cssRules, (cssRule) =>
{ | Dev: Revert to use try catch #<I> | liriliri_eruda | train | js |
6fb36ba6ca57dc7820ab9ce755632346f1ba760f | diff --git a/Library/Installation/Updater/Updater020500.php b/Library/Installation/Updater/Updater020500.php
index <HASH>..<HASH> 100644
--- a/Library/Installation/Updater/Updater020500.php
+++ b/Library/Installation/Updater/Updater020500.php
@@ -12,6 +12,7 @@
namespace Claroline\CoreBundle\Library\Installation\Updater;
use Claroline\CoreBundle\Entity\Tool\Tool;
+use Claroline\CoreBundle\Entity\Widget\Widget;
class Updater020500
{
@@ -57,6 +58,7 @@ class Updater020500
$em->flush();
}
+ $this->log('Adding agenda widget...');
$widget = new Widget();
$widget->setName('agenda');
$widget->setConfigurable(false); | [CoreBundle] added widget for the desktop | claroline_Distribution | train | php |
940f1acb9897eb73ad6d57959c8de895afecbba9 | diff --git a/anyconfig/mergeabledict.py b/anyconfig/mergeabledict.py
index <HASH>..<HASH> 100644
--- a/anyconfig/mergeabledict.py
+++ b/anyconfig/mergeabledict.py
@@ -8,6 +8,9 @@
.. versionadded: 0.3.1
Added naive and partial implementation of JSON Pointer support
+.. versionchanged: 0.5.0
+ Convert collections.namedtuple objects to dicts recursively
+
.. note::
JSON Pointer: http://tools.ietf.org/html/rfc6901
""" | fix: add a note about collections.namedtuple conversion | ssato_python-anyconfig | train | py |
4bc36b751e0fe4b6a82d87a99bd73ac9ede71871 | diff --git a/msm/skill_repo.py b/msm/skill_repo.py
index <HASH>..<HASH> 100644
--- a/msm/skill_repo.py
+++ b/msm/skill_repo.py
@@ -74,7 +74,7 @@ class SkillRepo(object):
def __init__(self, path=None, url=None, branch=None):
self.path = path or "/opt/mycroft/.skills-repo"
self.url = url or "https://github.com/MycroftAI/mycroft-skills"
- self.branch = branch or "19.08"
+ self.branch = branch or "20.02"
self.repo_info = {}
@cached_property(ttl=FIVE_MINUTES) | Update default branch to <I> | MycroftAI_mycroft-skills-manager | train | py |
2ff19db0274c39ec675ce5466a9151546ecd0ad3 | diff --git a/addon/components/mobile/object-list-view-row.js b/addon/components/mobile/object-list-view-row.js
index <HASH>..<HASH> 100644
--- a/addon/components/mobile/object-list-view-row.js
+++ b/addon/components/mobile/object-list-view-row.js
@@ -13,10 +13,21 @@ import ObjectListViewRowComponent from '../object-list-view-row';
*/
export default ObjectListViewRowComponent.extend({
/**
+ Stores the number of pixels to isolate one level of hierarchy.
+
+ @property _hierarchicalIndent
+ @type Number
+ @default 10
+ @private
*/
_hierarchicalIndent: 10,
/**
+ Number of pixels to isolate the current level of the hierarchy.
+
+ @property hierarchicalIndent
+ @type Number
+ @default 10
*/
hierarchicalIndent: Ember.computed({
get() { | Docs object-list-view-row mobile | Flexberry_ember-flexberry | train | js |
96e18d8710083e737f637f88b85801950ab3648e | diff --git a/lib/extract.js b/lib/extract.js
index <HASH>..<HASH> 100644
--- a/lib/extract.js
+++ b/lib/extract.js
@@ -203,19 +203,19 @@ module.exports = function (grunt) {
return key;
}
- function MissingContextException(item, collidingPoItem) {
- this.item = item;
+ function MissingContextException(poItem, collidingPoItem) {
+ this.poItem = poItem;
this.collidingPoItem = collidingPoItem;
this.toString = function () {
return [
'Usage of singular and plural form of "',
- item.msgid,
+ poItem.msgid,
'" in the same context: ',
(collidingPoItem.msgctxt || '[no context]'),
- '\n\nReferences: ',
- item.fileName, ':', item.line,
+ '\n\nReferences:\n',
+ poItem.references.join(', '),
'\n',
- collidingPoItem.references,
+ collidingPoItem.references.join(', '),
'\n\nChange the context of at least one of the strings (pgettext) or use ngettext/npgettext in both cases.'
].join('');
}; | cleanup output of exception
the item indeed is a po item and has references. The old behaviour was
tailored to an older implementation of the fix. | Open-Xchange-Frontend_grunt-require-gettext | train | js |
95ac461a290b80cb355ffab5d702cd3ddf3b77b0 | diff --git a/staging/src/k8s.io/legacy-cloud-providers/azure/azure_standard.go b/staging/src/k8s.io/legacy-cloud-providers/azure/azure_standard.go
index <HASH>..<HASH> 100644
--- a/staging/src/k8s.io/legacy-cloud-providers/azure/azure_standard.go
+++ b/staging/src/k8s.io/legacy-cloud-providers/azure/azure_standard.go
@@ -381,7 +381,7 @@ func (az *Cloud) serviceOwnsFrontendIP(fip network.FrontendIPConfiguration, serv
return false, isPrimaryService, nil
}
- return false, isPrimaryService, fmt.Errorf("serviceOwnsFrontendIP: wrong parameters")
+ return false, isPrimaryService, nil
}
// for internal secondary service the private IP address on the frontend IP config should be checked | serviceOwnsFrontendIP shouldn't report error when the public IP doesn't match | kubernetes_kubernetes | train | go |
78801ef32cdc8328cdfc70bc99b041868259f650 | diff --git a/types/model.go b/types/model.go
index <HASH>..<HASH> 100644
--- a/types/model.go
+++ b/types/model.go
@@ -1,5 +1,7 @@
package types
+import "time"
+
// FunctionDeployment represents a request to create or update a Function.
type FunctionDeployment struct {
@@ -100,7 +102,9 @@ type FunctionStatus struct {
// mount-point.
ReadOnlyRootFilesystem bool `json:"readOnlyRootFilesystem,omitempty"`
- // ** Status fields *8
+ // ================
+ // Fields for status
+ // ================
// InvocationCount count of invocations
InvocationCount float64 `json:"invocationCount,omitempty"`
@@ -111,4 +115,8 @@ type FunctionStatus struct {
// AvailableReplicas is the count of replicas ready to receive
// invocations as reported by the faas-provider
AvailableReplicas uint64 `json:"availableReplicas,omitempty"`
+
+ // CreatedAt is the time read back from the faas backend's
+ // data store for when the function or its container was created.
+ CreatedAt time.Time `json:"created_at,omitempty"`
} | Add CreatedAt to schema
Enables contributors to proceed with changes for #<I> in
the various back-ends | openfaas_faas-provider | train | go |
fa4aca1b628eba16a2dff5d342cf43e2baba4216 | diff --git a/lib/simple_notifications/base.rb b/lib/simple_notifications/base.rb
index <HASH>..<HASH> 100644
--- a/lib/simple_notifications/base.rb
+++ b/lib/simple_notifications/base.rb
@@ -93,7 +93,7 @@ module SimpleNotifications
raise 'SimpleNotification::SenderReceiverError' unless @@options[:sender] && @@options[:receivers]
@message = options[:message] if options[:message]
notification = notifications.build(entity: self, sender: get_obj(options[:sender]), message: default_message(self, get_obj(options[:sender]), 'created'))
- get_obj(options[:receivers]).each {|receiver| notification.deliveries.build(receiver: receiver)}
+ [get_obj(options[:receivers])].flatten.each {|receiver| notification.deliveries.build(receiver: receiver)}
notification.save
end
end
diff --git a/lib/simple_notifications/version.rb b/lib/simple_notifications/version.rb
index <HASH>..<HASH> 100644
--- a/lib/simple_notifications/version.rb
+++ b/lib/simple_notifications/version.rb
@@ -1,3 +1,3 @@
module SimpleNotifications
- VERSION = "1.1.6"
+ VERSION = "1.1.7"
end | Issue Fixed: receiver association can be singular | aashishgarg_simple_notifications | train | rb,rb |
efd8c35841034625ff5365efc1abc9d34daecadb | diff --git a/salt/modules/iptables.py b/salt/modules/iptables.py
index <HASH>..<HASH> 100644
--- a/salt/modules/iptables.py
+++ b/salt/modules/iptables.py
@@ -818,6 +818,8 @@ def insert(table='filter', chain=None, position=None, rule=None, family='ipv4'):
position = 1
wait = '--wait' if _has_option('--wait', family) else ''
+ if check(table, chain, rule, family):
+ return True
cmd = '{0} {1} -t {2} -I {3} {4} {5}'.format(
_iptables_cmd(family), wait, table, chain, position, rule)
out = __salt__['cmd.run'](cmd) | Add Check to the Rule
Check to see if the rule exists before insert | saltstack_salt | train | py |
d075961591b04bb041824ab22de04128f0eafdd7 | diff --git a/tests/settings.py b/tests/settings.py
index <HASH>..<HASH> 100644
--- a/tests/settings.py
+++ b/tests/settings.py
@@ -29,7 +29,4 @@ INSTALLED_APPS = [
SITE_ID = 1
-if django.VERSION >= (1, 10):
- MIDDLEWARE = ()
-else:
- MIDDLEWARE_CLASSES = ()
+MIDDLEWARE = () | Remove compat with Django <<I> | dbrgn_drf-dynamic-fields | train | py |
44fff6e26bf8ab185b632bdaedcd4b7789a7c791 | diff --git a/lib/ruby2d/window.rb b/lib/ruby2d/window.rb
index <HASH>..<HASH> 100644
--- a/lib/ruby2d/window.rb
+++ b/lib/ruby2d/window.rb
@@ -38,16 +38,10 @@ module Ruby2D
end
def set(opts)
- valid_keys = [:title, :width, :height]
- valid_opts = opts.reject { |k| !valid_keys.include?(k) }
- if !valid_opts.empty?
- @title = valid_opts[:title]
- @width = valid_opts[:width]
- @height = valid_opts[:height]
- return true
- else
- return false
- end
+ # Store new window attributes, or ignore if nil
+ @title = opts[:title] || @title
+ @width = opts[:width] || @width
+ @height = opts[:height] || @height
end
def add(o) | Fix failing DSL test
`set` must be able set a single window attribute without making others
nil | ruby2d_ruby2d | train | rb |
8c2ed05df5be6b48f5b93cc81410a3ed7fa6dd86 | diff --git a/lib/read_concern.js b/lib/read_concern.js
index <HASH>..<HASH> 100644
--- a/lib/read_concern.js
+++ b/lib/read_concern.js
@@ -29,10 +29,16 @@ class ReadConcern {
}
if (options.readConcern) {
+ if (options.readConcern instanceof ReadConcern) {
+ return options.readConcern;
+ }
+
return new ReadConcern(options.readConcern.level);
}
- return new ReadConcern(options.level);
+ if (options.level) {
+ return new ReadConcern(options.level);
+ }
}
static get MAJORITY() { | refactor: improve robustness of `ReadConcern.fromOptions`
We made some assumptions in this method about the existence of
keys, as well as that no `readConcern` passed in would ever already
be of the type we were looking to cast to. | mongodb_node-mongodb-native | train | js |
6880072dd0d3b7ff3d7427f7ed6677f848cee03b | diff --git a/Python/ibmcloudsql/SQLQuery.py b/Python/ibmcloudsql/SQLQuery.py
index <HASH>..<HASH> 100644
--- a/Python/ibmcloudsql/SQLQuery.py
+++ b/Python/ibmcloudsql/SQLQuery.py
@@ -857,7 +857,7 @@ class SQLQuery(COSClient, SQLBuilder, HiveMetastore):
)
)
- if "resultset_location" not in job_details or not job_details["resultset_format"]:
+ if "resultset_location" not in job_details or "resultset_format" not in job_details:
return None
url_parsed = self.analyze_cos_url(job_details["resultset_location"])
diff --git a/Python/ibmcloudsql/__init__.py b/Python/ibmcloudsql/__init__.py
index <HASH>..<HASH> 100644
--- a/Python/ibmcloudsql/__init__.py
+++ b/Python/ibmcloudsql/__init__.py
@@ -14,7 +14,7 @@
# limitations under the License.
# ------------------------------------------------------------------------------
-__version__ = "0.5.3"
+__version__ = "0.5.4"
# flake8: noqa F401
from .SQLQuery import SQLQuery
from .sql_query_ts import SQLClientTimeSeries | Fixing the previous improvement :-) | IBM-Cloud_sql-query-clients | train | py,py |
c66ce6655a990f5d094b345b24d769ad478059c3 | diff --git a/lib/magic_grid/html_grid.rb b/lib/magic_grid/html_grid.rb
index <HASH>..<HASH> 100644
--- a/lib/magic_grid/html_grid.rb
+++ b/lib/magic_grid/html_grid.rb
@@ -149,7 +149,7 @@ module MagicGrid
my_params = grid.base_params.merge(grid.param_key(:col) => id)
default_sort_order = Order.from_param(grid.default_order)
params = HashWithIndifferentAccess.new(my_params)
- if id.to_s == grid.current_sort_col.to_s and Order.from_param(params[grid.param_key(:order)]) != default_sort_order
+ if id.to_s == grid.current_sort_col.to_s
params[grid.param_key(:order)] = grid.current_order.reverse.to_param
else
params.delete(grid.param_key(:order)) | Remove conditional that is redundant with polymorphism | rmg_magic_grid | train | rb |
37e96ba47160427800c27ab0b42feb98485a3217 | diff --git a/src/AccordionItem/accordion-item.js b/src/AccordionItem/accordion-item.js
index <HASH>..<HASH> 100644
--- a/src/AccordionItem/accordion-item.js
+++ b/src/AccordionItem/accordion-item.js
@@ -15,7 +15,7 @@ type AccordionItemProps = ElementProps<'div'> & {
};
class AccordionItem extends Component<AccordionItemProps, *> {
- async componentDidMount() {
+ componentDidMount() {
const { uuid, accordionStore, disabled } = this.props;
accordionStore.addItem({ | Remove async/await from accordion-item | springload_react-accessible-accordion | train | js |
b9805efd99df7e21c8a9eba5fe5b1d19e6be843a | diff --git a/openstack_dashboard/karma.conf.js b/openstack_dashboard/karma.conf.js
index <HASH>..<HASH> 100644
--- a/openstack_dashboard/karma.conf.js
+++ b/openstack_dashboard/karma.conf.js
@@ -46,7 +46,8 @@ module.exports = function (config) {
// NOTE: the templates must also be listed in the files section below.
'./**/*.html': ['ng-html2js'],
// Used to indicate files requiring coverage reports.
- './**/!(*.spec).js': ['coverage']
+ '../static/**/!(*.spec).js': ['coverage'],
+ '../dashboards/**/static/**/!(*.spec).js': ['coverage']
},
// Sets up module to process templates.
@@ -160,10 +161,10 @@ module.exports = function (config) {
// Coverage threshold values.
thresholdReporter: {
- statements: 93, // target 100
- branches: 93, // target 100
- functions: 93, // target 100
- lines: 93 // target 100
+ statements: 89, // target 100
+ branches: 82, // target 100
+ functions: 88, // target 100
+ lines: 89 // target 100
}
});
}; | Include JS from openstack_dashboard/**/static for code coverage
Code coverage metrics in Horizon for openstack_dashboard do not include
metrics from JS files in openstack_dashboard/**/static.
Change-Id: I<I>c<I>e<I>e<I>ed<I>dfdeb<I>c8e<I>
Closes-Bug: #<I> | openstack_horizon | train | js |
4f610521dd97e8f2eda41af544ccebea85a91a95 | diff --git a/src/Alaouy/Youtube/YoutubeServiceProvider.php b/src/Alaouy/Youtube/YoutubeServiceProvider.php
index <HASH>..<HASH> 100644
--- a/src/Alaouy/Youtube/YoutubeServiceProvider.php
+++ b/src/Alaouy/Youtube/YoutubeServiceProvider.php
@@ -49,7 +49,7 @@ class YoutubeServiceProvider extends ServiceProvider
$this->publishes(array(__DIR__ . '/../../config/youtube.php' => config_path('youtube.php')));
//Laravel 5.1+ fix
- if(Str::startsWith(Application::VERSION, "5.1.")){
+ if(floatval(Application::VERSION,10) >= 5.1){
$this->app->bind("youtube", null, true, function(){
return $this->app->make('Alaouy\Youtube\Youtube', [config('youtube.KEY')]);
}); | Changes to apply for <I> and higher
Must easier fix for upcoming versions. Sorry to bother you twice. | alaouy_Youtube | train | php |
b69d83bfd5237fe3a826418c1397101f98cdb96f | diff --git a/pysoundfile.py b/pysoundfile.py
index <HASH>..<HASH> 100644
--- a/pysoundfile.py
+++ b/pysoundfile.py
@@ -662,9 +662,6 @@ class SoundFile(object):
"Not allowed for existing files (except 'RAW'): "
"samplerate, channels, format, subtype, endian")
- if not closefd and not isinstance(file, int):
- raise ValueError("closefd=False only allowed for file descriptors")
-
if isinstance(file, str):
if _os.path.isfile(file):
if 'x' in modes:
diff --git a/tests/test_pysoundfile.py b/tests/test_pysoundfile.py
index <HASH>..<HASH> 100644
--- a/tests/test_pysoundfile.py
+++ b/tests/test_pysoundfile.py
@@ -356,9 +356,6 @@ def test_open_with_more_invalid_arguments():
with pytest.raises(ValueError) as excinfo:
sf.SoundFile(filename_new, 'w', 44100, 2, endian='BOTH', format='WAV')
assert "Invalid endian-ness" in str(excinfo.value)
- with pytest.raises(ValueError) as excinfo:
- sf.SoundFile(filename_stereo, closefd=False)
- assert "closefd=False" in str(excinfo.value)
def test_open_r_and_rplus_with_too_many_arguments(): | Don't throw exception if closefd is given for non-fd
The documentation already mentions that closefd is only applicable if a
file descriptor is used. | bastibe_SoundFile | train | py,py |
80626843135fe39d35734f15261c0b3828d67abe | diff --git a/lib/emir/recipes/aiv/mask.py b/lib/emir/recipes/aiv/mask.py
index <HASH>..<HASH> 100644
--- a/lib/emir/recipes/aiv/mask.py
+++ b/lib/emir/recipes/aiv/mask.py
@@ -110,16 +110,18 @@ def _centering_centroid_loop(data, center, box):
# returns y,x
def centering_centroid(data, center, box, nloop=10, toldist=1e-3, maxdist=10):
- icenter = center.copy()
+
+ # Store original center
+ ocenter = center.copy()
for i in range(nloop):
ncenter = _centering_centroid_loop(data, center, box)
#_logger.debug('new center is %s', ncenter)
# if we are to far away from the initial point, break
- dst = distance.euclidean(icenter, ncenter)
+ dst = distance.euclidean(ocenter, ncenter)
if dst > maxdist:
- return icenter, 'maximum distance (%i) from origin reached' % maxdist
+ return center, 'maximum distance (%i) from origin reached' % maxdist
# check convergence
dst = distance.euclidean(ncenter, center) | Return the center in the last iteration of recentering | guaix-ucm_pyemir | train | py |
8b6661ee469b774f5a30d4a850e56c9a050a02c0 | diff --git a/Generator/PHPFile.php b/Generator/PHPFile.php
index <HASH>..<HASH> 100644
--- a/Generator/PHPFile.php
+++ b/Generator/PHPFile.php
@@ -195,14 +195,14 @@ class PHPFile extends File {
$imports = [];
if ($imported_classes) {
- // Sort the imported classes.
- natcasesort($imported_classes);
-
foreach ($imported_classes as $fully_qualified_class_name) {
$fully_qualified_class_name = ltrim($fully_qualified_class_name, '\\');
$imports[] = "use $fully_qualified_class_name;";
}
+ // Sort the imported classes.
+ natcasesort($imports);
+
$imports[] = '';
} | Fixed sorting should happen after trimming. | drupal-code-builder_drupal-code-builder | train | php |
11a4d2ef3a6aabeae2787d5aac79172c38897ec7 | diff --git a/Schema/Grammars/MySqlGrammar.php b/Schema/Grammars/MySqlGrammar.php
index <HASH>..<HASH> 100755
--- a/Schema/Grammars/MySqlGrammar.php
+++ b/Schema/Grammars/MySqlGrammar.php
@@ -14,7 +14,7 @@ class MySqlGrammar extends Grammar
* @var array
*/
protected $modifiers = [
- 'VirtualAs', 'StoredAs', 'Unsigned', 'Charset', 'Collate', 'Nullable',
+ 'Unsigned', 'VirtualAs', 'StoredAs', 'Charset', 'Collate', 'Nullable',
'Default', 'Increment', 'Comment', 'After', 'First',
]; | [<I>] Make sure sql for virtual columns is added after the unsigned modifier (#<I>)
* re-order
* append unsigned modifier before virtual columns to prevent a sql syntax error | illuminate_database | train | php |
f08221ccfebd5daa887bd7e482faef1b2abf62ce | diff --git a/tests/test_yaml.py b/tests/test_yaml.py
index <HASH>..<HASH> 100644
--- a/tests/test_yaml.py
+++ b/tests/test_yaml.py
@@ -1,4 +1,5 @@
from __future__ import unicode_literals
+import sys
from config_loader.loader import ConfigLoader
from config_loader.config import ConfigBaseWildcardDict
@@ -70,7 +71,11 @@ line_b: !2
# --------
line_c: 8"""
- assert isinstance(config.as_text(), unicode)
+ # We want to make sure we're getting unicode back
+ if sys.version_info.major < 3:
+ assert isinstance(config.as_text(), unicode)
+ else:
+ assert isinstance(config.as_text(), str)
assert config.as_dict() == {'config': None,
'config_text': 'line_a: True\nline_b: !2\nline_c: 8', | do robust testing with python 2/3 compatibility | dropseed_configyaml | train | py |
2572ef91d590c8644c1a309bfb8c08ca3baf4c1a | diff --git a/demo/index.php b/demo/index.php
index <HASH>..<HASH> 100644
--- a/demo/index.php
+++ b/demo/index.php
@@ -33,6 +33,11 @@ if (!empty($_GET['input'])) {
$feed->input_encoding($_GET['input']);
}
+// Allow us to snap into IHBB mode.
+if (!empty($_GET['image'])) {
+ $feed->bypass_image_hotlink($_GET['image']);
+}
+
// Initialize the whole SimplePie object. Read the feed, process it, parse it, cache it, and
// all that other good stuff. The feed's information will not be available to SimplePie before
// this is called. | Allow us to snap the demo into "Image Hotlink Block Bypass" (IHBB) mode. | simplepie_simplepie | train | php |
3edf8a2c7061f2c92d5d201406d72904db2f25d1 | diff --git a/search.py b/search.py
index <HASH>..<HASH> 100644
--- a/search.py
+++ b/search.py
@@ -151,9 +151,9 @@ def graph_search(problem, frontier):
if problem.goal_test(node.state):
return node
explored.add(node.state)
- frontier.extend(successor for successor in node.expand(problem)
- if successor.state not in explored
- and successor.state not in frontier)
+ frontier.extend(child for child in node.expand(problem)
+ if child.state not in explored
+ and child.state not in frontier)
return None
def breadth_first_tree_search(problem):
@@ -189,8 +189,8 @@ def depth_limited_search(problem, limit=50):
return 'cutoff'
else:
cutoff_occurred = False
- for successor in node.expand(problem):
- result = recursive_dls(successor, problem, limit)
+ for child in node.expand(problem):
+ result = recursive_dls(child, problem, limit)
if result == 'cutoff':
cutoff_occurred = True
elif result is not None: | s/successor/child/ | hobson_aima | train | py |
0adefe783bc13f50fbcca7c28eb47d4361cc3fb9 | diff --git a/examples/fib/wasm/js/index.js b/examples/fib/wasm/js/index.js
index <HASH>..<HASH> 100644
--- a/examples/fib/wasm/js/index.js
+++ b/examples/fib/wasm/js/index.js
@@ -1,5 +1,5 @@
import { JsByteStorage} from 'prototty-wasm-storage-js';
-const wasm = import('../wasm_out/web');
+const wasm = import('../wasm_out/wasm');
wasm.then(async wasm => {
let storage = await JsByteStorage.make_async("fib"); | Fix crate name in fib wasm example | stevebob_prototty | train | js |
2740a7fb8bbf235310fd820582b10d7d29363cf6 | diff --git a/php/WP_CLI/REPL.php b/php/WP_CLI/REPL.php
index <HASH>..<HASH> 100644
--- a/php/WP_CLI/REPL.php
+++ b/php/WP_CLI/REPL.php
@@ -21,7 +21,13 @@ class REPL {
$line = rtrim( $line, ';' ) . ';';
if ( self::starts_with( self::non_expressions(), $line ) ) {
+ ob_start();
eval( $line );
+ $out = ob_get_clean();
+ if ( 0 < strlen ( $out ) ) {
+ $out = rtrim( $out, "\n" ) . "\n";
+ }
+ fwrite( STDOUT, $out );
} else {
if ( !self::starts_with( 'return', $line ) )
$line = 'return ' . $line;
@@ -29,7 +35,8 @@ class REPL {
// Write directly to STDOUT, to sidestep any output buffers created by plugins
ob_start();
var_dump( eval( $line ) );
- fwrite( STDOUT, ob_get_clean() );
+ $out = rtrim( ob_get_clean(), "\n" ) . "\n";
+ fwrite( STDOUT, $out );
}
}
} | always include a newline after non-null eval output | wp-cli_shell-command | train | php |
921fe194bf5df616a747e78c2b6ee3e12481e706 | diff --git a/app/models/concerns/searchable_post.rb b/app/models/concerns/searchable_post.rb
index <HASH>..<HASH> 100644
--- a/app/models/concerns/searchable_post.rb
+++ b/app/models/concerns/searchable_post.rb
@@ -19,7 +19,7 @@ module SearchablePost
indexes :categories, :analyzer => :keyword, :as => 'categories.collect{ |c| c.name }'
indexes :job_phase, :analyzer => :keyword
indexes :type, :analyzer => :keyword, :as => 'type'
- indexes :industries, :analyzer => :keyword, :as => 'industry.title'
+ indexes :industries, :analyzer => :keyword, :as => 'industry.soc'
end
def related(published = nil) | Use SOC code for Post/Industry mapping | cortex-cms_cortex | train | rb |
36a6178ab5ffab94c0a1718ad4bcd47abf536808 | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -3,7 +3,14 @@ import os
import shutil
from setuptools import setup, find_packages
-from txtorcon import __version__, __author__, __contact__, __copyright__, __license__, __url__
+## can't just naively import these from txtorcon, as that will only
+## work if you already installed the dependencies
+__version__ = '0.5'
+__author__ = 'meejah'
+__contact__ = 'meejah@meejah.ca'
+__url__ = 'https://github.com/meejah/txtorcon'
+__license__ = 'MIT'
+__copyright__ = 'Copyright 2012'
setup(name = 'txtorcon',
version = __version__,
diff --git a/txtorcon/__init__.py b/txtorcon/__init__.py
index <HASH>..<HASH> 100644
--- a/txtorcon/__init__.py
+++ b/txtorcon/__init__.py
@@ -1,4 +1,6 @@
+## for now, this needs to be changed in setup.py also until I find a
+## better solution
__version__ = '0.5'
__author__ = 'meejah'
__contact__ = 'meejah@meejah.ca' | change where setup.py gets information from | meejah_txtorcon | train | py,py |
55446d29be48a92710d34d41632d6279f7a9dbe6 | diff --git a/master/buildbot/process/buildrequest.py b/master/buildbot/process/buildrequest.py
index <HASH>..<HASH> 100644
--- a/master/buildbot/process/buildrequest.py
+++ b/master/buildbot/process/buildrequest.py
@@ -118,6 +118,18 @@ class TempSourceStamp(object):
return result
+class TempChange(object):
+ # temporary fake change; attributes are added below
+
+ def __init__(self, d):
+ for k, v in d.items():
+ setattr(self, k, v)
+ self.properties = properties.Properties()
+ for k, v in d['properties'].items():
+ self.properties.setProperty(k, v[0], v[1])
+ self.who = d['author']
+
+
class BuildRequest(object):
"""
@@ -218,6 +230,9 @@ class BuildRequest(object):
ss.patch = None
ss.patch_info = (None, None)
ss.changes = []
+ change = yield master.db.changes.getChangeFromSSid(ss.ssid)
+ if change:
+ ss.changes.append(TempChange(change))
# XXX: sourcestamps don't have changes anymore; this affects merging!!
defer.returnValue(buildrequest) | populate the sourcestamp change list
Some build step are needing change properties to be mixed in build properties | buildbot_buildbot | train | py |
0ea9d94c152379eac4a2f8bc31ce2ff466fb8838 | diff --git a/tests/integration/array/reverse-test.js b/tests/integration/array/reverse-test.js
index <HASH>..<HASH> 100644
--- a/tests/integration/array/reverse-test.js
+++ b/tests/integration/array/reverse-test.js
@@ -31,6 +31,17 @@ test('it calls reverse on array', function(assert) {
});
});
+test('it doesn\'t mutate original array', function(assert) {
+ compute({
+ computed: reverse('array'),
+ properties: {
+ array
+ }
+ });
+
+ assert.deepEqual(array, [1, 2]);
+});
+
test('it responds to length changes', function(assert) {
let { subject } = compute({
computed: reverse('array'), | verify reverse doesn't mutate array | kellyselden_ember-awesome-macros | train | js |
e2df00d1d92acda2ba416367a831f8e2dc0d090e | diff --git a/source/git-promise.js b/source/git-promise.js
index <HASH>..<HASH> 100644
--- a/source/git-promise.js
+++ b/source/git-promise.js
@@ -271,7 +271,7 @@ git.binaryFileContent = (repoPath, filename, version, outPipe) => {
}
git.diffFile = (repoPath, filename, sha1, ignoreWhiteSpace) => {
- const newFileDiffArgs = ['diff', '--no-index', isWindows ? 'NUL' : '/dev/null', '--', filename.trim()];
+ const newFileDiffArgs = ['diff', '--no-index', isWindows ? 'NUL' : '/dev/null', filename.trim()];
return git.revParse(repoPath)
.then((revParse) => { return revParse.type === 'bare' ? { files: {} } : git.status(repoPath) }) // if bare do not call status
.then((status) => { | git doesn't like "--" for new file diffs... | FredrikNoren_ungit | train | js |
e9a3ec52b6bbbcaf47709117c39d27196a16eb0c | diff --git a/lib/clenver/repository.rb b/lib/clenver/repository.rb
index <HASH>..<HASH> 100644
--- a/lib/clenver/repository.rb
+++ b/lib/clenver/repository.rb
@@ -5,15 +5,19 @@ class Repository
@repo_uri = repo
@dst = dst
@abs_path = nil
+ @repo = nil
end
def clone
repo_basename = File.basename("#{@repo_uri}",".git")
- Git.clone(@repo_uri, repo_basename)
+ @repo = Git.clone(@repo_uri, repo_basename)
@abs_path = Dir::pwd + "/" + repo_basename
end
def get_abs_path
@abs_path
end
+ def add_remote(name, uri)
+ @repo.add_remote(name, uri)
+ end
end | add_remote support in Repository class | pietrushnic_clenver | train | rb |
c4d1f071080c10875293b65f67ea04c7779906f5 | diff --git a/python/dllib/src/bigdl/dllib/keras/engine/topology.py b/python/dllib/src/bigdl/dllib/keras/engine/topology.py
index <HASH>..<HASH> 100644
--- a/python/dllib/src/bigdl/dllib/keras/engine/topology.py
+++ b/python/dllib/src/bigdl/dllib/keras/engine/topology.py
@@ -190,7 +190,7 @@ class KerasNet(ZooKerasLayer):
distributed: Boolean. Whether to do prediction in distributed mode or local mode.
Default is True. In local mode, x must be a Numpy array.
"""
- if is_distributed:
+ if distributed:
if isinstance(x, np.ndarray):
features = to_sample_rdd(x, np.zeros([x.shape[0]]))
elif isinstance(x, RDD): | fix predict (#<I>) | intel-analytics_BigDL | train | py |
9f846ad2d737e4e64dfe884498c364c104f8e9d0 | diff --git a/niftypet/nimpa/prc/prc.py b/niftypet/nimpa/prc/prc.py
index <HASH>..<HASH> 100644
--- a/niftypet/nimpa/prc/prc.py
+++ b/niftypet/nimpa/prc/prc.py
@@ -448,7 +448,7 @@ def pvc_iyang(
#if affine transf. (faff) is given then take the T1 and resample it too.
if isinstance(faff, basestring) and not os.path.isfile(faff):
# faff is not given; get it by running the affine; get T1w to PET space
- faff = reg_mr2pet(fpet, mridct, Cnt, outpath=outpath, fcomment=fcomment)
+ faff, _ = reg_mr2pet(fpet, mridct, Cnt, outpath=outpath, fcomment=fcomment)
# establish the output folder
if outpath=='':
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -182,7 +182,7 @@ elif platform.system() == 'Windows' :
setup(
name='nimpa',
license = 'Apache 2.0',
- version='1.0.14',
+ version='1.0.15',
description='CUDA-accelerated Python utilities for high-throughput PET/MR image processing and analysis.',
long_description=long_description,
author='Pawel J. Markiewicz', | fixed bug with new registration in prc.py in nimpa | pjmark_NIMPA | train | py,py |
da586f947c2d28c92bdc23fc4cd83cc27e15ff8f | diff --git a/bigquery.js b/bigquery.js
index <HASH>..<HASH> 100644
--- a/bigquery.js
+++ b/bigquery.js
@@ -21,6 +21,7 @@
var assert = require('assert');
var async = require('async');
var Dataset = require('../lib/bigquery/dataset');
+var Table = require('../lib/bigquery/table');
var env = require('./env');
var fs = require('fs');
var Job = require('../lib/bigquery/job');
@@ -272,6 +273,28 @@ describe('BigQuery', function() {
});
});
});
+
+ it('should get tables', function(done) {
+ dataset.getTables(function(err, tables) {
+ assert.ifError(err);
+ assert(tables[0] instanceof Table);
+ done();
+ });
+ });
+
+ it('should get tables as a stream', function(done) {
+ var tableEmitted = false;
+
+ dataset.getTables()
+ .on('error', done)
+ .on('data', function(table) {
+ tableEmitted = table instanceof Table;
+ })
+ .on('end', function() {
+ assert.strictEqual(tableEmitted, true);
+ done();
+ });
+ });
});
describe('BigQuery/Table', function() { | bigquery: implemented streamrouter in dataset
updated unit and system tests
added a bit more documentation | googleapis_nodejs-bigquery | train | js |
6f40e31766c2fe378d31369d7b2b4c9535f5cb24 | diff --git a/examples/pytorch/language-modeling/run_clm_no_trainer.py b/examples/pytorch/language-modeling/run_clm_no_trainer.py
index <HASH>..<HASH> 100755
--- a/examples/pytorch/language-modeling/run_clm_no_trainer.py
+++ b/examples/pytorch/language-modeling/run_clm_no_trainer.py
@@ -14,7 +14,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
"""
-Fine-tuning the library models for causal language modeling (BERT, ALBERT, RoBERTa...)
+Fine-tuning the library models for causal language modeling (GPT, GPT-2, CTRL, ...)
on a text file or a dataset without using HuggingFace Trainer.
Here is the full list of checkpoints on the hub that can be fine-tuned by this script: | Fix comment in run_clm_no_trainer.py (#<I>) | huggingface_pytorch-pretrained-BERT | train | py |
d21bd9249b50fead8843d6b326b189ea898874b9 | diff --git a/azure-eventhubs/src/main/java/com/microsoft/azure/servicebus/MessageSender.java b/azure-eventhubs/src/main/java/com/microsoft/azure/servicebus/MessageSender.java
index <HASH>..<HASH> 100644
--- a/azure-eventhubs/src/main/java/com/microsoft/azure/servicebus/MessageSender.java
+++ b/azure-eventhubs/src/main/java/com/microsoft/azure/servicebus/MessageSender.java
@@ -303,6 +303,7 @@ public class MessageSender extends ClientEntity implements IAmqpSender, IErrorCo
@Override
public void onClose(ErrorCondition condition)
{
+ // TODO: code-refactor pending - refer to issue: https://github.com/Azure/azure-event-hubs/issues/73
Exception completionException = condition != null ? ExceptionUtil.toException(condition)
: new ServiceBusException(ClientConstants.DEFAULT_IS_TRANSIENT,
"The entity has been close due to transient failures (underlying link closed), please retry the operation."); | add code reference to issue#<I> | Azure_azure-sdk-for-java | train | java |
fb1e86ca5d3b0c2f961ddf09523473e744d91325 | diff --git a/lib/rest-ftp-daemon/remote.rb b/lib/rest-ftp-daemon/remote.rb
index <HASH>..<HASH> 100644
--- a/lib/rest-ftp-daemon/remote.rb
+++ b/lib/rest-ftp-daemon/remote.rb
@@ -14,7 +14,7 @@ module RestFtpDaemon
@url.user ||= "anonymous"
# Annnounce object
- log_info "Remote.initialize [#{url.to_s}]"
+ log_info "Remote.initialize [#{url}]"
end
def connect
diff --git a/lib/rest-ftp-daemon/remote_ftp.rb b/lib/rest-ftp-daemon/remote_ftp.rb
index <HASH>..<HASH> 100644
--- a/lib/rest-ftp-daemon/remote_ftp.rb
+++ b/lib/rest-ftp-daemon/remote_ftp.rb
@@ -135,7 +135,7 @@ module RestFtpDaemon
@ftp = DoubleBagFTPS.new
@ftp.ssl_context = DoubleBagFTPS.create_ssl_context(verify_mode: OpenSSL::SSL::VERIFY_NONE)
@ftp.ftps_mode = DoubleBagFTPS::EXPLICIT
- end
+ end
end | Rubocop: alignments, redundant use of Object#to_s in interpolation | bmedici_rest-ftp-daemon | train | rb,rb |
d7c827ed0ce9d171ef5dbd555ac6e7c5c1dc4196 | diff --git a/tools/c7n_azure/c7n_azure/filters.py b/tools/c7n_azure/c7n_azure/filters.py
index <HASH>..<HASH> 100644
--- a/tools/c7n_azure/c7n_azure/filters.py
+++ b/tools/c7n_azure/c7n_azure/filters.py
@@ -113,8 +113,8 @@ class MetricFilter(Filter):
'average': Math.mean,
'total': Math.sum,
'count': Math.sum,
- 'minimum': Math.max,
- 'maximum': Math.min
+ 'minimum': Math.min,
+ 'maximum': Math.max
}
schema = { | azure - metrics filter - fix aggregation funcs (#<I>) | cloud-custodian_cloud-custodian | train | py |
cc82286aa115f6fb4034f9c664bc7d91d05bf32c | diff --git a/v2client/src/test/java/com/yubico/client/v2/YubicoClientTest.java b/v2client/src/test/java/com/yubico/client/v2/YubicoClientTest.java
index <HASH>..<HASH> 100644
--- a/v2client/src/test/java/com/yubico/client/v2/YubicoClientTest.java
+++ b/v2client/src/test/java/com/yubico/client/v2/YubicoClientTest.java
@@ -88,6 +88,7 @@ public class YubicoClientTest {
assertTrue(response.getStatus() == YubicoResponseStatus.REPLAYED_OTP);
}
+ @Test
public void testBadSignature() throws YubicoValidationException, YubicoValidationFailure {
String otp = "cccccccfhcbelrhifnjrrddcgrburluurftrgfdrdifj";
client.setKey("bAX9u78e8BRHXPGDVV3lQUm4yVw="); | Added missing @Test to test method | Yubico_yubico-java-client | train | java |
70647448ce4e82775132e8acfc8ee70be85cb7ee | diff --git a/src/com/google/javascript/jscomp/NodeUtil.java b/src/com/google/javascript/jscomp/NodeUtil.java
index <HASH>..<HASH> 100644
--- a/src/com/google/javascript/jscomp/NodeUtil.java
+++ b/src/com/google/javascript/jscomp/NodeUtil.java
@@ -2546,7 +2546,7 @@ public final class NodeUtil {
static final Predicate<Node> createsScope = new Predicate<Node>() {
@Override
public boolean apply(Node n) {
- return createsBlockScope(n) || n.isFunction()
+ return createsBlockScope(n) || n.isFunction() || n.isModuleBody()
// The ROOT nodes that are the root of the externs tree or main JS tree do not
// create scopes. The parent of those two, which is the root of the entire AST and
// therefore has no parent, is the only ROOT node that creates a scope. | Small correction: Module bodies are also scope roots.
-------------
Created by MOE: <URL> | google_closure-compiler | train | java |
0fc3d41d2dc5167adbb470b0fd073f494521e642 | diff --git a/pyphi/connectivity.py b/pyphi/connectivity.py
index <HASH>..<HASH> 100644
--- a/pyphi/connectivity.py
+++ b/pyphi/connectivity.py
@@ -78,7 +78,7 @@ def block_cm(cm):
For example, the following connectivity matrix represents connections from
``nodes1 = A, B, C`` to ``nodes2 = D, E, F, G`` (without loss of
- generality—note that ``nodes1`` and ``nodes2`` may share elements)::
+ generality, note that ``nodes1`` and ``nodes2`` may share elements)::
D E F G
A [1, 1, 0, 0]
@@ -143,8 +143,9 @@ def block_reducible(cm, nodes1, nodes2):
nodes1 (tuple[int]): Source nodes
nodes2 (tuple[int]): Sink nodes
"""
+ # Trivial case
if not nodes1 or not nodes2:
- return True # trivially
+ return True
cm = cm[np.ix_(nodes1, nodes2)] | Fix code style in `connectivity` | wmayner_pyphi | train | py |
9e733c26a5a4c4d04c4c8ed3359eb812fa37b6f6 | diff --git a/SafeMarkup.php b/SafeMarkup.php
index <HASH>..<HASH> 100644
--- a/SafeMarkup.php
+++ b/SafeMarkup.php
@@ -169,6 +169,13 @@ class SafeMarkup {
*
* @ingroup sanitization
*
+ * @deprecated Will be removed before Drupal 8.0.0. Rely on Twig's
+ * auto-escaping feature, or use the @link theme_render #plain_text @endlink
+ * key when constructing a render array that contains plain text in order to
+ * use the renderer's auto-escaping feature. If neither of these are
+ * possible, \Drupal\Component\Utility\Html::escape() can be used in places
+ * where explicit escaping is needed.
+ *
* @see drupal_validate_utf8()
*/
public static function checkPlain($text) { | Issue #<I> by stefan.r, cilefen, alexpott, plach: Deprecate SafeMarkup::checkPlain() for Drupal <I>.x | drupal_core-utility | train | php |
cf47927d3a9760ca878c361f87ea1cf1aa1b4cca | diff --git a/littletable.py b/littletable.py
index <HASH>..<HASH> 100644
--- a/littletable.py
+++ b/littletable.py
@@ -777,10 +777,7 @@ class Table(object):
val = fn(rec)
except Exception:
val = default
- if isinstance(rec, DataObject):
- object.__setattr__(rec, attrname, val)
- else:
- setattr(rec, attrname, val)
+ object.__setattr__(rec, attrname, val)
def groupby(self, keyexpr, **outexprs):
"""simple prototype of group by, with support for expressions in the group-by clause
@@ -1071,7 +1068,7 @@ if __name__ == "__main__":
dict(state="TX"),
dict(city="New York"),
dict(city="Phoenix", _orderby="stn"),
- dict(city="Phoenix", _orderbydesc="stn"),
+ dict(city="Phoenix", _orderby="stn desc"),
]:
print queryargs,
result = stations.query(**queryargs) | Fix compute to handle namedtuple records correctly.
Fixed demo code to use _orderby instead of deprecated _orderbydesc | ptmcg_littletable | train | py |
ba3b1d8524d41772087ff1ca924101c4c1f5e638 | diff --git a/src/edu/jhu/hltcoe/data/DepTree.java b/src/edu/jhu/hltcoe/data/DepTree.java
index <HASH>..<HASH> 100644
--- a/src/edu/jhu/hltcoe/data/DepTree.java
+++ b/src/edu/jhu/hltcoe/data/DepTree.java
@@ -121,13 +121,10 @@ public class DepTree implements Iterable<DepTreeNode> {
private void addParentChildLinksToNodes() {
checkTree();
for (int i=0; i<parents.length; i++) {
- NonprojDepTreeNode node = (NonprojDepTreeNode)getNodeByPosition(i);
- node.setParent((NonprojDepTreeNode)getNodeByPosition(parents[i]));
- for (int j=0; j<parents.length; j++) {
- if (parents[j] == i) {
- node.addChild((NonprojDepTreeNode)getNodeByPosition(j));
- }
- }
+ NonprojDepTreeNode child = (NonprojDepTreeNode)getNodeByPosition(i);
+ NonprojDepTreeNode parent = (NonprojDepTreeNode)getNodeByPosition(parents[i]);
+ child.setParent(parent);
+ parent.addChild(child);
}
} | Bug fix: wasn't adding child to wall node
git-svn-id: svn+ssh://external.hltcoe.jhu.edu/home/hltcoe/mgormley/public/repos/dep_parse_filtered/trunk@<I> <I>f-cb4b-<I>-8b<I>-c<I>bcb<I> | mgormley_pacaya | train | java |
3474a31345597eaa5e1b6f22e263f33959d0a335 | diff --git a/tilequeue/process.py b/tilequeue/process.py
index <HASH>..<HASH> 100644
--- a/tilequeue/process.py
+++ b/tilequeue/process.py
@@ -72,9 +72,14 @@ def _postprocess_data(feature_layers, post_process_data):
for index, feature_layer in enumerate(feature_layers):
layer_datum = feature_layer['layer_datum']
layer_name = layer_datum['name']
- if layer_name == layer['name']:
+ if layer_name == layer['layer_datum']['name']:
feature_layers[index] = layer
+ layer = None
break
+ # if this layer isn't replacing an old layer, then
+ # append it.
+ if layer is not None:
+ feature_layers.append(layer)
return feature_layers | Fix bug where post-processing would only replace existing layers, not add new ones. | tilezen_tilequeue | train | py |
686286e7fa6051b0a63d4d8eaa13a4e5e341d1fd | diff --git a/test/visible.js b/test/visible.js
index <HASH>..<HASH> 100644
--- a/test/visible.js
+++ b/test/visible.js
@@ -2,10 +2,11 @@ describe('asking if a visible div scrolled', function() {
var scrolled = false;
var test = createTest();
- before(function() {
+ before(function(done) {
insertTest(test);
inViewport(test, function() {
scrolled = true;
+ done();
});
}); | make visible test async
While the callback is immediately called because element is immediately
visible, it must be/could be async | vvo_in-viewport | train | js |
19e0609e5e4dcc222b53a3730c4f9cdea65745b1 | diff --git a/src/main/java/com/twilio/converter/DateConverter.java b/src/main/java/com/twilio/converter/DateConverter.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/twilio/converter/DateConverter.java
+++ b/src/main/java/com/twilio/converter/DateConverter.java
@@ -1,5 +1,6 @@
package com.twilio.converter;
+import java.util.Locale;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.LocalDate;
@@ -16,7 +17,7 @@ public class DateConverter {
DateTimeFormat.forPattern(DATE_PATTERN).withZone(DateTimeZone.UTC);
private static final DateTimeFormatter RFC2822_DATE_TIME_FORMATTER =
- DateTimeFormat.forPattern(RFC2822_DATE_TIME).withZone(DateTimeZone.UTC);
+ DateTimeFormat.forPattern(RFC2822_DATE_TIME).withZone(DateTimeZone.UTC).withLocale(new Locale("en_US"));
private static final DateTimeFormatter ISO8601_DATE_TIME_FORMATTER =
DateTimeFormat.forPattern(ISO8601_DATE_TIME).withZone(DateTimeZone.UTC); | Fix exception parsing rfc<I> dates for some locales, always use us locale | twilio_twilio-java | train | java |
c65d4650a552c41ac24bbc24449fcaaf4052c996 | diff --git a/src/Filesystems/GcsFilesystem.php b/src/Filesystems/GcsFilesystem.php
index <HASH>..<HASH> 100644
--- a/src/Filesystems/GcsFilesystem.php
+++ b/src/Filesystems/GcsFilesystem.php
@@ -26,6 +26,7 @@ class GcsFilesystem implements Filesystem {
$storageClient = new StorageClient([
'projectId' => $config['project'],
+ 'keyFilePath' => isset($config['keyFilePath']) ? $config['keyFilePath'] : null,
]);
$bucket = $storageClient->bucket($config['bucket']); | Pass keyFilePath option to GCS client | backup-manager_backup-manager | train | php |
ed494bb3ff688b066c72ec9c0c4ff1841739af8a | diff --git a/nsq/__init__.py b/nsq/__init__.py
index <HASH>..<HASH> 100644
--- a/nsq/__init__.py
+++ b/nsq/__init__.py
@@ -17,4 +17,4 @@ except ImportError: # pragma: no cover
import json
# The current version
-__version__ = '0.1.8'
+__version__ = '0.1.9'
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -29,7 +29,7 @@ setup(name = 'nsq-py',
install_requires=[
'requests',
'decorator',
- 'url<=0.2.0',
+ 'url<0.2.0',
'statsd'
],
tests_requires=[ | Correctly pin url dependency to <<I> | dlecocq_nsq-py | train | py,py |
d04ebb5ebf3ff8f6316224ab517caf2c6175b0b7 | diff --git a/ontrack-web/src/app/view/view.search.js b/ontrack-web/src/app/view/view.search.js
index <HASH>..<HASH> 100644
--- a/ontrack-web/src/app/view/view.search.js
+++ b/ontrack-web/src/app/view/view.search.js
@@ -66,17 +66,24 @@ angular.module('ot.view.search', [
// Search type
let type = $location.search().type;
+ // Offset reset
+ if (request.token !== token || request.type !== type) {
+ request.offset = 0;
+ }
+
// Request
request.token = token;
- if (type) {
- request.type = type;
- }
+ request.type = type;
// Launching the search
otGraphqlService.pageGraphQLCall(query, request).then(function (data) {
$scope.searchDone = true;
$scope.pageInfo = data.search.pageInfo;
- $scope.results = $scope.results.concat(data.search.pageItems);
+ if (request.offset > 0) {
+ $scope.results = $scope.results.concat(data.search.pageItems);
+ } else {
+ $scope.results = data.search.pageItems;
+ }
// If only one result, switches directly to the correct page
if ($scope.results.length === 1) {
let result = $scope.results[0]; | #<I> When launching a NEW search, erase previous results | nemerosa_ontrack | train | js |
4b8064786f417a010e50de094219b68ce4268c51 | diff --git a/lib/social_snippet/resolvers/dep_resolver.rb b/lib/social_snippet/resolvers/dep_resolver.rb
index <HASH>..<HASH> 100644
--- a/lib/social_snippet/resolvers/dep_resolver.rb
+++ b/lib/social_snippet/resolvers/dep_resolver.rb
@@ -49,7 +49,7 @@ module SocialSnippet
:context => new_context,
})
# find more deps
- found_tags.push *find_func(snippet.lines, new_context, tag)
+ found_tags.push *find_func(snippet.lines.to_a, new_context, tag)
end
return found_tags | dep_resolver: normalize String.lines() to Array | social-snippet_social-snippet | train | rb |
38735117af08becd1f28dd24efff562b66e8498d | diff --git a/classes/PodsAPI.php b/classes/PodsAPI.php
index <HASH>..<HASH> 100644
--- a/classes/PodsAPI.php
+++ b/classes/PodsAPI.php
@@ -5046,6 +5046,11 @@ class PodsAPI {
$wpdb->query( "DELETE FROM `{$wpdb->options}` WHERE `option_name` LIKE '_transient_pods_%'" );
+ if ( in_array( $pod[ 'type' ], array( 'taxonomy', 'post_type' ) ) ) {
+ global $wp_rewrite;
+ $wp_rewrite->flush_rules();
+ }
+
wp_cache_flush();
} | Also flush rewrite rules when flushing a cache for posts and taxonomies. | pods-framework_pods | train | php |
328f0e45efe51d2beeb4422d77dd6364a07ffe2a | diff --git a/pyusps/address_information.py b/pyusps/address_information.py
index <HASH>..<HASH> 100644
--- a/pyusps/address_information.py
+++ b/pyusps/address_information.py
@@ -2,7 +2,10 @@ import urllib2
import urllib
from lxml import etree
-from collections import OrderedDict
+try:
+ from collections import OrderedDict
+except ImportError:
+ from ordereddict import OrderedDict
api_url = 'http://production.shippingapis.com/ShippingAPI.dll'
address_max = 5
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -30,6 +30,7 @@ setup(
install_requires=[
'setuptools>=0.6c11',
'lxml>=2.3.3',
+ 'ordereddict==1.1',
],
extras_require=EXTRAS_REQUIRES,
) | Adds support for python < <I> | thelinuxkid_pyusps | train | py,py |
860edd6e1e05428fd103589de5f44c2e7eec564f | diff --git a/lib/pundit.rb b/lib/pundit.rb
index <HASH>..<HASH> 100644
--- a/lib/pundit.rb
+++ b/lib/pundit.rb
@@ -243,7 +243,7 @@ protected
else
"permitted_attributes"
end
- params.require(param_key).permit(policy.public_send(method_name))
+ params.require(param_key).permit(*policy.public_send(method_name))
end
# Cache of policies. You should not rely on this method. | Add splat lost in merge, it was fixed elabs/pundit#<I>, closes elabs/pundit#<I> | varvet_pundit | train | rb |
02e7e96fac18129f0deb8bfb446339e203e94ac8 | diff --git a/config/deploy.rb b/config/deploy.rb
index <HASH>..<HASH> 100644
--- a/config/deploy.rb
+++ b/config/deploy.rb
@@ -54,7 +54,7 @@ namespace :deploy do
on roles(:app) do
%w(log tmp/pids tmp/cache tmp/sockets public/system public/assets).each do |path|
execute "rm -rf #{release_path}/spec/dummy/#{path}"
- execute "ln -s #{release_path}/spec/dummy/log #{shared_path}/#{path}"
+ execute "ln -s #{shared_path}/#{path} #{release_path}/spec/dummy/#{path}"
end
end
end | update deploy to symlink dummy directories | wearefine_fae | train | rb |
eb24b35c5708669841bbe1ca7c8dc2f012e0ca98 | diff --git a/lib/capnotify.rb b/lib/capnotify.rb
index <HASH>..<HASH> 100644
--- a/lib/capnotify.rb
+++ b/lib/capnotify.rb
@@ -49,7 +49,7 @@ module Capnotify
# by default, the output should be: "STAGE APPNAME @ BRANCH"
# override this to change the default behavior for capnotify.appname
_cset(:capnotify_appname) do
- name = [ fetch(:stage, nil), fetch(:application, nil) ].compact.map{|c| c.capitalize}.join(" ")
+ name = [ fetch(:stage, nil), fetch(:application, nil) ].compact.join(" ")
if fetch(:branch, nil)
name = "#{ name } @ #{ branch }"
end | yeah, probably shouldn't capitalize the appname | spikegrobstein_capnotify | train | rb |
e161bab02981d30bcf0731bda4cdaf06802d48dc | diff --git a/lib/adhearsion/punchblock_plugin.rb b/lib/adhearsion/punchblock_plugin.rb
index <HASH>..<HASH> 100644
--- a/lib/adhearsion/punchblock_plugin.rb
+++ b/lib/adhearsion/punchblock_plugin.rb
@@ -29,11 +29,11 @@ module Adhearsion
end
init :punchblock do
- Initializer.init if Adhearsion.config[:punchblock].enabled
+ Initializer.init if config.enabled
end
run :punchblock do
- Initializer.run if Adhearsion.config[:punchblock].enabled
+ Initializer.run if config.enabled
end
class << self | [CS] Cleanup access to config in punchblock plugin | adhearsion_adhearsion | train | rb |
55bd401aea98bd37d986bd626889ee9afcffe8fd | diff --git a/src/openaccess_epub/main.py b/src/openaccess_epub/main.py
index <HASH>..<HASH> 100755
--- a/src/openaccess_epub/main.py
+++ b/src/openaccess_epub/main.py
@@ -189,6 +189,7 @@ def batch_input(args):
None, # Does not use custom image path
batch=True)
except:
+ error_file.write(item_path + '\n')
traceback.print_exc(file=error_file)
#Cleanup output directory, keeps EPUB and log
diff --git a/src/openaccess_epub/utils/element_methods.py b/src/openaccess_epub/utils/element_methods.py
index <HASH>..<HASH> 100644
--- a/src/openaccess_epub/utils/element_methods.py
+++ b/src/openaccess_epub/utils/element_methods.py
@@ -39,6 +39,23 @@ def getChildrenByTagName(self, tagName):
return child_list
+def getOptionalChild(self, tagName, not_found=None):
+ """
+ This method is used to return the first child with the supplied tagName
+ when the child may or may not exist.
+
+ This saves repetitive coding of blocks to check for child existence.
+
+ The optional not_found argument (default None) can be used to define what
+ should be returned by the method if the child does not exist.
+ """
+ try:
+ child = self.getChildrenByTagName(tagName)[0]
+ except IndexError:
+ child = not_found
+ return child
+
+
def removeAllAttributes(self):
"""
This method will remove all attributes of any provided element. | adding file name to batch_traceback collection, and providing a new method to refactor optional children | SavinaRoja_OpenAccess_EPUB | train | py,py |
3fea31ba5d731ad803828bea0128f66f1e524cf8 | diff --git a/services/config.js b/services/config.js
index <HASH>..<HASH> 100644
--- a/services/config.js
+++ b/services/config.js
@@ -38,6 +38,10 @@ exports.init = function(callback) {
return callback(err);
} else {
exports.decryptionKey = result;
+ security.decryptObject(config, function (str) {
+ //Decryption function
+ return security.decrypt(str, result);
+ });
return callback();
}
}); | Encrypted properties aren't getting decrypted properly in a single-server environment | BlueOakJS_blueoak-server | train | js |
bb8db1e5a16dbc8b77e2b1b2138eea52ed330e14 | diff --git a/websockets/protocol.py b/websockets/protocol.py
index <HASH>..<HASH> 100644
--- a/websockets/protocol.py
+++ b/websockets/protocol.py
@@ -391,11 +391,8 @@ class WebSocketCommonProtocol(asyncio.StreamReaderProtocol):
# Handle flow control automatically.
yield from self.writer.drain()
except ConnectionResetError:
- # Terminate the connection if the socket died,
- # unless it's already being closed.
- if expected_state != CLOSING:
- self.state = CLOSING
- yield from self.fail_connection(1006)
+ # Terminate the connection if the socket died.
+ yield from self.fail_connection(1006)
@asyncio.coroutine
def close_connection(self): | Remove supefluous check.
fail_connection() can now be called safely multiple times in parallel,
solving the general problem, while this check only adressed an instance. | aaugustin_websockets | train | py |
dd660c2e0397683ce34926042c68da1c07db351d | diff --git a/getgauge/static_loader.py b/getgauge/static_loader.py
index <HASH>..<HASH> 100644
--- a/getgauge/static_loader.py
+++ b/getgauge/static_loader.py
@@ -16,8 +16,9 @@ def load_steps(content, file_name):
if decorator.value.__str__() == 'step':
steps = re.findall(r'[\'"](.*?)[\'"]', decorator.call.__str__())
add_steps(file_name, func, steps)
- except BaronError:
- pass
+ except BaronError as e:
+ print(e.message[:-640])
+
def reload_steps(content, file_name):
diff --git a/start.py b/start.py
index <HASH>..<HASH> 100755
--- a/start.py
+++ b/start.py
@@ -2,7 +2,7 @@ import platform
import sys
from colorama import Style, init
-
+from os import path
from getgauge import connection, processor
from getgauge.impl_loader import copy_skel_files
from getgauge.static_loader import load_files
@@ -16,7 +16,11 @@ def main():
copy_skel_files()
else:
s = connection.connect()
- load_files(get_step_impl_dir())
+ dir = get_step_impl_dir()
+ if path.exists(dir):
+ load_files(dir)
+ else:
+ print('can not load implementations from {}. {} does not exist.'.format(dir, dir))
processor.dispatch_messages(s) | loading steps only if step_impl_dir exist. | getgauge_gauge-python | train | py,py |
949f95b74a310017937af807457c613dbde14f54 | diff --git a/core/client/src/test/java/alluxio/client/file/URIStatusTest.java b/core/client/src/test/java/alluxio/client/file/URIStatusTest.java
index <HASH>..<HASH> 100644
--- a/core/client/src/test/java/alluxio/client/file/URIStatusTest.java
+++ b/core/client/src/test/java/alluxio/client/file/URIStatusTest.java
@@ -37,7 +37,7 @@ public class URIStatusTest {
/**
* Tests getting and setting fields.
*/
- @Test (timeout = 10000)
+ @Test
public void fieldsTest() {
FileInfo fileInfo = FileInfoTest.createRandom();
URIStatus uriStatus = new URIStatus(fileInfo);
@@ -72,7 +72,7 @@ public class URIStatusTest {
Assert.assertEquals(uriStatus.toString(), fileInfo.toString());
}
- @Test (timeout = 10000)
+ @Test
public void testEquals() throws Exception {
FileInfo fileInfo = FileInfoTest.createRandom();
URIStatus uriStatus1 = new URIStatus(fileInfo); | [Alluxio-<I>] Add unit tests for URIStatus. Address comments on pull
request-<I>-<I>-<I> | Alluxio_alluxio | train | java |
2db366bd9eb3a9c3402d11f5ed9117e853873581 | diff --git a/bitshares/wallet.py b/bitshares/wallet.py
index <HASH>..<HASH> 100644
--- a/bitshares/wallet.py
+++ b/bitshares/wallet.py
@@ -339,12 +339,12 @@ class Wallet():
def getKeyType(self, account, pub):
""" Get key type
"""
- if pub == account["options"]["memo_key"]:
- return "memo"
for authority in ["owner", "active"]:
for key in account[authority]["key_auths"]:
if pub == key[0]:
return authority
+ if pub == account["options"]["memo_key"]:
+ return "memo"
return None
def getAccounts(self): | [wallet] active keys have priority over memo keys | bitshares_python-bitshares | train | py |
bd7058b6cfa344801e2502895430cf8742b84e73 | diff --git a/benchexec/tools/coveriteam-verifier-validator.py b/benchexec/tools/coveriteam-verifier-validator.py
index <HASH>..<HASH> 100644
--- a/benchexec/tools/coveriteam-verifier-validator.py
+++ b/benchexec/tools/coveriteam-verifier-validator.py
@@ -30,7 +30,7 @@ class Tool(coveriteam.Tool):
task, {ILP32: "ILP32", LP64: "LP64"}
)
if data_model_param and not any(
- [option.startswith("data_model=") for option in options]
+ re.match("data_model *=", option) for option in options
):
options += ["--input", "data_model=" + data_model_param] | Moved to regex match instead of startswith, removed unnecessary list comprehension | sosy-lab_benchexec | train | py |
0647ceee2f11b1817f93f6020bfcc929528eedcb | diff --git a/packages/input-select/src/Select.js b/packages/input-select/src/Select.js
index <HASH>..<HASH> 100644
--- a/packages/input-select/src/Select.js
+++ b/packages/input-select/src/Select.js
@@ -148,7 +148,7 @@ class Select extends React.Component {
) : (
<Item
label={item.label}
- id={this.props.id && `${this.props.id}-item-${item.label}`}
+ id={this.props.id && `${this.props.id}-item-${item.value}`}
image={item.image}
selected={selected}
/> | feat(Select): Use value to compose Item id | CraveFood_farmblocks | train | js |
99a81442aae0f95d46e01f3e63da6186c0c90c65 | diff --git a/src/progress.js b/src/progress.js
index <HASH>..<HASH> 100644
--- a/src/progress.js
+++ b/src/progress.js
@@ -110,7 +110,7 @@ Progress.prototype.setText = function setText(text) {
if (this._progressPath === null) throw new Error(DESTROYED_ERROR);
if (this.text === null) {
- this.text = this._createTextElement(this._container, text);
+ this.text = this._createTextElement(this._opts, this._container);
this._container.appendChild(this.text);
return;
} | Fix bug where setting text without initial text definition failed | kimmobrunfeldt_progressbar.js | train | js |
e9c3f9e36b249bc88032fc7b2fa717f58a64c235 | diff --git a/lib/health-data-standards/import/cat1/lab_result_importer.rb b/lib/health-data-standards/import/cat1/lab_result_importer.rb
index <HASH>..<HASH> 100644
--- a/lib/health-data-standards/import/cat1/lab_result_importer.rb
+++ b/lib/health-data-standards/import/cat1/lab_result_importer.rb
@@ -6,10 +6,6 @@ module HealthDataStandards
super(entry_finder)
@entry_class = LabResult
end
-
- def create_entry(entry_element, nrh = CDA::NarrativeReferenceHandler.new)
- super
- end
end
end
end | Removing a method that only called super | projectcypress_health-data-standards | train | rb |
9e62a78da02e4e5f7193bafb0cddb66c966e10b6 | diff --git a/examples/simple.go b/examples/simple.go
index <HASH>..<HASH> 100644
--- a/examples/simple.go
+++ b/examples/simple.go
@@ -20,6 +20,11 @@ var qs = []*probe.Question{
}
func main() {
- answers := probe.Ask(qs)
+ answers, err := probe.Ask(qs)
+ if err != nil {
+ fmt.Println("\n", err.Error())
+ return
+ }
+
fmt.Printf("%s chose %s.\n", answers["name"], answers["color"])
}
diff --git a/probe.go b/probe.go
index <HASH>..<HASH> 100644
--- a/probe.go
+++ b/probe.go
@@ -14,7 +14,7 @@ type Prompt interface {
Prompt() (string, error)
}
-func Ask(qs []*Question) map[string]string {
+func Ask(qs []*Question) (map[string]string, error) {
// the response map
res := make(map[string]string)
// go over every question
@@ -23,11 +23,12 @@ func Ask(qs []*Question) map[string]string {
ans, err := q.Prompt.Prompt()
// if something went wrong
if err != nil {
- panic(err)
+ // stop listening
+ return nil, err
}
// add it to the map
res[q.Name] = ans
}
// return the response
- return res
+ return res, nil
} | ask no longer panics, just returns error | AlecAivazis_survey | train | go,go |
a3b1f5a14e966eb15cccd70e1a02be51efde5845 | diff --git a/intranet/apps/auth/backends.py b/intranet/apps/auth/backends.py
index <HASH>..<HASH> 100644
--- a/intranet/apps/auth/backends.py
+++ b/intranet/apps/auth/backends.py
@@ -62,7 +62,8 @@ class KerberosAuthenticationBackend(object):
return True
else:
logger.debug("Kerberos failed to authorize {}".format(username))
- del os.environ["KRB5CCNAME"]
+ if "KRB5CCNAME" in os.environ:
+ del os.environ["KRB5CCNAME"]
return False
def authenticate(self, username=None, password=None): | Check for env var before deleting it | tjcsl_ion | train | py |
1f157c9d28d4f705f5f2a48cc93dad67683b9e5e | diff --git a/spyder/plugins/completion/plugin.py b/spyder/plugins/completion/plugin.py
index <HASH>..<HASH> 100644
--- a/spyder/plugins/completion/plugin.py
+++ b/spyder/plugins/completion/plugin.py
@@ -756,7 +756,8 @@ class CompletionPlugin(SpyderPluginV2):
self.requests[req_id]['timed_out'] = True
# Send request to all running completion providers
- for provider_name in self.providers:
+ providers = self.available_providers_for_language(language.lower())
+ for provider_name in providers:
provider_info = self.providers[provider_name]
provider_info['instance'].send_request(
language, req_type, req, req_id)
@@ -781,7 +782,8 @@ class CompletionPlugin(SpyderPluginV2):
**kwargs: notification-specific parameters
}
"""
- for provider_name in self.providers:
+ providers = self.available_providers_for_language(language.lower())
+ for provider_name in providers:
provider_info = self.providers[provider_name]
if provider_info['status'] == self.RUNNING:
provider_info['instance'].send_notification( | Ensure that requests are only dispatched to servers that have a language available | spyder-ide_spyder | train | py |
5379e5de58c7ba9db2c9b4070621ad2c95e8fb9f | diff --git a/client/state/sharing/keyring/selectors.js b/client/state/sharing/keyring/selectors.js
index <HASH>..<HASH> 100644
--- a/client/state/sharing/keyring/selectors.js
+++ b/client/state/sharing/keyring/selectors.js
@@ -7,6 +7,11 @@
import { filter, values } from 'lodash';
/**
+ * Internal dependencies
+ */
+import createSelector from 'lib/create-selector';
+
+/**
* Returns an array of keyring connection objects.
*
* @param {Object} state Global state tree
@@ -34,9 +39,10 @@ export function getKeyringConnectionById( state, keyringConnectionId ) {
* @param {String} service Service slug.
* @return {Array} Keyring connections, if known.
*/
-export function getKeyringConnectionsByName( state, service ) {
- return filter( getKeyringConnections( state ), { service } );
-}
+export const getKeyringConnectionsByName = createSelector(
+ ( state, service ) => filter( getKeyringConnections( state ), { service } ),
+ state => [ state.sharing.keyring.items ]
+);
/**
* Returns an array of keyring connection objects for a specific user. | Unless the dependecy changed, we shouldn't change this selector's result | Automattic_wp-calypso | train | js |
ba01c031c0630da041bb2f4922023e12b4684418 | diff --git a/molgenis-omx-dataexplorer/src/main/java/org/molgenis/dataexplorer/controller/DataExplorerController.java b/molgenis-omx-dataexplorer/src/main/java/org/molgenis/dataexplorer/controller/DataExplorerController.java
index <HASH>..<HASH> 100644
--- a/molgenis-omx-dataexplorer/src/main/java/org/molgenis/dataexplorer/controller/DataExplorerController.java
+++ b/molgenis-omx-dataexplorer/src/main/java/org/molgenis/dataexplorer/controller/DataExplorerController.java
@@ -104,9 +104,6 @@ public class DataExplorerController extends MolgenisPluginController
@Autowired
private MolgenisSettings molgenisSettings;
- @Autowired
- private SearchService searchService;
-
public DataExplorerController()
{
super(URI); | remove unused searchservice + fix bug: error occurs when selecting "select..." in aggregates | molgenis_molgenis | train | java |
b9f4e9f84b947a9dfa577087a5eb21cdd8789cf6 | diff --git a/src/main/java/com/github/maven_nar/NarSystemMojo.java b/src/main/java/com/github/maven_nar/NarSystemMojo.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/github/maven_nar/NarSystemMojo.java
+++ b/src/main/java/com/github/maven_nar/NarSystemMojo.java
@@ -208,7 +208,7 @@ public class NarSystemMojo
builder.append(delimiter);
delimiter = " else ";
- builder.append("if (ao.equals(\"").append(entry.getKey()).append("\")) {\n");
+ builder.append("if (ao.startsWith(\"").append(entry.getKey()).append("\")) {\n");
builder.append(" return new String[] {\n");
String delimiter2 = " ";
for (final String aol : entry.getValue()) { | Better "Windows" OS compatibility in native-lib-loader context.
In Windows case, operating systems are started-with "Windows *". Instead of using ao.equals(), ao.startsWith() is suitable for deciding OS types in such a case. | maven-nar_nar-maven-plugin | train | java |
97b9c41a56c067b92590c4816abf398bcae0a078 | diff --git a/test/generator_test.rb b/test/generator_test.rb
index <HASH>..<HASH> 100644
--- a/test/generator_test.rb
+++ b/test/generator_test.rb
@@ -1,4 +1,3 @@
-require 'minitest/unit'
require 'minitest/autorun'
require 'token_phrase' | change require order for minitest on <I> | genericsteele_token_phrase | train | rb |
c4fd070115d0dec0bbd80215fd043768d7a7726d | diff --git a/presence.go b/presence.go
index <HASH>..<HASH> 100644
--- a/presence.go
+++ b/presence.go
@@ -134,25 +134,19 @@ func LastIndexOf(in interface{}, elem interface{}) int {
// Contains returns true if an element is present in a iteratee.
func Contains(in interface{}, elem interface{}) bool {
inValue := reflect.ValueOf(in)
-
elemValue := reflect.ValueOf(elem)
-
inType := inValue.Type()
- if inType.Kind() == reflect.String {
+ switch inType.Kind() {
+ case reflect.String:
return strings.Contains(inValue.String(), elemValue.String())
- }
-
- if inType.Kind() == reflect.Map {
- keys := inValue.MapKeys()
- for i := 0; i < len(keys); i++ {
- if equal(keys[i].Interface(), elem) {
+ case reflect.Map:
+ for _, key := range inValue.MapKeys() {
+ if equal(key.Interface(), elem) {
return true
}
}
- }
-
- if inType.Kind() == reflect.Slice {
+ case reflect.Slice:
for i := 0; i < inValue.Len(); i++ {
if equal(inValue.Index(i).Interface(), elem) {
return true | In 'Contains', save one comparison against reflect.Slice for Map that does not contain the key | thoas_go-funk | train | go |
f5d18e2e86c0db3be6aeb4cfd7638d8f1d04b300 | diff --git a/src/infi/docopt_completion/bash.py b/src/infi/docopt_completion/bash.py
index <HASH>..<HASH> 100644
--- a/src/infi/docopt_completion/bash.py
+++ b/src/infi/docopt_completion/bash.py
@@ -48,8 +48,8 @@ class BashCompletion(CompletionGenerator):
return SUBCOMMAND_SWITCH_TEMPLATE.format(level_num=level_num, subcommand_cases=subcommand_cases)
def create_compreply(self, subcommands, opts):
- return " ".join(opts) + " ".join(subcommands.keys())
-
+ return " ".join(opts) + " " + " ".join(subcommands.keys())
+
def create_section(self, cmd_name, param_tree, option_help, level_num):
subcommands = param_tree.subcommands
opts = param_tree.options | fix bad bash reply (reported in issue #2) | Infinidat_infi.docopt_completion | train | py |
Subsets and Splits
Java Commits in Train Set
Queries for all entries where the diff_languages column is 'java', providing a filtered dataset but without deeper analysis.
Java Commits Test Data
Returns a subset of 5000 entries from the dataset where the programming language difference is Java, providing basic filtering for exploration.
Java Commits Sample
Retrieves the first 1,000 records where the 'diff_languages' column is 'java', providing limited insight into the specific data entries.
Java Commits Validation Sample
Retrieves a sample of entries from the validation dataset where the diff languages are Java, providing limited insight into specific Java-related data points.
Java Commits in Validation
This query retrieves a limited sample of entries from the validation dataset where the programming language difference is Java, providing basic filtering with minimal insight.
Java Commits Sample
This query retrieves a sample of 100 records where the 'diff_languages' is 'java', providing basic filtering but limited analytical value.
Java Commits Sample
Retrieves 100 samples where the language difference is Java, providing basic filtering but minimal analytical value.
Java Commits Sample
Retrieves 10 samples where the diff_languages column is 'java', providing basic examples of data entries with this specific language.
Java Commits Validation Sample
Retrieves 1,000 records where the differences in languages are marked as Java, providing a snapshot of that specific subset but limited to raw data.
Java Commits Sample
This query retrieves 1000 random samples from the dataset where the programming language is Java, offering limited insight beyond raw data.