hash
stringlengths 40
40
| diff
stringlengths 131
26.7k
| message
stringlengths 7
694
| project
stringlengths 5
67
| split
stringclasses 1
value | diff_languages
stringlengths 2
24
|
|---|---|---|---|---|---|
fbbf1b63c113c8f9c1aac16d7ad6bb7280e55a4e
|
diff --git a/api/src/opentrons/hardware_control/controller.py b/api/src/opentrons/hardware_control/controller.py
index <HASH>..<HASH> 100644
--- a/api/src/opentrons/hardware_control/controller.py
+++ b/api/src/opentrons/hardware_control/controller.py
@@ -41,7 +41,9 @@ class Controller:
'This is intended to run on a robot, and while it can connect '
'to a smoothie via a usb/serial adapter unexpected things '
'using gpios (such as smoothie reset or light management) '
- 'will fail')
+ 'will fail. If you are seeing this message and you are '
+ 'running on a robot, you need to set the RUNNING_ON_PI '
+ 'environmental variable to 1.')
self.config = config or opentrons.config.robot_configs.load()
|
feat(api): add info to debug warning on how IS_ROBOT is determined (#<I>)
If you run a robot without RUNNING_ON_PI in the environment you get this warning message (and substantially reduced functionality), but it is not clear how to resolve it. This provides that information. In my case it was because I was invoking one Python script from another with subprocess.
|
Opentrons_opentrons
|
train
|
py
|
5212351177bb9e4251f663e58022d4adbcebcaa9
|
diff --git a/salt/utils/verify.py b/salt/utils/verify.py
index <HASH>..<HASH> 100644
--- a/salt/utils/verify.py
+++ b/salt/utils/verify.py
@@ -69,8 +69,10 @@ def verify_socket(interface, pub_port, ret_port):
pubsock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
retsock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
+ pubsock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
pubsock.bind((interface, int(pub_port)))
pubsock.close()
+ retsock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
retsock.bind((interface, int(ret_port)))
retsock.close()
result = True
@@ -82,8 +84,8 @@ def verify_socket(interface, pub_port, ret_port):
pubsock.close()
retsock.close()
- return True # TODO: Make this test actually function as advertised
- # Disabled check as per github issue number 1594
+ return result
+
def verify_env(dirs, user):
'''
|
Set the SO_REUSEADDR option when performing the bind() test.
Make failure of this test fatal again as well.
|
saltstack_salt
|
train
|
py
|
a5705919b065d981cc8ecd95170c353bdd0663bd
|
diff --git a/lib/devise_token/rails/routes.rb b/lib/devise_token/rails/routes.rb
index <HASH>..<HASH> 100644
--- a/lib/devise_token/rails/routes.rb
+++ b/lib/devise_token/rails/routes.rb
@@ -23,7 +23,7 @@ module ActionDispatch::Routing
:module => :devise,
:path => "#{opts[:at]}",
:controllers => controllers,
- :skip => opts[:skip]
+ :skip => opts[:skip] + [:omniauth_callbacks]
unnest_namespace do
# get full url path as if it were namespaced
|
skips omniauth_callback
|
akrahdan_devise_token
|
train
|
rb
|
2df09f264989c425f6df5ed85329e8a374c19eb7
|
diff --git a/lib/sensu/server/process.rb b/lib/sensu/server/process.rb
index <HASH>..<HASH> 100644
--- a/lib/sensu/server/process.rb
+++ b/lib/sensu/server/process.rb
@@ -128,12 +128,10 @@ module Sensu
# Process an event: filter -> mutate -> handle.
#
- # This method runs event bridges, relaying the event data to
- # other services. This method also determines the appropriate
- # handlers for the event, filtering and mutating the event data
- # for each of them. The `@handling_event_count` is incremented
- # by `1`, for each event handler chain (filter -> mutate ->
- # handle).
+ # This method determines the appropriate handlers for an event,
+ # filtering and mutating the event data for each of them. The
+ # `@handling_event_count` is incremented by `1`, for each event
+ # handler chain (filter -> mutate -> handle).
#
# @param event [Hash]
def process_event(event)
@@ -299,8 +297,10 @@ module Sensu
# registry. If the previous conditions are not met, and check
# `:type` is `metric` and the `:status` is `0`, the event
# registry is not updated, but the provided callback is called
- # with the event data. JSON serialization is used when storing
- # data in the registry.
+ # with the event data. All event data is sent to event bridge
+ # extensions, including events that do not normally produce an
+ # action. JSON serialization is used when storing data in the
+ # registry.
#
# @param client [Hash]
# @param check [Hash]
|
[event-bridge] updated yardoc, event_bridges() moved
|
sensu_sensu
|
train
|
rb
|
4922a82b2499a124e51ba495086b824c2ba324bf
|
diff --git a/tff/io.py b/tff/io.py
index <HASH>..<HASH> 100644
--- a/tff/io.py
+++ b/tff/io.py
@@ -343,6 +343,9 @@ class DefaultPTY(PTY):
new[6][termios.VSUSP] = vdisable # Ctrl-Z
new[6][termios.VQUIT] = vdisable # Ctrl-\
+ VDSUSP = 11
+ new[6][VDSUSP] = vdisable # Ctrl-Y
+
termios.tcsetattr(self.__stdin_fileno, termios.TCSANOW, new)
pid, master = pty.fork()
if not pid:
|
Fix VDSUSP (Ctrl-y) issue
|
saitoha_tff
|
train
|
py
|
1c36d79aeaa0a098d855b2ac3b2d4477ea2bccef
|
diff --git a/src/python/dxpy/utils/describe.py b/src/python/dxpy/utils/describe.py
index <HASH>..<HASH> 100644
--- a/src/python/dxpy/utils/describe.py
+++ b/src/python/dxpy/utils/describe.py
@@ -378,7 +378,7 @@ def print_data_obj_desc(desc, verbose=False):
if desc["class"] == "file" or desc["class"] == "gtable":
sponsored_str = ""
if 'sponsored' in desc and desc['sponsored']:
- sponsored_str = ", sponsored by DNAnexus"
+ sponsored_str = DELIMITER(", ") + "sponsored by DNAnexus"
print_field("Size", get_size_str(desc['size']) + sponsored_str)
else:
print_field("Size", str(desc['size']))
|
PTFM-<I>-sidenote: adding delimiter between size string and "sponsored by DNAnexus" string
|
dnanexus_dx-toolkit
|
train
|
py
|
36c63b285439ee9de4ef1a59aa6eea40f981a0ab
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -5,7 +5,7 @@ long_description = open('README.rst').read()
setup(
name = 'templated-emails',
- version = "0.4",
+ version = "0.5",
url = 'https://github.com/philippWassibauer/templated-emails',
author = "Philipp Wassibauer",
author_email = "phil@gidsy.com",
|
browsing emails in the backend now possible. this renders the blocks, but not the templates itself; therefore showing the whole logic of the template and data is not required for the rendering process. this is especially useful for complex, timebased emails that are almost impossible to recreate manually.
|
philippWassibauer_templated-emails
|
train
|
py
|
86bc7b0cf7e751c539c7797bf481f15f4aece7d2
|
diff --git a/lib/App.js b/lib/App.js
index <HASH>..<HASH> 100644
--- a/lib/App.js
+++ b/lib/App.js
@@ -92,7 +92,9 @@ inherit(App, EventEmitter, [
this.intialized = true;
this.emit('init', settings);
return this;
- }
+ },
+ _App.screenHeight,
+ _App.screenWidth,
]);
|
accidentally removed screenWidth and screenHeight
|
appjs_appjs
|
train
|
js
|
eed5b18dc5fbd2efe20d0a7545b0fe3ff5efaf9a
|
diff --git a/synapse/tests/test_lib_agenda.py b/synapse/tests/test_lib_agenda.py
index <HASH>..<HASH> 100644
--- a/synapse/tests/test_lib_agenda.py
+++ b/synapse/tests/test_lib_agenda.py
@@ -324,8 +324,8 @@ class AgendaTest(s_t_utils.SynTest):
await agenda.add('visi', '[teststr=baz]', {s_tu.HOUR: (7, 8), s_tu.MINUTE: 0, s_tu.DAYOFMONTH: 6},
incunit=s_agenda.TimeUnit.MONTH, incvals=1)
- xmas = {s_tu.DAYOFMONTH: 25, s_tu.MONTH: 12, s_tu.YEAR: 2018}
- lasthanu = {s_tu.DAYOFMONTH: 10, s_tu.MONTH: 12, s_tu.YEAR: 2018}
+ xmas = {s_tu.DAYOFMONTH: 25, s_tu.MONTH: 12, s_tu.YEAR: 2099}
+ lasthanu = {s_tu.DAYOFMONTH: 10, s_tu.MONTH: 12, s_tu.YEAR: 2099}
await agenda.delete(guid1)
|
Fix agenda persistence fail (#<I>)
|
vertexproject_synapse
|
train
|
py
|
0f0632da80c713df4288d1e14492a42d78e9938f
|
diff --git a/jdk8/src/main/java/com/google/errorprone/dataflow/DataFlow.java b/jdk8/src/main/java/com/google/errorprone/dataflow/DataFlow.java
index <HASH>..<HASH> 100644
--- a/jdk8/src/main/java/com/google/errorprone/dataflow/DataFlow.java
+++ b/jdk8/src/main/java/com/google/errorprone/dataflow/DataFlow.java
@@ -134,7 +134,7 @@ public final class DataFlow {
final MethodTree method = (MethodTree) leaf;
Preconditions.checkNotNull(method.getBody(),
- "Method to analyze must have a body. Method passed in: %s in file %s",
+ "Method to analyze must have a body. Method passed in: %s() in file %s",
method.getName(),
methodPath.getCompilationUnit().getSourceFile().getName());
@@ -177,6 +177,15 @@ public final class DataFlow {
// Currently not supported because it only happens in ~2% of cases.
return null;
}
+
+ final MethodTree method = (MethodTree) enclosingMethodPath.getLeaf();
+ if (method.getBody() == null) {
+ // expressions can occur in abstract methods, for example {@code Map.Entry} in:
+ //
+ // abstract Set<Map.Entry<K, V>> entries();
+ return null;
+ }
+
return methodDataflow(enclosingMethodPath, context, transfer).getAnalysis().getValue(expr);
}
|
Fix bug where expressionDataflow would throw an exception for expressions in a method that doesn't have a body, e.g. Map.Entry in:
abstract Set<Map.Entry<K, V>> entries();
-------------
Created by MOE: <URL>
|
google_error-prone
|
train
|
java
|
5e787a928b66a2cd4c9fa75bf7a4888ab0b69657
|
diff --git a/luigi/contrib/bigquery.py b/luigi/contrib/bigquery.py
index <HASH>..<HASH> 100644
--- a/luigi/contrib/bigquery.py
+++ b/luigi/contrib/bigquery.py
@@ -326,7 +326,7 @@ class BigQueryClient(object):
job_id = new_job['jobReference']['jobId']
logger.info('Started import job %s:%s', project_id, job_id)
while True:
- status = self.client.jobs().get(projectId=project_id, jobId=job_id).execute()
+ status = self.client.jobs().get(projectId=project_id, jobId=job_id).execute(num_retries=10)
if status['status']['state'] == 'DONE':
if status['status'].get('errorResult'):
raise Exception('BigQuery job failed: {}'.format(status['status']['errorResult']))
|
Add retries for <I> errors in BigQuery
According to <URL>
|
spotify_luigi
|
train
|
py
|
48b404a62d8514656cf93f3f41a025c3491ec393
|
diff --git a/spec/integration/util/rdoc/parser_spec.rb b/spec/integration/util/rdoc/parser_spec.rb
index <HASH>..<HASH> 100755
--- a/spec/integration/util/rdoc/parser_spec.rb
+++ b/spec/integration/util/rdoc/parser_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
require 'puppet/util/rdoc'
-describe "RDoc::Parser" do
+describe "RDoc::Parser", :unless => Puppet.features.microsoft_windows? do
require 'puppet_spec/files'
include PuppetSpec::Files
|
(PUP-<I>) Skip rdoc tests on windows
Appveyor has transient failures running rdoc parser tests on windows
only. Puppet uses rdoc as part of its `puppet doc` command. The
puppet-strings project has mostly, but not yet completely, replaced the
`puppet doc` command. At a future date `puppet doc` will be deprecated
and removed. This commit skips the tests on windows.
|
puppetlabs_puppet
|
train
|
rb
|
510cf4634cf379cda64915bcbd5573de2f165d06
|
diff --git a/lib/ffi-glib/ptr_array.rb b/lib/ffi-glib/ptr_array.rb
index <HASH>..<HASH> 100644
--- a/lib/ffi-glib/ptr_array.rb
+++ b/lib/ffi-glib/ptr_array.rb
@@ -28,12 +28,17 @@ module GLib
Lib.g_ptr_array_add self, ptr
end
+ # Re-implementation of the g_ptr_array_index macro
+ def index idx
+ sz = FFI.type_size :pointer
+ ptr = @struct[:pdata].get_pointer(idx * sz)
+ GirFFI::ArgHelper.cast_from_pointer(element_type, ptr)
+ end
+
def each
- prc = Proc.new {|valptr, userdata|
- val = GirFFI::ArgHelper.cast_from_pointer element_type, valptr
- yield val
- }
- Lib.g_ptr_array_foreach self.to_ptr, prc, nil
+ @struct[:len].times.each do |idx|
+ yield index(idx)
+ end
end
end
end
|
Make PtrArray#each behave properly with early exit in JRuby.
|
mvz_gir_ffi
|
train
|
rb
|
ef0de1536a3aa4bacc422e3dc7b5ede1f9ee1710
|
diff --git a/pyqg/layered_model.py b/pyqg/layered_model.py
index <HASH>..<HASH> 100644
--- a/pyqg/layered_model.py
+++ b/pyqg/layered_model.py
@@ -99,15 +99,11 @@ class LayeredModel(model.Model):
Parameters
----------
- g : number
- Gravitational acceleration. Units: meters second :sup:`-2`
nz : integer number
Number of layers (> 1)
beta : number
Gradient of coriolis parameter. Units: meters :sup:`-1`
seconds :sup:`-1`
- rek : number
- Linear drag in lower layer. Units: seconds :sup:`-1`
rd : number
Deformation radius. Units: meters. Only necessary for
the two-layer (nz=2) case.
|
Removes unused paramaters from docs strings
|
pyqg_pyqg
|
train
|
py
|
3109f93529754bd32602f20d28d80cb921a8bcc1
|
diff --git a/lib/haml_lint/tree/root_node.rb b/lib/haml_lint/tree/root_node.rb
index <HASH>..<HASH> 100644
--- a/lib/haml_lint/tree/root_node.rb
+++ b/lib/haml_lint/tree/root_node.rb
@@ -17,7 +17,9 @@ module HamlLint::Tree
# @param line [Integer] the line number of the node
# @return [HamlLint::Node]
def node_for_line(line)
- find(-> { HamlLint::Tree::NullNode.new }) { |node| node.line_numbers.cover?(line) }
+ find(-> { HamlLint::Tree::NullNode.new }) do |node|
+ node.line_numbers.cover?(line) && node != self
+ end
end
end
end
|
Fix RootNode#node_for_line method on Ruby <I>
This method broke on Ruby <I> since `Range#cover?` now returns true for
beginless ranges where it previously returned false. This meant the
`RootNode` would match (as its `line_numbers` have no start nor end).
Fix by excluding the root node if it matches.
|
sds_haml-lint
|
train
|
rb
|
43ddfcbbe00a19e629f2706406d96d9a683418de
|
diff --git a/untwisted/network.py b/untwisted/network.py
index <HASH>..<HASH> 100644
--- a/untwisted/network.py
+++ b/untwisted/network.py
@@ -23,9 +23,9 @@ class SuperSocket(Dispatcher):
core.gear.scale(self)
def destroy(self):
- self.base.clear()
- SSL.base.clear()
- del self.pool[:]
+ # self.base.clear()
+ # SSL.base.clear()
+ # del self.pool[:]
core.gear.unregister(self)
class SSL(SuperSocket):
@@ -69,3 +69,4 @@ class Device(SuperSocket):
+
|
Fixing misbehavior with Supersocket class.
|
untwisted_untwisted
|
train
|
py
|
b6870add90a9baf0b202f6b5e484a491706bf2e3
|
diff --git a/core/Core.php b/core/Core.php
index <HASH>..<HASH> 100644
--- a/core/Core.php
+++ b/core/Core.php
@@ -304,19 +304,6 @@ Debug::loadErrorHandlers();
///////////////////////////////////////////////////////////////////////////////
// HELPER FUNCTIONS
-function getSysTempDir() {
- Deprecation::notice(3.0, 'Please use PHP function get_sys_temp_dir() instead.');
- return sys_get_temp_dir();
-}
-
-/**
- * @deprecated 3.0 Please use {@link SS_ClassManifest::getItemPath()}.
- */
-function getClassFile($className) {
- Deprecation::notice('3.0', 'Use SS_ClassManifest::getItemPath() instead.');
- return SS_ClassLoader::instance()->getManifest()->getItemPath($className);
-}
-
/**
* Creates a class instance by the "singleton" design pattern.
* It will always return the same instance for this class,
|
Removing deprecated Core.php functions
|
silverstripe_silverstripe-framework
|
train
|
php
|
2ce25e19a24f7242939bf4f26bf81c9f78164a7c
|
diff --git a/eZ/Publish/Core/Search/Legacy/Content/Gateway/DoctrineDatabase.php b/eZ/Publish/Core/Search/Legacy/Content/Gateway/DoctrineDatabase.php
index <HASH>..<HASH> 100644
--- a/eZ/Publish/Core/Search/Legacy/Content/Gateway/DoctrineDatabase.php
+++ b/eZ/Publish/Core/Search/Legacy/Content/Gateway/DoctrineDatabase.php
@@ -85,7 +85,7 @@ class DoctrineDatabase extends Gateway
$doCount = true
)
{
- $count = $doCount ? $this->getResultCount( $criterion, $sort, $fieldFilters ) : null;
+ $count = $doCount ? $this->getResultCount( $criterion, null, $fieldFilters ) : null;
if ( !$doCount && $limit === 0 )
{
|
There is no need for sorting when just getting the result count
|
ezsystems_ezpublish-kernel
|
train
|
php
|
7fe85af9605de82fcfd0dcb707500efb975646b1
|
diff --git a/app/models/no_cms/blocks/block_slot.rb b/app/models/no_cms/blocks/block_slot.rb
index <HASH>..<HASH> 100644
--- a/app/models/no_cms/blocks/block_slot.rb
+++ b/app/models/no_cms/blocks/block_slot.rb
@@ -7,5 +7,11 @@ module NoCms::Blocks
belongs_to :block, class_name: "NoCms::Blocks::Block"
accepts_nested_attributes_for :block
+
+ scope :for_bone, -> (bone) { where(bone: bone) }
+
+ validates :bone, presence: true
+
+
end
end
|
Scopes and validations for bones in the blocks
|
simplelogica_nocms-blocks
|
train
|
rb
|
5c0545e75b16984cfd4fbe953c3b76adcec65d0c
|
diff --git a/geomet/tests/wkb_test.py b/geomet/tests/wkb_test.py
index <HASH>..<HASH> 100644
--- a/geomet/tests/wkb_test.py
+++ b/geomet/tests/wkb_test.py
@@ -45,6 +45,10 @@ class PointTestCase(unittest.TestCase):
)
self.assertEqual(expected, wkb.dumps(pt, big_endian=True))
+ # We skip this because, right now, we have no way of signalling with
+ # GeoJSON that an object with 3-dimensional coordinates is XYZ or XYM.
+ # XYZ is more common, so we assume this.
+ @unittest.skip
def test_dumps_point_m(self):
# Test for an XYM Point:
pt = dict(type='Point', coordinates=[0.0, 1.0, 2.0])
|
tests/wkb_test:
Skip `test_dumps_point_m` and explain why (with a comment).
|
geomet_geomet
|
train
|
py
|
a76a398d9dfac039dd8960680597695897e7d40d
|
diff --git a/ui/app/milestone/milestone-controllers.js b/ui/app/milestone/milestone-controllers.js
index <HASH>..<HASH> 100644
--- a/ui/app/milestone/milestone-controllers.js
+++ b/ui/app/milestone/milestone-controllers.js
@@ -188,6 +188,8 @@
that.data = milestoneDetail;
+ that.data.endDate = dateUtilConverter.convertFromTimestampNoonUTC(that.data.endDate);
+
that.submit = function() {
that.data.endDate = dateUtilConverter.convertToTimestampNoonUTC(that.data.endDate);
|
NCL-<I> Missing convertor added
|
project-ncl_pnc
|
train
|
js
|
d6f3c5b672f9e1576dde5ffa1f81f767dbbd91a5
|
diff --git a/update.go b/update.go
index <HASH>..<HASH> 100644
--- a/update.go
+++ b/update.go
@@ -48,6 +48,10 @@ func init() {
func Update(channel string) {
golock.Lock(updateLockPath)
defer golock.Unlock(updateLockPath)
+ if !IsUpdateNeeded("soft") {
+ // update no longer needed
+ return
+ }
done := make(chan bool)
go func() {
touchAutoupdateFile()
|
skip updating if it is no longer needed
|
heroku_cli
|
train
|
go
|
2d9c93a7b1aa6964bf2d319453e060983bd64eff
|
diff --git a/adafruit_bme280.py b/adafruit_bme280.py
index <HASH>..<HASH> 100644
--- a/adafruit_bme280.py
+++ b/adafruit_bme280.py
@@ -213,7 +213,7 @@ class Adafruit_BME280_I2C(Adafruit_BME280):
with self._i2c as i2c:
i2c.write(bytes([register & 0xFF]))
result = bytearray(length)
- i2c.read_into(result)
+ i2c.readinto(result)
#print("$%02X => %s" % (register, [hex(i) for i in result]))
return result
|
changed read_into to readinto
|
adafruit_Adafruit_CircuitPython_BME280
|
train
|
py
|
ba943d7ec291aeff293331b1bc0f648d153501ea
|
diff --git a/src/shims/forms-picker.js b/src/shims/forms-picker.js
index <HASH>..<HASH> 100644
--- a/src/shims/forms-picker.js
+++ b/src/shims/forms-picker.js
@@ -8,7 +8,7 @@ webshims.register('forms-picker', function($, webshims, window, document, undefi
var ret = [date.getFullYear(), moduleOpts.addZero(date.getMonth() + 1), moduleOpts.addZero(date.getDate())];
ret.month = ret[0]+'-'+ret[1];
ret.date = ret[0]+'-'+ret[1]+'-'+ret[2];
- ret.time = date.getHours() +':'+ date.getMinutes();
+ ret.time = moduleOpts.addZero(date.getHours()) +':'+ moduleOpts.addZero(date.getMinutes());
ret['datetime-local'] = ret.date +'T'+ ret.time;
return ret;
|
getDateArray should add zeros to time as well
|
aFarkas_webshim
|
train
|
js
|
ba3f901263dd122f0c8f5ecc894d4caebbeed3f3
|
diff --git a/xtuml/load.py b/xtuml/load.py
index <HASH>..<HASH> 100644
--- a/xtuml/load.py
+++ b/xtuml/load.py
@@ -111,8 +111,9 @@ class ModelLoader(object):
'''
Parse input as raw data.
'''
- s = self.parser.parse(lexer=self.lexer, input=data)
- self.statements.extend(s)
+ if data:
+ s = self.parser.parse(lexer=self.lexer, input=data)
+ self.statements.extend(s)
def filename_input(self, filename):
|
load: don't report syntax errors on empty input
|
xtuml_pyxtuml
|
train
|
py
|
ec8f44abd7f8dc62fc8c7af530813e7c5c67dffb
|
diff --git a/test/benchmarks/active_record_ips_test.rb b/test/benchmarks/active_record_ips_test.rb
index <HASH>..<HASH> 100644
--- a/test/benchmarks/active_record_ips_test.rb
+++ b/test/benchmarks/active_record_ips_test.rb
@@ -16,6 +16,6 @@ class Blueprinter::ActiveRecordIPSTest < Minitest::Test
def test_render
result = iterate {@blueprinter.render(@prepared_objects)}
puts "\nActiveRecord IPS: #{result}"
- assert_operator(result, :>=, 2500)
+ assert_operator(result, :>=, 2000)
end
end
diff --git a/test/benchmarks/ips_test.rb b/test/benchmarks/ips_test.rb
index <HASH>..<HASH> 100644
--- a/test/benchmarks/ips_test.rb
+++ b/test/benchmarks/ips_test.rb
@@ -16,6 +16,6 @@ class Blueprinter::IPSTest < Minitest::Test
def test_render
result = iterate {@blueprinter.render(@prepared_objects)}
puts "\nBasic IPS: #{result}"
- assert_operator(result, :>=, 3000)
+ assert_operator(result, :>=, 2500)
end
end
|
IPS are slower due to ruby downgrade.
Previously, we were building and running benchmarks on circle ci using
ruby <I>. We now build and run benchmarks on <I>. Ruby <I> is
slower than <I>, so we need to reduce the IPS benchmarks.
|
procore_blueprinter
|
train
|
rb,rb
|
ada58af10bee2ceb5486df9bec85eb584f0c1980
|
diff --git a/test_everything.py b/test_everything.py
index <HASH>..<HASH> 100644
--- a/test_everything.py
+++ b/test_everything.py
@@ -61,7 +61,6 @@ pub.options.reprcomments = False
import hydpy
doctests = {}
for dirinfo in os.walk(hydpy.__path__[0]):
- print(dirinfo[0])
if dirinfo[0].endswith('unittests') or not '__init__.py' in dirinfo[2]:
continue
packagename = dirinfo[0].replace(os.sep, '.')+'.'
@@ -69,7 +68,9 @@ for dirinfo in os.walk(hydpy.__path__[0]):
level = packagename.count('.')-1
modulenames = [packagename+fn.split('.')[0]
for fn in dirinfo[2] if fn.endswith('.py')]
+ print(dirinfo[0], packagename)
for modulename in modulenames:
+ print(' '+modulename)
module = importlib.import_module(modulename)
runner = unittest.TextTestRunner(stream=open(os.devnull, 'w'))
suite = unittest.TestSuite()
|
still debugging
going into the details of doctesting hland
|
hydpy-dev_hydpy
|
train
|
py
|
3a2ed9671403c2407229484031d119b4c45f5ec4
|
diff --git a/src/transformers/trainer_seq2seq.py b/src/transformers/trainer_seq2seq.py
index <HASH>..<HASH> 100644
--- a/src/transformers/trainer_seq2seq.py
+++ b/src/transformers/trainer_seq2seq.py
@@ -161,6 +161,9 @@ class Seq2SeqTrainer(Trainer):
"synced_gpus": True if is_deepspeed_zero3_enabled() else False,
}
+ if "attention_mask" in inputs:
+ gen_kwargs["attention_mask"] = inputs.get("attention_mask", None)
+
# prepare generation inputs
# some encoder-decoder models can have varying encder's and thus
# varying model input names
@@ -171,7 +174,6 @@ class Seq2SeqTrainer(Trainer):
generated_tokens = self.model.generate(
generation_inputs,
- attention_mask=inputs.get("attention_mask", None),
**gen_kwargs,
)
# in case the batch is shorter than max length, the output should be padded
|
Fix Seq2SeqTrainer (#<I>)
|
huggingface_pytorch-pretrained-BERT
|
train
|
py
|
049ae93465c823c0fd6335278b502ee99889ebd2
|
diff --git a/tensorflow_datasets/core/utils/read_config.py b/tensorflow_datasets/core/utils/read_config.py
index <HASH>..<HASH> 100644
--- a/tensorflow_datasets/core/utils/read_config.py
+++ b/tensorflow_datasets/core/utils/read_config.py
@@ -77,10 +77,11 @@ class ReadConfig(_ReadConfig):
try_autocache: If True (default) and the dataset satisfy the right
conditions (dataset small enough, files not shuffled,...) the dataset
will be cached during the first iteration (through `ds = ds.cache()`).
- shuffle_seed: `tf.int64`, seeds forwarded to `tf.data.Dataset.shuffle` when
- `shuffle_files=True`.
+ shuffle_seed: `tf.int64`, seed forwarded to `tf.data.Dataset.shuffle` during
+ file shuffling (which happens when `tfds.load(..., shuffle_files=True)`).
shuffle_reshuffle_each_iteration: `bool`, forwarded to
- `tf.data.Dataset.shuffle` when `shuffle_files=True`.
+ `tf.data.Dataset.shuffle` during file shuffling (which happens when
+ `tfds.load(..., shuffle_files=True)`).
interleave_cycle_length: `int`, forwarded to `tf.data.Dataset.interleave`.
Default to 16.
interleave_block_length: `int`, forwarded to `tf.data.Dataset.interleave`.
|
Modifying docstring of ReadConfig to clarify that `shuffle_seed` and `shuffle_reshuffle_each_iteration` are only used for file shuffling.
PiperOrigin-RevId: <I>
|
tensorflow_datasets
|
train
|
py
|
cc7fb996464002772668e4f925056d866de16909
|
diff --git a/simuvex/s_irexpr.py b/simuvex/s_irexpr.py
index <HASH>..<HASH> 100644
--- a/simuvex/s_irexpr.py
+++ b/simuvex/s_irexpr.py
@@ -20,7 +20,10 @@ class SimIRExpr(object):
self._post_processed = False
self.expr = None
- self.type = tyenv.typeOf(expr)
+ if expr.tag in ('Iex_BBPTR', 'Iex_VECRET'):
+ self.type = None
+ else:
+ self.type = tyenv.typeOf(expr)
self.state._inspect('expr', BP_BEFORE)
@@ -100,6 +103,16 @@ class SimIRExpr(object):
### expression handlers ###
###########################
+ def _handle_BBPTR(self, expr):
+ l.warning("BBPTR IRExpr encountered. This is (probably) not bad, but we have no real idea how to handle it.")
+ self.type = "Ity_I32"
+ self.expr = self.state.BVV("WTF!")
+
+ def _handle_VECRET(self, expr):
+ l.warning("VECRET IRExpr encountered. This is (probably) not bad, but we have no real idea how to handle it.")
+ self.type = "Ity_I32"
+ self.expr = self.state.BVV("OMG!")
+
def _handle_Get(self, expr):
size = size_bytes(expr.type)
self.type = expr.type
|
handle BBPTR and VECRET, at least as stubs
|
angr_angr
|
train
|
py
|
2d6cb482e663c7bc5037daba6ed50c1cccb4e4a0
|
diff --git a/lib/hashie/extensions/coercion.rb b/lib/hashie/extensions/coercion.rb
index <HASH>..<HASH> 100644
--- a/lib/hashie/extensions/coercion.rb
+++ b/lib/hashie/extensions/coercion.rb
@@ -2,7 +2,7 @@ module Hashie
module Extensions
module Coercion
def self.included(base)
- base.send :extend, ClassMethods
+ base.extend ClassMethods
base.send :include, InstanceMethods
end
|
Object#extend is a public method
|
intridea_hashie
|
train
|
rb
|
a95f2c3ad52f62bad032d8b2e2371d42fb59f057
|
diff --git a/chickpea/base_models.py b/chickpea/base_models.py
index <HASH>..<HASH> 100644
--- a/chickpea/base_models.py
+++ b/chickpea/base_models.py
@@ -89,7 +89,7 @@ class Category(models.Model):
map = models.ForeignKey(Map)
name = models.CharField(max_length=50)
description = models.TextField(blank=True, null=True)
- color = models.CharField(max_length=32)
+ color = models.CharField(max_length=32, default="DarkBlue")
icon = models.ForeignKey(Icon, null=True, blank=True)
preset = models.BooleanField(default=False, help_text="Display this category on load.")
rank = models.IntegerField(null=True, blank=True)
diff --git a/chickpea/views.py b/chickpea/views.py
index <HASH>..<HASH> 100644
--- a/chickpea/views.py
+++ b/chickpea/views.py
@@ -76,6 +76,7 @@ class QuickMapCreate(CreateView):
self.object = form.save()
layer = TileLayer.get_default()
MapToTileLayer.objects.create(map=self.object, tilelayer=layer, rank=1)
+ Category.objects.create(map=self.object, name="POIs", preset=True)
response = {
"redirect": self.get_success_url()
}
|
Create a default category during map quick create process
|
umap-project_django-leaflet-storage
|
train
|
py,py
|
212d3a0cb1d1ee9f4e20d844a5b1a82660042fa3
|
diff --git a/lib/info.rb b/lib/info.rb
index <HASH>..<HASH> 100644
--- a/lib/info.rb
+++ b/lib/info.rb
@@ -1,5 +1,5 @@
module MultiRepo
NAME = "git-multirepo"
- VERSION = "1.0.0.beta2"
+ VERSION = "1.0.0.beta3"
DESCRIPTION = "Track multiple Git repositories side-by-side."
end
\ No newline at end of file
|
Updated gem version to <I>.beta3 (yanked gem version forces version bump).
|
fortinmike_git-multirepo
|
train
|
rb
|
17718f4ecfe807a1233df52c17ff96fd1592b4c4
|
diff --git a/django_ssh/models.py b/django_ssh/models.py
index <HASH>..<HASH> 100644
--- a/django_ssh/models.py
+++ b/django_ssh/models.py
@@ -23,3 +23,6 @@ class Key(models.Model):
data = models.TextField(db_index=True, unique=True)
comment = models.TextField()
fingerprint = models.CharField(max_length=47)
+
+ class Meta:
+ db_table = 'ssh_key'
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -19,11 +19,11 @@ from distutils.core import setup
setup(
name = 'django-ssh',
packages = ['django_ssh'],
- version = '0.0.2',
+ version = '0.0.3',
description = 'A basic Django app for storing SSH keys',
author = 'Jon Eyolfson',
author_email = 'jon@eyl.io',
url = 'https://github.com/eyolfson/django-ssh/',
download_url = ('https://github.com/eyolfson/django-ssh/archive/'
- 'v0.0.2.tar.gz'),
+ 'v0.0.3.tar.gz'),
)
|
Changed table names and bumped version
|
eyolfson_django-ssh
|
train
|
py,py
|
947049fac1c75b89368d7f329e5acf727855cdf5
|
diff --git a/src/trumbowyg.js b/src/trumbowyg.js
index <HASH>..<HASH> 100644
--- a/src/trumbowyg.js
+++ b/src/trumbowyg.js
@@ -1225,8 +1225,9 @@ Object.defineProperty(jQuery.trumbowyg, 'defaultOptions', {
createLink: function () {
var t = this,
documentSelection = t.doc.getSelection(),
+ selectedRange = documentSelection.getRangeAt(0),
node = documentSelection.focusNode,
- text = new XMLSerializer().serializeToString(documentSelection.getRangeAt(0).cloneContents()),
+ text = new XMLSerializer().serializeToString(selectedRange.cloneContents()) || selectedRange + '',
url,
title,
target;
@@ -1263,7 +1264,7 @@ Object.defineProperty(jQuery.trumbowyg, 'defaultOptions', {
}
};
if (!t.o.minimalLinks) {
- Object.assign(options, {
+ $.extend(options, {
title: {
label: t.lang.title,
value: title
|
fix: makes links works on IE<I>
|
Alex-D_Trumbowyg
|
train
|
js
|
5576d01ca09e6936eb39ada5159d10c925dc00e3
|
diff --git a/java-allocation-instrumenter/src/main/java/com/google/monitoring/runtime/instrumentation/ConstructorInstrumenter.java b/java-allocation-instrumenter/src/main/java/com/google/monitoring/runtime/instrumentation/ConstructorInstrumenter.java
index <HASH>..<HASH> 100644
--- a/java-allocation-instrumenter/src/main/java/com/google/monitoring/runtime/instrumentation/ConstructorInstrumenter.java
+++ b/java-allocation-instrumenter/src/main/java/com/google/monitoring/runtime/instrumentation/ConstructorInstrumenter.java
@@ -39,9 +39,7 @@ import java.util.logging.Logger;
* Instruments bytecode by inserting a specified call in the
* constructor of a given class. This class is intended to be loaded
* by a javaagent; end-users will want to add {@link ConstructorCallback}s by
- * invoking {@link
- * com.google.monitoring.runtime.allocation.AllocationInspector#
- * addConstructorCallback(Class, ConstructorCallback)}.
+ * invoking {@link #instrumentClass(Class, ConstructorCallback)}.
*
* @author Jeremy Manson
*/
|
Doc fix: javadoc points to incorrect method for instrumentation.
|
google_allocation-instrumenter
|
train
|
java
|
5114f6c764a8346cce96dc1a7498ab2a0d31d2d0
|
diff --git a/lib/builder.js b/lib/builder.js
index <HASH>..<HASH> 100644
--- a/lib/builder.js
+++ b/lib/builder.js
@@ -219,9 +219,16 @@ lunr.Builder.prototype.createDocumentVectors = function () {
tf = termFrequencies[term],
termIndex = this.invertedIndex[term]._index,
idf = lunr.idf(this.invertedIndex[term], this.documentCount),
- score = idf * ((this._k1 + 1) * tf) / (this._k1 * (1 - this._b + this._b * (documentLength / this.averageDocumentLength)) + tf)
-
- documentVector.insert(termIndex, score)
+ score = idf * ((this._k1 + 1) * tf) / (this._k1 * (1 - this._b + this._b * (documentLength / this.averageDocumentLength)) + tf),
+ scoreWithPrecision = Math.round(score * 1000) / 1000
+ // Converts 1.23456789 to 1.234.
+ // Reducing the precision so that the vectors take up less
+ // space when serialised. Doing it now so that they behave
+ // the same before and after serialisation. Also, this is
+ // the fastest approach to reducing a number's precision in
+ // JavaScript.
+
+ documentVector.insert(termIndex, scoreWithPrecision)
}
documentVectors[docRef] = documentVector
|
Reduce term score precision in vectors
This change reduces the number of decimal places used to represent the
score of a term within a document vector. Through testing this has been
seen to not have any impact to the relevance of the search results but
does lead to a ~<I>% size reduction when serialising indexes, before and
after compression with gzip.
|
olivernn_lunr.js
|
train
|
js
|
19732cc07a653280e4ada179d195551b1249eab4
|
diff --git a/examples/pytorch/speech-recognition/run_speech_recognition_ctc.py b/examples/pytorch/speech-recognition/run_speech_recognition_ctc.py
index <HASH>..<HASH> 100755
--- a/examples/pytorch/speech-recognition/run_speech_recognition_ctc.py
+++ b/examples/pytorch/speech-recognition/run_speech_recognition_ctc.py
@@ -152,7 +152,7 @@ class DataTrainingArguments:
eval_split_name: str = field(
default="test",
metadata={
- "help": "The name of the training data set split to use (via the datasets library). Defaults to 'train'"
+ "help": "The name of the training data set split to use (via the datasets library). Defaults to 'test'"
},
)
audio_column_name: str = field(
|
Fix 'eval_split_name' described as defaulting to 'train' (#<I>)
The default is correct (`test`) but the description is not.
|
huggingface_pytorch-pretrained-BERT
|
train
|
py
|
9df42b64058bd32f1f977f12bc17c22790e6c0f1
|
diff --git a/pycbc/results/legacy_grb.py b/pycbc/results/legacy_grb.py
index <HASH>..<HASH> 100755
--- a/pycbc/results/legacy_grb.py
+++ b/pycbc/results/legacy_grb.py
@@ -36,7 +36,6 @@ if 'matplotlib.backends' not in sys.modules:
matplotlib.use('agg')
import matplotlib.pyplot as plt
from pycbc_glue import markup, segments
-from lal.gpstime import gps_to_utc, LIGOTimeGPS
from matplotlib.patches import Rectangle
from matplotlib.lines import Line2D
from matplotlib.ticker import ScalarFormatter
@@ -132,6 +131,7 @@ def write_summary(page, args, ifos, skyError=None, ipn=False, ipnError=False):
Write summary of information to markup.page object page
"""
from pylal import antenna
+ from lal.gpstime import gps_to_utc, LIGOTimeGPS
gps = args.start_time
grbdate = gps_to_utc(LIGOTimeGPS(gps))\
|
don't explicitly require lal gpstime python module (#<I>)
|
gwastro_pycbc
|
train
|
py
|
0f0cc7344654c6df33d3b9652c430740ea5620f4
|
diff --git a/security/src/test/java/com/networknt/security/JwtHelperTest.java b/security/src/test/java/com/networknt/security/JwtHelperTest.java
index <HASH>..<HASH> 100644
--- a/security/src/test/java/com/networknt/security/JwtHelperTest.java
+++ b/security/src/test/java/com/networknt/security/JwtHelperTest.java
@@ -51,6 +51,14 @@ public class JwtHelperTest {
}
@Test
+ public void longLivedAPIAJwt() throws Exception {
+ JwtClaims claims = getTestClaims("Steve", "EMPLOYEE", "f7d42348-c647-4efb-a52d-4c5787421e72", Arrays.asList("api_a.w", "api_b.w", "api_c.w", "api_d.w", "server.info.r"));
+ claims.setExpirationTimeMinutesInTheFuture(5256000);
+ String jwt = JwtHelper.getJwt(claims);
+ System.out.println("***LongLived APIA JWT***: " + jwt);
+ }
+
+ @Test
public void longLivedATMP1000Jwt() throws Exception {
JwtClaims claims = getTestClaims("eric", "EMPLOYEE", "f7d42348-c647-4efb-a52d-4c5787421e72", Arrays.asList("ATMP1000.w", "ATMP1000.r"));
claims.setExpirationTimeMinutesInTheFuture(5256000);
|
add a test case to generate long lived token for calling API A
|
networknt_light-4j
|
train
|
java
|
b385fb22ebfcabe91fd31dd316d95539d09ef713
|
diff --git a/email_extras/admin.py b/email_extras/admin.py
index <HASH>..<HASH> 100644
--- a/email_extras/admin.py
+++ b/email_extras/admin.py
@@ -10,5 +10,9 @@ if USE_GNUPG:
class KeyAdmin(admin.ModelAdmin):
form = KeyForm
+ class AddressAdmin(admin.ModelAdmin):
+ def has_add_permission(self, request):
+ return False
+
admin.site.register(Key, KeyAdmin)
- admin.site.register(Address)
+ admin.site.register(Address, AddressAdmin)
|
Addresses can't be added without keys
|
stephenmcd_django-email-extras
|
train
|
py
|
344e1723c5877b5d1944e7e3db36d02f850908c8
|
diff --git a/bottom.py b/bottom.py
index <HASH>..<HASH> 100644
--- a/bottom.py
+++ b/bottom.py
@@ -35,9 +35,11 @@ class Client(object):
bot.run()
'''
+ command = rfc.unique_command(command)
+
def wrap(func):
''' Add the function to this client's handlers and return it '''
- self.handler.add(command.upper(), func)
+ self.handler.add(command, func)
return func
return wrap
@@ -104,13 +106,13 @@ class Handler(object):
def add(self, command, func):
# Wrap the function in a coroutine so that we can
- # crete a task list and use asyncio.wait
- command = command.upper()
+ # create a task list and use asyncio.wait
+ command = rfc.unique_command(command)
coro = asyncio.coroutine(func)
self.coros[command].add(coro)
@asyncio.coroutine
def __call__(self, command, *args, **kwargs):
- coros = self.coros[command.upper()]
+ coros = self.coros[rfc.unique_command(command)]
tasks = [coro(*args, **kwargs) for coro in coros]
asyncio.wait(tasks)
|
Replace command.upper() with rfc.unique_command() for consistency
|
numberoverzero_bottom
|
train
|
py
|
ce819bdd0d859fcd3c72367f57409947ebc94c1d
|
diff --git a/structr/structr-core/src/main/java/org/structr/core/resource/constraint/RelationshipConstraint.java b/structr/structr-core/src/main/java/org/structr/core/resource/constraint/RelationshipConstraint.java
index <HASH>..<HASH> 100644
--- a/structr/structr-core/src/main/java/org/structr/core/resource/constraint/RelationshipConstraint.java
+++ b/structr/structr-core/src/main/java/org/structr/core/resource/constraint/RelationshipConstraint.java
@@ -85,7 +85,7 @@ public class RelationshipConstraint extends ResourceConstraint {
@Override
public boolean supportsNesting() {
- return false;
+ return true;
}
@Override
|
Allowed nesting of RelationshipConstraint to fix paging for relationships.
|
structr_structr
|
train
|
java
|
8bee7ae96f8c0a9111d25ed31761c1d996ec5c7f
|
diff --git a/foolbox/adversarial.py b/foolbox/adversarial.py
index <HASH>..<HASH> 100644
--- a/foolbox/adversarial.py
+++ b/foolbox/adversarial.py
@@ -5,16 +5,16 @@ Provides a class that represents an adversarial example.
import numpy as np
-from .adversarial import Adversarial
-from .adversarial import StopAttack
+from v1.adversarial import Adversarial as BaseAdversarial
+from v1.adversarial import StopAttack
-class YieldingAdversarial(Adversarial):
+class Adversarial(BaseAdversarial):
def _check_unperturbed(self):
try:
# for now, we use the non-yielding implementation in the super-class
# TODO: add support for batching this first call as well
- super(YieldingAdversarial, self).forward_one(self._Adversarial__unperturbed)
+ super(Adversarial, self).forward_one(self._Adversarial__unperturbed)
except StopAttack:
# if a threshold is specified and the unperturbed input is
# misclassified, this can already cause a StopAttack
|
renamed YieldingAdversarial to Adversarial and based it on v1
|
bethgelab_foolbox
|
train
|
py
|
2bebbe348a3f9c738f905a1a00a000d4f8d492c0
|
diff --git a/code/controller/context/context.php b/code/controller/context/context.php
index <HASH>..<HASH> 100644
--- a/code/controller/context/context.php
+++ b/code/controller/context/context.php
@@ -16,6 +16,16 @@
class KControllerContext extends KCommand implements KControllerContextInterface
{
/**
+ * Constructor.
+ *
+ * @param array|\Traversable $attributes An associative array or a Traversable object instance
+ */
+ public function __construct($attributes = array())
+ {
+ KObjectConfig::__construct($attributes);
+ }
+
+ /**
* Get the request object
*
* @return KControllerRequestInterface
|
re #<I> - Override ControllerContext constructor to only accept an array of attributes
|
timble_kodekit
|
train
|
php
|
c34046977d948272e6abf704a16849835bc8541f
|
diff --git a/application/modules/g/controllers/AuthController.php b/application/modules/g/controllers/AuthController.php
index <HASH>..<HASH> 100755
--- a/application/modules/g/controllers/AuthController.php
+++ b/application/modules/g/controllers/AuthController.php
@@ -291,7 +291,7 @@ class G_AuthController extends Garp_Controller_Action {
return;
}
- if (!empty($this->getRequest()->getPost(self::HONEYPOT_COLUMN))) {
+ if ($this->getRequest()->getPost(self::HONEYPOT_COLUMN)) {
throw new Garp_Auth_Exception(__('honeypot error'));
}
|
added functionanility to add/remove sprint alert to event
|
grrr-amsterdam_garp3
|
train
|
php
|
81466fd8a7553e6ae582ab738b7bcb171f3c7640
|
diff --git a/lib/matestack/ui/vue_js/components/async.rb b/lib/matestack/ui/vue_js/components/async.rb
index <HASH>..<HASH> 100644
--- a/lib/matestack/ui/vue_js/components/async.rb
+++ b/lib/matestack/ui/vue_js/components/async.rb
@@ -29,7 +29,13 @@ module Matestack
div class: 'matestack-async-component-container', 'v-bind:class': '{ "loading": loading === true }' do
div class: 'matestack-async-component-wrapper', 'v-if': 'asyncTemplate == null', 'v-bind:class': '{ "loading": loading === true }' do
div async_attributes do
- yield unless is_deferred?
+ if params[:component_key]
+ # we need to yield if a request is looking for a async component, indicated through present params[:component_key]
+ # the requested component could be hidden within this deferred async!
+ yield
+ else
+ yield unless is_deferred?
+ end
end
end
div class: 'matestack-async-component-wrapper', 'v-if': 'asyncTemplate != null', 'v-bind:class': '{ "loading": loading === true }' do
|
fixed async component resolve within deferred async components
|
basemate_matestack-ui-core
|
train
|
rb
|
62078b7d40f76fae1a7a847845fc1b0b24f2d755
|
diff --git a/src/AutoRotatingCarousel.js b/src/AutoRotatingCarousel.js
index <HASH>..<HASH> 100644
--- a/src/AutoRotatingCarousel.js
+++ b/src/AutoRotatingCarousel.js
@@ -10,6 +10,7 @@ import ArrowBackIcon from '@material-ui/icons/ArrowBack'
import ArrowForwardIcon from '@material-ui/icons/ArrowForward'
import Modal from '@material-ui/core/Modal'
import Fade from '@material-ui/core/Fade'
+import Backdrop from '@material-ui/core/Backdrop'
import Dots from 'material-ui-dots'
import classNames from 'classnames'
import Carousel from './SwipableCarouselView'
@@ -188,6 +189,7 @@ class AutoRotatingCarousel extends Component {
})}
open={open}
onClose={onClose}
+ BackdropComponent={Backdrop}
BackdropProps={ModalProps ? { transitionDuration, ...ModalProps.BackdropProps } : { transitionDuration }}
{...ModalProps}
>
|
Add BackdropComponent prop to resolve issue with transitionDuration prop on MUI v4 (#<I>)
|
TeamWertarbyte_material-auto-rotating-carousel
|
train
|
js
|
34a8157c62c49d10e574670e45c51f3380602137
|
diff --git a/tasks/cucumber.js b/tasks/cucumber.js
index <HASH>..<HASH> 100644
--- a/tasks/cucumber.js
+++ b/tasks/cucumber.js
@@ -41,6 +41,10 @@ module.exports = function(grunt) {
var commands = [];
+ if (grunt.option('rerun')) {
+ commands.push(grunt.option('rerun'));
+ }
+
if (options.executeParallel && options.workers) {
commands.push('-w', options.workers);
}
|
rerun the cucumber failed scenario. Pass the path of @rerun.txt file
|
mavdi_grunt-cucumberjs
|
train
|
js
|
8b139575f8f2bcb995db917a1f3eefe3234a447e
|
diff --git a/src/exporter/file.js b/src/exporter/file.js
index <HASH>..<HASH> 100644
--- a/src/exporter/file.js
+++ b/src/exporter/file.js
@@ -40,6 +40,7 @@ module.exports = (node, name, pathRest, ipldResolver) => {
return pull.values([{
content: content,
path: name,
+ hash: node.multihash,
size: file.fileSize()
}])
}
diff --git a/test/test-exporter.js b/test/test-exporter.js
index <HASH>..<HASH> 100644
--- a/test/test-exporter.js
+++ b/test/test-exporter.js
@@ -45,8 +45,8 @@ module.exports = (repo) => {
function onFiles (err, files) {
expect(err).to.not.exist()
expect(files).to.have.length(1)
+ expect(files[0]).to.have.property('hash')
expect(files[0]).to.have.property('path', hash)
-
fileEql(files[0], unmarsh.data, done)
}
})
@@ -117,6 +117,7 @@ module.exports = (repo) => {
pull(
exporter(hash, ipldResolver),
pull.collect((err, files) => {
+ files.forEach(file => expect(file).to.have.property('hash'))
expect(err).to.not.exist()
expect(
|
feat: Include hash field for exported files (#<I>)
|
ipfs_js-ipfs-unixfs
|
train
|
js,js
|
483ef6de737f61a74d56d33e40aea6ef5a5762d4
|
diff --git a/lib/dexter/indexer.rb b/lib/dexter/indexer.rb
index <HASH>..<HASH> 100644
--- a/lib/dexter/indexer.rb
+++ b/lib/dexter/indexer.rb
@@ -99,6 +99,8 @@ module Dexter
log "Index created: #{((Time.now - started_at) * 1000).to_i} ms"
end
end
+ else
+ log "No indexes found"
end
new_indexes
|
Added no indexes found message
|
ankane_dexter
|
train
|
rb
|
8f819eaba7e137f1778c42c83067fb5cc8b53787
|
diff --git a/src/harvesters/core.py b/src/harvesters/core.py
index <HASH>..<HASH> 100644
--- a/src/harvesters/core.py
+++ b/src/harvesters/core.py
@@ -1592,6 +1592,10 @@ class ImageAcquirer:
self._num_images_to_acquire = num_images_to_acquire
+ # We're ready to start image acquisition. Lock the device's transport
+ # layer related features:
+ self.device.node_map.TLParamsLocked.value = 1
+
# Start image acquisition.
self._is_acquiring_images = True
@@ -1911,6 +1915,10 @@ class ImageAcquirer:
#
self.device.node_map.AcquisitionStop.execute()
+ # Unlock TLParamsLocked in order to allow full device
+ # configuration:
+ self.device.node_map.TLParamsLocked.value = 0
+
for data_stream in self._data_streams:
# Stop image acquisition.
try:
|
Resolve issue #<I>
|
genicam_harvesters
|
train
|
py
|
f3d837bb76e184462caa94cb5f3532a8235aa797
|
diff --git a/src/Helpers/view/TinyMCE.php b/src/Helpers/view/TinyMCE.php
index <HASH>..<HASH> 100644
--- a/src/Helpers/view/TinyMCE.php
+++ b/src/Helpers/view/TinyMCE.php
@@ -17,9 +17,9 @@ class TinyMCE extends AbstractHelper
{
if ($this->_enabled) {
$this->getView()->Scripts()->setPack(false)
- ->add($this->getBase() . '/jquery.tinymce.min', 'tinymce')
- ->add($this->getBase() . '/tinymce.min', 'tinymce')
- ->add($this->getBase() . '/init', 'tinymce');
+ ->add($this->getBase() . '/jquery.tinymce.min.js', 'tinymce')
+ ->add($this->getBase() . '/tinymce.min.js', 'tinymce')
+ ->add($this->getBase() . '/init.js', 'tinymce');
}
return $this->getView()->Scripts()->render('tinymce');
|
Add SetBase to TinyMCE
|
bytic_framework
|
train
|
php
|
b4fc8d365447336cf13380cefb29e636d2de5b52
|
diff --git a/openquake/calculators/views.py b/openquake/calculators/views.py
index <HASH>..<HASH> 100644
--- a/openquake/calculators/views.py
+++ b/openquake/calculators/views.py
@@ -695,7 +695,7 @@ def view_dupl_sources_time(token, dstore):
calc_time = records['calc_time'].sum()
tot_time += calc_time
tbl.append((source_id, calc_time, len(records)))
- if tbl and info.attrs.get('has_dupl_sources'):
+ if tbl:
tot = info['calc_time'].sum() + info['split_time'].sum()
percent = tot_time / tot * 100
m = '\nTotal time in duplicated sources: %d/%d (%d%%)' % (
@@ -854,7 +854,7 @@ def view_dupl_sources(token, dstore):
if not dupl:
return ''
msg = ('Found %d source(s) with the same ID and %d true duplicate(s): %s'
- % (len(sameid), len(dupl), dupl))
+ % (len(sameid), len(dupl), numpy.array(dupl)))
fakedupl = set(sameid) - set(dupl)
if fakedupl:
msg += '\nHere is a fake duplicate: %s' % fakedupl.pop()
|
Reduced logging in view_dupl_sources [skip hazardlib]
Former-commit-id: <I>f0a<I>d0bd9f7f<I>e9e1f<I>c<I>ab1ca5a6
|
gem_oq-engine
|
train
|
py
|
8b5f798868a94a82946bd8666fc5f3033a1a8ff9
|
diff --git a/test.js b/test.js
index <HASH>..<HASH> 100644
--- a/test.js
+++ b/test.js
@@ -135,7 +135,7 @@ test('Works for iterable objects', function(is) {
"value" : 1,
"@@iterator" : function(){
var hasValue = true;
- value = this.value;
+ var value = this.value;
return {
next: function(){
if(hasValue) {
|
Added missing `var` statement.
|
studio-b12_array-from
|
train
|
js
|
8f6009a8f6c16408c850a97cfaa9e5fd90ae6ab2
|
diff --git a/cmd/jujud/run.go b/cmd/jujud/run.go
index <HASH>..<HASH> 100644
--- a/cmd/jujud/run.go
+++ b/cmd/jujud/run.go
@@ -141,7 +141,10 @@ func (c *RunCommand) executeNoContext() (*exec.ExecResponse, error) {
if err != nil {
return nil, err
}
- lock.Lock("juju-run")
+ err = lock.Lock("juju-run")
+ if err != nil {
+ return nil, err
+ }
defer lock.Unlock()
runCmd := `[ -f "/home/ubuntu/.juju-proxy" ] && . "/home/ubuntu/.juju-proxy"` + "\n" + c.commands
|
Check the err response from the fslock Lock method.
|
juju_juju
|
train
|
go
|
a6f728ac16129cf1418366f73d9c6e48c8bd4e29
|
diff --git a/src/main/java/org/paumard/spliterators/GatingSpliterator.java b/src/main/java/org/paumard/spliterators/GatingSpliterator.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/paumard/spliterators/GatingSpliterator.java
+++ b/src/main/java/org/paumard/spliterators/GatingSpliterator.java
@@ -88,11 +88,7 @@ public class GatingSpliterator<E> implements Spliterator<E> {
@Override
public long estimateSize() {
- if (gateIsOpenned) {
- return this.spliterator.estimateSize();
- } else {
- return 0;
- }
+ return 0L;
}
@Override
|
Fixed the estimate sized of the gating spliterator
|
JosePaumard_streams-utils
|
train
|
java
|
c57799cb9bcc9d75d20e2a8e8154f801772dc1a7
|
diff --git a/ripe/atlas/sagan/helpers/abuf.py b/ripe/atlas/sagan/helpers/abuf.py
index <HASH>..<HASH> 100644
--- a/ripe/atlas/sagan/helpers/abuf.py
+++ b/ripe/atlas/sagan/helpers/abuf.py
@@ -290,12 +290,14 @@ class AbufParser(object):
edns0 = {
'UDPsize': res[1],
'ExtendedReturnCode': res[2] >> 24,
- 'Version': (res[2] and 0x0f00) >> 16,
- 'Z': (res[2] and 0x00ff),
+ 'Version': (res[2] & 0x00ff0000) >> 16,
+ 'Z': (res[2] & 0x007fff),
'Type': 'OPT',
'Option': [],
'Name': name,
}
+ if res[2] & 0x8000:
+ edns0['DO']= True
o = 0
while o < len(rdata):
|
Fixed some bugs in EDNS0 parsing and extract DO flag.
|
RIPE-NCC_ripe.atlas.sagan
|
train
|
py
|
d50df2f116bfb1f3e897746fb345a5e945650e31
|
diff --git a/actionpack/lib/action_controller/metal/rendering.rb b/actionpack/lib/action_controller/metal/rendering.rb
index <HASH>..<HASH> 100644
--- a/actionpack/lib/action_controller/metal/rendering.rb
+++ b/actionpack/lib/action_controller/metal/rendering.rb
@@ -6,7 +6,7 @@ module ActionController
# Before processing, set the request formats in current controller formats.
def process_action(*) #:nodoc:
- self.formats = request.formats.map { |x| x.ref }
+ self.formats = request.formats.select { |x| !x.nil? }.map(&:ref)
super
end
diff --git a/actionpack/lib/action_dispatch/http/mime_type.rb b/actionpack/lib/action_dispatch/http/mime_type.rb
index <HASH>..<HASH> 100644
--- a/actionpack/lib/action_dispatch/http/mime_type.rb
+++ b/actionpack/lib/action_dispatch/http/mime_type.rb
@@ -306,12 +306,16 @@ module Mime
method.to_s.ends_with? '?'
end
end
-
+
class NullType
def nil?
true
end
+ def respond_to_missing?(method, include_private = false)
+ method.to_s.ends_with? '?'
+ end
+
private
def method_missing(method, *args)
false if method.to_s.ends_with? '?'
|
Reverts rendering behavior when format is unknown
If a request has unknown format (eg. /foo.bar), the renderer
fallbacks to default format.
This patch reverts Rails <I> behavior after c<I>db commit.
Fixes issue #<I>.
|
rails_rails
|
train
|
rb,rb
|
870cc765f7bc4bc615c60f714710b365df8cd3db
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -8,7 +8,6 @@ setup(
description="Python API and CLI for KeePassX",
long_description=open(os.path.join(os.path.dirname(__file__),
'README.rst')).read(),
- license='BSD',
author='James Saryerwinnie',
author_email='js@jamesls.com',
packages=find_packages(),
|
Remove license field
This is specified in the trove classifier as GPLv2.
|
jamesls_python-keepassx
|
train
|
py
|
fb65eaf8549dc4a9d06b0ac6529907484cbd21cd
|
diff --git a/django_extensions/management/shells.py b/django_extensions/management/shells.py
index <HASH>..<HASH> 100644
--- a/django_extensions/management/shells.py
+++ b/django_extensions/management/shells.py
@@ -153,7 +153,11 @@ def import_objects(options, style):
if not quiet_load:
print(style.SQL_TABLE("# Shell Plus Model Imports"))
for app_mod, models in sorted(six.iteritems(load_models)):
- app_name = app_mod.split('.')[-2]
+ try:
+ app_name = app_mod.split('.')[-2]
+ except IndexError:
+ # Some weird model naming scheme like in Sentry.
+ app_name = app_mod
app_aliases = model_aliases.get(app_name, {})
model_labels = []
|
Fix model loading for sentry
Sentry has a weird model naming scheme where all model files are inside
a single directory called models and each 'app' models is in a file of
its own (I guess the authors thought this was a better idea when they
didn't have any views etc. but models only).
With the models directory begin in the import path, one can import by
from useroption import UserOption
for example.
If there are no app names, use the file name as app name.
|
django-extensions_django-extensions
|
train
|
py
|
2b43805053b525534e7eeb90bcb46123687c17ca
|
diff --git a/public/javascripts/promotion.js b/public/javascripts/promotion.js
index <HASH>..<HASH> 100644
--- a/public/javascripts/promotion.js
+++ b/public/javascripts/promotion.js
@@ -184,7 +184,7 @@ var promotion_page = {
if (promotion_page.current_changeset) {
if (promotion_page.current_product) {
var product = promotion_page.current_changeset.products[promotion_page.current_product];
- if( product.all ){
+ if( product !== undefined && product.all !== undefined ){
promotion_page.disable_all();
} else {
jQuery.each(promotion_page.subtypes, function(index, type){
|
Adds extra check to ensure product in reset_page exists when doing an all check.
|
Katello_katello
|
train
|
js
|
ecc3a9c90d20699c6f0bf18600cf9bd755b56d65
|
diff --git a/rollbar/contrib/fastapi/utils.py b/rollbar/contrib/fastapi/utils.py
index <HASH>..<HASH> 100644
--- a/rollbar/contrib/fastapi/utils.py
+++ b/rollbar/contrib/fastapi/utils.py
@@ -1,5 +1,8 @@
+import functools
import logging
+import fastapi
+
log = logging.getLogger(__name__)
@@ -11,3 +14,20 @@ class FastAPIVersionError(Exception):
log.error(err_msg)
return super().__init__(err_msg)
+
+
+class fastapi_min_version:
+ def __init__(self, min_version):
+ self.min_version = min_version
+
+ def __call__(self, func):
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ if fastapi.__version__ < self.min_version:
+ raise FastAPIVersionError(
+ '0.41.0', reason=f'to use {func.__name__}() function'
+ )
+
+ return func(*args, **kwargs)
+
+ return wrapper
|
Add decorator to check minimum required FastAPI version
|
rollbar_pyrollbar
|
train
|
py
|
786a3a55a2d5973e4d4a8e72230774f0f40d3f0b
|
diff --git a/src/main/java/io/airlift/slice/XxHash64.java b/src/main/java/io/airlift/slice/XxHash64.java
index <HASH>..<HASH> 100644
--- a/src/main/java/io/airlift/slice/XxHash64.java
+++ b/src/main/java/io/airlift/slice/XxHash64.java
@@ -85,16 +85,9 @@ public class XxHash64
hash = rotateLeft(v1, 1) + rotateLeft(v2, 7) + rotateLeft(v3, 12) + rotateLeft(v4, 18);
- v1 = mix(0, v1);
hash = update(hash, v1);
-
- v2 = mix(0, v2);
hash = update(hash, v2);
-
- v3 = mix(0, v3);
hash = update(hash, v3);
-
- v4 = mix(0, v4);
hash = update(hash, v4);
}
else {
@@ -130,7 +123,7 @@ public class XxHash64
private static long update(long hash, long value)
{
- long temp = hash ^ value;
+ long temp = hash ^ mix(0, value);
return temp * PRIME64_1 + PRIME64_4;
}
|
More simplification of xxh<I>
|
airlift_slice
|
train
|
java
|
6ff56c8a72b772b2857355d998bbda0011733ce2
|
diff --git a/Tests/TestServiceSoundCloud.php b/Tests/TestServiceSoundCloud.php
index <HASH>..<HASH> 100755
--- a/Tests/TestServiceSoundCloud.php
+++ b/Tests/TestServiceSoundCloud.php
@@ -34,7 +34,6 @@ class TestServiceSoundCloud extends TestProviders
'https://soundcloud.com/explore',
'https://soundcloud.com/groups',
'https://soundcloud.com',
- '',
),
);
|
Remove emtpy url from SoundCloud Test
|
mpratt_Embera
|
train
|
php
|
5f2dbb5ae8ff534e91429646db593910967a912c
|
diff --git a/lib/ponder/irc.rb b/lib/ponder/irc.rb
index <HASH>..<HASH> 100644
--- a/lib/ponder/irc.rb
+++ b/lib/ponder/irc.rb
@@ -12,9 +12,10 @@ module Ponder
raw "PRIVMSG #{recipient} :#{message}"
end
+ # register when connected
def register
raw "NICK #{@config.nick}"
- raw "USER #{@config.nick} 0 * :#{@config.realname}"
+ raw "USER #{@config.username} * * :#{@config.real_name}"
raw "PASS #{@config.password}" if @config.password
end
diff --git a/lib/ponder/thaum.rb b/lib/ponder/thaum.rb
index <HASH>..<HASH> 100644
--- a/lib/ponder/thaum.rb
+++ b/lib/ponder/thaum.rb
@@ -18,7 +18,8 @@ module Ponder
@config = OpenStruct.new(:server => 'localhost',
:port => 6667,
:nick => 'Ponder',
- :realname => 'Ponder',
+ :username => 'Ponder',
+ :real_name => 'Ponder',
:verbose => true,
:logging => false,
:reconnect => true,
|
added the functionality of chosing an username
|
tbuehlmann_ponder
|
train
|
rb,rb
|
f2abb27d197de936347e0b2c302307aa725b248f
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -137,6 +137,7 @@ setup(
'boto',
'jellyfish',
'nilsimsa >= 0.3',
+ 'regex != 2014.08.28',
'chromium_compact_language_detector',
'sortedcollection',
'python-docx',
|
we should be using the new regex wherever we use `re`
|
trec-kba_streamcorpus-pipeline
|
train
|
py
|
37cc870708e937cdc78e2634152f49933c7aaa90
|
diff --git a/djpaypal/models/billing.py b/djpaypal/models/billing.py
index <HASH>..<HASH> 100644
--- a/djpaypal/models/billing.py
+++ b/djpaypal/models/billing.py
@@ -143,7 +143,8 @@ class BillingAgreement(PaypalObject):
if ba.error:
raise PaypalApiError(str(ba.error)) # , ba.error)
- return cls.get_or_update_from_api_data(ba, always_sync=True)
+ obj, created = cls.get_or_update_from_api_data(ba, always_sync=True)
+ return obj
class PaymentDefinition(PaypalObject):
|
Return only the object from BillingAgreement.execute
|
HearthSim_dj-paypal
|
train
|
py
|
6ac250bc8863723dd8a838e5847415a86e8fb54f
|
diff --git a/tests/tests/lib/ezutils/ezmail_test.php b/tests/tests/lib/ezutils/ezmail_test.php
index <HASH>..<HASH> 100644
--- a/tests/tests/lib/ezutils/ezmail_test.php
+++ b/tests/tests/lib/ezutils/ezmail_test.php
@@ -800,7 +800,7 @@ class eZMailTest extends ezpTestCase
}
// Open mailbox and delete all existing emails in the account
- $mbox = imap_open( $mboxString, $recipient['username'], $recipient['password'] );
+ $mbox = @imap_open( $mboxString, $recipient['username'], $recipient['password'] );
if ( !$mbox )
{
$this->markTestSkipped( 'Cannot open mailbox for ' . $recipient['username'] . ': ' . imap_last_error() );
@@ -860,7 +860,7 @@ class eZMailTest extends ezpTestCase
// Read emails
foreach ( $recipients as $recipient )
{
- $mbox = imap_open( $mboxString, $recipient['username'], $recipient['password'] );
+ $mbox = @imap_open( $mboxString, $recipient['username'], $recipient['password'] );
if ( !$mbox )
{
$this->markTestSkipped( 'Cannot open mailbox for ' . $recipient['username'] . ': ' . imap_last_error() );
|
Fix failing imap tests, silence errors causing failures
|
ezsystems_ezpublish-legacy
|
train
|
php
|
4af6e42b3bd5bc9e676e4ec8db3d2d48429d77a6
|
diff --git a/website/resources/js/main.js b/website/resources/js/main.js
index <HASH>..<HASH> 100644
--- a/website/resources/js/main.js
+++ b/website/resources/js/main.js
@@ -37,7 +37,7 @@
if (self.data("clc")) return;
var href = self.attr("href");
self.data("clc", true);
- if (!href || href.substr(0, 4) === "http" || href === "/api/" || href === "/" || href === "/supporters" || href === "/order-license") return;
+ if (!href || href.substr(0, 4) === "http" || href.substr(0, 1) === "#" || href === "/api/" || href === "/" || href === "/supporters" || href === "/order-license") return;
var ext = href.substr(href.length - 4, 4);
if (ext === ".xml" || ext === ".jar" || ext === ".pdf") return;
self.on("click", function(evt) {
|
[website] fixed history/ajax-load system to deal with anchor links
|
rzwitserloot_lombok
|
train
|
js
|
46f9be6c6610a33f78ec35825d5fd98f4093ee31
|
diff --git a/src/main/java/com/hmsonline/cassandra/triggers/Trigger.java b/src/main/java/com/hmsonline/cassandra/triggers/Trigger.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/hmsonline/cassandra/triggers/Trigger.java
+++ b/src/main/java/com/hmsonline/cassandra/triggers/Trigger.java
@@ -1,8 +1,14 @@
package com.hmsonline.cassandra.triggers;
-
+/**
+ * A trigger that can be invoked upon a database mutation.
+ */
public interface Trigger {
- public void process(LogEntry loEntry);
-
+ /**
+ * Causes this trigger to process the given {@link LogEntry}.
+ *
+ * @param logEntry the log entry to process (never <code>null</code>)
+ */
+ void process(LogEntry logEntry);
}
|
Fixed "loEntry" typo, removed redundant public modifier, and added JavaDoc.
|
hmsonline_cassandra-triggers
|
train
|
java
|
7f0ff74e951a8d759079d0508044f4e9ea7cb65d
|
diff --git a/girder/utility/__init__.py b/girder/utility/__init__.py
index <HASH>..<HASH> 100644
--- a/girder/utility/__init__.py
+++ b/girder/utility/__init__.py
@@ -40,7 +40,7 @@ except NotImplementedError: # pragma: no cover
def parseTimestamp(x, naive=True):
- '''
+ """
Parse a datetime string using the python-dateutil package.
If no timezone information is included, assume UTC. If timezone information
@@ -48,7 +48,7 @@ def parseTimestamp(x, naive=True):
If naive is True (the default), drop the timezone information such that a
naive datetime is returned.
- '''
+ """
dt = dateutil.parser.parse(x)
if dt.tzinfo:
dt = dt.astimezone(pytz.utc).replace(tzinfo=None)
|
Fix static analysis (double triple quotes)
|
girder_girder
|
train
|
py
|
50954cd3b3bbdb476456994b1e8af9a5c3e646ad
|
diff --git a/git_repo/services/ext/gogs.py b/git_repo/services/ext/gogs.py
index <HASH>..<HASH> 100644
--- a/git_repo/services/ext/gogs.py
+++ b/git_repo/services/ext/gogs.py
@@ -77,14 +77,13 @@ class GogsService(RepositoryService):
super().__init__(*args, **kwargs)
+ def connect(self):
self.gg.setup(self.url_ro)
self.gg.set_token(self._privatekey)
self.gg.set_default_private(self.default_create_private)
self.gg.setup_session(
self.session_certificate or not self.session_insecure,
self.session_proxy)
-
- def connect(self):
try:
self.username = self.user # Call to self.gg.authenticated_user()
except HTTPError as err:
|
🚒 connection order issue (fixes #<I>)
|
guyzmo_git-repo
|
train
|
py
|
09381314f4b8474d2fd9b7e9cbdb5c31a5322741
|
diff --git a/ella/newman/utils.py b/ella/newman/utils.py
index <HASH>..<HASH> 100644
--- a/ella/newman/utils.py
+++ b/ella/newman/utils.py
@@ -30,6 +30,9 @@ def JsonResponse(message, data={}, errors={}, status=200):
'message': message,
}
if data:
+ try: data = json_decode(data)
+ except: pass
+
out_dict['data'] = data
if errors:
out_dict['errors'] = errors
|
JsonResponse will not try to expand JSON data.
|
ella_ella
|
train
|
py
|
93712bdfc669ac1005cb9f6c5bdc6fdd5c0a88bf
|
diff --git a/lib/ezsession/classes/ezsession.php b/lib/ezsession/classes/ezsession.php
index <HASH>..<HASH> 100644
--- a/lib/ezsession/classes/ezsession.php
+++ b/lib/ezsession/classes/ezsession.php
@@ -446,11 +446,7 @@ class eZSession
*/
static protected function forceStart()
{
- if ( self::$handlerInstance instanceof ezpSessionHandler )
- return self::$hasStarted = self::$handlerInstance->sessionStart();
-
- session_start();
- return self::$hasStarted = true;
+ return self::$hasStarted = self::getHandlerInstance()->sessionStart();
}
/**
|
Additional fix EZP-<I>: Warning: session already started when logging in
Make sure handler is always used in standalone mode.
|
ezsystems_ezpublish-legacy
|
train
|
php
|
beb785804e805462477e66c6099b7509138a9aaf
|
diff --git a/geshi/geshi/php.php b/geshi/geshi/php.php
index <HASH>..<HASH> 100644
--- a/geshi/geshi/php.php
+++ b/geshi/geshi/php.php
@@ -90,7 +90,7 @@ $language_data = array(
'as','break','case','continue','default','do','else','elseif',
'endfor','endforeach','endif','endswitch','endwhile','for',
'foreach','if','include','include_once','require','require_once',
- 'return','switch','throw','while','namespace','use',
+ 'return','switch','throw','while','namespace','use', 'try', 'catch',
'echo','print'
),
|
[GeSHi] Adding try & catch
|
Gregwar_Slidey
|
train
|
php
|
eb0df236093fcae85b320dfd1f17e60d52da3b0c
|
diff --git a/src/resources/js/controllers.js b/src/resources/js/controllers.js
index <HASH>..<HASH> 100644
--- a/src/resources/js/controllers.js
+++ b/src/resources/js/controllers.js
@@ -112,6 +112,8 @@
return;
}
+ var blockRequest = false;
+
if ($scope.pager) {
if (n.length == 0) {
$timeout.cancel($scope.searchPromise);
@@ -119,15 +121,22 @@
$scope.config.pagerHiddenByAjaxSearch = false;
} else {
$timeout.cancel($scope.searchPromise);
+
+ if (blockRequest) {
+ return;
+ }
+
$scope.searchPromise = $timeout(function() {
if ($scope.config.fullSearchContainer) {
$scope.data.listArray = $filter('filter')($scope.config.fullSearchContainer, n);
$scope.config.pagerHiddenByAjaxSearch = true;
} else {
+ blockRequest = true;
$http.post($scope.config.apiEndpoint + '/full-response?' + $scope.config.apiListQueryString, {query: n}).success(function(response) {
$scope.config.pagerHiddenByAjaxSearch = true;
$scope.config.fullSearchContainer = response;
$scope.data.listArray = $filter('filter')(response, n);
+ blockRequest = false;
});
}
}, 500)
|
added block request var in order to make sure the full response request
can only run once as it takes usualy longer to resolve the request. #<I>
|
luyadev_luya-module-admin
|
train
|
js
|
3fa0e645487ae66ce6bd9ee935a9afff2fe33cf1
|
diff --git a/libdokan/dokan_info.go b/libdokan/dokan_info.go
index <HASH>..<HASH> 100644
--- a/libdokan/dokan_info.go
+++ b/libdokan/dokan_info.go
@@ -58,9 +58,9 @@ func logDokanFilesInfo(epc *errorPrinter) {
debugFileInfo(epc, d+shortPath)
}
for _, d := range []string{syswow64, system32} {
- debugFileInfo(epc, d+`DOKAN.SYS`)
- debugFileInfo(epc, d+`DOKAN1.SYS`)
- debugFileInfo(epc, d+`DOKAN2.SYS`)
+ debugFileInfo(epc, d+`DRIVERS\DOKAN.SYS`)
+ debugFileInfo(epc, d+`DRIVERS\DOKAN1.SYS`)
+ debugFileInfo(epc, d+`DRIVERS\DOKAN2.SYS`)
}
}
|
Use correct path for dokanX.sys in debug output
|
keybase_client
|
train
|
go
|
71739901b869127ed544864c1bca4ad53a85b28f
|
diff --git a/src/adapt/cssstyler.js b/src/adapt/cssstyler.js
index <HASH>..<HASH> 100644
--- a/src/adapt/cssstyler.js
+++ b/src/adapt/cssstyler.js
@@ -532,14 +532,12 @@ adapt.cssstyler.columnProps = ["column-count", "column-width", "column-fill"];
* @return {void}
*/
adapt.cssstyler.Styler.prototype.postprocessTopStyle = function(elemStyle, isBody) {
- if (!isBody) {
- ["writing-mode", "direction"].forEach(function(propName) {
- if (elemStyle[propName]) {
- // Copy it over, but keep it at the root element as well.
- this.rootStyle[propName] = elemStyle[propName];
- }
- }, this);
- }
+ ["writing-mode", "direction"].forEach(propName => {
+ if (elemStyle[propName] && !(isBody && this.rootStyle[propName])) {
+ // Copy it over, but keep it at the root element as well.
+ this.rootStyle[propName] = elemStyle[propName];
+ }
+ });
if (!this.rootBackgroundAssigned) {
const backgroundColor = /** @type {adapt.css.Val} */
(this.hasProp(elemStyle, this.validatorSet.backgroundProps, "background-color") ?
|
Fix the bug that the writing mode specified on the body element did not determine the principal (root) writing mode
|
vivliostyle_vivliostyle.js
|
train
|
js
|
31d2278d88b9ef08432f31f023f9a2cfb453dc9e
|
diff --git a/pygsp/graphs/graph.py b/pygsp/graphs/graph.py
index <HASH>..<HASH> 100644
--- a/pygsp/graphs/graph.py
+++ b/pygsp/graphs/graph.py
@@ -80,6 +80,11 @@ class Graph(fourier.GraphFourier, difference.GraphDifference):
if len(W.shape) != 2 or W.shape[0] != W.shape[1]:
raise ValueError('W has incorrect shape {}'.format(W.shape))
+ # Don't keep edges of 0 weight. Otherwise Ne will not correspond to the
+ # real number of edges. Problematic when e.g. plotting.
+ W = sparse.csr_matrix(W)
+ W.eliminate_zeros()
+
self.N = W.shape[0]
self.W = sparse.lil_matrix(W)
|
graphs: eliminate edges of weight 0
|
epfl-lts2_pygsp
|
train
|
py
|
5b104b7b8f1a4de5a57df20751ad7c9a043ce367
|
diff --git a/javascript/LeftAndMain.Tree.js b/javascript/LeftAndMain.Tree.js
index <HASH>..<HASH> 100755
--- a/javascript/LeftAndMain.Tree.js
+++ b/javascript/LeftAndMain.Tree.js
@@ -116,7 +116,7 @@
});
$.ajax({
- 'url': this.data('url-savetreenode'),
+ 'url': self.data('url-savetreenode'),
'data': {
ID: $(movedNode).data('id'),
ParentID: $(newParentNode).data('id') || 0,
|
MINOR Fixed LeftAndMain.Tree.js scope
|
silverstripe_silverstripe-siteconfig
|
train
|
js
|
307c9aa92fb1012bcb9894658533219230179c28
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -39,11 +39,12 @@ class RustPlugin {
this.serverless.service.package.excludeDevDependencies = false;
}
- runDocker(funcArgs, cargoPackage) {
+ runDocker(funcArgs, cargoPackage, binary) {
const defaultArgs = [
'run',
'--rm',
'-t',
+ '-e', `BIN=${binary}`,
`-v`, `${this.servicePath}:/code`,
`-v`, `${process.env['HOME']}/.cargo/registry:/root/.cargo/registry`,
`-v`, `${process.env['HOME']}/.cargo/git:/root/.cargo/git`,
@@ -100,7 +101,7 @@ class RustPlugin {
binary = cargoPackage;
}
this.serverless.cli.log(`Building native Rust ${func.handler} func...`);
- const res = this.runDocker(func.rust, cargoPackage);
+ const res = this.runDocker(func.rust, cargoPackage, binary);
if (res.error || res.status > 0) {
this.serverless.cli.log(`Dockerized Rust build encountered an error: ${res.error} ${res.status}.`);
throw new Error(res.error);
|
target binary by name (for upcoming release)
|
softprops_serverless-rust
|
train
|
js
|
5ed8adfcfd691787cf977f46a13850a9c7e5fc1c
|
diff --git a/web/concrete/blocks/form/controller.php b/web/concrete/blocks/form/controller.php
index <HASH>..<HASH> 100644
--- a/web/concrete/blocks/form/controller.php
+++ b/web/concrete/blocks/form/controller.php
@@ -578,7 +578,9 @@ class Controller extends BlockController
}
if (!$this->noSubmitFormRedirect) {
- if ($this->redirectCID > 0) {
+ if ($this->redirectCID == HOME_CID) {
+ $this->redirect(Core::make('url/canonical'));
+ } elseif ($this->redirectCID > 0) {
$pg = Page::getByID($this->redirectCID);
if (is_object($pg) && $pg->cID) {
$this->redirect($pg->getCollectionPath());
|
Fix: unable to redirect to home on submit form block
Former-commit-id: <I>f<I>be3ed<I>b<I>ce5fb6ad<I>e<I>ea4
Former-commit-id: ba3f<I>facbc<I>e<I>d<I>ecc6c<I>b6e<I>
|
concrete5_concrete5
|
train
|
php
|
8562e60825f3b7517a7e0674885292da9f5be8a6
|
diff --git a/lib/oauth/token.rb b/lib/oauth/token.rb
index <HASH>..<HASH> 100644
--- a/lib/oauth/token.rb
+++ b/lib/oauth/token.rb
@@ -69,6 +69,20 @@ module OAuth
# The Access Token is used for the actual "real" web service calls thatyou perform against the server
class AccessToken<ConsumerToken
+
+ # The less intrusive way. Otherwise, if we are to do it correctly inside consumer,
+ # we need to restructure and touch more methods: requst(), sign!(), etc.
+ def request(http_method, path, *arguments)
+ request_uri = URI.parse(path)
+ site_uri = consumer.uri
+ is_service_uri_different = (request_uri != site_uri)
+ consumer.uri(request_uri) if is_service_uri_different
+ resp = super(http_method, path, *arguments)
+ # NOTE: reset for wholesomeness? meaning that we admit only AccessToken service calls may use different URIs?
+ # so reset in case consumer is still used for other token-management tasks subsequently?
+ consumer.uri(site_uri) if is_service_uri_different
+ resp
+ end
# Make a regular get request using AccessToken
#
|
enhancement to allow service (non-token-management-related) requests to
use URIs different from initial site URI.
|
oauth-xx_oauth-ruby
|
train
|
rb
|
28024b7ce2c3a54d695060ad557fdda9f8eb2b3e
|
diff --git a/src/Sessions/NativeSession.php b/src/Sessions/NativeSession.php
index <HASH>..<HASH> 100644
--- a/src/Sessions/NativeSession.php
+++ b/src/Sessions/NativeSession.php
@@ -86,7 +86,7 @@ class NativeSession implements SessionInterface
protected function startSession()
{
// Check that the session hasn't already been started
- if (session_id() == '' && ! headers_sent()) {
+ if (session_status() != PHP_SESSION_ACTIVE && ! headers_sent()) {
session_start();
}
}
|
Changing session closed test to PHP <I> construct
If a session has previously been opened then closed, session_id() will have a value. The session then fails to open. In PHP <I> session_status() was introduced which allows testing whether the session is actually open or closed.
With this change, the session is correctly re-opened after it has previously been opened and closed. See <URL>
|
cartalyst_sentinel
|
train
|
php
|
0895de873b259fb32091302a6e18b88fa5d809c9
|
diff --git a/styx-scheduler-service/src/main/java/com/spotify/styx/docker/KubernetesDockerRunner.java b/styx-scheduler-service/src/main/java/com/spotify/styx/docker/KubernetesDockerRunner.java
index <HASH>..<HASH> 100644
--- a/styx-scheduler-service/src/main/java/com/spotify/styx/docker/KubernetesDockerRunner.java
+++ b/styx-scheduler-service/src/main/java/com/spotify/styx/docker/KubernetesDockerRunner.java
@@ -446,7 +446,7 @@ class KubernetesDockerRunner implements DockerRunner {
.getOrDefault(KubernetesDockerRunner.STYX_WORKFLOW_INSTANCE_ANNOTATION, "N/A");
final String status = readStatus(pod);
- LOG.info("{}Pod event for {} at resource version {}, action: {}, workflow instance: {}, status: {}",
+ LOG.debug("{}Pod event for {} at resource version {}, action: {}, workflow instance: {}, status: {}",
polled ? "Polled: " : "", podName, resourceVersion, action, workflowInstance, status);
}
|
log k8s events at debug level
|
spotify_styx
|
train
|
java
|
061edfbb8615e38a54f867ea424a19eb76eff976
|
diff --git a/proxmoxer/backends/ssh_paramiko.py b/proxmoxer/backends/ssh_paramiko.py
index <HASH>..<HASH> 100644
--- a/proxmoxer/backends/ssh_paramiko.py
+++ b/proxmoxer/backends/ssh_paramiko.py
@@ -56,8 +56,8 @@ class ProxmoxParamikoSession(ProxmoxBaseSSHSession):
cmd = 'sudo ' + cmd
session = self.ssh_client.get_transport().open_session()
session.exec_command(cmd)
- stdout = session.makefile('rb', -1).read().decode()
- stderr = session.makefile_stderr('rb', -1).read().decode()
+ stdout = session.makefile('r', -1).read().decode()
+ stderr = session.makefile_stderr('r', -1).read().decode()
return stdout, stderr
def upload_file_obj(self, file_obj, remote_path):
|
fix from mihailstoynov
|
swayf_proxmoxer
|
train
|
py
|
ba7e3b5e95252583fd06c93683b02c73efedd6f0
|
diff --git a/lib/chef/provider/service.rb b/lib/chef/provider/service.rb
index <HASH>..<HASH> 100644
--- a/lib/chef/provider/service.rb
+++ b/lib/chef/provider/service.rb
@@ -44,16 +44,19 @@ class Chef
supports[:restart] = false if supports[:restart].nil?
end
- def load_new_resource_state
- # If the user didn't specify a change in enabled state,
- # it will be the same as the old resource
- if ( @new_resource.enabled.nil? )
- @new_resource.enabled(@current_resource.enabled)
- end
- if ( @new_resource.running.nil? )
- @new_resource.running(@current_resource.running)
- end
- end
+ # the new_resource#enabled and #running variables are not user input, but when we
+ # do (e.g.) action_enable we want to set new_resource.enabled so that the comparison
+ # between desired and current state produces the correct change in reporting.
+ # XXX?: the #nil? check below will likely fail if this is a cloned resource or if
+ # we just run multiple actions.
+ def load_new_resource_state
+ if ( @new_resource.enabled.nil? )
+ @new_resource.enabled(@current_resource.enabled)
+ end
+ if ( @new_resource.running.nil? )
+ @new_resource.running(@current_resource.running)
+ end
+ end
def shared_resource_requirements
end
|
add better documentation to this method
it violates our dont-violate-the-new-resource policy for a reason, but
is likely still buggy if the resource gets reused.
|
chef_chef
|
train
|
rb
|
c614a20e02cb8fd5531bce1adcb34a1210f79ac6
|
diff --git a/lib/Doctrine/Common/ClassLoader.php b/lib/Doctrine/Common/ClassLoader.php
index <HASH>..<HASH> 100644
--- a/lib/Doctrine/Common/ClassLoader.php
+++ b/lib/Doctrine/Common/ClassLoader.php
@@ -33,10 +33,25 @@ namespace Doctrine\Common;
*/
class ClassLoader
{
- private $fileExtension = '.php';
- private $namespace;
- private $includePath;
- private $namespaceSeparator = '\\';
+ /**
+ * @var string PHP file extension
+ */
+ protected $fileExtension = '.php';
+
+ /**
+ * @var string Current namespace
+ */
+ protected $namespace;
+
+ /**
+ * @var string Current include path
+ */
+ protected $includePath;
+
+ /**
+ * @var string PHP namespace separator
+ */
+ protected $namespaceSeparator = '\\';
/**
* Creates a new <tt>ClassLoader</tt> that loads classes of the
|
[DCOM-<I>] Increased visibility of members in ClassLoader in order to simplify extensibility through methods' override.
|
doctrine_common
|
train
|
php
|
0e9116a8f700f3159112066a5487700fe503cbff
|
diff --git a/server/src/main/java/org/jboss/as/server/deployment/module/ModuleSpecification.java b/server/src/main/java/org/jboss/as/server/deployment/module/ModuleSpecification.java
index <HASH>..<HASH> 100644
--- a/server/src/main/java/org/jboss/as/server/deployment/module/ModuleSpecification.java
+++ b/server/src/main/java/org/jboss/as/server/deployment/module/ModuleSpecification.java
@@ -254,6 +254,24 @@ public class ModuleSpecification extends SimpleAttachable {
this.localDependenciesTransitive = localDependenciesTransitive;
}
+ /**
+ * @deprecated since AS 8.x. Use {@link #isLocalDependenciesTransitive()} instead
+ * @return
+ */
+ @Deprecated
+ public boolean isRequiresTransitiveDependencies() {
+ return localDependenciesTransitive;
+ }
+
+ /**
+ * @deprecated since AS 8.x. Use {@link #setLocalDependenciesTransitive(boolean)} instead
+ * @param requiresTransitiveDependencies
+ */
+ @Deprecated
+ public void setRequiresTransitiveDependencies(final boolean requiresTransitiveDependencies) {
+ this.localDependenciesTransitive = requiresTransitiveDependencies;
+ }
+
public boolean isLocalLast() {
return localLast;
}
|
AS7-<I> Deprecate the methods on ModuleSpecification instead of removing those
|
wildfly_wildfly
|
train
|
java
|
2e6ef522134daed48c6dae0dd672680e80cade91
|
diff --git a/classes/Kohana/Jam/Model.php b/classes/Kohana/Jam/Model.php
index <HASH>..<HASH> 100755
--- a/classes/Kohana/Jam/Model.php
+++ b/classes/Kohana/Jam/Model.php
@@ -397,7 +397,7 @@ abstract class Kohana_Jam_Model extends Jam_Validated {
public function get_insist($attribute_name)
{
- $attribute = $this->{$attribute_name};
+ $attribute = $this->__get($attribute_name);
if ($attribute === NULL)
throw new Jam_Exception_Notfound('The association :name was empty on :model', NULL, array(
|
a small performance optimization for get_insist
|
OpenBuildings_jam
|
train
|
php
|
f8eee55e064f332a27852243d13871fa56fc17d9
|
diff --git a/src/Geometry.php b/src/Geometry.php
index <HASH>..<HASH> 100644
--- a/src/Geometry.php
+++ b/src/Geometry.php
@@ -237,7 +237,13 @@ abstract class Geometry
*/
public function asText()
{
- return (new WKTWriter())->write($this);
+ static $wktWriter;
+
+ if ($wktWriter === null) {
+ $wktWriter = new WKTWriter();
+ }
+
+ return $wktWriter->write($this);
}
/**
@@ -249,7 +255,13 @@ abstract class Geometry
*/
public function asBinary()
{
- return (new WKBWriter())->write($this);
+ static $wkbWriter;
+
+ if ($wkbWriter === null) {
+ $wkbWriter = new WKBWriter();
+ }
+
+ return $wkbWriter->write($this);
}
/**
|
Static instances of WKTWriter and WKBWriter in Geometry export methods
|
brick_geo
|
train
|
php
|
8312661d33cc6c8a98e342a3e28b713446beb2cf
|
diff --git a/app/adapters/application.js b/app/adapters/application.js
index <HASH>..<HASH> 100644
--- a/app/adapters/application.js
+++ b/app/adapters/application.js
@@ -95,6 +95,10 @@ export default Adapter.extend(PouchAdapterUtils, {
return haveSpecialCharacters;
},
+ generateIdForRecord: function() {
+ return PouchDB.utils.uuid();
+ },
+
findQuery: function(store, type, query, options) {
var specialQuery = false;
for (var i = 0; i < this._specialQueries.length; i++) {
|
Make sure new records have id on creation.
|
HospitalRun_hospitalrun-frontend
|
train
|
js
|
cc1cdc577acaac071e63231054114b4feb9c474d
|
diff --git a/src/Provider/Hook_Event_Provider.php b/src/Provider/Hook_Event_Provider.php
index <HASH>..<HASH> 100644
--- a/src/Provider/Hook_Event_Provider.php
+++ b/src/Provider/Hook_Event_Provider.php
@@ -20,6 +20,8 @@ class Hook_Event_Provider implements ServiceProviderInterface {
/**
* @param Container $pimple Container instance.
+ *
+ * @psalm-suppress DeprecatedClass
*/
public function register( Container $pimple ): void {
|
Disabled Psalm error for known deprecated class use.
|
Rarst_laps
|
train
|
php
|
c20f1aa7b4208e7d381654c0f9d03b395e6bbf54
|
diff --git a/src/Torann/LaravelRepository/Repositories/AbstractCacheDecorator.php b/src/Torann/LaravelRepository/Repositories/AbstractCacheDecorator.php
index <HASH>..<HASH> 100644
--- a/src/Torann/LaravelRepository/Repositories/AbstractCacheDecorator.php
+++ b/src/Torann/LaravelRepository/Repositories/AbstractCacheDecorator.php
@@ -239,6 +239,12 @@ abstract class AbstractCacheDecorator implements RepositoryInterface
*/
public function getCacheKey($method, $args = null)
{
+ foreach($args as &$a) {
+ if ($a instanceof Model) {
+ $a = get_class($a).'|'.$a->getKey();
+ }
+ }
+
$args = serialize($args)
. serialize($this->repo->getScopeQuery())
. serialize($this->repo->getWith());
@@ -304,4 +310,4 @@ abstract class AbstractCacheDecorator implements RepositoryInterface
{
return call_user_func_array([$this->repo, $method], $parameters);
}
-}
\ No newline at end of file
+}
|
Enable closures with model arguments.
An Exception is thrown if you use the Model typehint / type as an argument(s) in your repository methods, a pretty common practice. `Serialization of 'Closure' is not allowed` There may be a more clever way to support more types of objects, but this was fast, easy and worked for my purposes.
|
Torann_laravel-repository
|
train
|
php
|
5fcb920f969d8ea13e92d4c4b11870afb883653d
|
diff --git a/connection.go b/connection.go
index <HASH>..<HASH> 100644
--- a/connection.go
+++ b/connection.go
@@ -6,7 +6,6 @@ import (
"errors"
"fmt"
"io"
- "log"
"math/rand"
"net"
"strconv"
@@ -14,6 +13,8 @@ import (
"sync"
"time"
+ "github.com/anacrolix/log"
+
"github.com/anacrolix/missinggo"
"github.com/anacrolix/missinggo/bitmap"
"github.com/anacrolix/missinggo/iter"
@@ -1203,7 +1204,7 @@ another:
break another
}
}
- log.Printf("error sending chunk %+v to peer: %s", r, err)
+ log.Str("error sending chunk to peer").AddValues(c, r, err).Log(c.t.logger)
// If we failed to send a chunk, choke the peer to ensure they
// flush all their requests. We've probably dropped a piece,
// but there's no way to communicate this to the peer. If they
|
Use new logging in connection.go
|
anacrolix_torrent
|
train
|
go
|
47d4fad4964dda1503a280da7aff2b9f3eeca188
|
diff --git a/src/itertools/autoload.php b/src/itertools/autoload.php
index <HASH>..<HASH> 100644
--- a/src/itertools/autoload.php
+++ b/src/itertools/autoload.php
@@ -17,6 +17,7 @@ spl_autoload_register(function ($class) {
'itertools\ForkingIterator',
'itertools\HistoryIterator',
'itertools\IterUtil',
+ 'itertools\LockingIterator',
'itertools\MapIterator',
'itertools\PdoIterator',
'itertools\Queue',
|
Added LockingIterator to autoload
|
SuRaMoN_itertools
|
train
|
php
|
4b5096b827a723a1384617546bd288d7087911b2
|
diff --git a/lib/Elastica/Query/Ids.php b/lib/Elastica/Query/Ids.php
index <HASH>..<HASH> 100644
--- a/lib/Elastica/Query/Ids.php
+++ b/lib/Elastica/Query/Ids.php
@@ -6,7 +6,7 @@
* @category Xodoa
* @package Elastica
* @author Lee Parker, Nicolas Ruflin <spam@ruflin.com>, Tim Rupp
- * @link http://www.elasticsearch.org/guide/reference/query-dsl/ids-filter.html
+ * @link http://www.elasticsearch.org/guide/reference/query-dsl/ids-query.html
*/
class Elastica_Query_Ids extends Elastica_Query_Abstract
{
|
Fixed link to elasticsearch documentation for Query_Ids
|
ruflin_Elastica
|
train
|
php
|
ddad35288e7b0cd442f260efdabc660b2a76599b
|
diff --git a/werkzeug/testsuite/formparser.py b/werkzeug/testsuite/formparser.py
index <HASH>..<HASH> 100644
--- a/werkzeug/testsuite/formparser.py
+++ b/werkzeug/testsuite/formparser.py
@@ -132,6 +132,8 @@ class FormParserTestCase(WerkzeugTestCase):
# make sure we have a real file here, because we expect to be
# on the disk. > 1024 * 500
self.assert_true(hasattr(req.files['foo'].stream, u'fileno'))
+ # close file to prevent fds from leaking
+ req.files['foo'].close()
def test_streaming_parse(self):
data = b'x' * (1024 * 600)
|
Close file to prevent leaking fd
|
pallets_werkzeug
|
train
|
py
|
3f9cdf303727c109092cd6ab375dfe255aa07a86
|
diff --git a/spec/unit/mongoid/config_spec.rb b/spec/unit/mongoid/config_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/unit/mongoid/config_spec.rb
+++ b/spec/unit/mongoid/config_spec.rb
@@ -1,13 +1,12 @@
require "spec_helper"
describe Mongoid::Config do
+ let(:config) { Mongoid::Config.instance }
after do
- Mongoid::Config.instance.reset
+ config.reset
end
- let(:config) { Mongoid::Config.instance }
-
describe "#database=" do
context "when object provided is not a Mongo::DB" do
diff --git a/spec/unit/mongoid_spec.rb b/spec/unit/mongoid_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/unit/mongoid_spec.rb
+++ b/spec/unit/mongoid_spec.rb
@@ -32,14 +32,14 @@ describe Mongoid do
end
describe ".deprecate" do
+ let(:deprecation) { stub }
before do
- @deprecation = mock
- Mongoid::Deprecation.expects(:instance).returns(@deprecation)
+ Mongoid::Deprecation.expects(:instance).returns(deprecation)
end
it "calls alert on the deprecation singleton" do
- @deprecation.expects(:alert).with("testing")
+ deprecation.expects(:alert).with("testing")
Mongoid.deprecate("testing")
end
end
|
Specfix for better use of rspec
|
mongodb_mongoid
|
train
|
rb,rb
|
Subsets and Splits
Java Commits in Train Set
Queries for all entries where the diff_languages column is 'java', providing a filtered dataset but without deeper analysis.
Java Commits Test Data
Returns a subset of 5000 entries from the dataset where the programming language difference is Java, providing basic filtering for exploration.
Java Commits Sample
Retrieves the first 1,000 records where the 'diff_languages' column is 'java', providing limited insight into the specific data entries.
Java Commits Validation Sample
Retrieves a sample of entries from the validation dataset where the diff languages are Java, providing limited insight into specific Java-related data points.
Java Commits in Validation
This query retrieves a limited sample of entries from the validation dataset where the programming language difference is Java, providing basic filtering with minimal insight.
Java Commits Sample
This query retrieves a sample of 100 records where the 'diff_languages' is 'java', providing basic filtering but limited analytical value.
Java Commits Sample
Retrieves 100 samples where the language difference is Java, providing basic filtering but minimal analytical value.
Java Commits Sample
Retrieves 10 samples where the diff_languages column is 'java', providing basic examples of data entries with this specific language.
Java Commits Validation Sample
Retrieves 1,000 records where the differences in languages are marked as Java, providing a snapshot of that specific subset but limited to raw data.
Java Commits Sample
This query retrieves 1000 random samples from the dataset where the programming language is Java, offering limited insight beyond raw data.