hash
stringlengths
40
40
diff
stringlengths
131
114k
message
stringlengths
7
980
project
stringlengths
5
67
split
stringclasses
1 value
3193c6f3d543320cdca574c95d6b241359823d66
diff --git a/package.json b/package.json index <HASH>..<HASH> 100644 --- a/package.json +++ b/package.json @@ -16,7 +16,7 @@ }, "dependencies": { "address-rfc2821" : "^1.1.1", - "address-rfc2822" : "*", + "address-rfc2822" : "^2.0.0", "async" : "~2.4.0", "daemon" : "~1.1.0", "generic-pool" : "~2.5.0", diff --git a/plugins/dkim_sign.js b/plugins/dkim_sign.js index <HASH>..<HASH> 100644 --- a/plugins/dkim_sign.js +++ b/plugins/dkim_sign.js @@ -309,7 +309,7 @@ exports.get_sender_domain = function (txn) { if (!addrs || ! addrs.length) { return domain; } // If From has a single address, we're done - if (addrs.length === 1) { + if (addrs.length === 1 && addrs[0].host) { var fromHost = addrs[0].host(); if (fromHost) { // don't attempt to lower a null or undefined value #1575 diff --git a/tests/plugins/dkim_sign.js b/tests/plugins/dkim_sign.js index <HASH>..<HASH> 100644 --- a/tests/plugins/dkim_sign.js +++ b/tests/plugins/dkim_sign.js @@ -85,7 +85,7 @@ exports.get_sender_domain = { }, 'from header, RFC 5322 odd': function (test) { test.expect(1); - this.connection.transaction.header.add('From', 'Pete(A nice \) chap) <pete(his account)@silly.test(his host)>'); + this.connection.transaction.header.add('From', 'Pete(A nice \\) chap) <pete(his account)@silly.test(his host)>'); var r = this.plugin.get_sender_domain(this.connection.transaction); test.equal('silly.test', r); test.done();
Support the new address-rfc<I> (#<I>) * Support the new address-rfc<I> New module supports Groups, which have no host method * Force version of rfc<I> required by the fix * Fix missing backslash
haraka_Haraka
train
7494e16d2cce260922682abb63c649e6bb270352
diff --git a/spec/unit/resource/api1600/synergy/volume_attachment_spec.rb b/spec/unit/resource/api1600/synergy/volume_attachment_spec.rb index <HASH>..<HASH> 100644 --- a/spec/unit/resource/api1600/synergy/volume_attachment_spec.rb +++ b/spec/unit/resource/api1600/synergy/volume_attachment_spec.rb @@ -1,3 +1,14 @@ +# (C) Copyright 2017 Hewlett Packard Enterprise Development LP +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# You may not use this file except in compliance with the License. +# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. + require 'spec_helper' RSpec.describe OneviewSDK::API1600::Synergy::VolumeAttachment do
Fixed API<I> spec file
HewlettPackard_oneview-sdk-ruby
train
327db11cda3c67cdd585a48604c02b8274c1c7c2
diff --git a/travis-ci/settings.py b/travis-ci/settings.py index <HASH>..<HASH> 100644 --- a/travis-ci/settings.py +++ b/travis-ci/settings.py @@ -91,4 +91,4 @@ STATIC_URL = '/static/' # Test the memcached cache code RESTCLIENTS_TEST_MEMCACHED = True -RESTCLIENTS_MEMCACHED_SERVERS = ('127.0.0.1:11211', ) +RESTCLIENTS_MEMCACHED_SERVERS = ('localhost:11211', )
trying another way to get memcached running on travis
uw-it-aca_uw-restclients
train
c1affade9ad71014c844832a2319297f40fd764c
diff --git a/nodeserver/src/client/js/Client/ClientMaster.js b/nodeserver/src/client/js/Client/ClientMaster.js index <HASH>..<HASH> 100644 --- a/nodeserver/src/client/js/Client/ClientMaster.js +++ b/nodeserver/src/client/js/Client/ClientMaster.js @@ -342,11 +342,21 @@ define([ return activeActor.getCurrentCommit(); } }; + self.getActualBranch = function(){ + if(activeProject && activeActor){ + return activeActor.getCurrentBranch(); + } + }; self.getRootKey = function(){ if(activeProject && activeActor){ return activeActor.getRootKey(); } }; + self.commit = function(message,branch){ + if(branch && branch !== self.getActualBranch()){ + + } + }; //functions handling UI components self.addUI = function(ui,oneevent,guid){ diff --git a/nodeserver/src/client/js/Client/ClientProject.js b/nodeserver/src/client/js/Client/ClientProject.js index <HASH>..<HASH> 100644 --- a/nodeserver/src/client/js/Client/ClientProject.js +++ b/nodeserver/src/client/js/Client/ClientProject.js @@ -208,6 +208,9 @@ define([ var getCurrentCommit = function(){ return mycommit[KEY]; }; + var getCurrentBranch = function(){ + return mycommit['name']; + }; /*MGA like functions*/ var startTransaction = function(){ @@ -795,6 +798,7 @@ define([ getNode : getNode, getRootKey : getRootKey, getCurrentCommit : getCurrentCommit, + getCurrentBranch : getCurrentBranch, //MGAlike startTransaction : startTransaction, completeTransaction : completeTransaction,
getActualBranch have been added to client functionality Former-commit-id: ead2abe2c<I>b<I>c8bf<I>b<I>f<I>e<I>a
webgme_webgme-engine
train
dbbf34dc65ee6b3afdc79af1f320c78ebb28cf35
diff --git a/src/main/java/net/openhft/chronicle/network/connection/AbstractStatelessClient.java b/src/main/java/net/openhft/chronicle/network/connection/AbstractStatelessClient.java index <HASH>..<HASH> 100755 --- a/src/main/java/net/openhft/chronicle/network/connection/AbstractStatelessClient.java +++ b/src/main/java/net/openhft/chronicle/network/connection/AbstractStatelessClient.java @@ -320,7 +320,7 @@ public abstract class AbstractStatelessClient<E extends ParameterizeWireKey> imp consumer.accept(valueOut); }); - hub.asyncWriteSocket(hub.outWire()); + hub.writeSocket(hub.outWire()); } /** diff --git a/src/main/java/net/openhft/chronicle/network/connection/TcpChannelHub.java b/src/main/java/net/openhft/chronicle/network/connection/TcpChannelHub.java index <HASH>..<HASH> 100755 --- a/src/main/java/net/openhft/chronicle/network/connection/TcpChannelHub.java +++ b/src/main/java/net/openhft/chronicle/network/connection/TcpChannelHub.java @@ -397,25 +397,28 @@ public class TcpChannelHub implements Closeable { tcpSocketConsumer.prepareToShutdown(); + if (shouldSendCloseMessage) - try { + asycnWriteExecutor.submit(() -> { + try { - tcpSocketConsumer.stop(); + tcpSocketConsumer.stop(); - sendCloseMessage(); - closed = true; + sendCloseMessage(); + closed = true; - if (LOG.isDebugEnabled()) - LOG.debug("closing connection to " + socketAddressSupplier); + if (LOG.isDebugEnabled()) + LOG.debug("closing connection to " + socketAddressSupplier); - while (clientChannel != null) { + while (clientChannel != null) { - if (LOG.isDebugEnabled()) - LOG.debug("waiting for disconnect to " + socketAddressSupplier); - } - } catch (ConnectionDroppedException ignore) { + if (LOG.isDebugEnabled()) + LOG.debug("waiting for disconnect to " + socketAddressSupplier); + } + } catch (ConnectionDroppedException ignore) { - } + } + }); } /** @@ -476,6 +479,7 @@ public class TcpChannelHub implements Closeable { /** * dispatcahes the task onto the async write thred and ensures that + * * @param r */ public void asyncWriteTask(@NotNull final Runnable r) { @@ -590,7 +594,6 @@ public class TcpChannelHub implements Closeable { while (outBuffer.remaining() > 0) { - // if the socket was changed, we need to resend using this one instead // unless the client channel still has not be set, then we will use this one // this can happen during the handshaking phase of a new connection @@ -608,7 +611,7 @@ public class TcpChannelHub implements Closeable { if (prevRemaining != outBuffer.remaining()) { start = Time.currentTimeMillis(); prevRemaining = outBuffer.remaining(); - }else { + } else { long writeTime = Time.currentTimeMillis() - start; if (writeTime > 20_000) { @@ -1449,6 +1452,7 @@ public class TcpChannelHub implements Closeable { if (isShutdown()) continue OUTER; + if (start + socketAddressSupplier.timeoutMS() < System.currentTimeMillis()) { String oldAddress = socketAddressSupplier.toString();
CE-<I> Fixing performance issues
OpenHFT_Chronicle-Network
train
a80ade66e8b58a3eaa0789073a238fc254494ae5
diff --git a/es6/lib/builder/DelegateBuilder.js b/es6/lib/builder/DelegateBuilder.js index <HASH>..<HASH> 100644 --- a/es6/lib/builder/DelegateBuilder.js +++ b/es6/lib/builder/DelegateBuilder.js @@ -87,19 +87,23 @@ export class DelegateBuilder extends Builder { fieldBuilderData.getterFactory = (attrName, isBoolean) => { return (el) => { let target = el.querySelector(data.selector); - return targetedAttrName ? getAttValue(target, targetedAttrName, isBoolean) : target[targetedPropName]; + if (target) { + return targetedAttrName ? getAttValue(target, targetedAttrName, isBoolean) : target[targetedPropName]; + } }; }; fieldBuilderData.setterFactory = (attrName, isBoolean, attSetter) => { return (el, value) => { let target = el.querySelector(data.selector), attrValue = isFunction(attSetter) ? attSetter.call(el, el, value) : value; - if (targetedAttrName) { - setAttValue(target, targetedAttrName, isBoolean, attrValue); - } else { - target[targetedPropName] = attrValue; + if (target) { + if (targetedAttrName) { + setAttValue(target, targetedAttrName, isBoolean, attrValue); + } else { + target[targetedPropName] = attrValue; + } + setAttValue(el, attrName, isBoolean, attrValue); } - setAttValue(el, attrName, isBoolean, attrValue); }; }; } else if (fieldBuilderData.propName) { @@ -107,20 +111,24 @@ export class DelegateBuilder extends Builder { fieldBuilderData.getter = (el) => { let target = el.querySelector(data.selector), targetValue; - if (targetedAttrName) { - targetValue = target.getAttribute(targetedAttrName); - } else { - targetValue = target[targetedPropName]; + if (target) { + if (targetedAttrName) { + targetValue = target.getAttribute(targetedAttrName); + } else { + targetValue = target[targetedPropName]; + } } return isFunction(fieldGetter) ? fieldGetter.call(this, this, targetValue) : targetValue; }; fieldBuilderData.setter = (el, value) => { let target = el.querySelector(data.selector), targetValue = isFunction(fieldSetter) ? fieldSetter.call(this, this, value) : value; - if (targetedAttrName) { - target.setAttribute(targetedAttrName, targetValue); - } else { - target[targetedPropName] = targetValue; + if (target) { + if (targetedAttrName) { + target.setAttribute(targetedAttrName, targetValue); + } else { + target[targetedPropName] = targetValue; + } } }; }
fix delegaion with no target
tmorin_ceb
train
0b3e836cc004c71b441551b3c374410f07968aa5
diff --git a/vendor/require_definition.js b/vendor/require_definition.js index <HASH>..<HASH> 100644 --- a/vendor/require_definition.js +++ b/vendor/require_definition.js @@ -77,6 +77,6 @@ globals.require.define = define; globals.require.register = define; globals.require.brunch = true; - globals.require.modules = modules; + globals.require.cache = modules; })();
Renames exposed require modules to require.cache
brunch_brunch
train
f9592dc668cc78c2fb7f784c0efdb923c4ddc148
diff --git a/nomad/structs/structs.go b/nomad/structs/structs.go index <HASH>..<HASH> 100644 --- a/nomad/structs/structs.go +++ b/nomad/structs/structs.go @@ -1069,6 +1069,18 @@ type Evaluation struct { // StatusDescription is meant to provide more human useful information StatusDescription string + // Wait is a minimum wait time for running the eval. This is used to + // support a rolling upgrade. + Wait time.Duration + + // NextEval is the evaluation ID for the eval created to do a followup. + // This is used to support rolling upgrades, where we need a chain of evaluations. + NextEval string + + // PreviousEval is the evaluation ID for the eval creating this one to do a followup. + // This is used to support rolling upgrades, where we need a chain of evaluations. + PreviousEval string + // Raft Indexes CreateIndex uint64 ModifyIndex uint64
nomad: adding fields to cause an evaluation to 'wait'
hashicorp_nomad
train
b82b3826dfcd36edca031b3440bdc4fd177ae4a0
diff --git a/pyemu/la.py b/pyemu/la.py index <HASH>..<HASH> 100644 --- a/pyemu/la.py +++ b/pyemu/la.py @@ -539,6 +539,18 @@ class LinearAnalysis(object): self.__load_obscov() return self.__obscov + @property + def nnz_obs_names(self): + if self.__pst is not None: + return self.pst.nnz_obs_names + else: + return self.jco.obs_names + + def adj_par_names(self): + if self.__pst is not None: + return self.pst.adj_par_names + else: + return self.jco.par_names @property def jco(self): diff --git a/pyemu/sc.py b/pyemu/sc.py index <HASH>..<HASH> 100644 --- a/pyemu/sc.py +++ b/pyemu/sc.py @@ -547,7 +547,9 @@ class Schur(LinearAnalysis): raise Exception("case {0} has observation names ".format(case_name) + \ "not found: " + ','.join(missing_onames)) # find the set difference between obslist and jco obs names - diff_onames = [oname for oname in self.jco.obs_names if oname not in obslist] + #diff_onames = [oname for oname in self.jco.obs_names if oname not in obslist] + diff_onames = [oname for oname in self.nnz_obs_names if oname not in obslist] + # calculate the increase in forecast variance by not using the obs # in obslist diff --git a/pyemu/tests/la_tests.py b/pyemu/tests/la_tests.py index <HASH>..<HASH> 100644 --- a/pyemu/tests/la_tests.py +++ b/pyemu/tests/la_tests.py @@ -259,10 +259,10 @@ if __name__ == "__main__": #par_contrib_test() #dataworth_test() #dataworth_next_test() - #schur_test_nonpest() + schur_test_nonpest() #schur_test() #errvar_test_nonpest() #errvar_test() - css_test() + #css_test() #inf_test() #inf2_test() \ No newline at end of file
bug fix in data worth - using adj pars and nnz obs instead of jco row and col names
jtwhite79_pyemu
train
923d6064a69c6863938d657db4154429b982031e
diff --git a/bees/huebee/huebee.go b/bees/huebee/huebee.go index <HASH>..<HASH> 100644 --- a/bees/huebee/huebee.go +++ b/bees/huebee/huebee.go @@ -50,10 +50,12 @@ func (mod *HueBee) Action(action bees.Action) []bees.Placeholder { case "setcolor": var lightID int var color string + var alert int brightness := 254 action.Options.Bind("light", &lightID) action.Options.Bind("color", &color) action.Options.Bind("brightness", &brightness) + action.Options.Bind("alert", &alert) light, err := mod.client.FindLightById(strconv.Itoa(lightID)) if err != nil { @@ -61,9 +63,15 @@ func (mod *HueBee) Action(action bees.Action) []bees.Placeholder { } state := hue.SetLightState{ - On: "true", - Bri: strconv.FormatInt(int64(brightness), 10), - Sat: "254", + On: "true", + Bri: strconv.FormatInt(int64(brightness), 10), + Effect: "", + } + switch alert { + case 1: + state.Alert = "select" + case 2: + state.Alert = "lselect" } switch strings.ToLower(color) { diff --git a/bees/huebee/huebeefactory.go b/bees/huebee/huebeefactory.go index <HASH>..<HASH> 100644 --- a/bees/huebee/huebeefactory.go +++ b/bees/huebee/huebeefactory.go @@ -126,13 +126,19 @@ func (factory *HueBeeFactory) Actions() []bees.ActionDescriptor { Name: "color", Description: "New color of the light you want to change", Type: "string", - Mandatory: true, + Mandatory: false, }, { Name: "brightness", Description: "New brightness of the light you want to change", Type: "int", - Mandatory: true, + Mandatory: false, + }, + { + Name: "alert", + Description: "0: no alert, 1: short alert, 2: long alert", + Type: "int", + Mandatory: false, }, }, },
Support Hue alerts in HueBee
muesli_beehive
train
c4bdb48f78f6c96d692f55b722202e338c75a3a6
diff --git a/lib/gliffy/api/facade.rb b/lib/gliffy/api/facade.rb index <HASH>..<HASH> 100644 --- a/lib/gliffy/api/facade.rb +++ b/lib/gliffy/api/facade.rb @@ -47,6 +47,11 @@ module Gliffy params) end + def delete_document(document_id) + post("/accounts/#{account_id}/documents/#{document_id}.xml", + :action => "delete") + end + private def api diff --git a/lib/gliffy/document.rb b/lib/gliffy/document.rb index <HASH>..<HASH> 100644 --- a/lib/gliffy/document.rb +++ b/lib/gliffy/document.rb @@ -37,6 +37,10 @@ module Gliffy api.update_document_metadata(id, new_name, nil) end + def delete + api.delete_document(id) + end + def editor(return_url, return_text) api.web( "/gliffy/", diff --git a/spec/lib/gliffy/api/facade_spec.rb b/spec/lib/gliffy/api/facade_spec.rb index <HASH>..<HASH> 100644 --- a/spec/lib/gliffy/api/facade_spec.rb +++ b/spec/lib/gliffy/api/facade_spec.rb @@ -103,6 +103,23 @@ shared_examples_for "an API facade" do facade.update_document_metadata(document_id, document_name, nil) end end + + it "allows user to delete a document" do + expect(facade).to respond_to :delete_document + end + + context "when deleting a document" do + let(:document_id) { 221 } + + it "sends POST request" do + facade + .should_receive(:post) + .with("/accounts/#{account_id}/documents/#{document_id}.xml", + hash_including(:action => "delete")) + + facade.delete_document(document_id) + end + end end describe Gliffy::API::Facade do diff --git a/spec/lib/gliffy/document_spec.rb b/spec/lib/gliffy/document_spec.rb index <HASH>..<HASH> 100644 --- a/spec/lib/gliffy/document_spec.rb +++ b/spec/lib/gliffy/document_spec.rb @@ -136,4 +136,20 @@ describe Gliffy::Document do .with(document_id, new_name, nil) end end + + it "can be deleted" do + expect(document).to respond_to :delete + end + + context "when deleted" do + before :each do + api.stub(:delete_document) + document.delete + end + + it "calls REST API" do + expect(api).to have_received(:delete_document) + .with(document_id) + end + end end
'Delete a document' functionality.
bkon_gliffy
train
943467bfc8cbf2f0ccb13cd9f696ff66ce054259
diff --git a/stagemonitor-core/src/main/java/org/stagemonitor/core/MeasurementSession.java b/stagemonitor-core/src/main/java/org/stagemonitor/core/MeasurementSession.java index <HASH>..<HASH> 100644 --- a/stagemonitor-core/src/main/java/org/stagemonitor/core/MeasurementSession.java +++ b/stagemonitor-core/src/main/java/org/stagemonitor/core/MeasurementSession.java @@ -3,6 +3,7 @@ package org.stagemonitor.core; import static com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility.ANY; import static com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility.NONE; +import static org.stagemonitor.core.util.StringUtils.replaceWhitespacesWithDash; import java.net.InetAddress; import java.util.Collections; @@ -31,9 +32,9 @@ public class MeasurementSession { @JsonProperty("hostName") String hostName, @JsonProperty("instanceName") String instanceName) { - this.applicationName = applicationName; - this.hostName = hostName; - this.instanceName = instanceName; + this.applicationName = replaceWhitespacesWithDash(applicationName); + this.hostName = replaceWhitespacesWithDash(hostName); + this.instanceName = replaceWhitespacesWithDash(instanceName); stringRepresentation = "[application=" + applicationName + "] [instance=" + instanceName + "] [host=" + hostName + "]"; startTimestamp = System.currentTimeMillis(); } diff --git a/stagemonitor-core/src/main/java/org/stagemonitor/core/util/StringUtils.java b/stagemonitor-core/src/main/java/org/stagemonitor/core/util/StringUtils.java index <HASH>..<HASH> 100644 --- a/stagemonitor-core/src/main/java/org/stagemonitor/core/util/StringUtils.java +++ b/stagemonitor-core/src/main/java/org/stagemonitor/core/util/StringUtils.java @@ -41,7 +41,7 @@ public class StringUtils { } public static String slugify(String s) { - return s.toLowerCase().replaceAll("[^\\w ]+", "").replaceAll("\\s+", "-"); + return replaceWhitespacesWithDash(s.toLowerCase().replaceAll("[^\\w ]+", "")); } public static boolean isNotEmpty(String s) { @@ -65,4 +65,8 @@ public class StringUtils { dateFormat.setTimeZone(TimeZone.getTimeZone("UTC")); return dateFormat.format(new Date(time)); } + + public static String replaceWhitespacesWithDash(String s) { + return s.replaceAll("\\s", "-"); + } }
Replace whitespaces in measurement session properties with dash As grafana's templating feature in combination with lucene queries does not like whitespaces
stagemonitor_stagemonitor
train
8d0abbd1a1323e130a1f48565e025d2d12a5f634
diff --git a/src/preferences/PreferenceStorage.js b/src/preferences/PreferenceStorage.js index <HASH>..<HASH> 100644 --- a/src/preferences/PreferenceStorage.js +++ b/src/preferences/PreferenceStorage.js @@ -218,6 +218,13 @@ define(function (require, exports, module) { var rule = rules[key]; if (!rule && prefCheckCallback) { rule = prefCheckCallback(key); + } else if (prefCheckCallback) { + // Check whether we have a new preference key-value pair + // for an old preference. + var newRule = prefCheckCallback(key, prefs[key]); + if (newRule) { + rule = _.cloneDeep(newRule); + } } if (!rule) { console.warn("Preferences conversion for ", self._clientID, " has no rule for", key); @@ -238,6 +245,11 @@ define(function (require, exports, module) { manager.set(newKey, prefs[key], options); convertedKeys.push(key); } + } else if (_.isObject(rule)) { + Object.keys(rule).forEach(function (ruleKey) { + manager.set(ruleKey, rule[ruleKey]); + }); + convertedKeys.push(key); } else { complete = false; } diff --git a/src/view/ViewCommandHandlers.js b/src/view/ViewCommandHandlers.js index <HASH>..<HASH> 100644 --- a/src/view/ViewCommandHandlers.js +++ b/src/view/ViewCommandHandlers.js @@ -323,6 +323,23 @@ define(function (require, exports, module) { _scrollLine(1); } + /** + * @private + * Convert the old "fontSizeAdjustment" preference to the new view states. + * + * @param {string} key The key of the preference to be examined for migration + * of old preferences. Not used since we only have one in this module. + * @param {string} value The value of "fontSizeAdjustment" preference + * @return {Object} - JSON object for the new view states equivalent to + * the old "fontSizeAdjustment" preference. + */ + function _convertToNewViewStates(key, value) { + var fontSize = 12 + value, + newRule = { "fontSizeStyle": fontSize + "px", + "lineHeightStyle": Math.ceil(fontSize * LINE_HEIGHT) + "px" }; + + return newRule; + } // Register command handlers CommandManager.register(Strings.CMD_INCREASE_FONT_SIZE, Commands.VIEW_INCREASE_FONT_SIZE, _handleIncreaseFontSize); @@ -331,9 +348,7 @@ define(function (require, exports, module) { CommandManager.register(Strings.CMD_SCROLL_LINE_UP, Commands.VIEW_SCROLL_LINE_UP, _handleScrollLineUp); CommandManager.register(Strings.CMD_SCROLL_LINE_DOWN, Commands.VIEW_SCROLL_LINE_DOWN, _handleScrollLineDown); - // Initialize the default font size - PreferencesManager.stateManager.definePreference("fontSizeAdjustment", "number", 0); - PreferencesManager.convertPreferences(module, {"fontSizeAdjustment": "user"}, true); + PreferencesManager.convertPreferences(module, {"fontSizeAdjustment": "user"}, true, _convertToNewViewStates); // Update UI when opening or closing a document $(DocumentManager).on("currentDocumentChange", _updateUI);
Migrate "fontSizeAdjustment" to new view states.
adobe_brackets
train
0ed6f64b9a04d52454fd46409f1eb725d420478f
diff --git a/src/Service/MelisTreeService.php b/src/Service/MelisTreeService.php index <HASH>..<HASH> 100644 --- a/src/Service/MelisTreeService.php +++ b/src/Service/MelisTreeService.php @@ -23,23 +23,22 @@ class MelisTreeService extends MelisGeneralService implements MelisTreeServiceIn */ public function getPageChildren($idPage, $publishedOnly = 0) { - if (empty($idPage)) - return null; - // Retrieve cache version if front mode to avoid multiple calls - /* $cacheKey = 'getPageChildren_' . $idPage . '_' . $publishedOnly; + $cacheKey = 'getPageChildren_' . $idPage . '_' . $publishedOnly; $cacheConfig = 'engine_page_services'; $melisEngineCacheSystem = $this->getServiceManager()->get('MelisEngineCacheSystem'); $results = $melisEngineCacheSystem->getCacheByKey($cacheKey, $cacheConfig); - if (!empty($results)) return $results; */ - + + if (!is_null($results)) + return $results; + $tablePageTree = $this->getServiceManager()->get('MelisEngineTablePageTree'); - $datasPage = $tablePageTree->getPageChildrenByidPage($idPage, $publishedOnly); + $pages = $tablePageTree->getPageChildrenByidPage($idPage, $publishedOnly)->toArray(); // Save cache key - // $melisEngineCacheSystem->setCacheByKey($cacheKey, $cacheConfig, $pages); - - return $datasPage; + $melisEngineCacheSystem->setCacheByKey($cacheKey, $cacheConfig, $pages); + + return $pages; } public function getAllPages($idPage) @@ -47,6 +46,8 @@ class MelisTreeService extends MelisGeneralService implements MelisTreeServiceIn $pages = []; $children = $this->getPageChildren($idPage); + // print_r($children); + foreach($children as $idx => $child) { if($child['tree_father_page_id'] == '-1') {
Caching method return data revert
melisplatform_melis-engine
train
c1a48e9565d53d8a2a34985428b2599814a4134c
diff --git a/neat/population.py b/neat/population.py index <HASH>..<HASH> 100644 --- a/neat/population.py +++ b/neat/population.py @@ -1,9 +1,10 @@ +import gzip, random, math, time from config import Config import species import chromosome import cPickle as pickle import visualize -import random, math, time + class Population(object): """ Manages all the species """ @@ -35,14 +36,13 @@ class Population(object): species_log = property(lambda self: self.__species_log) def __resume_checkpoint(self, checkpoint): - - print 'Resuming from a previous point' + """ Resumes the simulation from a previous saved point. """ try: - file = open(checkpoint) + #file = open(checkpoint) + file = gzip.open(checkpoint) except IOError: raise - - print 'Loading previous population: %s' %checkpoint + print 'Resuming from a previous point: %s' %checkpoint # when unpickling __init__ is not called again previous_pop = pickle.load(file) self.__dict__ = previous_pop.__dict__ @@ -54,7 +54,7 @@ class Population(object): file.close() def __create_checkpoint(self, report): - + """ Saves the current simulation state. """ #from time import strftime # get current time #date = strftime("%Y_%m_%d_%Hh%Mm%Ss") @@ -62,7 +62,8 @@ class Population(object): print 'Creating checkpoint file at generation: %d' %self.__generation # dumps 'self' - file = open('checkpoint_'+str(self.__generation), 'w') + #file = open('checkpoint_'+str(self.__generation), 'w') + file = gzip.open('checkpoint_'+str(self.__generation), 'w', compresslevel = 5) # dumps the population pickle.dump(self, file, protocol=2) # dumps the current random state
Compressing checkpoint files with gzip.
CodeReclaimers_neat-python
train
9723e6ea816df8dcc37114081bddc2fada6e9bf5
diff --git a/src/parts/dropdown.js b/src/parts/dropdown.js index <HASH>..<HASH> 100644 --- a/src/parts/dropdown.js +++ b/src/parts/dropdown.js @@ -583,7 +583,7 @@ export default { list = [], exactMatchesList = [], whitelist = _s.whitelist, - suggestionsCount = _sd.maxItems || Infinity, + suggestionsCount = _sd.maxItems >= 0 ? _sd.maxItems : Infinity, searchKeys = _sd.searchKeys, whitelistItem, valueIsInWhitelist,
fixes #<I> - dropdownFooter shows "Refine your search" when maxItems = 0 (which shows all items)
yairEO_tagify
train
97b7f2972ccb695e9e97baaae416c79504eb63a8
diff --git a/sos/plugins/ds.py b/sos/plugins/ds.py index <HASH>..<HASH> 100644 --- a/sos/plugins/ds.py +++ b/sos/plugins/ds.py @@ -64,6 +64,7 @@ class DirectoryServer(Plugin, RedHatPlugin): "/etc/dirsrv/slapd*/dse.ldif.startOK", "/etc/dirsrv/slapd*/secmod.db", "/etc/dirsrv/slapd*/schema/*.ldif", + "/etc/dirsrv/admin-serv", "/var/log/dirsrv/*" ]) elif "ds7" in self.check_version():
[ds] collect /etc/dirsrv/admin-serv Resolves #<I>, #<I>.
sosreport_sos
train
454776695ec4893a35373caa0676ff07bb477ae7
diff --git a/postgres_kernel/kernel.py b/postgres_kernel/kernel.py index <HASH>..<HASH> 100644 --- a/postgres_kernel/kernel.py +++ b/postgres_kernel/kernel.py @@ -80,9 +80,11 @@ class PostgresKernel(Kernel): log('fetching all from: \n' + query) with self._conn.cursor() as c: c.execute(query) - keys = [col[0] for col in c.description] - return keys, c.fetchall() - + desc = c.description + if c.description: + keys = [col[0] for col in c.description] + return keys, c.fetchall() + return None, None def do_execute(self, code, silent, store_history=True, user_expressions=None, allow_stdin=False): if not code.strip(): @@ -102,7 +104,8 @@ class PostgresKernel(Kernel): 'ename': 'ProgrammingError', 'evalue': str(e), 'traceback': []} else: - self.send_response(self.iopub_socket, 'display_data', display_data(header, rows)) + if header is not None: + self.send_response(self.iopub_socket, 'display_data', display_data(header, rows)) return {'status': 'ok', 'execution_count': self.execution_count, 'payload': [], 'user_expressions': {}}
Handle queries that don't produce output
bgschiller_postgres_kernel
train
0112125116b69f02414791246a1b27c18c303c9c
diff --git a/chilero/web/application.py b/chilero/web/application.py index <HASH>..<HASH> 100644 --- a/chilero/web/application.py +++ b/chilero/web/application.py @@ -94,8 +94,8 @@ class Application(web.Application): name = '{}_{}'.format( url_name, 'index' if pt == pattern else 'item' ) - if name in self.router: - name = None + + name = None if name in self.router else name self.router.add_route( method, pt, self.dispatcher(view, action), @@ -116,8 +116,7 @@ class Application(web.Application): name = url_name - if name in self.router: - name = None # pragma: no cover + name = None if name in self.router else url_name self.router.add_route( method,
reduce complexity of Application.register_routes
dmonroy_chilero
train
d2f6405184d3d155109100caf0eafc3b2917a435
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -38,9 +38,9 @@ if isWindows: os.mkdir(cmake_build) os.chdir(cmake_build) # -DBUILD_STATIC=on - cmds = [cmake + ' .. -DGLIB2_BASE_DIR=glib-2.0 -DBUILD_TESTS=off -DLENSFUN_INSTALL_PREFIX= ', + cmds = [cmake + ' .. -G "NMake Makefiles" -DGLIB2_BASE_DIR=glib-2.0 -DBUILD_TESTS=off -DLENSFUN_INSTALL_PREFIX= ', 'dir', - 'nmake /F ' + os.path.join(cmake_build, 'Makefile') + 'nmake' ] for cmd in cmds: print cmd
force cmake to generate NMake makefiles
letmaik_lensfunpy
train
9b627c19412c2a565e344947a6c8d590ae403d15
diff --git a/integration/experimental/experimental_suite_test.go b/integration/experimental/experimental_suite_test.go index <HASH>..<HASH> 100644 --- a/integration/experimental/experimental_suite_test.go +++ b/integration/experimental/experimental_suite_test.go @@ -1,14 +1,12 @@ package experimental import ( - "regexp" "testing" "time" "code.cloudfoundry.org/cli/integration/helpers" . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" - . "github.com/onsi/gomega/gexec" ) const ( @@ -72,26 +70,6 @@ var _ = AfterEach(func() { helpers.DestroyHomeDir(homeDir) }) -var foundDefaultDomain string - -func defaultSharedDomain() string { - // TODO: Move this into helpers when other packages need it, figure out how - // to cache cuz this is a wacky call otherwise - if foundDefaultDomain == "" { - session := helpers.CF("domains") - Eventually(session).Should(Exit(0)) - - regex, err := regexp.Compile(`(.+?)\s+shared`) - Expect(err).ToNot(HaveOccurred()) - - matches := regex.FindStringSubmatch(string(session.Out.Contents())) - Expect(matches).To(HaveLen(2)) - - foundDefaultDomain = matches[1] - } - return foundDefaultDomain -} - func setupCF(org string, space string) { helpers.LoginCF() if org != ReadOnlyOrg && space != ReadOnlySpace { diff --git a/integration/experimental/unshare_service_command_test.go b/integration/experimental/unshare_service_command_test.go index <HASH>..<HASH> 100644 --- a/integration/experimental/unshare_service_command_test.go +++ b/integration/experimental/unshare_service_command_test.go @@ -125,7 +125,7 @@ var _ = Describe("unshare-service command", func() { helpers.CreateOrgAndSpace(sharedToOrgName, sharedToSpaceName) setupCF(sourceOrgName, sourceSpaceName) - domain = defaultSharedDomain() + domain = helpers.DefaultSharedDomain() }) AfterEach(func() { diff --git a/integration/experimental/v3_app_command_test.go b/integration/experimental/v3_app_command_test.go index <HASH>..<HASH> 100644 --- a/integration/experimental/v3_app_command_test.go +++ b/integration/experimental/v3_app_command_test.go @@ -171,7 +171,7 @@ var _ = Describe("v3-app command", func() { Eventually(helpers.CustomCF(helpers.CFEnv{WorkingDirectory: appDir}, "v3-push", appName)).Should(Exit(0)) }) - domainName = defaultSharedDomain() + domainName = helpers.DefaultSharedDomain() }) It("displays the app summary", func() { @@ -242,7 +242,7 @@ var _ = Describe("v3-app command", func() { BeforeEach(func() { Eventually(helpers.CF("v3-push", appName, "-o", PublicDockerImage)).Should(Exit(0)) - domainName = defaultSharedDomain() + domainName = helpers.DefaultSharedDomain() }) It("displays the app summary", func() { diff --git a/integration/experimental/v3_apps_command_test.go b/integration/experimental/v3_apps_command_test.go index <HASH>..<HASH> 100644 --- a/integration/experimental/v3_apps_command_test.go +++ b/integration/experimental/v3_apps_command_test.go @@ -170,7 +170,7 @@ var _ = Describe("v3-apps command", func() { Eventually(helpers.CustomCF(helpers.CFEnv{WorkingDirectory: appDir}, "v3-push", appName1)).Should(Exit(0)) }) - domainName = defaultSharedDomain() + domainName = helpers.DefaultSharedDomain() }) It("displays apps in the list", func() { diff --git a/integration/experimental/v3_push_command_test.go b/integration/experimental/v3_push_command_test.go index <HASH>..<HASH> 100644 --- a/integration/experimental/v3_push_command_test.go +++ b/integration/experimental/v3_push_command_test.go @@ -202,8 +202,7 @@ var _ = Describe("v3-push command", func() { BeforeEach(func() { setupCF(orgName, spaceName) - - domainName = defaultSharedDomain() + domainName = helpers.DefaultSharedDomain() }) AfterEach(func() { @@ -589,7 +588,7 @@ var _ = Describe("v3-push command", func() { }) It("successfully compiles and runs the app", func() { - resp, err := http.Get(fmt.Sprintf("http://%s.%s", appName, defaultSharedDomain())) + resp, err := http.Get(fmt.Sprintf("http://%s.%s", appName, helpers.DefaultSharedDomain())) Expect(err).ToNot(HaveOccurred()) Expect(resp.StatusCode).To(Equal(http.StatusOK)) }) diff --git a/integration/helpers/route.go b/integration/helpers/route.go index <HASH>..<HASH> 100644 --- a/integration/helpers/route.go +++ b/integration/helpers/route.go @@ -111,10 +111,11 @@ func NewDomain(org string, name string) Domain { } } +var foundDefaultDomain string + func DefaultSharedDomain() string { // TODO: Move this into helpers when other packages need it, figure out how // to cache cuz this is a wacky call otherwise - var foundDefaultDomain string if foundDefaultDomain == "" { session := CF("domains")
use DefaultSharedDomain helper in experimentation integration tests - move foundDefaultDomain to global scope in helper suite [#<I>]
cloudfoundry_cli
train
257cde7c363efb3317bfb5c13975cca9154894e2
diff --git a/python/pyspark/mllib/regression.py b/python/pyspark/mllib/regression.py index <HASH>..<HASH> 100644 --- a/python/pyspark/mllib/regression.py +++ b/python/pyspark/mllib/regression.py @@ -163,7 +163,8 @@ def _regression_train_wrapper(train_func, modelClass, data, initial_weights): first = data.first() if not isinstance(first, LabeledPoint): raise ValueError("data should be an RDD of LabeledPoint, but got %s" % first) - initial_weights = initial_weights or [0.0] * len(data.first().features) + if initial_weights is None: + initial_weights = [0.0] * len(data.first().features) weights, intercept = train_func(data, _convert_to_vector(initial_weights)) return modelClass(weights, intercept) diff --git a/python/pyspark/mllib/tests.py b/python/pyspark/mllib/tests.py index <HASH>..<HASH> 100644 --- a/python/pyspark/mllib/tests.py +++ b/python/pyspark/mllib/tests.py @@ -323,6 +323,13 @@ class ListTests(PySparkTestCase): self.assertTrue(gbt_model.predict(features[2]) <= 0) self.assertTrue(gbt_model.predict(features[3]) > 0) + try: + LinearRegressionWithSGD.train(rdd, initialWeights=array([1.0, 1.0])) + LassoWithSGD.train(rdd, initialWeights=array([1.0, 1.0])) + RidgeRegressionWithSGD.train(rdd, initialWeights=array([1.0, 1.0])) + except ValueError: + self.fail() + class StatTests(PySparkTestCase): # SPARK-4023
[SPARK-<I>][MLLIB] _regression_train_wrapper does not test initialWeights correctly Weight parameters must be initialized correctly even when numpy array is passed as initial weights.
apache_spark
train
b73c4603398b905abaddc820e809fa25787042b7
diff --git a/scout/server/links.py b/scout/server/links.py index <HASH>..<HASH> 100644 --- a/scout/server/links.py +++ b/scout/server/links.py @@ -372,7 +372,7 @@ def cosmic_link(variant_obj): else: cosmic_id = cosmic_ids[0] - if cosmic_id.startswith("COSM") or cosmic_id.startswith("COSV") or cosmic_id.startswith("COSN"): + if cosmic_id.startswith("COS"): url_template = "https://cancer.sanger.ac.uk/cosmic/search?q={}" else: url_template = "https://cancer.sanger.ac.uk/cosmic/mutation/overview?id={}"
merge all cosv, cosm, and cosn into cos
Clinical-Genomics_scout
train
7028753cc82ff6ba3e8b07d01f048479c953f870
diff --git a/test/renderer/epics/kernelLaunch_spec.js b/test/renderer/epics/kernelLaunch_spec.js index <HASH>..<HASH> 100644 --- a/test/renderer/epics/kernelLaunch_spec.js +++ b/test/renderer/epics/kernelLaunch_spec.js @@ -4,11 +4,14 @@ const Rx = require('rxjs/Rx'); const EventEmitter = require('events'); +import { ActionsObservable } from 'redux-observable'; + import * as constants from '../../../src/notebook/constants'; import { setLanguageInfo, acquireKernelInfo, + watchExecutionStateEpic, } from '../../../src/notebook/epics/kernelLaunch'; import { @@ -79,3 +82,10 @@ describe('acquireKernelInfo', () => { }) }) }) + +describe('watchExecutionStateEpic', () => { + it('returns an Observable with an initial state of idle', () => { + const action$ = new ActionsObservable(); + const obs = watchExecutionStateEpic(action$); + }) +})
pass an actions observable on through
nteract_nteract
train
06345570fa978fbb11f253b85692aa415f7dbf1b
diff --git a/src/Go/Core/GeneralAspectLoaderExtension.php b/src/Go/Core/GeneralAspectLoaderExtension.php index <HASH>..<HASH> 100644 --- a/src/Go/Core/GeneralAspectLoaderExtension.php +++ b/src/Go/Core/GeneralAspectLoaderExtension.php @@ -177,6 +177,22 @@ class GeneralAspectLoaderExtension implements AspectLoaderExtension return $pointcut; } + + // within(Go\Aspects\Blog\Package\*) : This will match all the methods in all classes of Go\Aspects\Blog\Package. + // within(Go\Aspects\Blog\Package\**) : This will match all the methods in all classes of Go\Aspects\Blog\Package and its sub packages. The only difference is the extra dot(.) after package. + // within(Go\Aspects\Blog\Package\DemoClass) : This will match all the methods in the DemoClass. + // within(DemoInterface+) : This will match all the methods which are in classes which implement DemoInterface. + static $withinReg = '/ + ^within\( + (?P<class>[\w\\\*]+) + (?P<children>\+?) + \)$/x'; + + if (preg_match($withinReg, $metaInformation->value, $matches)) { + $pointcut = new Support\WithinMethodPointcut($matches['class'], (bool) $matches['children']); + return $pointcut; + } + throw new \UnexpectedValueException("Unsupported pointcut: {$metaInformation->value}"); } } \ No newline at end of file
Added support for within annotation in GeneralAspectLoaderExtension
goaop_framework
train
afb292b2b44201a914a1b2f26309e647807b8bc2
diff --git a/Router.php b/Router.php index <HASH>..<HASH> 100644 --- a/Router.php +++ b/Router.php @@ -9,6 +9,10 @@ class Router */ protected $routes = []; /** + * Array storing defined states. + */ + protected $states = []; + /** * String to prefix to every URL. Defaults to the current domain. */ protected $prefix; @@ -27,6 +31,10 @@ class Router */ public function state($name, $verb, $url, callable $callback) { + $state = new State($callback); + $url = $this->fullUrl($verb, $url); + $this->routes[$url] = $state; + $this->states[$name] = $state; } /** @@ -63,5 +71,21 @@ class Router public function get($name) { } + + /** + * Return the URL associated with the state $name. + * + * @param string $name The state name to resolve. + * @return string The generated URL, with optional scheme/domain prefixed. + */ + public function url($name) + { + + } + + protected function fullUrl($verb, $url) + { + return "$url:$verb"; + } }
add some implementation, lot to be done of course...
monolyth-php_reroute
train
3f8c6668b6a3cea1f9aa0728825d9974fe6f8373
diff --git a/pyproject.toml b/pyproject.toml index <HASH>..<HASH> 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,3 +16,17 @@ multi_line_output = 3 no_lines_before = "LOCALFOLDER" order_by_type = false reverse_relative = true + +[tool.mypy] +allow_redefinition = true +disallow_subclassing_any = true +disallow_untyped_defs = true +strict_equality = true +strict_optional = false +warn_redundant_casts = true +warn_unused_configs = true +warn_unused_ignores = true + +[[tool.mypy.overrides]] +module =["aioquic.*", "aioredis.*", "cryptography.*", "h11.*", "h2.*", "priority.*", "_pytest.*", "pytest.*", "trio.*", "uvloop.*"] +ignore_missing_imports = true diff --git a/setup.cfg b/setup.cfg index <HASH>..<HASH> 100644 --- a/setup.cfg +++ b/setup.cfg @@ -7,43 +7,6 @@ max_line_length = 100 min_version = 3.7 require_code = True -[mypy] -allow_redefinition = True -disallow_subclassing_any = True -disallow_untyped_defs = True -strict_equality = True -strict_optional = False -warn_redundant_casts = True -warn_unused_configs = True -warn_unused_ignores = True - -[mypy-_pytest.*] -ignore_missing_imports = True - -[mypy-aioquic.*] -ignore_missing_imports = True - -[mypy-cryptography.*] -ignore_missing_imports = True - -[mypy-h11.*] -ignore_missing_imports = True - -[mypy-h2.*] -ignore_missing_imports = True - -[mypy-priority.*] -ignore_missing_imports = True - -[mypy-pytest.*] -ignore_missing_imports = True - -[mypy-trio.*] -ignore_missing_imports = True - -[mypy-uvloop.*] -ignore_missing_imports = True - [tool:pytest] addopts = --no-cov-on-fail --showlocals testpaths = tests diff --git a/tests/protocol/test_h11.py b/tests/protocol/test_h11.py index <HASH>..<HASH> 100755 --- a/tests/protocol/test_h11.py +++ b/tests/protocol/test_h11.py @@ -21,7 +21,7 @@ try: from unittest.mock import AsyncMock except ImportError: # Python < 3.8 - from mock import AsyncMock + from mock import AsyncMock # type: ignore BASIC_HEADERS = [("Host", "hypercorn"), ("Connection", "close")] diff --git a/tests/protocol/test_h2.py b/tests/protocol/test_h2.py index <HASH>..<HASH> 100644 --- a/tests/protocol/test_h2.py +++ b/tests/protocol/test_h2.py @@ -17,7 +17,7 @@ try: from unittest.mock import AsyncMock except ImportError: # Python < 3.8 - from mock import AsyncMock + from mock import AsyncMock # type: ignore @pytest.mark.asyncio diff --git a/tests/protocol/test_http_stream.py b/tests/protocol/test_http_stream.py index <HASH>..<HASH> 100644 --- a/tests/protocol/test_http_stream.py +++ b/tests/protocol/test_http_stream.py @@ -16,7 +16,7 @@ try: from unittest.mock import AsyncMock except ImportError: # Python < 3.8 - from mock import AsyncMock + from mock import AsyncMock # type: ignore @pytest.fixture(name="stream") diff --git a/tests/protocol/test_ws_stream.py b/tests/protocol/test_ws_stream.py index <HASH>..<HASH> 100644 --- a/tests/protocol/test_ws_stream.py +++ b/tests/protocol/test_ws_stream.py @@ -32,7 +32,7 @@ try: from unittest.mock import AsyncMock except ImportError: # Python < 3.8 - from mock import AsyncMock + from mock import AsyncMock # type: ignore def test_buffer() -> None: diff --git a/tests/trio/test_sanity.py b/tests/trio/test_sanity.py index <HASH>..<HASH> 100644 --- a/tests/trio/test_sanity.py +++ b/tests/trio/test_sanity.py @@ -16,7 +16,7 @@ try: from unittest.mock import AsyncMock except ImportError: # Python < 3.8 - from mock import AsyncMock + from mock import AsyncMock # type: ignore @pytest.mark.trio diff --git a/tox.ini b/tox.ini index <HASH>..<HASH> 100644 --- a/tox.ini +++ b/tox.ini @@ -42,7 +42,9 @@ commands = flake8 src/hypercorn/ tests/ [testenv:mypy] basepython = python3.9 -deps = mypy +deps = + mypy + types-toml commands = mypy src/hypercorn/ tests/
Update to mypy <I> This allows the configuration to be moved to pyproject.toml leaving only flake8 and pytest.
pgjones_hypercorn
train
9178066dbb27a344d43dfd2e1987c3bc1a4501e7
diff --git a/rfc1869.js b/rfc1869.js index <HASH>..<HASH> 100644 --- a/rfc1869.js +++ b/rfc1869.js @@ -18,7 +18,7 @@ // inner-esmtp-cmd ::= ("MAIL FROM:" reverse-path) / // ("RCPT TO:" forward-path) -var chew_regexp = /\s+([A-Za-z0-9][A-Za-z0-9\-]*(=[^= \x00-\x1f]+)?)$/; +var chew_regexp = /\s+([A-Za-z0-9][A-Za-z0-9\-]*(?:=[^= \x00-\x1f]+)?)$/g; exports.parse = function(type, line) { var params = []; @@ -30,11 +30,11 @@ exports.parse = function(type, line) { line = line.replace(/to:/i, ""); } - var matches; - while (matches = chew_regexp.exec(line)) { - params.push(matches[1]); - line = line.slice(matches[0].length); - } + line = line.replace(chew_regexp, function repl(str, p1) { + params.push(p1); + return ''; + }); + params = params.reverse(); // the above will "fail" (i.e. all of the line in params) on
Fixed parsing of MAIL/RCPT lines
haraka_Haraka
train
a9a6719b6fd4f4065a48e3f9d0a6cdd7d8081675
diff --git a/lib/godot/net/client.js b/lib/godot/net/client.js index <HASH>..<HASH> 100644 --- a/lib/godot/net/client.js +++ b/lib/godot/net/client.js @@ -198,7 +198,7 @@ Client.prototype.connect = function (port, host, callback) { if (connectBackoff) { connectBackoff.reset(); } - return self.emit('connect'); + self.emit('connect'); }); } @@ -227,6 +227,8 @@ Client.prototype.connect = function (port, host, callback) { // Closes the underlying network connection for this client. // Client.prototype.close = function () { + var self = this; + if (this.type === 'tcp' || this.type === 'unix') { this.socket.destroy(); } @@ -240,5 +242,7 @@ Client.prototype.close = function () { self.remove(self.producers[id]); }); - return this.emit('close'); + this.socket.on('close', function () { + self.emit('close'); + }); };
[fix] don't return the 'connect' emit in case there is a callback and ensure the socket is closed before emitting close on the client
nodejitsu_godot
train
f2703b85acfee01e5ebc7b90b9f82116d3c72b27
diff --git a/src/Libraries/Builder/ThemeBuilder.php b/src/Libraries/Builder/ThemeBuilder.php index <HASH>..<HASH> 100644 --- a/src/Libraries/Builder/ThemeBuilder.php +++ b/src/Libraries/Builder/ThemeBuilder.php @@ -119,6 +119,8 @@ class ThemeBuilder } } + dd($blocksFound, $extraTemplates); + if (!empty($extraTemplates)) { $blocksCsv = fopen(base_path().'/resources/views/themes/'.self::$_theme->theme.'/import/blocks.csv', 'w'); fputcsv($blocksCsv, [ @@ -1116,6 +1118,10 @@ class ThemeBuilder self::$_fileTemplateBlocks[$template][] = $block_name; } + if (!empty($options['import_return_value'])) { + $output = $options['import_return_value']; + } + return $output; } diff --git a/src/Models/Theme.php b/src/Models/Theme.php index <HASH>..<HASH> 100644 --- a/src/Models/Theme.php +++ b/src/Models/Theme.php @@ -247,7 +247,6 @@ Class Theme extends Eloquent // install theme blocks and templates try { ThemeBuilder::updateTheme($newTheme->id); - ThemeBuilder::cleanOverwriteFile($newTheme->id); } catch (\Exception $e) { // ignore no blocks found ? } @@ -257,6 +256,12 @@ Class Theme extends Eloquent self::_pageImportData($newTheme); } + try { + ThemeBuilder::cleanOverwriteFile($newTheme->id); + } catch (\Exception $e) { + // ignore no blocks found ? + } + return 1; } return 0;
improvements clearing out the blocks.csv on import
Web-Feet_coasterframework
train
c73883a0e8fbd6c79c76035edd51fa01665bb7d4
diff --git a/lib/capnotify/component.rb b/lib/capnotify/component.rb index <HASH>..<HASH> 100644 --- a/lib/capnotify/component.rb +++ b/lib/capnotify/component.rb @@ -27,7 +27,7 @@ module Capnotify end def build! - @builder.call(self) unless @builder.nil? + @builder.call(self) and @builder = nil unless @builder.nil? return self end diff --git a/spec/capnotify/component_spec.rb b/spec/capnotify/component_spec.rb index <HASH>..<HASH> 100644 --- a/spec/capnotify/component_spec.rb +++ b/spec/capnotify/component_spec.rb @@ -16,6 +16,22 @@ describe Capnotify::Component do Capnotify::Component.new('asdf', :css_class => 'great-component').css_class.should == 'great-component' end + it "should allow building with a block" do + c = Capnotify::Component.new(:test_component) do |c| + c.header = 'My Header' + + c.content = {} + c.content['this is'] = 'a test' + end + + c.builder.should_not be_nil + c.header.should be_nil + + c.build! + + c.header.should == 'My Header' + c.builder.should be_nil + end end context "#content=" do
Capnotify::Component#build! now clears @builder also wrote tests around this.
spikegrobstein_capnotify
train
3de5d9ad410df95533ea7b7671997d8dcfffcfae
diff --git a/framework/Orm/Extensions/Tree.php b/framework/Orm/Extensions/Tree.php index <HASH>..<HASH> 100644 --- a/framework/Orm/Extensions/Tree.php +++ b/framework/Orm/Extensions/Tree.php @@ -356,7 +356,6 @@ class Tree public function beforeSave(Model &$model) { - if ($model->isNew()) { if (empty($model->parent)) { $this->insertModelAsLastRoot($model); @@ -371,17 +370,19 @@ class Tree if ($oldParent != $model->parent) { $model->refreshTreeColumns(); - /** @var \T4\Dbal\Connection $connection */ - $connection = $class::getDbConnection(); - $query = new Query(); - $query - ->select('count(*)') - ->from($class::getTableName()) - ->where('__id=:id AND __lft>:lft AND __rgt<:rgt') - ->params([':id' => $model->parent->getPk(), ':lft'=>$model->__lft, ':rgt'=>$model->__rgt]); - - if ($connection->query($query)->fetchScalar() > 0) { - throw new Exception('Parent must not be in children!'); + if (!empty($model->parent)) { + /** @var \T4\Dbal\Connection $connection */ + $connection = $class::getDbConnection(); + $query = new Query(); + $query + ->select('count(*)') + ->from($class::getTableName()) + ->where('__id=:id AND __lft>:lft AND __rgt<:rgt') + ->params([':id' => $model->parent->getPk(), ':lft'=>$model->__lft, ':rgt'=>$model->__rgt]); + + if ($connection->query($query)->fetchScalar() > 0) { + throw new Exception('Parent must not be in children!'); + } } if (empty($model->parent)) {
ONCE-<I>: edited nested tree beforeSave method
pr-of-it_t4
train
5d21f52205e51039b3a65c7102ce672810c3ad80
diff --git a/spec/logger_pipe_spec.rb b/spec/logger_pipe_spec.rb index <HASH>..<HASH> 100644 --- a/spec/logger_pipe_spec.rb +++ b/spec/logger_pipe_spec.rb @@ -56,8 +56,9 @@ describe LoggerPipe do end end - context ":return and :logging" do + context ":returns and :logging" do { + # [:returns, :logging] => [return of LoggerPipe.run, logging expectations] [:nil , :nil ] => [nil , {foo: false, bar: false, baz: false}], # OK [:nil , :stdout] => [nil , {foo: true , bar: false, baz: true }], # OK [:nil , :stderr] => [nil , {foo: false, bar: true , baz: false}], # OK
add description about examples for :returns and :logging
groovenauts_logger_pipe
train
492188a17668be0b2a07e1a75bcd208fdbd79540
diff --git a/project/library/CM/Request/Abstract.php b/project/library/CM/Request/Abstract.php index <HASH>..<HASH> 100644 --- a/project/library/CM/Request/Abstract.php +++ b/project/library/CM/Request/Abstract.php @@ -131,6 +131,23 @@ abstract class CM_Request_Abstract { } /** + * @return CM_Session + */ + public function getSession() { + if (!$this->hasSession()) { + $this->_session = CM_Session::getInstance($this->_sessionId); + } + return $this->_session; + } + + /** + * @return boolean + */ + public function hasSession() { + return isset($this->_session); + } + + /** * @param string $name * @return bool */
t<I>: made Session instantiation lazy
cargomedia_cm
train
3812a6a8466f639cfd538c05c3ff2730a70b1440
diff --git a/integration-cli/docker_api_inspect_test.go b/integration-cli/docker_api_inspect_test.go index <HASH>..<HASH> 100644 --- a/integration-cli/docker_api_inspect_test.go +++ b/integration-cli/docker_api_inspect_test.go @@ -7,7 +7,7 @@ import ( "testing" ) -func TestInspectContainerResponse(t *testing.T) { +func TestInspectApiContainerResponse(t *testing.T) { runCmd := exec.Command(dockerBinary, "run", "-d", "busybox", "true") out, _, err := runCommandWithOutput(runCmd) errorOut(err, t, fmt.Sprintf("failed to create a container: %v %v", out, err))
Use prefix naming for inspect tests
containers_storage
train
42c7c3916b279873d3c26611e41dbf3b8243b312
diff --git a/nifty-core/src/main/java/com/facebook/nifty/core/NiftyDispatcher.java b/nifty-core/src/main/java/com/facebook/nifty/core/NiftyDispatcher.java index <HASH>..<HASH> 100644 --- a/nifty-core/src/main/java/com/facebook/nifty/core/NiftyDispatcher.java +++ b/nifty-core/src/main/java/com/facebook/nifty/core/NiftyDispatcher.java @@ -118,8 +118,11 @@ public class NiftyDispatcher extends SimpleChannelUpstreamHandler RequestContext.clearCurrentContext(); } - ThriftMessage response = message.getMessageFactory().create(messageTransport.getOutputBuffer()); - writeResponse(ctx, response, requestSequenceId, message.isOrderedResponsesRequired()); + // Only write response if the client is still there + if (ctx.getChannel().isConnected()) { + ThriftMessage response = message.getMessageFactory().create(messageTransport.getOutputBuffer()); + writeResponse(ctx, response, requestSequenceId, message.isOrderedResponsesRequired()); + } } catch (TException e) { Channels.fireExceptionCaught(ctx, e);
Skip writing responses if client disconnected while the request was running
facebookarchive_nifty
train
0fbe2fac22851fd7decbc0da8c548cd3c364f5ce
diff --git a/lib/elasticsearch/transport/http.rb b/lib/elasticsearch/transport/http.rb index <HASH>..<HASH> 100644 --- a/lib/elasticsearch/transport/http.rb +++ b/lib/elasticsearch/transport/http.rb @@ -89,7 +89,6 @@ module ElasticSearch def count(index, type, query, options={}) if query.is_a?(Hash) # patron cannot submit get requests with content, so if query is a hash, post it instead (assume a query hash is using the query dsl) - query = {:query => query} unless query[:query] # if there is no query element, wrap query in one response = request(:post, generate_path(:index => index, :type => type, :id => "_count", :params => options), encoder.encode(query)) else response = request(:get, generate_path(:index => index, :type => type, :id => "_count", :params => options.merge(:q => query)))
count does not wrap queries with a "query" key like search
grantr_rubberband
train
ce5a361ff0bf375b78464fb0a393d6c9a9994e53
diff --git a/src/main/java/ninja/S3Dispatcher.java b/src/main/java/ninja/S3Dispatcher.java index <HASH>..<HASH> 100644 --- a/src/main/java/ninja/S3Dispatcher.java +++ b/src/main/java/ninja/S3Dispatcher.java @@ -523,7 +523,7 @@ public class S3Dispatcher implements WebDispatcher { return; } - String etag = BaseEncoding.base16().encode(hash.asBytes()); + String etag = BaseEncoding.base16().encode(hash.asBytes()).toLowerCase(); properties.put(HTTP_HEADER_NAME_ETAG, etag); object.storeProperties(properties); Response response = ctx.respondWith(); @@ -578,7 +578,7 @@ public class S3Dispatcher implements WebDispatcher { Files.copy(src.getPropertiesFile(), object.getPropertiesFile()); } HashCode hash = Files.hash(object.getFile(), Hashing.md5()); - String etag = BaseEncoding.base16().encode(hash.asBytes()); + String etag = BaseEncoding.base16().encode(hash.asBytes()).toLowerCase(); XMLStructuredOutput structuredOutput = ctx.respondWith().addHeader(HTTP_HEADER_NAME_ETAG, etag(etag)).xml(); structuredOutput.beginOutput("CopyObjectResult"); @@ -617,7 +617,7 @@ public class S3Dispatcher implements WebDispatcher { String etag = properties.getProperty(HTTP_HEADER_NAME_ETAG); if (Strings.isEmpty(etag)) { HashCode hash = Files.hash(object.getFile(), Hashing.md5()); - etag = BaseEncoding.base16().encode(hash.asBytes()); + etag = BaseEncoding.base16().encode(hash.asBytes()).toLowerCase(); Map<String, String> data = new HashMap<>(); properties.forEach((key, value) -> data.put(key.toString(), String.valueOf(value))); data.put(HTTP_HEADER_NAME_ETAG, etag); @@ -697,7 +697,7 @@ public class S3Dispatcher implements WebDispatcher { } part.close(); - String etag = BaseEncoding.base16().encode(Files.hash(partFile, Hashing.md5()).asBytes()); + String etag = BaseEncoding.base16().encode(Files.hash(partFile, Hashing.md5()).asBytes()).toLowerCase(); ctx.respondWith() .setHeader(HTTP_HEADER_NAME_ETAG, etag) .addHeader(HttpHeaderNames.ACCESS_CONTROL_EXPOSE_HEADERS, HTTP_HEADER_NAME_ETAG)
Transfers etag hashes in lower case ⌨️
scireum_s3ninja
train
47d833854cf273b601f03834bd711bf4e3f0647a
diff --git a/openhtf/plugs/usb/__init__.py b/openhtf/plugs/usb/__init__.py index <HASH>..<HASH> 100644 --- a/openhtf/plugs/usb/__init__.py +++ b/openhtf/plugs/usb/__init__.py @@ -26,6 +26,8 @@ To use these plugs: def MyPhase(test, adb): adb.Shell('ls') """ +import commands +import logging import openhtf.plugs as plugs from openhtf import conf @@ -36,9 +38,10 @@ from openhtf.plugs.usb import local_usb conf.Declare('usb_server', 'USB Server IP/Hostname') conf.Declare('usb_server_port', 'USB Server Port', default_value=10000) - conf.Declare('libusb_rsa_key', 'A private key file for use by libusb auth.') +conf.Declare('unit_name', 'cambrionix unit name') +_LOG = logging.getLogger(__name__) def _open_usb_handle(**kwargs): """Open a UsbHandle subclass, based on configuration. @@ -57,17 +60,35 @@ def _open_usb_handle(**kwargs): else: return local_usb.LibUsbHandle.Open(**kwargs) +def _get_usb_serial(unit_name): + """Get a usb serial based on the Cambrionix unit mac address in configuration.""" + cmd = '/usr/local/google/home/amyxchen/esuit64 -t "LIST, %s"' % unit_name + serial = commands.getstatusoutput(cmd)[1] + + serial = 'LWP1A02A15110225' + _LOG.error('get serial:%s on unit:%s' % (serial, unit_name)) + + return serial + # pylint: disable=too-few-public-methods class FastbootPlug(plugs.BasePlug): """Plug that provides fastboot.""" def __new__(cls): + serial = None + unit_name = conf.Config().unit_name + + if unit_name: + serial = _get_usb_serial(unit_name) + device = fastboot_device.FastbootDevice.Connect( _open_usb_handle( interface_class=fastboot_device.CLASS, interface_subclass=fastboot_device.SUBCLASS, - interface_protocol=fastboot_device.PROTOCOL)) + interface_protocol=fastboot_device.PROTOCOL, + serial_number = serial + )) device.TearDown = device.Close # pylint: disable=invalid-name return device @@ -81,11 +102,19 @@ class AdbPlug(plugs.BasePlug): kwargs['rsa_keys'] = [adb_device.M2CryptoSigner( conf.Config().libusb_rsa_key)] + serial = None + unit_name = conf.Config().unit_name + + if unit_name: + serial = _get_usb_serial(unit_name) + device = adb_device.AdbDevice.Connect( _open_usb_handle( interface_class=adb_device.CLASS, interface_subclass=adb_device.SUBCLASS, - interface_protocol=adb_device.PROTOCOL), + interface_protocol=adb_device.PROTOCOL, + serial_number=serial + ), **kwargs) device.TearDown = device.Close # pylint: disable=invalid-name - return device + return device \ No newline at end of file
change for AdbPlug, adding serial number
google_openhtf
train
9be39b3dd4af20f277a0781642bf0270b7125f49
diff --git a/test/eventlog.test.js b/test/eventlog.test.js index <HASH>..<HASH> 100644 --- a/test/eventlog.test.js +++ b/test/eventlog.test.js @@ -174,7 +174,7 @@ Object.keys(testAPIs).forEach(API => { describe('Options: limit', function() { it('returns 1 item when limit is 0', () => { - const iter = db.iterator({ limit: 1 }) + const iter = db.iterator({ limit: 0 }) const first = iter.next().value const second = iter.next().value assert.equal(first.hash, last(items)) diff --git a/test/feed.test.js b/test/feed.test.js index <HASH>..<HASH> 100644 --- a/test/feed.test.js +++ b/test/feed.test.js @@ -214,7 +214,7 @@ Object.keys(testAPIs).forEach(API => { describe('Options: limit', function() { it('returns 1 item when limit is 0', () => { - const iter = db.iterator({ limit: 1 }) + const iter = db.iterator({ limit: 0 }) const first = iter.next().value const second = iter.next().value assert.equal(first.hash, last(items))
Fix: tests weren't actually testing limit 0 cases
orbitdb_orbit-db
train
1425e7f0f6b867e922c592b0138fc0de38a7d1b2
diff --git a/aeron-driver/src/main/java/io/aeron/driver/DirectPublication.java b/aeron-driver/src/main/java/io/aeron/driver/DirectPublication.java index <HASH>..<HASH> 100644 --- a/aeron-driver/src/main/java/io/aeron/driver/DirectPublication.java +++ b/aeron-driver/src/main/java/io/aeron/driver/DirectPublication.java @@ -63,10 +63,10 @@ public class DirectPublication implements DriverManagedResource final int termLength = rawLog.termLength(); this.termWindowLength = Configuration.ipcPublicationTermWindowLength(termLength); + this.tripGain = termWindowLength / 8; this.positionBitsToShift = Integer.numberOfTrailingZeros(termLength); - this.rawLog = rawLog; this.publisherLimit = publisherLimit; - this.tripGain = this.termWindowLength / 8; + this.rawLog = rawLog; } public int sessionId() diff --git a/aeron-driver/src/main/java/io/aeron/driver/NetworkPublication.java b/aeron-driver/src/main/java/io/aeron/driver/NetworkPublication.java index <HASH>..<HASH> 100644 --- a/aeron-driver/src/main/java/io/aeron/driver/NetworkPublication.java +++ b/aeron-driver/src/main/java/io/aeron/driver/NetworkPublication.java @@ -33,6 +33,8 @@ import org.agrona.concurrent.status.Position; import java.net.InetSocketAddress; import java.nio.ByteBuffer; +import static io.aeron.driver.Configuration.PUBLICATION_HEARTBEAT_TIMEOUT_NS; +import static io.aeron.driver.Configuration.PUBLICATION_SETUP_TIMEOUT_NS; import static io.aeron.driver.status.SystemCounterDescriptor.*; import static io.aeron.logbuffer.LogBufferDescriptor.*; import static io.aeron.logbuffer.TermScanner.*; @@ -144,8 +146,8 @@ public class NetworkPublication termLengthMask = termLength - 1; flowControl.initialize(initialTermId, termLength); - timeOfLastSendOrHeartbeat = nanoClock.nanoTime() - Configuration.PUBLICATION_HEARTBEAT_TIMEOUT_NS - 1; - timeOfLastSetup = nanoClock.nanoTime() - Configuration.PUBLICATION_SETUP_TIMEOUT_NS - 1; + timeOfLastSendOrHeartbeat = nanoClock.nanoTime() - PUBLICATION_HEARTBEAT_TIMEOUT_NS - 1; + timeOfLastSetup = nanoClock.nanoTime() - PUBLICATION_SETUP_TIMEOUT_NS - 1; positionBitsToShift = Integer.numberOfTrailingZeros(termLength); termWindowLength = Configuration.publicationTermWindowLength(termLength); @@ -363,7 +365,7 @@ public class NetworkPublication private void setupMessageCheck(final long now, final int activeTermId, final int termOffset) { - if (now > (timeOfLastSetup + Configuration.PUBLICATION_SETUP_TIMEOUT_NS)) + if (now > (timeOfLastSetup + PUBLICATION_SETUP_TIMEOUT_NS)) { setupFrameBuffer.clear(); setupHeader.activeTermId(activeTermId).termOffset(termOffset); @@ -386,7 +388,7 @@ public class NetworkPublication private void heartbeatMessageCheck(final long now, final int activeTermId, final int termOffset) { - if (now > (timeOfLastSendOrHeartbeat + Configuration.PUBLICATION_HEARTBEAT_TIMEOUT_NS)) + if (now > (timeOfLastSendOrHeartbeat + PUBLICATION_HEARTBEAT_TIMEOUT_NS)) { heartbeatFrameBuffer.clear(); dataHeader.termId(activeTermId).termOffset(termOffset);
[Java] Minor code cleanup.
real-logic_aeron
train
9d065e72ded773b2f9cac2380f55991ace10133f
diff --git a/lib/core/src/server/manager/manager-webpack.config.js b/lib/core/src/server/manager/manager-webpack.config.js index <HASH>..<HASH> 100644 --- a/lib/core/src/server/manager/manager-webpack.config.js +++ b/lib/core/src/server/manager/manager-webpack.config.js @@ -55,12 +55,11 @@ export default ({ configDir, configType, entries, dll, outputDir, cache, babelOp }), template: require.resolve(`../templates/index.ejs`), }), - new webpack.DefinePlugin({ 'process.env': stringified }), new CaseSensitivePathsPlugin(), new Dotenv({ silent: true }), // graphql sources check process variable new DefinePlugin({ - process: JSON.stringify(true), + process: { browser: true, env: stringified }, NODE_ENV: JSON.stringify(process.env.NODE_ENV), }), // See https://github.com/graphql/graphql-language-service/issues/111#issuecomment-306723400
#<I> set a better value for process in the manager webpack config
storybooks_storybook
train
67571cfa8530a03cacb86b46cc174c12b1797c63
diff --git a/Cake/Test/TestCase/ORM/TableTest.php b/Cake/Test/TestCase/ORM/TableTest.php index <HASH>..<HASH> 100644 --- a/Cake/Test/TestCase/ORM/TableTest.php +++ b/Cake/Test/TestCase/ORM/TableTest.php @@ -884,7 +884,37 @@ class TableTest extends \Cake\TestSuite\TestCase { ]); $result = $table->find('all')->contain(['tag'])->first(); $this->assertInstanceOf('TestApp\Model\Entity\Tag', $result->tags[0]); - $this->assertInstanceOf('TestApp\Model\Entity\ArticlesTag', $result->tags[0]->extraInfo); + $this->assertInstanceOf( + 'TestApp\Model\Entity\ArticlesTag', + $result->tags[0]->extraInfo + ); + } + +/** + * Tests that recently fetched entities are always clean + * + * @return void + */ + public function testFindCleanEntities() { + $table = new \TestApp\Model\Repository\ArticleTable([ + 'connection' => $this->connection, + ]); + $results = $table->find('all')->contain(['tag', 'author'])->toArray(); + $this->assertCount(3, $results); + foreach ($results as $article) { + $this->assertFalse($article->dirty('id')); + $this->assertFalse($article->dirty('title')); + $this->assertFalse($article->dirty('author_id')); + $this->assertFalse($article->dirty('body')); + $this->assertFalse($article->dirty('published')); + $this->assertFalse($article->dirty('author')); + $this->assertFalse($article->author->dirty('id')); + $this->assertFalse($article->author->dirty('name')); + $this->assertFalse($article->dirty('tag')); + if ($article->tag) { + $this->assertFalse($article->tag[0]->extraInfo->dirty('tag_id')); + } + } } /**
Adding test to prove that fetched entities are marked as clean
cakephp_cakephp
train
de95d196a0bc97ed7d0994d54b3ac60c83448232
diff --git a/bundles/org.eclipse.orion.client.ui/web/orion/widgets/nav/mini-nav.js b/bundles/org.eclipse.orion.client.ui/web/orion/widgets/nav/mini-nav.js index <HASH>..<HASH> 100644 --- a/bundles/org.eclipse.orion.client.ui/web/orion/widgets/nav/mini-nav.js +++ b/bundles/org.eclipse.orion.client.ui/web/orion/widgets/nav/mini-nav.js @@ -253,9 +253,10 @@ define(['require', 'i18n!orion/edit/nls/messages', 'orion/objects', 'orion/webui this.createActionSections(); var selectionTools = this.selectionActionsScope; var treeRoot = this.treeRoot, commandRegistry = this.commandRegistry; - FileCommands.updateNavTools(this.registry, commandRegistry, this, this.newActionsScope, selectionTools, treeRoot, true); + var modelItems = selections || treeRoot; + FileCommands.updateNavTools(this.registry, commandRegistry, this, this.newActionsScope, selectionTools, modelItems, true); commandRegistry.destroy(this.folderNavActionsScope); - commandRegistry.renderCommands(this.folderNavActionsScope, this.folderNavActionsScope, this.treeRoot, this, "tool"); //$NON-NLS-0$ + commandRegistry.renderCommands(this.folderNavActionsScope, this.folderNavActionsScope, modelItems, this, "tool"); //$NON-NLS-0$ } });
[Bug <I>] New File command is missing when a folder is selected at workspace root
eclipse_orion.client
train
9eda78a179ad4d5477fa9d5ebe8bd760fa925c95
diff --git a/modules/pubmaticBidAdapter.js b/modules/pubmaticBidAdapter.js index <HASH>..<HASH> 100644 --- a/modules/pubmaticBidAdapter.js +++ b/modules/pubmaticBidAdapter.js @@ -185,7 +185,7 @@ _each(NATIVE_ASSETS, anAsset => { NATIVE_ASSET_ID_TO_KEY_MAP[anAsset.ID] = anAss // loading NATIVE_ASSET_KEY_TO_ASSET_MAP _each(NATIVE_ASSETS, anAsset => { NATIVE_ASSET_KEY_TO_ASSET_MAP[anAsset.KEY] = anAsset }); -function _getDomainFromURL(url) { +export function _getDomainFromURL(url) { let anchor = document.createElement('a'); anchor.href = url; return anchor.hostname; @@ -1162,7 +1162,11 @@ export const spec = { // First Party Data const commonFpd = (bidderRequest && bidderRequest.ortb2) || {}; if (commonFpd.site) { + const { page, domain, ref } = payload.site; mergeDeep(payload, {site: commonFpd.site}); + payload.site.page = page; + payload.site.domain = domain; + payload.site.ref = ref; } if (commonFpd.user) { mergeDeep(payload, {user: commonFpd.user}); diff --git a/test/spec/modules/pubmaticBidAdapter_spec.js b/test/spec/modules/pubmaticBidAdapter_spec.js index <HASH>..<HASH> 100644 --- a/test/spec/modules/pubmaticBidAdapter_spec.js +++ b/test/spec/modules/pubmaticBidAdapter_spec.js @@ -1,5 +1,5 @@ import {expect} from 'chai'; -import {spec, checkVideoPlacement} from 'modules/pubmaticBidAdapter.js'; +import {spec, checkVideoPlacement, _getDomainFromURL} from 'modules/pubmaticBidAdapter.js'; import * as utils from 'src/utils.js'; import {config} from 'src/config.js'; import { createEidsArray } from 'modules/userId/eids.js'; @@ -1688,17 +1688,66 @@ describe('PubMatic adapter', function () { describe('FPD', function() { let newRequest; - it('ortb2.site should be merged in the request', function() { + describe('ortb2.site should not override page, domain & ref values', function() { + it('When above properties are present in ortb2.site', function() { + const ortb2 = { + site: { + domain: 'page.example.com', + page: 'https://page.example.com/here.html', + ref: 'https://page.example.com/here.html' + } + }; + const request = spec.buildRequests(bidRequests, {ortb2}); + let data = JSON.parse(request.data); + expect(data.site.domain).not.equal('page.example.com'); + expect(data.site.page).not.equal('https://page.example.com/here.html'); + expect(data.site.ref).not.equal('https://page.example.com/here.html'); + }); + + it('When above properties are absent in ortb2.site', function () { + const ortb2 = { + site: {} + }; + let request = spec.buildRequests(bidRequests, { + auctionId: 'new-auction-id', + ortb2 + }); + let data = JSON.parse(request.data); + let response = spec.interpretResponse(bidResponses, request); + expect(data.site.page).to.equal(bidRequests[0].params.kadpageurl); + expect(data.site.domain).to.equal(_getDomainFromURL(data.site.page)); + expect(response[0].referrer).to.equal(data.site.ref); + }); + + it('With some extra properties in ortb2.site', function() { + const ortb2 = { + site: { + domain: 'page.example.com', + page: 'https://page.example.com/here.html', + ref: 'https://page.example.com/here.html', + cat: ['IAB2'], + sectioncat: ['IAB2-2'] + } + }; + const request = spec.buildRequests(bidRequests, {ortb2}); + let data = JSON.parse(request.data); + expect(data.site.domain).not.equal('page.example.com'); + expect(data.site.page).not.equal('https://page.example.com/here.html'); + expect(data.site.ref).not.equal('https://page.example.com/here.html'); + expect(data.site.cat).to.deep.equal(['IAB2']); + expect(data.site.sectioncat).to.deep.equal(['IAB2-2']); + }); + }); + + it('ortb2.site should be merged except page, domain & ref in the request', function() { const ortb2 = { site: { - domain: 'page.example.com', cat: ['IAB2'], sectioncat: ['IAB2-2'] } }; const request = spec.buildRequests(bidRequests, {ortb2}); let data = JSON.parse(request.data); - expect(data.site.domain).to.equal('page.example.com'); expect(data.site.cat).to.deep.equal(['IAB2']); expect(data.site.sectioncat).to.deep.equal(['IAB2-2']); });
Pubmatic bid adapter: improved site object handling (#<I>) * UOE-<I>: Vanilla JS: Stop overwriting site.page site.ref and site.domain * UOE-<I>: Vanilla JS: Stop overwriting site.page site.ref and site.domain in PrebidServerBidAdapter * Rename variable name * Revert changes from PrebidServerBidAdapter file * Test cases written for the ticket UOE-<I> - stop overriding page, domain, site
prebid_Prebid.js
train
c45a43580595ae36cfa177bbfc3c4130b3176920
diff --git a/src/Symfony/Component/Form/Tests/Extension/Core/Type/ChoiceTypeTest.php b/src/Symfony/Component/Form/Tests/Extension/Core/Type/ChoiceTypeTest.php index <HASH>..<HASH> 100644 --- a/src/Symfony/Component/Form/Tests/Extension/Core/Type/ChoiceTypeTest.php +++ b/src/Symfony/Component/Form/Tests/Extension/Core/Type/ChoiceTypeTest.php @@ -153,7 +153,6 @@ class ChoiceTypeTest extends \Symfony\Component\Form\Test\TypeTestCase { $view = $this->factory->create('Symfony\Component\Form\Extension\Core\Type\ChoiceType', null, array( 'choices' => $this->booleanChoicesWithNull, - 'choices_as_values' => true, 'expanded' => true, ))->createView(); @@ -179,7 +178,6 @@ class ChoiceTypeTest extends \Symfony\Component\Form\Test\TypeTestCase { $view = $this->factory->create('Symfony\Component\Form\Extension\Core\Type\ChoiceType', false, array( 'choices' => $this->booleanChoicesWithNull, - 'choices_as_values' => true, 'expanded' => true, ))->createView();
[Form] Fix tests added in #<I>
symfony_symfony
train
78fab728e46b2a0e917c88f6ee9d8477b2e68786
diff --git a/dependency-check-maven/src/main/java/org/owasp/dependencycheck/maven/ReportAggregationMojo.java b/dependency-check-maven/src/main/java/org/owasp/dependencycheck/maven/ReportAggregationMojo.java index <HASH>..<HASH> 100644 --- a/dependency-check-maven/src/main/java/org/owasp/dependencycheck/maven/ReportAggregationMojo.java +++ b/dependency-check-maven/src/main/java/org/owasp/dependencycheck/maven/ReportAggregationMojo.java @@ -150,7 +150,9 @@ public abstract class ReportAggregationMojo extends AbstractMojo implements Mave * @param sink the sink to write the report to * @param locale the locale to use when generating the report * @throws MavenReportException if a maven report exception occurs + * @deprecated use {@link #generate(org.apache.maven.doxia.sink.Sink, org.apache.maven.doxia.sink.SinkFactory, java.util.Locale) instead. */ + @Deprecated public final void generate(@SuppressWarnings("deprecation") org.codehaus.doxia.sink.Sink sink, Locale locale) throws MavenReportException { generate((Sink) sink, null, locale); } @@ -159,6 +161,19 @@ public abstract class ReportAggregationMojo extends AbstractMojo implements Mave * Generates the Dependency-Check Site Report. * * @param sink the sink to write the report to + * @param locale the locale to use when generating the report + * @throws MavenReportException if a maven report exception occurs + * @deprecated use {@link #generate(org.apache.maven.doxia.sink.Sink, org.apache.maven.doxia.sink.SinkFactory, java.util.Locale) instead. + */ + @Deprecated + public final void generate(Sink sink, Locale locale) throws MavenReportException { + generate(sink, null, locale); + } + + /** + * Generates the Dependency-Check Site Report. + * + * @param sink the sink to write the report to * @param sinkFactory the sink factory * @param locale the locale to use when generating the report * @throws MavenReportException if a maven report exception occurs
added an additional generate method Former-commit-id: <I>b<I>b<I>da<I>f3b4bc<I>b<I>f<I>e<I>c<I>
jeremylong_DependencyCheck
train
d47a4529c5c838b340297a48a73d743da77cb4ec
diff --git a/tests/searchcommands/__init__.py b/tests/searchcommands/__init__.py index <HASH>..<HASH> 100644 --- a/tests/searchcommands/__init__.py +++ b/tests/searchcommands/__init__.py @@ -29,7 +29,7 @@ else: from splunklib.searchcommands import environment from splunklib import searchcommands - package_directory = path.dirname(path.abspath(__file__)) + package_directory = path.dirname(path.realpath(__file__)) project_root = path.dirname(path.dirname(package_directory))
Fixed a test break related to running tests from a symbolically linked directory
splunk_splunk-sdk-python
train
b9cc9fd683ef2023cd4be249dd0b314134409d0c
diff --git a/src/JonnyW/PhantomJs/Client.php b/src/JonnyW/PhantomJs/Client.php index <HASH>..<HASH> 100644 --- a/src/JonnyW/PhantomJs/Client.php +++ b/src/JonnyW/PhantomJs/Client.php @@ -197,4 +197,15 @@ class Client implements ClientInterface { return $this->procedure; } + + /** + * Get procedure compiler. + * + * @access public + * @return \JonnyW\PhantomJs\Procedure\ProcedureCompilerInterface + */ + public function getProcedureCompiler() + { + return $this->procedureCompiler; + } }
Added the procedure compiler to the Client class to match instructions in docs. <URL>
jonnnnyw_php-phantomjs
train
5ff00fcd3d3c4229246bf8ef749b6bc27a5cdb13
diff --git a/activesupport/lib/active_support/core_ext/array/conversions.rb b/activesupport/lib/active_support/core_ext/array/conversions.rb index <HASH>..<HASH> 100644 --- a/activesupport/lib/active_support/core_ext/array/conversions.rb +++ b/activesupport/lib/active_support/core_ext/array/conversions.rb @@ -82,23 +82,8 @@ class Array end end - # Converts a collection of elements into a formatted string by calling - # <tt>to_s</tt> on all elements and joining them. Having this model: - # - # class Blog < ActiveRecord::Base - # def to_s - # title - # end - # end - # - # Blog.all.map(&:title) #=> ["First Post", "Second Post", "Third post"] - # - # <tt>to_formatted_s</tt> shows us: - # - # Blog.all.to_formatted_s # => "First PostSecond PostThird Post" - # - # Adding in the <tt>:db</tt> argument as the format yields a comma separated - # id list: + # Extends <tt>Array#to_s</tt> to convert a collection of elements into a + # comma separated id list if <tt>:db</tt> argument is given as the format. # # Blog.all.to_formatted_s(:db) # => "1,2,3" def to_formatted_s(format = :default)
Array#to_formatted_s does not call each element's to_s anymore Array#to_s calls each element's inspect since ruby <I>
rails_rails
train
4cbd6d7d935f6bcb0197378add4efff3d2a7cda0
diff --git a/lib/egon/version.rb b/lib/egon/version.rb index <HASH>..<HASH> 100644 --- a/lib/egon/version.rb +++ b/lib/egon/version.rb @@ -1,3 +1,3 @@ module Egon - VERSION = "0.4.7" + VERSION = "0.4.8" end
Bump to <I> Fix OSP <I> compatibility Fix for nil hardware introspection Fix for ruby script prepend
fusor_egon
train
3f857351d52db88888630dc590903ad33b9bb124
diff --git a/aws/awserr/types.go b/aws/awserr/types.go index <HASH>..<HASH> 100644 --- a/aws/awserr/types.go +++ b/aws/awserr/types.go @@ -113,7 +113,7 @@ func newRequestError(err Error, statusCode int, requestID string) *requestError // Error returns the string representation of the error. // Satisfies the error interface. func (r requestError) Error() string { - extra := fmt.Sprintf("status code: %d, request id: [%s]", + extra := fmt.Sprintf("status code: %d, request id: %s", r.statusCode, r.requestID) return SprintError(r.Code(), r.Message(), extra, r.OrigErr()) } diff --git a/internal/protocol/jsonrpc/jsonrpc.go b/internal/protocol/jsonrpc/jsonrpc.go index <HASH>..<HASH> 100644 --- a/internal/protocol/jsonrpc/jsonrpc.go +++ b/internal/protocol/jsonrpc/jsonrpc.go @@ -13,6 +13,7 @@ import ( "github.com/aws/aws-sdk-go/aws/awserr" "github.com/aws/aws-sdk-go/aws/request" "github.com/aws/aws-sdk-go/internal/protocol/json/jsonutil" + "github.com/aws/aws-sdk-go/internal/protocol/rest" ) var emptyJSON = []byte("{}") @@ -59,7 +60,7 @@ func Unmarshal(req *request.Request) { // UnmarshalMeta unmarshals headers from a response for a JSON RPC service. func UnmarshalMeta(req *request.Request) { - req.RequestID = req.HTTPResponse.Header.Get("x-amzn-requestid") + rest.UnmarshalMeta(req) } // UnmarshalError unmarshals an error response for a JSON RPC service. @@ -88,7 +89,7 @@ func UnmarshalError(req *request.Request) { req.Error = awserr.NewRequestFailure( awserr.New(codes[len(codes)-1], jsonErr.Message, nil), req.HTTPResponse.StatusCode, - "", + req.RequestID, ) } diff --git a/internal/protocol/rest/unmarshal.go b/internal/protocol/rest/unmarshal.go index <HASH>..<HASH> 100644 --- a/internal/protocol/rest/unmarshal.go +++ b/internal/protocol/rest/unmarshal.go @@ -25,6 +25,7 @@ func Unmarshal(r *request.Request) { // UnmarshalMeta unmarshals the REST metadata of a response in a REST service func UnmarshalMeta(r *request.Request) { + r.RequestID = r.HTTPResponse.Header.Get("X-Amzn-Requestid") if r.DataFilled() { v := reflect.Indirect(reflect.ValueOf(r.Data)) unmarshalLocationElements(r, v) diff --git a/internal/protocol/restjson/restjson.go b/internal/protocol/restjson/restjson.go index <HASH>..<HASH> 100644 --- a/internal/protocol/restjson/restjson.go +++ b/internal/protocol/restjson/restjson.go @@ -69,7 +69,7 @@ func UnmarshalError(r *request.Request) { r.Error = awserr.NewRequestFailure( awserr.New(codes[0], jsonErr.Message, nil), r.HTTPResponse.StatusCode, - "", + r.RequestID, ) }
Add RequestID to be set for REST request responses, and errors Addresses #<I> Addresses #<I>
aws_aws-sdk-go
train
b74769d5f6ea96c37dd966872226ce0af180dc73
diff --git a/projects/samskivert/src/java/com/samskivert/util/StringUtil.java b/projects/samskivert/src/java/com/samskivert/util/StringUtil.java index <HASH>..<HASH> 100644 --- a/projects/samskivert/src/java/com/samskivert/util/StringUtil.java +++ b/projects/samskivert/src/java/com/samskivert/util/StringUtil.java @@ -1,5 +1,5 @@ // -// $Id: StringUtil.java,v 1.1 2000/10/31 00:04:15 mdb Exp $ +// $Id: StringUtil.java,v 1.2 2000/10/31 00:51:13 mdb Exp $ package com.samskivert.util; @@ -15,4 +15,84 @@ public class StringUtil { return (value == null || value.trim().length() == 0); } + + /** + * Converts the supplied object to a string. Normally this is + * accomplished via the object's built in <code>toString()</code> + * method, but in the case of arrays, <code>toString()</code> is + * called on each element and the contents are listed like so: + * + * <pre> + * (value, value, value) + * </pre> + * + * Arrays of ints, longs, floats and doubles are also handled for + * convenience. Also note that passing null will result in the string + * "null" being returned. + */ + public static String toString (Object val) + { + StringBuffer buf = new StringBuffer(); + + if (val instanceof int[]) { + buf.append("("); + int[] v = (int[])val; + for (int i = 0; i < v.length; i++) { + if (i > 0) { + buf.append(", "); + } + buf.append(v[i]); + } + buf.append(")"); + + } else if (val instanceof long[]) { + buf.append("("); + long[] v = (long[])val; + for (int i = 0; i < v.length; i++) { + if (i > 0) { + buf.append(", "); + } + buf.append(v[i]); + } + buf.append(")"); + + } else if (val instanceof float[]) { + buf.append("("); + float[] v = (float[])val; + for (int i = 0; i < v.length; i++) { + if (i > 0) { + buf.append(", "); + } + buf.append(v[i]); + } + buf.append(")"); + + } else if (val instanceof double[]) { + buf.append("("); + double[] v = (double[])val; + for (int i = 0; i < v.length; i++) { + if (i > 0) { + buf.append(", "); + } + buf.append(v[i]); + } + buf.append(")"); + + } else if (val instanceof Object[]) { + buf.append("("); + Object[] v = (Object[])val; + for (int i = 0; i < v.length; i++) { + if (i > 0) { + buf.append(", "); + } + buf.append(v[i]); + } + buf.append(")"); + + } else { + buf.append(val); + } + + return buf.toString(); + } }
Added a toString() that does something nice with arrays. git-svn-id: <URL>
samskivert_samskivert
train
10a304d733e7f963f08ed16920c5eff5f6295377
diff --git a/public/js/modules/datetimepicker.js b/public/js/modules/datetimepicker.js index <HASH>..<HASH> 100644 --- a/public/js/modules/datetimepicker.js +++ b/public/js/modules/datetimepicker.js @@ -33,7 +33,7 @@ define(function (require) { // Selectors var $controls = this.$('.date-field .controls') - , $time = this.$('.time-field :text').parent() + , $time = this.$('.time-field .input-append') , $time_control_group = $time.closest('.control-group') , $date_help = $controls.find('.help-block') , $time_help = $time_control_group.find('.help-block')
Fixing issue with datetime selector The :text was getting the tooltip inputs of the widget but only when compiled
BKWLD_decoy
train
1a09a6b529e8c31b02e635a16636da8c186070de
diff --git a/lib/site_prism/element_container.rb b/lib/site_prism/element_container.rb index <HASH>..<HASH> 100644 --- a/lib/site_prism/element_container.rb +++ b/lib/site_prism/element_container.rb @@ -23,6 +23,7 @@ module SitePrism::ElementContainer # home.should_not have_search_link def element element_name, element_locator create_existence_checker element_name, element_locator + create_waiter element_name, element_locator define_method element_name.to_s do find_one element_locator end @@ -101,4 +102,11 @@ module SitePrism::ElementContainer element_exists? element_locator end end + + # Creates a method used to wait for an element to appear - uses the default capybara wait time + def create_waiter element_name, element_locator + define_method "wait_for_#{element_name.to_s}" do + element_waiter element_locator + end + end end \ No newline at end of file diff --git a/lib/site_prism/page.rb b/lib/site_prism/page.rb index <HASH>..<HASH> 100644 --- a/lib/site_prism/page.rb +++ b/lib/site_prism/page.rb @@ -105,5 +105,10 @@ module SitePrism def element_exists? locator has_selector? locator end + + # Page specific element waiter + def element_waiter locator + wait_until { element_exists? locator } + end end end \ No newline at end of file diff --git a/lib/site_prism/section.rb b/lib/site_prism/section.rb index <HASH>..<HASH> 100644 --- a/lib/site_prism/section.rb +++ b/lib/site_prism/section.rb @@ -28,5 +28,10 @@ module SitePrism def element_exists? locator @root_element.has_selector? locator end + + # Section specific element waiter + def element_waiter locator + wait_until { element_exists? locator } + end end end
adding implementation of wait_for_ element method
natritmeyer_site_prism
train
0479559b2fa88881dc5386bf41ab3313acce6cd6
diff --git a/byte-buddy-dep/src/main/java/net/bytebuddy/dynamic/scaffold/MethodGraph.java b/byte-buddy-dep/src/main/java/net/bytebuddy/dynamic/scaffold/MethodGraph.java index <HASH>..<HASH> 100644 --- a/byte-buddy-dep/src/main/java/net/bytebuddy/dynamic/scaffold/MethodGraph.java +++ b/byte-buddy-dep/src/main/java/net/bytebuddy/dynamic/scaffold/MethodGraph.java @@ -1028,8 +1028,7 @@ public interface MethodGraph { protected Store<V> registerTopLevel(MethodDescription methodDescription, Harmonizer<V> harmonizer) { Harmonized<V> key = Harmonized.of(methodDescription, harmonizer); LinkedHashMap<Harmonized<V>, Entry<V>> entries = new LinkedHashMap<Harmonized<V>, Entry<V>>(this.entries); - Entry<V> currentEntry = entries.remove(key); - Entry<V> extendedEntry = (currentEntry == null + Entry<V> currentEntry = entries.remove(key), extendedEntry = (currentEntry == null ? new Entry.Initial<V>(key) : currentEntry).extendBy(methodDescription, harmonizer); entries.put(extendedEntry.getKey(), extendedEntry); @@ -1043,26 +1042,13 @@ public interface MethodGraph { * @return A store representing a combination of this store and the given store. */ protected Store<V> combineWith(Store<V> store) { - Store<V> combinedStore = this; + LinkedHashMap<Harmonized<V>, Entry<V>> entries = new LinkedHashMap<Harmonized<V>, Entry<V>>(this.entries); for (Entry<V> entry : store.entries.values()) { - combinedStore = combinedStore.combineWith(entry); + Entry<V> previousEntry = entries.remove(entry.getKey()), injectedEntry = previousEntry == null + ? entry + : combine(previousEntry, entry); + entries.put(injectedEntry.getKey(), injectedEntry); } - return combinedStore; - } - - /** - * Combines this store with the given entry. - * - * @param entry The entry to combine with this store. - * @return A store representing a combination of this store and the given entry. - */ - protected Store<V> combineWith(Entry<V> entry) { - LinkedHashMap<Harmonized<V>, Entry<V>> entries = new LinkedHashMap<Harmonized<V>, Entry<V>>(this.entries); - Entry<V> previousEntry = entries.remove(entry.getKey()); - Entry<V> injectedEntry = previousEntry == null - ? entry - : combine(previousEntry, entry); - entries.put(injectedEntry.getKey(), injectedEntry); return new Store<V>(entries); } @@ -1073,26 +1059,13 @@ public interface MethodGraph { * @return A store that represents this store with the given store injected. */ protected Store<V> inject(Store<V> store) { - Store<V> injectedStore = this; + LinkedHashMap<Harmonized<V>, Entry<V>> entries = new LinkedHashMap<Harmonized<V>, Entry<V>>(this.entries); for (Entry<V> entry : store.entries.values()) { - injectedStore = injectedStore.inject(entry); + Entry<V> dominantEntry = entries.remove(entry.getKey()), injectedEntry = dominantEntry == null + ? entry + : dominantEntry.inject(entry.getKey(), entry.getVisibility()); + entries.put(injectedEntry.getKey(), injectedEntry); } - return injectedStore; - } - - /** - * Injects the given entry into this store. - * - * @param entry The entry to be injected into this store. - * @return A store that represents this store with the given entry injected. - */ - protected Store<V> inject(Entry<V> entry) { - LinkedHashMap<Harmonized<V>, Entry<V>> entries = new LinkedHashMap<Harmonized<V>, Entry<V>>(this.entries); - Entry<V> dominantEntry = entries.remove(entry.getKey()); - Entry<V> injectedEntry = dominantEntry == null - ? entry - : dominantEntry.inject(entry.getKey(), entry.getVisibility()); - entries.put(injectedEntry.getKey(), injectedEntry); return new Store<V>(entries); }
Avoid unneccessary copies during graph computation.
raphw_byte-buddy
train
3a81d2cf69fbfe8d087f5f618f6ff40d61462310
diff --git a/uploadservice/src/main/java/net/gotev/uploadservice/UploadService.java b/uploadservice/src/main/java/net/gotev/uploadservice/UploadService.java index <HASH>..<HASH> 100644 --- a/uploadservice/src/main/java/net/gotev/uploadservice/UploadService.java +++ b/uploadservice/src/main/java/net/gotev/uploadservice/UploadService.java @@ -288,7 +288,7 @@ public final class UploadService extends Service { return START_STICKY; } - private void clearIdleTimer() { + synchronized private void clearIdleTimer() { if (idleTimer != null) { Logger.info(TAG, "Clearing idle timer"); idleTimer.cancel(); @@ -296,7 +296,7 @@ public final class UploadService extends Service { } } - private int shutdownIfThereArentAnyActiveTasks() { + synchronized private int shutdownIfThereArentAnyActiveTasks() { if (uploadTasksMap.isEmpty()) { clearIdleTimer();
#<I> methods which interacts with idle timer are now synchronized
gotev_android-upload-service
train
c71089d05be10fafa2b6685536e66731f5ffbd03
diff --git a/index.js b/index.js index <HASH>..<HASH> 100755 --- a/index.js +++ b/index.js @@ -3,6 +3,7 @@ var fs = require('fs') var path = require('path') var os = require('osenv') +var crypto = require('crypto') var mkdirp = require('mkdirp') var pull = require('pull-stream') @@ -10,9 +11,41 @@ var pt = require('pull-traverse') var paramap = require('pull-paramap') var cont = require('continuable') var cpara = require('continuable-hash') -var unpack = require('npmd-unpack') + +//var _unpack = require('npmd-unpack').unpack var deps = require('get-deps') var clone = require('clone') +var tarfs = require('tar-fs') +var zlib = require('zlib') + +var get = require('npmd-cache')({path: path.join(process.env.HOME, '.npmd')}) + +function unpack (pkg, opts, cb) { + var start = Date.now() + + //get from the hash if it's already been downloaded! + //else download from the registry or the url. + var query = { + key: /\//.test(pkg.from) ? pkg.from : pkg.name + '@' + pkg.version, + hash: pkg.shasum + } + + return get.createStream(query, function (err, stream) { + if(err) return cb(err) + if(!stream) throw new Error('did not return stream') + var hash = crypto.createHash(opts.alg || 'sha1') + + stream + .on('data', function (d) { hash.update(d) }) + .on('error', cb) + .pipe(zlib.createGunzip()) + .pipe(tarfs.extract(opts.target)) + .on('finish', function () { + cb(null, hash.digest('hex')) + }) + }) +} + var EventEmitter = require('events').EventEmitter @@ -73,42 +106,14 @@ var installTree = cont.to(function(tree, opts, cb) { }) ) }), - //unpack every file, so that it can be moved into place. - //possibe optimization: if a module has no deps, - //just link it. paramap(function (pkg, cb) { - var target = randDir('npmd-unpack-') - unpack.unpack(pkg, - merge({target: target, cache: opts.cache}, opts), - function (err, shasum) { - pkg.tmp = path.join(target, 'package') + unpack(pkg, {target: pkg.path, alg: config.alg}, function (err, hash) { + if(hash !== pkg.shasum) return cb(new Error( + 'expected ' + pkg.name +'@' + pkg.version +'\n' + + 'to have shasum=' + pkg.shasum + ' but was='+hash)) cb(err, pkg) - }) - }, 64), - pull.asyncMap(function (pkg, cb) { - if(!pkg.tmp) - return cb(new Error('no path for:'+ pkg.name), null) - - var source = pkg.tmp - var dest = path.join(pkg.path, pkg.name) - mkdirp(pkg.path, function () { - fs.lstat(dest, function (err, stat) { - if(stat) fs.rename(dest, randDir('npmd-gc-') , next) - else next() - - function next (err) { - if(err) return cb(err) - fs.rename(source, dest, function (err) { - if(err) - err.stack = err.message - + '\n(mv ' + source + ' ' + dest + ')' - + '\n' + err.stack - cb(err, null) - }) - } - }) }) - }), + }, 32), pull.drain(null, cb) )
streaming unpack, and unpack directly to dir
dominictarr_npmd-install
train
678fe34d2c6163f3c6ee7e7904df95f9ae83129e
diff --git a/example.py b/example.py index <HASH>..<HASH> 100644 --- a/example.py +++ b/example.py @@ -11,7 +11,7 @@ args = parser.parse_args() args.password = getpass("Please enter your GitHub password: ") -browser = mechanicalsoup.StatefulBrowser(soup_config={'features': 'lxml'}) +browser = mechanicalsoup.StatefulBrowser(soup_config={'features': 'lxml'}, raise_on_404=True) # Uncomment for a more verbose output: # browser.set_verbose(2)
use raise_on_<I> in example.py It's a good idea to use it (but we can't make it the default without breaking backward compatibility).
MechanicalSoup_MechanicalSoup
train
7cda157cc23305a57e8f4e6c24bbb0312a348ebc
diff --git a/core/src/Entity/Site.php b/core/src/Entity/Site.php index <HASH>..<HASH> 100644 --- a/core/src/Entity/Site.php +++ b/core/src/Entity/Site.php @@ -730,9 +730,9 @@ class Site extends ApiModelTrackingAbstract implements \IteratorAggregate, Track } /** - * Get all the page entities for the site. + * Get all the containers for the site. * - * @return ArrayCollection Array of page entities + * @return ArrayCollection */ public function getContainers() { @@ -740,7 +740,7 @@ class Site extends ApiModelTrackingAbstract implements \IteratorAggregate, Track } /** - * Get all the page entities for the site. + * Get a container for this site * * @param string $name Name of container * @@ -758,9 +758,9 @@ class Site extends ApiModelTrackingAbstract implements \IteratorAggregate, Track } /** - * Set up a page + * Add a container * - * @param Container $container Page Entity to add. + * @param Container $container * * @return void */ diff --git a/core/src/SecureRepo/PageSecureRepo.php b/core/src/SecureRepo/PageSecureRepo.php index <HASH>..<HASH> 100644 --- a/core/src/SecureRepo/PageSecureRepo.php +++ b/core/src/SecureRepo/PageSecureRepo.php @@ -422,13 +422,11 @@ class PageSecureRepo ); } - /** - * @todo Load all known containers for the target page's site, - * include empty entries for those not included in $result */ foreach ($result['modifiedSiteWideContainers'] as $revisionId => $container) { /** * @var Container $container */ + $modifiedContainerNames[] = $container->getName(); $this->immutableSiteWideContainerRepo->publish( new SiteWideContainerLocator($container->getSiteId(), $container->getName()), new ContainerContent( @@ -441,6 +439,28 @@ class PageSecureRepo ); } + // FIXME: This is dangerous, as it may delete the content of site containers that are not on + // the current page. + // $modifiedContainerNames = array_keys($data['containers']); + + // // Clear containers that were not present in the result + // $unmodifiedContainerNames = $site->getContainers()->map(function (Container $container) use ( + // $modifiedContainerNames, + // $user + // ) { + // // Skip containers that were modified + // if (in_array($container->getName(), $modifiedContainerNames)) { + // return null; + // } + // $this->immutableSiteWideContainerRepo->publish( + // new SiteWideContainerLocator($container->getSiteId(), $container->getName()), + // new ContainerContent([]), + // $user->getId(), + // __CLASS__ . '::' . __FUNCTION__ + // ); + // return $container->getName(); + // })->filter(function ($name) { return $name; }); + if ($savedANewVersion) { $return['redirect'] = $urlToPageFunction( $pageName,
provisional code to delete content from empty site containers
reliv_Rcm
train
459694707a2662a6e45c9c9f59751ff915ea817f
diff --git a/lib/adb.js b/lib/adb.js index <HASH>..<HASH> 100644 --- a/lib/adb.js +++ b/lib/adb.js @@ -1081,6 +1081,30 @@ ADB.prototype.killProcessByPID = function (pid, cb) { this.shell("kill " + pid, cb); }; +var _buildStartCmd = function (startAppOptions, apiLevel) { + var cmd = "am start "; + + cmd += startAppOptions.stopApp && apiLevel >= 15 ? "-S" : ""; + + if (startAppOptions.action) { + cmd += " -a " + startAppOptions.action; + } + + if (startAppOptions.category) { + cmd += " -c " + startAppOptions.category; + } + + if (startAppOptions.flags) { + cmd += " -f " + startAppOptions.flags; + } + + if (startAppOptions.pkg) { + cmd += " -n " + startAppOptions.pkg + "/" + startAppOptions.activity + startAppOptions.optionalIntentArguments; + } + + return cmd; +}; + ADB.prototype.startApp = function (startAppOptions, cb) { startAppOptions = _.clone(startAppOptions); // initializing defaults @@ -1096,12 +1120,9 @@ ADB.prototype.startApp = function (startAppOptions, cb) { startAppOptions.optionalIntentArguments = startAppOptions.optionalIntentArguments ? " " + startAppOptions.optionalIntentArguments : ""; this.getApiLevel(function (err, apiLevel) { if (err) return cb(err); - var stop = startAppOptions.stopApp && apiLevel >= 15 ? "-S" : ""; - var cmd = "am start " + stop + - " -a " + startAppOptions.action + - " -c " + startAppOptions.category + - " -f " + startAppOptions.flags + - " -n " + startAppOptions.pkg + "/" + startAppOptions.activity + startAppOptions.optionalIntentArguments; + + var cmd = _buildStartCmd(startAppOptions, apiLevel); + this.shell(cmd, function (err, stdout) { if (err) return cb(err); if (stdout.indexOf("Error: Activity class") !== -1 &&
Changed adb start cmd line construction to be more flexible.
appium_appium-adb
train
aca22a5306e147f7273d3ec1ae2ab60567a8a34d
diff --git a/barnard/client.go b/barnard/client.go index <HASH>..<HASH> 100644 --- a/barnard/client.go +++ b/barnard/client.go @@ -32,6 +32,8 @@ func (b *Barnard) OnUserChange(e *gumble.UserChangeEvent) { if e.ChannelChanged && e.User == b.Client.Self() { b.UpdateInputStatus(fmt.Sprintf("To: %s", e.User.Channel().Name())) } + b.UiTree.Rebuild() + b.Ui.Refresh() } func (b *Barnard) OnChannelChange(e *gumble.ChannelChangeEvent) {
barnard: refresh UI on user change
layeh_gumble
train
974b73b60793342d9407d325f35a16d22014089d
diff --git a/src/main/java/com/linecorp/armeria/server/DefaultServiceRequestContext.java b/src/main/java/com/linecorp/armeria/server/DefaultServiceRequestContext.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/linecorp/armeria/server/DefaultServiceRequestContext.java +++ b/src/main/java/com/linecorp/armeria/server/DefaultServiceRequestContext.java @@ -78,6 +78,10 @@ public final class DefaultServiceRequestContext extends AbstractRequestContext i requestLog = new DefaultRequestLog(); requestLog.start(ch, sessionProtocol, cfg.virtualHost().defaultHostname(), method, path); responseLog = new DefaultResponseLog(requestLog); + + final ServerConfig serverCfg = cfg.server().config(); + requestTimeoutMillis = serverCfg.defaultRequestTimeoutMillis(); + maxRequestLength = serverCfg.defaultMaxRequestLength(); } @Override
Fix a bug where some server-side default settings are not applied Motivation: DefaultServiceRequestContext does not set maxRequestLength and requestTimeoutMillis to their default values retrieved from ServerConfig. Modifications: - Initialize maxRequestLength and requestTimeoutMillis properly Result: Configured default settings are respected.
line_armeria
train
a2541370b95a7442dff2c95a9186d2fbc4b42496
diff --git a/modules/telemetry/class-kirki-modules-telemetry.php b/modules/telemetry/class-kirki-modules-telemetry.php index <HASH>..<HASH> 100644 --- a/modules/telemetry/class-kirki-modules-telemetry.php +++ b/modules/telemetry/class-kirki-modules-telemetry.php @@ -92,11 +92,13 @@ final class Kirki_Modules_Telemetry { return; } - // Only send data once/month. - $sent = get_site_transient( 'kirki_telemetry_sent' ); - if ( ! $sent ) { + // Only send data once/month. We use an option instead of a transient + // because transients in some managed hosting environments don't properly update + // due to their caching implementations. + $sent = get_option( 'kirki_telemetry_sent' ); + if ( ! $sent || $sent < time() - MONTH_IN_SECONDS ) { $this->send_data(); - set_site_transient( 'kirki_telemetry_sent', time(), MONTH_IN_SECONDS ); + update_option( 'kirki_telemetry_sent', time() ); } }
managed hosts have irrational caching implementations for transients
aristath_kirki
train
e3350fd724c30bb3695f755316f9b840445a0af6
diff --git a/doc/whats-new.rst b/doc/whats-new.rst index <HASH>..<HASH> 100644 --- a/doc/whats-new.rst +++ b/doc/whats-new.rst @@ -61,10 +61,16 @@ Bug fixes attribute being set. (:issue:`2201`) By `Thomas Voigt <https://github.com/tv3141>`_. + - Tests can be run in parallel with pytest-xdist -- Follow up the renamings in dask; from dask.ghost to dask.overlap + By `Tony Tung <https://github.com/ttung>`_. + +- Now raises a ValueError when there is a conflict between dimension names and + level names of MultiIndex. (:issue:`2299`) By `Keisuke Fujii <https://github.com/fujiisoup>`_. +- Follow up the renamings in dask; from dask.ghost to dask.overlap + By `Keisuke Fujii <https://github.com/fujiisoup>`_. - Now :py:func:`xr.apply_ufunc` raises a ValueError when the size of ``input_core_dims`` is inconsistent with the number of arguments. diff --git a/xarray/core/variable.py b/xarray/core/variable.py index <HASH>..<HASH> 100644 --- a/xarray/core/variable.py +++ b/xarray/core/variable.py @@ -1876,12 +1876,15 @@ def assert_unique_multiindex_level_names(variables): objects. """ level_names = defaultdict(list) + all_level_names = set() for var_name, var in variables.items(): if isinstance(var._data, PandasIndexAdapter): idx_level_names = var.to_index_variable().level_names if idx_level_names is not None: for n in idx_level_names: level_names[n].append('%r (%s)' % (n, var_name)) + if idx_level_names: + all_level_names.update(idx_level_names) for k, v in level_names.items(): if k in variables: @@ -1892,3 +1895,9 @@ def assert_unique_multiindex_level_names(variables): conflict_str = '\n'.join([', '.join(v) for v in duplicate_names]) raise ValueError('conflicting MultiIndex level name(s):\n%s' % conflict_str) + # Check confliction between level names and dimensions GH:2299 + for k, v in variables.items(): + for d in v.dims: + if d in all_level_names: + raise ValueError('conflicting level / dimension names. {} ' + 'already exists as a level name.'.format(d)) diff --git a/xarray/tests/test_dataset.py b/xarray/tests/test_dataset.py index <HASH>..<HASH> 100644 --- a/xarray/tests/test_dataset.py +++ b/xarray/tests/test_dataset.py @@ -2456,6 +2456,18 @@ class TestDataset(TestCase): with raises_regex(ValueError, 'conflicting MultiIndex'): data.assign(level_1=range(4)) data.assign_coords(level_1=range(4)) + # raise an Error when any level name is used as dimension GH:2299 + with pytest.raises(ValueError): + data['y'] = ('level_1', [0, 1]) + + def test_merge_multiindex_level(self): + data = create_test_multiindex() + other = Dataset({'z': ('level_1', [0, 1])}) # conflict dimension + with pytest.raises(ValueError): + data.merge(other) + other = Dataset({'level_1': ('x', [0, 1])}) # conflict variable name + with pytest.raises(ValueError): + data.merge(other) def test_setitem_original_non_unique_index(self): # regression test for GH943
Raises a ValueError for a confliction between dimension names and level names (#<I>) * Raises a ValueError for a confliction between dimension names and level names * Clean up whatsnew.
pydata_xarray
train
0474357876c6cd87109f4b3ea555be4ae4e06967
diff --git a/test/unit/Cursor.test.js b/test/unit/Cursor.test.js index <HASH>..<HASH> 100644 --- a/test/unit/Cursor.test.js +++ b/test/unit/Cursor.test.js @@ -7,51 +7,79 @@ describe('Cursor', () => { const cursor = new Cursor(); assert.instanceOf(cursor, Cursor); - assert.equal(cursor._buffer, ''); - assert.deepEqual(cursor._pointer, {x: 1, y: 1}); + assert.equal(cursor._width, process.stdout.columns); + assert.equal(cursor._height, process.stdout.rows); + assert.equal(cursor._x, 0); + assert.equal(cursor._y, 0); + assert.equal(cursor._buffer.length, cursor._width * cursor._height); }); - it('Should properly initialize with custom stdout', () => { + it('Should properly calculate buffer pointer', () => { const cursor = new Cursor(); - assert.instanceOf(cursor, Cursor); - assert.equal(cursor._buffer, ''); - assert.deepEqual(cursor._pointer, {x: 1, y: 1}); + assert.equal(cursor.getBufferPointer(), 0); + assert.instanceOf(cursor.moveTo(10, 10), Cursor); + assert.equal(cursor.getBufferPointer(), 10 * process.stdout.columns + 10); + assert.equal(cursor.getBufferPointer(20, 20), 20 * process.stdout.columns + 20); + }); + + it('Should properly calculate coordinates from buffer pointer', () => { + const cursor = new Cursor(); + + assert.deepEqual(cursor.getXYFromPointer(0), [0, 0]); + assert.deepEqual(cursor.getXYFromPointer(1), [1, 0]); + assert.deepEqual(cursor.getXYFromPointer(200), [200 - (Math.floor(200 / cursor._width) * cursor._width), Math.floor(200 / cursor._width)]); }); it('Should properly write to the cursor', () => { const cursor = new Cursor(); - assert.equal(cursor._buffer, ''); + assert.equal(cursor._buffer[0], ' '); + cursor.write('test'); - assert.equal(cursor._buffer, 'test'); + assert.equal(cursor._buffer[0], 't'); + assert.equal(cursor._buffer[1], 'e'); + assert.equal(cursor._buffer[2], 's'); + assert.equal(cursor._buffer[3], 't'); + cursor.write(new Buffer('another')); - assert.equal(cursor._buffer, 'testanother'); + assert.equal(cursor._buffer[4], ' a'); + assert.equal(cursor._buffer[5], ' n'); + assert.equal(cursor._buffer[6], ' o'); + assert.equal(cursor._buffer[7], ' t'); + assert.equal(cursor._buffer[8], ' h'); + assert.equal(cursor._buffer[9], ' e'); + assert.equal(cursor._buffer[10], ' r'); }); it('Should properly ignore write if out of the bounding box', () => { const cursor = new Cursor(); - assert.equal(cursor._buffer, ''); + assert.equal(cursor._buffer[0], ' '); + cursor.write('test'); - assert.equal(cursor._buffer, 'test'); - cursor.write(new Buffer('another')); - assert.equal(cursor._buffer, 'testanother'); + assert.equal(cursor._buffer[0], 't'); + assert.equal(cursor._buffer[1], 'e'); + assert.equal(cursor._buffer[2], 's'); + assert.equal(cursor._buffer[3], 't'); + cursor.moveTo(-5, -5).write('do not print'); - assert.equal(cursor._buffer, 'testanother\u001b[1;1f'); + assert.equal(cursor._buffer[0], 't'); + assert.equal(cursor._buffer[1], 'e'); + assert.equal(cursor._buffer[2], 's'); + assert.equal(cursor._buffer[3], 't'); + assert.equal(cursor._buffer[4], ' '); }); it('Should properly flush the buffer into the stream', () => { const cursor = new Cursor(); const mock = sinon.mock(process.stdout); - mock.expects('write').once().withArgs('testanother'); + mock.expects('write').twice(); cursor.write('test'); - cursor.write('another'); cursor.flush(); - assert.equal(cursor._buffer, ''); mock.verify(); }); @@ -84,14 +112,10 @@ describe('Cursor', () => { it('Should properly move cursor up with default arguments', () => { const cursor = new Cursor(); - const mock = sinon.mock(cursor); - - mock.expects('write').once().withArgs(new Buffer('\u001b[1A')); - cursor.up(); - - assert.equal(cursor._pointer.y, 0); - mock.verify(); + assert.equal(cursor._y, 0); + assert.instanceOf(cursor.up(), Cursor); + assert.equal(cursor._y, -1); }); it('Should properly move cursor up with custom arguments', () => {
test(cursor): Rewrite few tests for new rendering system
kittikjs_cursor
train
bfa82499d633d586020af2333e68b2d85df190b2
diff --git a/activerecord/lib/active_record/associations/builder/singular_association.rb b/activerecord/lib/active_record/associations/builder/singular_association.rb index <HASH>..<HASH> 100644 --- a/activerecord/lib/active_record/associations/builder/singular_association.rb +++ b/activerecord/lib/active_record/associations/builder/singular_association.rb @@ -8,12 +8,11 @@ module ActiveRecord::Associations::Builder def define_accessors(model, reflection) super - define_constructors(model.generated_feature_methods) if reflection.constructable? + self.class.define_constructors(model.generated_feature_methods, name) if reflection.constructable? end # Defines the (build|create)_association methods for belongs_to or has_one association - - def define_constructors(mixin) + def self.define_constructors(mixin, name) mixin.class_eval <<-CODE, __FILE__, __LINE__ + 1 def build_#{name}(*args, &block) association(:#{name}).build(*args, &block)
Move define_constructors to class level
rails_rails
train
ee9ecbdca4d80e0daaec7146e8aca4c079d32775
diff --git a/intlogger.go b/intlogger.go index <HASH>..<HASH> 100644 --- a/intlogger.go +++ b/intlogger.go @@ -124,7 +124,7 @@ func newLogger(opts *LoggerOptions) *intLogger { independentLevels: opts.IndependentLevels, } if opts.IncludeLocation { - l.callerOffset = offsetIntLogger + l.callerOffset = offsetIntLogger + opts.AdditionalLocationOffset } if l.json { diff --git a/logger.go b/logger.go index <HASH>..<HASH> 100644 --- a/logger.go +++ b/logger.go @@ -235,6 +235,10 @@ type LoggerOptions struct { // Include file and line information in each log line IncludeLocation bool + // AdditionalLocationOffset is the number of additional stack levels to skip + // when finding the file and line information for the log line + AdditionalLocationOffset int + // The time format to use instead of the default TimeFormat string diff --git a/logger_test.go b/logger_test.go index <HASH>..<HASH> 100644 --- a/logger_test.go +++ b/logger_test.go @@ -176,6 +176,30 @@ func TestLogger(t *testing.T) { assert.Equal(t, "[INFO] go-hclog/logger_test.go:169: test: this is test: who=programmer why=\"testing is fun\"\n", rest) }) + t.Run("includes the caller location excluding helper functions", func(t *testing.T) { + var buf bytes.Buffer + + logMe := func(l Logger) { + l.Info("this is test", "who", "programmer", "why", "testing is fun") + } + + logger := New(&LoggerOptions{ + Name: "test", + Output: &buf, + IncludeLocation: true, + AdditionalLocationOffset: 1, + }) + + logMe(logger) + + str := buf.String() + dataIdx := strings.IndexByte(str, ' ') + rest := str[dataIdx+1:] + + // This test will break if you move this around, it's line dependent, just fyi + assert.Equal(t, "[INFO] go-hclog/logger_test.go:193: test: this is test: who=programmer why=\"testing is fun\"\n", rest) + }) + t.Run("prefixes the name", func(t *testing.T) { var buf bytes.Buffer @@ -805,6 +829,36 @@ func TestLogger_JSON(t *testing.T) { assert.Equal(t, fmt.Sprintf("%v:%d", file, line-1), raw["@caller"]) }) + t.Run("includes the caller location excluding helper functions", func(t *testing.T) { + var buf bytes.Buffer + + logMe := func(l Logger) { + l.Info("this is test", "who", "programmer", "why", "testing is fun") + } + + logger := New(&LoggerOptions{ + Name: "test", + Output: &buf, + JSONFormat: true, + IncludeLocation: true, + AdditionalLocationOffset: 1, + }) + + logMe(logger) + _, file, line, ok := runtime.Caller(0) + require.True(t, ok) + + b := buf.Bytes() + + var raw map[string]interface{} + if err := json.Unmarshal(b, &raw); err != nil { + t.Fatal(err) + } + + assert.Equal(t, "this is test", raw["@message"]) + assert.Equal(t, fmt.Sprintf("%v:%d", file, line-1), raw["@caller"]) + }) + t.Run("handles non-serializable entries", func(t *testing.T) { var buf bytes.Buffer diff --git a/stdlog_test.go b/stdlog_test.go index <HASH>..<HASH> 100644 --- a/stdlog_test.go +++ b/stdlog_test.go @@ -181,3 +181,32 @@ func TestFromStandardLogger(t *testing.T) { prefix := "test-stdlib-log " require.Equal(t, prefix, actual[:16]) } + +func TestFromStandardLogger_helper(t *testing.T) { + var buf bytes.Buffer + + sl := log.New(&buf, "test-stdlib-log ", log.Ltime) + + hl := FromStandardLogger(sl, &LoggerOptions{ + Name: "hclog-inner", + IncludeLocation: true, + AdditionalLocationOffset: 1, + }) + + helper := func() { + hl.Info("this is a test", "name", "tester", "count", 1) + } + + helper() + _, file, line, ok := runtime.Caller(0) + require.True(t, ok) + + actual := buf.String() + suffix := fmt.Sprintf( + "[INFO] go-hclog/%s:%d: hclog-inner: this is a test: name=tester count=1\n", + filepath.Base(file), line-1) + require.Equal(t, suffix, actual[25:]) + + prefix := "test-stdlib-log " + require.Equal(t, prefix, actual[:16]) +}
Add an AdditionalLocationOffset option to LoggerOptions. Add the ability for wrappers of hclog to have the file and line number point to their caller, instead of the wrappers, by specifying additional offsets for the callstack when creating a logger.
hashicorp_go-hclog
train
519f8e051b3ffbcacf40c4980d962687f87b7cbc
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -3,8 +3,13 @@ * Created by jazarja, 4ossiblellc on 9/20/16. */ -var merge = require('deepmerge').default; var _ = require('lodash'); +var merge = require('deepmerge'); + +// Webpack specific case +if (typeof merge !== 'function') { + merge = merge.default +} var isEmpty = function (map) { for(var key in map) {
Make the module usable with Webpack
4ossiblellc_dynamodb-update-expression
train
14845223e489480096572b1db30e9f5a34ad0aad
diff --git a/armstrong/apps/images/tests/views.py b/armstrong/apps/images/tests/views.py index <HASH>..<HASH> 100644 --- a/armstrong/apps/images/tests/views.py +++ b/armstrong/apps/images/tests/views.py @@ -58,21 +58,21 @@ class ImageAdminTestCase(TestCase): def test_upload_image(self): self.assertTrue(not Image.objects.filter(title='uploaded').exists()) - with open(LOCAL_IMAGE_PATH) as image: - url = reverse('admin:images_admin_upload') - response = self.client.post(url, { - 'image': image, - 'title': 'uploaded', - 'slug': 'uploaded', - 'summary': 'uploaded image', - 'authors_override': 'bob marley', - 'pub_date': '2011-08-15', - 'pub_status': 'D', - 'tags': 'test tags', - 'primary_section': self.section.id, - 'sites': Site.objects.get_current().id, - }, follow=True) - + f = open(LOCAL_IMAGE_PATH) + url = reverse('admin:images_admin_upload') + response = self.client.post(url, { + 'image': f, + 'title': 'uploaded', + 'slug': 'uploaded', + 'summary': 'uploaded image', + 'authors_override': 'bob marley', + 'pub_date': '2011-08-15', + 'pub_status': 'D', + 'tags': 'test tags', + 'primary_section': self.section.id, + 'sites': Site.objects.get_current().id, + }, follow=True) + f.close() self.assertEqual(response.status_code, 200) self.assertTrue(Image.objects.filter(title='uploaded').exists()) self.assertTrue(os.path.exists(SERVER_IMAGE_PATH))
Remove with statement for <I>% test coverage
armstrong_armstrong.apps.images
train
224665105ecb2a703d221da136bc76bed8f16d47
diff --git a/tests/test_git.py b/tests/test_git.py index <HASH>..<HASH> 100644 --- a/tests/test_git.py +++ b/tests/test_git.py @@ -8,7 +8,7 @@ class Test(BaseTest): def setUp(self): super(Test, self).setUp() - self.git = git.bake(_cwd=self.path) + self.git = git.bake(_cwd=self.path, _tty_out=False) self.notebook_name = 'git_tests' # make notebook
Fix git hanging in tests when it wants to show a pager
charlesthomas_magpie
train
0be63749c039e16aa1fcc64cfd8227b50829254e
diff --git a/pyvisa/__init__.py b/pyvisa/__init__.py index <HASH>..<HASH> 100644 --- a/pyvisa/__init__.py +++ b/pyvisa/__init__.py @@ -34,5 +34,15 @@ except: # on any error just try to grab the version that is installed on the sy pass # we seem to have a local copy without any repository control or installed without setuptools # so the reported version will be __unknown__ +import wrapper from .visa import instrument, ResourceManager, Instrument, SerialInstrument, from .errors import * + + +from .library import read_user_settings +_user_lib = read_user_settings() + +if _user_lib: + from . import vpp43 + vpp43.visa_library.load_library(_user_lib) +
Load legacy visa_library taking user settings into account See #7
pyvisa_pyvisa
train
ebde13927fe22494c60972ad7a73a7e486faf7b2
diff --git a/didauth/__init__.py b/didauth/__init__.py index <HASH>..<HASH> 100644 --- a/didauth/__init__.py +++ b/didauth/__init__.py @@ -1 +1 @@ -__version__ = '1.1.0' +__version__ = '1.2.0' diff --git a/didauth/base.py b/didauth/base.py index <HASH>..<HASH> 100644 --- a/didauth/base.py +++ b/didauth/base.py @@ -49,7 +49,7 @@ class VerifierBase: class KeyFinderBase: - def find_key(self, _key_id: str, _key_type: str) -> bytes: + async def find_key(self, _key_id: str, _key_type: str) -> bytes: raise LookupError('Key lookup not implemented') @@ -60,7 +60,7 @@ class StaticKeyFinder(KeyFinderBase): def add_key(self, key_id, key_type, key): self._keys[key_id] = (key_type, key) - def find_key(self, key_id: str, key_type: str) -> bytes: + async def find_key(self, key_id: str, key_type: str) -> bytes: key = self._keys.get(key_id) if not key: return None diff --git a/didauth/headers.py b/didauth/headers.py index <HASH>..<HASH> 100644 --- a/didauth/headers.py +++ b/didauth/headers.py @@ -86,7 +86,7 @@ class HeaderVerifier: required_headers = [] # implementors should require (request-target) and date self._required_headers = [h.lower() for h in required_headers] - def verify(self, headers: Mapping, method=None, path=None): + async def verify(self, headers: Mapping, method=None, path=None): """ Parse Signature Authorization header and verify signature @@ -121,7 +121,7 @@ class HeaderVerifier: if not self._handlers.supports(algo): raise VerifierException("Unsupported HTTP signature algorithm '{}'".format(algo)) - pubkey = self._key_finder.find_key(key_id, algo) + pubkey = await self._key_finder.find_key(key_id, algo) if not pubkey: raise VerifierException("Cannot locate public key for '{}'".format(key_id)) diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -3,7 +3,7 @@ from setuptools import setup pkg_name = 'didauth' -version = '1.1.0' +version = '1.2.0' setup( name=pkg_name, diff --git a/test/test_headers.py b/test/test_headers.py index <HASH>..<HASH> 100644 --- a/test/test_headers.py +++ b/test/test_headers.py @@ -1,3 +1,6 @@ +import asyncio +import pytest + from didauth.base import StaticKeyFinder from didauth.headers import HeaderSigner, HeaderVerifier from didauth.registry import ALL @@ -11,7 +14,8 @@ TEST_DID = 'did:sov:47MC9bBzTfrsdETN6aSBAT' TEST_SECRET = b'test-key-00000000000000000000000' -def test_headers(): +@pytest.mark.asyncio +async def test_headers(): headers = { 'Date': 'Thu, 01 May 2018 00:00:00 -0000', @@ -37,7 +41,7 @@ def test_headers(): key_finder.add_key(TEST_DID, ALG_ED25519, signer.public_key) verifier = HeaderVerifier(key_finder) - verified = verifier.verify(signed_headers, method, path) + verified = await verifier.verify(signed_headers, method, path) print('Verify result: {}'.format(verified)) @@ -48,3 +52,8 @@ def test_headers(): assert verified['key'] == signer.public_key assert verified['signature'] == \ '+lqX6t0Jq2nELAzFMuVDsyuz2PJmMMSF1eiXuNg7dNyD0r+t9VwGDpMlxvtrI1DdfI0yQHtsRZiO2BRz4YNXAQ==' + + +if __name__ == '__main__': + loop = asyncio.get_event_loop() + loop.run_until_complete(test_headers())
use async method for header verification
PSPC-SPAC-buyandsell_didauth
train
bff7c9e348242b0d680427850ac72f21fa49cabd
diff --git a/lxd/container_lxc.go b/lxd/container_lxc.go index <HASH>..<HASH> 100644 --- a/lxd/container_lxc.go +++ b/lxd/container_lxc.go @@ -8698,7 +8698,7 @@ func (c *containerLXC) generateNetworkFilterEbtablesRules(m types.Device, IPv4 n } // generateNetworkFilterIptablesRules returns a customised set of iptables filter rules based on the device. -func (c *containerLXC) generateNetworkFilterIptablesRules(m types.Device) (rules [][]string, err error) { +func (c *containerLXC) generateNetworkFilterIptablesRules(m types.Device, IPv6 net.IP) (rules [][]string, err error) { mac, err := net.ParseMAC(m["hwaddr"]) if err != nil { return @@ -8714,14 +8714,8 @@ func (c *containerLXC) generateNetworkFilterIptablesRules(m types.Device) (rules // not assigned to the container by sending a specially crafted gratuitous NDP packet with // correct source address and MAC at the IP & ethernet layers, but a fraudulent IP or MAC // inside the ICMPv6 NDP packet. - if shared.IsTrue(m["security.ipv6_filtering"]) && m["ipv6.address"] != "" { - ipv6 := net.ParseIP(m["ipv6.address"]) - if ipv6 == nil { - err = fmt.Errorf("Invalid IPv6 address") - return - } - - ipv6Hex := hex.EncodeToString(ipv6) + if shared.IsTrue(m["security.ipv6_filtering"]) && IPv6 != nil { + ipv6Hex := hex.EncodeToString(IPv6) rules = append(rules, // Prevent Neighbor Advertisement IP spoofing (prevents the container redirecting traffic for IPs that are not its own).
container/lxc: generateNetworkFilterIptablesRules to accept IP info as arguments
lxc_lxd
train
bd1d28cc466d56bc97895cc338be4f18eada6d18
diff --git a/test/__init__.py b/test/__init__.py index <HASH>..<HASH> 100644 --- a/test/__init__.py +++ b/test/__init__.py @@ -97,11 +97,15 @@ or: if io_loop: io_loop.install() + start = time.time() + def on_timeout(): + print '%s timed out after %.2f seconds' % ( + func, time.time() - start) loop.stop() raise AssertionError("%s timed out" % func) - timeout = loop.add_timeout(time.time() + timeout_sec, on_timeout) + timeout = loop.add_timeout(start + timeout_sec, on_timeout) try: generator = func(self, done) diff --git a/test/test_motor_tail.py b/test/test_motor_tail.py index <HASH>..<HASH> 100644 --- a/test/test_motor_tail.py +++ b/test/test_motor_tail.py @@ -74,7 +74,7 @@ class MotorTailTest(MotorTest): tail_pauses = ( 1, 0, 1, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0.1, 0.1, 0, 0) - @async_test_engine(timeout_sec=sum(tail_pauses) + 1) + @async_test_engine(timeout_sec=sum(tail_pauses) + 30) def test_tail(self, done): t = self.start_insertion_thread(self.tail_pauses) results = [] @@ -96,7 +96,7 @@ class MotorTailTest(MotorTest): t.join() done() - @async_test_engine() + @async_test_engine(timeout_sec=30) def test_tail_empty(self, done): pauses = (0, 1) results = [] @@ -122,7 +122,7 @@ class MotorTailTest(MotorTest): drop_collection_pauses = (0, 0, 1, 'drop', 1, 0, 0) - @async_test_engine(timeout_sec=10) + @async_test_engine(timeout_sec=30) def test_tail_drop_collection(self, done): # Ensure tail() throws error when its collection is dropped t = self.start_insertion_thread(self.drop_collection_pauses) @@ -156,7 +156,7 @@ class MotorTailTest(MotorTest): done() - @async_test_engine() + @async_test_engine(timeout_sec=30) def test_tail_nonempty_collection(self, done): self.sync_db.capped.insert([{'_id': -2}, {'_id': -1}], safe=True) @@ -178,7 +178,7 @@ class MotorTailTest(MotorTest): t.join() done() - @async_test_engine() + @async_test_engine(timeout_sec=30) def test_tail_gen(self, done): pauses = (1, 0.5, 1, 0, 0) t = self.start_insertion_thread(pauses)
Debug test timeout issues in Jenkins
mongodb_motor
train
5dfa78ebf55103e9fcf0c8f7cc46c4cde683bf41
diff --git a/src/ZipStream.php b/src/ZipStream.php index <HASH>..<HASH> 100644 --- a/src/ZipStream.php +++ b/src/ZipStream.php @@ -498,14 +498,14 @@ class ZipStream { } rewind($fh); - stream_filter_append($fh, 'zlib.deflate', STREAM_FILTER_READ, 6); + $filter = stream_filter_append($fh, 'zlib.deflate', STREAM_FILTER_READ, 6); while (!feof($fh)) { $data = fread($fh, $block_size); $zlen += strlen($data); } - stream_filter_remove($fh); + stream_filter_remove($filter); // close file and finalize crc fclose($fh); @@ -522,7 +522,7 @@ class ZipStream { $fh = fopen($path, 'rb'); if ($meth_str == self::METHOD_DEFLATE) { - stream_filter_append($fh, 'zlib.deflate', STREAM_FILTER_READ, 6); + $filter = stream_filter_append($fh, 'zlib.deflate', STREAM_FILTER_READ, 6); } // send file blocks @@ -533,7 +533,9 @@ class ZipStream { $this->send($data); } - stream_filter_remove($fh); + if (isset($filter) && is_resource($filter)) { + stream_filter_remove($filter); + } // close input file fclose($fh); @@ -793,8 +795,9 @@ class ZipStream { * @return void */ protected function send($str) { - if ($this->need_headers) + if ($this->need_headers) { $this->sendHttpHeaders(); + } $this->need_headers = false; fwrite($this->opt[self::OPTION_OUTPUT_STREAM], $str);
Correct stream_filter_remove calls
maennchen_ZipStream-PHP
train
b0c4df63d5aa71ddf654fc60e89fbf8b9a6b73dd
diff --git a/scio-examples/src/test/java/org/apache/beam/examples/complete/game/LeaderBoardTest.java b/scio-examples/src/test/java/org/apache/beam/examples/complete/game/LeaderBoardTest.java index <HASH>..<HASH> 100644 --- a/scio-examples/src/test/java/org/apache/beam/examples/complete/game/LeaderBoardTest.java +++ b/scio-examples/src/test/java/org/apache/beam/examples/complete/game/LeaderBoardTest.java @@ -276,6 +276,8 @@ public class LeaderBoardTest implements Serializable { .addElements(event(TestUser.RED_ONE, 4, Duration.standardMinutes(2)), event(TestUser.BLUE_TWO, 3, Duration.ZERO), event(TestUser.BLUE_ONE, 3, Duration.standardMinutes(3))) + // Move the watermark to the end of the window to output on time + .advanceWatermarkTo(baseTime.plus(TEAM_WINDOW_DURATION)) // Move the watermark past the end of the allowed lateness plus the end of the window .advanceWatermarkTo(baseTime.plus(ALLOWED_LATENESS) .plus(TEAM_WINDOW_DURATION).plus(Duration.standardMinutes(1)))
Beam <I>: update LeaderBoardTest.java from upstream
spotify_scio
train
5e94cda0ee90c409f632372eec5045083b607003
diff --git a/Controller/WidgetController.php b/Controller/WidgetController.php index <HASH>..<HASH> 100644 --- a/Controller/WidgetController.php +++ b/Controller/WidgetController.php @@ -21,6 +21,26 @@ use Symfony\Bundle\FrameworkBundle\Controller\Controller; class WidgetController extends Controller { /** + * Renders all paths for a user + * @Route( + * "/path", + * name="claro_desktop_paths", + * options={"expose"=true} + * ) + * @Method("GET") + * @Template("InnovaPathBundle::Widget/listWidget.html.twig") + */ + public function pathsDesktopWidgetAction() + { + $paths = $this->container->get('innova_path.manager.path')->findAccessibleByUser(); + + return array ( + 'widgetType' => 'desktop', + 'paths' => $paths, + ); + } + + /** * Renders all paths from a workspace * @param Workspace $workspace * @return array @@ -48,24 +68,4 @@ class WidgetController extends Controller 'paths' => $paths, ); } - - /** - * Renders all paths for a user - * @Route( - * "/path/my-paths", - * name="my_paths", - * options={"expose"=true} - * ) - * @Method("GET") - * @Template("InnovaPathBundle::Widget/listWidget.html.twig") - */ - public function myPathsWidgetAction() - { - $paths = $this->container->get('innova_path.manager.path')->findAccessibleByUser(); - - return array ( - 'widgetType' => 'desktop', - 'paths' => $paths, - ); - } } \ No newline at end of file diff --git a/Listener/PathWidgetListener.php b/Listener/PathWidgetListener.php index <HASH>..<HASH> 100644 --- a/Listener/PathWidgetListener.php +++ b/Listener/PathWidgetListener.php @@ -34,7 +34,7 @@ class PathWidgetListener * @param DisplayWidgetEvent $event * @throws \Claroline\CoreBundle\Listener\NoHttpRequestException */ - public function onDisplayPathsWorkspace(DisplayWidgetEvent $event) + public function onDisplay(DisplayWidgetEvent $event) { if (!$this->request) { throw new NoHttpRequestException(); @@ -44,37 +44,13 @@ class PathWidgetListener $workspace = $widgetInstance->getWorkspace(); $params = array(); - $params['_controller'] = 'InnovaPathBundle:Widget:pathsWorkspaceWidget'; - $params['workspaceId'] = $workspace->getId(); - - - $subRequest = $this->request->duplicate( - array(), - null, - $params - ); - $response = $this->httpKernel->handle($subRequest, HttpKernelInterface::SUB_REQUEST); - - $event->setContent($response->getContent()); - $event->stopPropagation(); - } - - - /** - * @DI\Observe("widget_innova_my_paths_widget") - * - * @param DisplayWidgetEvent $event - * @throws \Claroline\CoreBundle\Listener\NoHttpRequestException - */ - public function onDisplayMyPaths(DisplayWidgetEvent $event) - { - if (!$this->request) { - throw new NoHttpRequestException(); + if (is_null($workspace)) { + $params['_controller'] = 'InnovaPathBundle:Widget:pathsDesktopWidget'; + } else { + $params['_controller'] = 'InnovaPathBundle:Widget:pathsWorkspaceWidget'; + $params['workspaceId'] = $workspace->getId(); } - $params = array(); - $params['_controller'] = 'InnovaPathBundle:Widget:myPathsWidget'; - $subRequest = $this->request->duplicate(array(), null, $params); $response = $this->httpKernel->handle($subRequest, HttpKernelInterface::SUB_REQUEST);
[PathBundle] fix widgets
claroline_Distribution
train
5f283d3b00151cf7d839c37daa9e3fbc80ca3e59
diff --git a/pact/pact-runtime/src/main/java/eu/stratosphere/pact/runtime/task/DataSinkTask.java b/pact/pact-runtime/src/main/java/eu/stratosphere/pact/runtime/task/DataSinkTask.java index <HASH>..<HASH> 100644 --- a/pact/pact-runtime/src/main/java/eu/stratosphere/pact/runtime/task/DataSinkTask.java +++ b/pact/pact-runtime/src/main/java/eu/stratosphere/pact/runtime/task/DataSinkTask.java @@ -317,7 +317,16 @@ public class DataSinkTask<IT> extends AbstractOutputTask // path points to an existing file. delete it to be able to replace the // file with a directory fs.delete(path, false); - return -1; + int dop = getTaskConfiguration().getInteger(DEGREE_OF_PARALLELISM_KEY, -1); + if (dop == 1) { + // a none existing file and a degree of parallelism that is one + return 1; + } else { + // a degree of parallelism greater one, or an unspecified one. in all cases, create a directory + // the output + fs.mkdirs(path); + return -1; + } } } catch (FileNotFoundException fnfex) {
Fixed Bug with incorrect output directory preparation.
stratosphere_stratosphere
train
316551bb5d2370e2d515728e4d438ce0546eaa1d
diff --git a/lib/ruby_speech/nlsml/document.rb b/lib/ruby_speech/nlsml/document.rb index <HASH>..<HASH> 100644 --- a/lib/ruby_speech/nlsml/document.rb +++ b/lib/ruby_speech/nlsml/document.rb @@ -41,6 +41,7 @@ module RubySpeech def instance_hash_for_interpretation(interpretation) instance_element = interpretation.at_xpath 'xf:instance' + return unless instance_element element_children_key_value instance_element end diff --git a/spec/ruby_speech/nlsml_spec.rb b/spec/ruby_speech/nlsml_spec.rb index <HASH>..<HASH> 100644 --- a/spec/ruby_speech/nlsml_spec.rb +++ b/spec/ruby_speech/nlsml_spec.rb @@ -119,6 +119,43 @@ describe RubySpeech::NLSML do subject.should_not be == RubySpeech.parse(empty_result) end + context "with an interpretation that has no model/instance" do + let :example_document do + ''' +<result xmlns:xf="http://www.w3.org/2000/xforms" grammar="http://flight"> + <interpretation confidence="60"> + <input mode="speech">I want to go to Pittsburgh</input> + </interpretation> + <interpretation confidence="40"> + <input>I want to go to Stockholm</input> + </interpretation> +</result> + ''' + end + + let(:expected_best_interpretation) do + { + confidence: 0.6, + input: { mode: :speech, content: 'I want to go to Pittsburgh' }, + instance: nil + } + end + + let(:expected_interpretations) do + [ + expected_best_interpretation, + { + confidence: 0.4, + input: { content: 'I want to go to Stockholm' }, + instance: nil + } + ] + end + + its(:interpretations) { should == expected_interpretations } + its(:best_interpretation) { should == expected_best_interpretation } + end + context "without any interpretations" do subject do RubySpeech.parse empty_result
[BUGFIX] Handle parsing NLSML documents with no model/instances
adhearsion_ruby_speech
train
e8830a730f8633df8e459256163360545679144f
diff --git a/releaf-permissions/app/controllers/releaf/permissions/home_controller.rb b/releaf-permissions/app/controllers/releaf/permissions/home_controller.rb index <HASH>..<HASH> 100644 --- a/releaf-permissions/app/controllers/releaf/permissions/home_controller.rb +++ b/releaf-permissions/app/controllers/releaf/permissions/home_controller.rb @@ -20,7 +20,13 @@ module Releaf::Permissions end def controllers_to_try - [access_control.user.role.default_controller, access_control.user.role.allowed_controllers].flatten.uniq + [access_control.user.role.default_controller, allowed_controllers].flatten.uniq + end + + def allowed_controllers + # Note: This basically sorts allowed controllers in order specified by + # Releaf.available_controllers + Releaf.available_controllers & access_control.user.role.allowed_controllers end end end
Fix randomeness in picking fallback controller
cubesystems_releaf
train
187a2c3835fb4ae9f3422046ee6650e2ace063e4
diff --git a/lib/device_curator.js b/lib/device_curator.js index <HASH>..<HASH> 100644 --- a/lib/device_curator.js +++ b/lib/device_curator.js @@ -154,10 +154,15 @@ function device(useMockDevice) { var parsedResult = data_parser.parseResult('HARDWARE_INSTALLED', res, dt); // Save results - retResult.highResADC = parsedResult.highResADC; - retResult.wifi = parsedResult.wifi; - retResult.rtc = parsedResult.rtc; - retResult.sdCard = parsedResult.sdCard; + var parsedResultKeys = Object.keys(parsedResult); + parsedResultKeys.forEach(function(key) { + retResult[key] = parsedResult[key]; + }); + // Save results + // retResult.highResADC = parsedResult.highResADC; + // retResult.wifi = parsedResult.wifi; + // retResult.rtc = parsedResult.rtc; + // retResult.sdCard = parsedResult.sdCard; self.savedAttributes.subclass = parsedResult.subclass; self.savedAttributes.isPro = parsedResult.isPro; diff --git a/package.json b/package.json index <HASH>..<HASH> 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "ljswitchboard-ljm_device_curator", - "version": "0.0.20", + "version": "0.0.21", "description": "A promise based ljm wrapper with expanded device functionality for the ljswitchboard project", "main": "./lib/device_curator.js", "scripts": { diff --git a/test/mock_device_attrs_test.js b/test/mock_device_attrs_test.js index <HASH>..<HASH> 100644 --- a/test/mock_device_attrs_test.js +++ b/test/mock_device_attrs_test.js @@ -203,6 +203,7 @@ exports.tests = { test.strictEqual(res.connectionTypeString, 'LJM_ctWIFI'); test.strictEqual(res.serialNumber, deviceInfo.serialNumber); test.strictEqual(res.ip, deviceInfo.ipAddress); + // console.log('device attributes', res); test.done(); }); },
Added some more information to the devices saved attributes.
chrisJohn404_ljswitchboard-ljm_device_curator
train
0d149932ff52110a2870dd6256b8d6ce1b511fd9
diff --git a/tests/test_funcs.py b/tests/test_funcs.py index <HASH>..<HASH> 100644 --- a/tests/test_funcs.py +++ b/tests/test_funcs.py @@ -3,6 +3,7 @@ from __future__ import print_function # python 2/3 compatibility from sc2common import commonUtilFuncs as cu from sc2common import containers as cn +from sc2common import types as t def test_getName(): @@ -174,3 +175,19 @@ def test_quadraticSolver(): assert cu.quadraticSolver(-4, 8, 0) == [0.0, 2.0] assert cu.quadraticSolver(-5, 0, 5) == [-1.0, 1.0] +def test_determineRace(): + race = ActualRaces(c.PROTOSS) + assert cu.determineRace(race) == c.PROTOSS + assert cu.determineRace(1) == c.PROTOSS + assert cu.determineRace(2) == c.TERRAN + assert cu.determineRace(3) == c.ZERG + assert cu.determineRace(4) == c.RANDOM + assert cu.determineRace("prot") == c.PROTOSS + assert cu.determineRace("terr") == c.TERRAN + assert cu.determineRace("zerg") == c.ZERG + assert cu.determineRace("rand") == c.RANDOM + try: + cu.determineRace(1) + assert False + except ValueError: + assert True
- added testing for determineRace() function
ttinies_sc2common
train
bfe22dfde8c14b36f7ccb469404667830cad4637
diff --git a/app/controllers/resource_import_files_controller.rb b/app/controllers/resource_import_files_controller.rb index <HASH>..<HASH> 100644 --- a/app/controllers/resource_import_files_controller.rb +++ b/app/controllers/resource_import_files_controller.rb @@ -40,7 +40,6 @@ class ResourceImportFilesController < ApplicationController def new @resource_import_file = ResourceImportFile.new @resource_import_file.library_id = current_user.profile.library_id - @shelves = Library.find(@resource_import_file.library_id).shelves respond_to do |format| format.html # new.html.erb
fix getting shelf list next-l/enju_leaf#<I>
next-l_enju_biblio
train
ea02213839f44950f19e13bef48af1a0f009bdc0
diff --git a/shared/profile/post-proof.desktop.js b/shared/profile/post-proof.desktop.js index <HASH>..<HASH> 100644 --- a/shared/profile/post-proof.desktop.js +++ b/shared/profile/post-proof.desktop.js @@ -179,11 +179,14 @@ const styleNoteText = { const styleProofAction = { marginTop: globalMargins.small, + flexShrink: 0, } const styleButtonsContainer = { ...globalStyles.flexBoxRow, + flexShrink: 0, marginTop: globalMargins.medium, + marginBottom: globalMargins.medium, } export default PostProof
don't let buttons overlap (#<I>)
keybase_client
train
530cc950a4cb2b0391b847d750d77e6b13b6726e
diff --git a/cmd/watch-termination/main.go b/cmd/watch-termination/main.go index <HASH>..<HASH> 100644 --- a/cmd/watch-termination/main.go +++ b/cmd/watch-termination/main.go @@ -4,6 +4,7 @@ import ( "flag" "fmt" "io" + "io/ioutil" "os" "os/exec" "os/signal" @@ -20,8 +21,11 @@ func main() { terminationLock := flag.String("termination-touch-file", "", "Touch this file on SIGTERM and delete on termination") klog.InitFlags(nil) - flag.Set("logtostderr", "true") - flag.Set("v", "2") + flag.Set("v", "9") + + // never log to stderr, only through our termination log writer (which sends it also to stderr) + flag.Set("logtostderr", "false") + flag.Set("stderrthreshold", "99") flag.Parse() args := flag.CommandLine.Args() @@ -40,7 +44,12 @@ func main() { fn: *terminationLog, startFileLoggingCh: termCh, } - klog.SetOutput(stderr) + + // do the klog file writer dance: klog writes to all outputs of lower + // severity. No idea why. So we discard for anything other than info. + // Otherwise, we would see errors multiple times. + klog.SetOutput(ioutil.Discard) + klog.SetOutputBySeverity("INFO", stderr) } cmd := exec.Command(args[0], args[1:]...) @@ -123,7 +132,7 @@ func (w *terminationFileWriter) Write(bs []byte) (int, error) { Compress: false, } w.logger = l - klog.Infof("Redirecting termination logs to %q", w.fn) + fmt.Fprintf(os.Stderr, "Copying termination logs to %q\n", w.fn) } if n, err := w.logger.Write(bs); err != nil { return n, err
watch-termination: fix klog redirection
openshift_origin
train
53a9527875e5e9aca59946937fab8291085d4b3a
diff --git a/pysat/instruments/pysat_testing.py b/pysat/instruments/pysat_testing.py index <HASH>..<HASH> 100644 --- a/pysat/instruments/pysat_testing.py +++ b/pysat/instruments/pysat_testing.py @@ -20,6 +20,20 @@ meta = pysat.Meta() meta['uts'] = {'units':'s', 'long_name':'Universal Time', 'custom':False} meta['mlt'] = {'units':'hours', 'long_name':'Magnetic Local Time'} meta['slt'] = {'units':'hours', 'long_name':'Solar Local Time'} +meta['orbit_num'] = {'units':'', 'long_name':'Orbit Number'} +meta['longitude'] = {'units':'degrees', 'long_name':'Longitude'} +meta['latitude'] = {'units':'degrees', 'long_name':'Latitude'} +meta['dummy1'] = {'units':'', 'long_name':'dummy1'} +meta['dummy2'] = {'units':'', 'long_name':'dummy2'} +meta['dummy3'] = {'units':'', 'long_name':'dummy3'} +meta['dummy4'] = {'units':'', 'long_name':'dummy4'} +meta['string_dummy'] = {'units':'', 'long_name':'string_dummy'} +meta['unicode_dummy'] = {'units':'', 'long_name':'unicode_dummy'} +meta['int8_dummy'] = {'units':'', 'long_name':'int8_dummy'} +meta['int16_dummy'] = {'units':'', 'long_name':'int16_dummy'} +meta['int32_dummy'] = {'units':'', 'long_name':'int32_dummy'} +meta['int64_dummy'] = {'units':'', 'long_name':'int64_dummy'} + def init(self):
Added missing metadata for 1D test instrument
rstoneback_pysat
train
929b68ae4d73f9dfc746a0781bf5bd76b58b9431
diff --git a/lxd/db/networks_test.go b/lxd/db/networks_test.go index <HASH>..<HASH> 100644 --- a/lxd/db/networks_test.go +++ b/lxd/db/networks_test.go @@ -15,7 +15,7 @@ func TestGetNetworksLocalConfigs(t *testing.T) { cluster, cleanup := db.NewTestCluster(t) defer cleanup() - _, err := cluster.CreateNetwork("lxdbr0", "", map[string]string{ + _, err := cluster.CreateNetwork("lxdbr0", "", db.NetworkTypeBridge, map[string]string{ "dns.mode": "none", "bridge.external_interfaces": "vlan0", }) @@ -45,7 +45,7 @@ func TestCreatePendingNetwork(t *testing.T) { require.NoError(t, err) config := map[string]string{"bridge.external_interfaces": "foo"} - err = tx.CreatePendingNetwork("buzz", "network1", config) + err = tx.CreatePendingNetwork("buzz", "network1", db.NetworkTypeBridge, config) require.NoError(t, err) networkID, err := tx.GetNetworkID("network1") @@ -53,7 +53,7 @@ func TestCreatePendingNetwork(t *testing.T) { assert.True(t, networkID > 0) config = map[string]string{"bridge.external_interfaces": "bar"} - err = tx.CreatePendingNetwork("rusp", "network1", config) + err = tx.CreatePendingNetwork("rusp", "network1", db.NetworkTypeBridge, config) require.NoError(t, err) // The initial node (whose name is 'none' by default) is missing. @@ -61,7 +61,7 @@ func TestCreatePendingNetwork(t *testing.T) { require.EqualError(t, err, "Network not defined on nodes: none") config = map[string]string{"bridge.external_interfaces": "egg"} - err = tx.CreatePendingNetwork("none", "network1", config) + err = tx.CreatePendingNetwork("none", "network1", db.NetworkTypeBridge, config) require.NoError(t, err) // Now the storage is defined on all nodes. @@ -82,10 +82,10 @@ func TestNetworksCreatePending_AlreadyDefined(t *testing.T) { _, err := tx.CreateNode("buzz", "1.2.3.4:666") require.NoError(t, err) - err = tx.CreatePendingNetwork("buzz", "network1", map[string]string{}) + err = tx.CreatePendingNetwork("buzz", "network1", db.NetworkTypeBridge, map[string]string{}) require.NoError(t, err) - err = tx.CreatePendingNetwork("buzz", "network1", map[string]string{}) + err = tx.CreatePendingNetwork("buzz", "network1", db.NetworkTypeBridge, map[string]string{}) require.Equal(t, db.ErrAlreadyDefined, err) } @@ -94,6 +94,6 @@ func TestNetworksCreatePending_NonExistingNode(t *testing.T) { tx, cleanup := db.NewTestClusterTx(t) defer cleanup() - err := tx.CreatePendingNetwork("buzz", "network1", map[string]string{}) + err := tx.CreatePendingNetwork("buzz", "network1", db.NetworkTypeBridge, map[string]string{}) require.Equal(t, db.ErrNoSuchObject, err) } diff --git a/lxd/instance_test.go b/lxd/instance_test.go index <HASH>..<HASH> 100644 --- a/lxd/instance_test.go +++ b/lxd/instance_test.go @@ -99,7 +99,7 @@ func (suite *containerTestSuite) TestContainer_ProfilesOverwriteDefaultNic() { Name: "testFoo", } - _, err := suite.d.State().Cluster.CreateNetwork("unknownbr0", "", nil) + _, err := suite.d.State().Cluster.CreateNetwork("unknownbr0", "", db.NetworkTypeBridge, nil) suite.Req.Nil(err) c, err := instanceCreateInternal(suite.d.State(), args) @@ -133,7 +133,7 @@ func (suite *containerTestSuite) TestContainer_LoadFromDB() { } state := suite.d.State() - _, err := state.Cluster.CreateNetwork("unknownbr0", "", nil) + _, err := state.Cluster.CreateNetwork("unknownbr0", "", db.NetworkTypeBridge, nil) suite.Req.Nil(err) // Create the container
lxd: Updates network tests to pass netType
lxc_lxd
train
0bf3d61cd01e99f34f852c0b819ffe6d650c185e
diff --git a/lib/jsonapi/resource.rb b/lib/jsonapi/resource.rb index <HASH>..<HASH> 100644 --- a/lib/jsonapi/resource.rb +++ b/lib/jsonapi/resource.rb @@ -525,9 +525,9 @@ module JSONAPI if field.to_s.include?(".") *model_names, terminus, column_name = field.split(".") records = records.includes(build_includes_path(model_names, terminus)) - - table_name = resolve_terminus_classname([records.model.to_s, *model_names, terminus]) - records = records.order("#{table_name}.#{column_name} #{direction}") + record_class = resolve_terminus_classname([records.model.to_s, *model_names, terminus]) + order_by = "#{record_class.table_name}.#{column_name} #{direction}" + records = records.order(order_by) else records = records.order(field => direction) end @@ -550,7 +550,7 @@ module JSONAPI prev.classify.constantize.reflect_on_all_associations.detect do |assoc| assoc.name.to_s.downcase == current.downcase end - end.table_name + end end def apply_filter(records, filter, value, options = {}) diff --git a/test/controllers/controller_test.rb b/test/controllers/controller_test.rb index <HASH>..<HASH> 100644 --- a/test/controllers/controller_test.rb +++ b/test/controllers/controller_test.rb @@ -299,9 +299,18 @@ class PostsControllerTest < ActionController::TestCase get :index, {sort: 'author.name'} assert_response :success + assert json_response['data'].length > 10 assert_equal '17', json_response['data'][0]['id'] end + def test_desc_sorting_by_relationship_field + get :index, {sort: '-author.name'} + + assert_response :success + assert json_response['data'].length > 10 + assert_equal '17', json_response['data'][-1]['id'] + end + def test_invalid_sort_param get :index, {sort: 'asdfg'}
Sorting by relationships - increase test coverage
cerebris_jsonapi-resources
train
c54de985421c8ce4bb3f4000d0f4b33845a96253
diff --git a/azurerm/data_source_snapshot.go b/azurerm/data_source_snapshot.go index <HASH>..<HASH> 100644 --- a/azurerm/data_source_snapshot.go +++ b/azurerm/data_source_snapshot.go @@ -14,10 +14,8 @@ func dataSourceArmSnapshot() *schema.Resource { Type: schema.TypeString, Required: true, }, - "resource_group_name": { - Type: schema.TypeString, - Required: true, - }, + + "resource_group_name": resourceGroupNameForDataSourceSchema(), // Computed "os_type": { diff --git a/azurerm/resource_arm_container_group.go b/azurerm/resource_arm_container_group.go index <HASH>..<HASH> 100644 --- a/azurerm/resource_arm_container_group.go +++ b/azurerm/resource_arm_container_group.go @@ -25,11 +25,7 @@ func resourceArmContainerGroup() *schema.Resource { "location": locationSchema(), - "resource_group_name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - }, + "resource_group_name": resourceGroupNameSchema(), "ip_address_type": { Type: schema.TypeString, diff --git a/azurerm/resource_arm_container_service.go b/azurerm/resource_arm_container_service.go index <HASH>..<HASH> 100644 --- a/azurerm/resource_arm_container_service.go +++ b/azurerm/resource_arm_container_service.go @@ -126,8 +126,9 @@ func resourceArmContainerService() *schema.Resource { }, "vm_size": { - Type: schema.TypeString, - Required: true, + Type: schema.TypeString, + Required: true, + DiffSuppressFunc: ignoreCaseDiffSuppressFunc, }, }, }, diff --git a/azurerm/resource_arm_kubernetes_cluster.go b/azurerm/resource_arm_kubernetes_cluster.go index <HASH>..<HASH> 100644 --- a/azurerm/resource_arm_kubernetes_cluster.go +++ b/azurerm/resource_arm_kubernetes_cluster.go @@ -111,9 +111,10 @@ func resourceArmKubernetesCluster() *schema.Resource { }, "vm_size": { - Type: schema.TypeString, - Required: true, - ForceNew: true, + Type: schema.TypeString, + Required: true, + ForceNew: true, + DiffSuppressFunc: ignoreCaseDiffSuppressFunc, }, "os_disk_size_gb": { diff --git a/azurerm/resource_arm_virtual_machine.go b/azurerm/resource_arm_virtual_machine.go index <HASH>..<HASH> 100644 --- a/azurerm/resource_arm_virtual_machine.go +++ b/azurerm/resource_arm_virtual_machine.go @@ -107,8 +107,9 @@ func resourceArmVirtualMachine() *schema.Resource { }, "vm_size": { - Type: schema.TypeString, - Required: true, + Type: schema.TypeString, + Required: true, + DiffSuppressFunc: ignoreCaseDiffSuppressFunc, }, "storage_image_reference": { diff --git a/azurerm/resource_arm_virtual_network_gateway_connection.go b/azurerm/resource_arm_virtual_network_gateway_connection.go index <HASH>..<HASH> 100644 --- a/azurerm/resource_arm_virtual_network_gateway_connection.go +++ b/azurerm/resource_arm_virtual_network_gateway_connection.go @@ -28,11 +28,7 @@ func resourceArmVirtualNetworkGatewayConnection() *schema.Resource { ForceNew: true, }, - "resource_group_name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - }, + "resource_group_name": resourceGroupNameSchema(), "location": locationSchema(),
Made the vm_size property case insensitive and updated resource_group_name fields to use shared schema (#<I>)
terraform-providers_terraform-provider-azurerm
train
031d7e7e7f82b9fa3d9841bb2a5f797c7d251225
diff --git a/Gruntfile.js b/Gruntfile.js index <HASH>..<HASH> 100644 --- a/Gruntfile.js +++ b/Gruntfile.js @@ -63,6 +63,9 @@ module.exports = function(grunt) { }, watch: { // scss: { //scss can be watched if you like + // options: { + // livereload: true + // }, // files: ['source/css/**/*.scss', 'public/styleguide/css/*.scss'], // tasks: ['default'] // },
added livereload option to the sass watch added livereload so if someone uses this livereload will also work
pattern-lab_patternengine-node-underscore
train
92d92bb4b3ab36708f4d65fbfdb37b16f519367c
diff --git a/tests/conftest.py b/tests/conftest.py index <HASH>..<HASH> 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -100,6 +100,15 @@ def config(config_source, auth_config_source, mocker): @pytest.fixture(autouse=True) +def mock_user_config_dir(mocker): + config_dir = tempfile.mkdtemp(prefix="poetry_config_") + mocker.patch("poetry.locations.CONFIG_DIR", new=config_dir) + mocker.patch("poetry.factory.CONFIG_DIR", new=config_dir) + yield + shutil.rmtree(config_dir, ignore_errors=True) + + +@pytest.fixture(autouse=True) def download_mock(mocker): # Patch download to not download anything but to just copy from fixtures mocker.patch("poetry.utils.helpers.download_file", new=mock_download)
tests: ensure ephemeral config usage Previously, pytest execution was influenced by poetry user configuration. This change ensures that a new config.toml is used each test case.
sdispater_poetry
train
cc496c0dba906503f51f4c317ba8683b2ef324ac
diff --git a/switches/radio/radio-group.js b/switches/radio/radio-group.js index <HASH>..<HASH> 100644 --- a/switches/radio/radio-group.js +++ b/switches/radio/radio-group.js @@ -42,7 +42,7 @@ class Group extends React.PureComponent { <DirectionWrapper scale="m"> {React.Children.map(this.props.children, child => { // NOTE: Allowing to intersperse other elements than `Option`. - if (child.type.displayName === Option.displayName) + if (child && child.type.displayName === Option.displayName) return React.cloneElement(child, { isChecked: this.props.value === child.props.value, name: this.props.name,
fix(uikit/switches): to be able to map over null elements (e.g. by conditionals)
commercetools_ui-kit
train
d309622a27d5576e1a6a7fcff306d52336dd8e7f
diff --git a/spacy/tests/parser/test_ner.py b/spacy/tests/parser/test_ner.py index <HASH>..<HASH> 100644 --- a/spacy/tests/parser/test_ner.py +++ b/spacy/tests/parser/test_ner.py @@ -46,7 +46,7 @@ def test_unit_end_gazetteer(EN): ents = matcher(doc) assert len(ents) == 1 EN.entity(doc) - assert list(nlp.ents)[0].text == 'cal' + assert list(EN.ents)[0].text == 'cal'
* Add test for matcher end-point problem
explosion_spaCy
train
54ca0e042ae0d2f058911da1e6d9905d576f5224
diff --git a/nephele/nephele-common/src/main/java/eu/stratosphere/nephele/util/FileUtils.java b/nephele/nephele-common/src/main/java/eu/stratosphere/nephele/util/FileUtils.java index <HASH>..<HASH> 100644 --- a/nephele/nephele-common/src/main/java/eu/stratosphere/nephele/util/FileUtils.java +++ b/nephele/nephele-common/src/main/java/eu/stratosphere/nephele/util/FileUtils.java @@ -46,7 +46,7 @@ public class FileUtils { final StringBuilder stringBuilder = new StringBuilder(prefix); for (int i = 0; i < LENGTH; i++) { - stringBuilder.append(ALPHABET[(int) Math.floor(Math.random() * ALPHABET.length)]); + stringBuilder.append(ALPHABET[(int) Math.floor(Math.random() * (double)ALPHABET.length)]); } return stringBuilder.toString();
Minor modification to generation of random filenames
stratosphere_stratosphere
train
8c90995b8e1e7e38cdd2f8893db9be85a78b2480
diff --git a/py/dynesty/dynesty.py b/py/dynesty/dynesty.py index <HASH>..<HASH> 100644 --- a/py/dynesty/dynesty.py +++ b/py/dynesty/dynesty.py @@ -485,7 +485,7 @@ def NestedSampler(loglikelihood, prior_transform, ndim, nlive=500, if queue_size is None: try: queue_size = pool.size - except: + except AttributeError: raise ValueError("Cannot initialize `queue_size` because " "`pool.size` has not been provided. Please" "define `pool.size` or specify `queue_size` " @@ -937,7 +937,7 @@ def DynamicNestedSampler(loglikelihood, prior_transform, ndim, if queue_size is None: try: queue_size = pool.size - except: + except AttributeError: raise ValueError("Cannot initialize `queue_size` because " "`pool.size` has not been provided. Please " "define `pool.size` or specify `queue_size` " diff --git a/tests/test_pool.py b/tests/test_pool.py index <HASH>..<HASH> 100644 --- a/tests/test_pool.py +++ b/tests/test_pool.py @@ -35,8 +35,27 @@ def test_pool(): nlive=nlive, bound='multi', sample='unif', - pool=pool) + pool=pool, + queue_size=2) sampler.run_nested(dlogz=0.1, print_progress=printing) logz_truth = 235.856 assert (abs(logz_truth - sampler.results.logz[-1]) < 5. * sampler.results.logzerr[-1]) + + +def test_pool2(): + # test pool + ndim = 2 + pool = mp.Pool(2) + sampler = dynesty.DynamicNestedSampler(loglike_egg, + prior_transform_egg, + ndim, + nlive=nlive, + bound='multi', + sample='unif', + pool=pool, + queue_size=2) + sampler.run_nested(dlogz_init=0.1, print_progress=printing) + logz_truth = 235.856 + assert (abs(logz_truth - sampler.results.logz[-1]) < + 5. * sampler.results.logzerr[-1])
fix the test of the pool functionality get rid of unqualified try Except related to that
joshspeagle_dynesty
train
740305cc80b18e0fa4402a3d891e6bcdc3ea4f5c
diff --git a/authcode/models.py b/authcode/models.py index <HASH>..<HASH> 100644 --- a/authcode/models.py +++ b/authcode/models.py @@ -45,7 +45,7 @@ def get_auth_user_mixin(auth, roles=False): class AuthUserMixin(object): id = Column(Integer, primary_key=True) - login = Column(Unicode, nullable=False, unique=True, index=True) + login = Column(Unicode(255), nullable=False, unique=True, index=True) password = Column(String(255), nullable=True) last_sign_in = Column(DateTime, nullable=True) deleted = Column(Boolean, default=False)
Add a length to the login column to be usable in lesser db dialects
jpscaletti_authcode
train
73bbcef1eee90dcf72b4364b1d988a60dca4d005
diff --git a/spikeextractors/extractorlist.py b/spikeextractors/extractorlist.py index <HASH>..<HASH> 100644 --- a/spikeextractors/extractorlist.py +++ b/spikeextractors/extractorlist.py @@ -7,7 +7,7 @@ from .extractors.hs2sortingextractor.hs2sortingextractor import HS2SortingExtrac from .extractors.klustaextractors.klustaextractors import KlustaSortingExtractor, KlustaRecordingExtractor from .extractors.kilosortextractors.kilosortextractors import KiloSortSortingExtractor, KiloSortRecordingExtractor from .extractors.numpyextractors.numpyextractors import NumpyRecordingExtractor, NumpySortingExtractor -from .extractors.nwbextractors.nwbextractors import NwbRecordingExtractor +from .extractors.nwbextractors.nwbextractors import NwbRecordingExtractor, NwbSortingExtractor from .extractors.maxonerecordingextractor import MaxOneRecordingExtractor from .extractors.openephysextractors.openephysextractors import OpenEphysRecordingExtractor, OpenEphysSortingExtractor from .extractors.phyextractors.phyextractors import PhyRecordingExtractor, PhySortingExtractor diff --git a/spikeextractors/extractors/nwbextractors/__init__.py b/spikeextractors/extractors/nwbextractors/__init__.py index <HASH>..<HASH> 100644 --- a/spikeextractors/extractors/nwbextractors/__init__.py +++ b/spikeextractors/extractors/nwbextractors/__init__.py @@ -1 +1 @@ -from .nwbextractors import NwbRecordingExtractor +from .nwbextractors import NwbRecordingExtractor, NwbSortingExtractor diff --git a/spikeextractors/extractors/nwbextractors/nwbextractors.py b/spikeextractors/extractors/nwbextractors/nwbextractors.py index <HASH>..<HASH> 100644 --- a/spikeextractors/extractors/nwbextractors/nwbextractors.py +++ b/spikeextractors/extractors/nwbextractors/nwbextractors.py @@ -102,20 +102,19 @@ class NwbRecordingExtractor(CopyRecordingExtractor): while len(location) < 3: location = np.append(location, [0]) nwbfile.add_electrode( - id, + id=id, x=float(location[0]), y=float(location[1]), z=float(location[2]), imp=impedence, location='electrode_location', filtering='none', group=electrode_group, - description='electrode_description' ) electrode_table_region = nwbfile.create_electrode_table_region( list(range(M)), 'electrode_table_region' ) - rate = recording.get_sampling_frequency() / 1000 + rate = recording.get_sampling_frequency() #/ 1000 ephys_data = recording.get_traces().T ephys_ts = ElectricalSeries( @@ -129,7 +128,88 @@ class NwbRecordingExtractor(CopyRecordingExtractor): description='acquisition_description' ) nwbfile.add_acquisition(ephys_ts) + if os.path.exists(save_path): os.remove(save_path) with NWBHDF5IO(save_path, 'w') as io: io.write(nwbfile) + + + + +class NwbSortingExtractor(se.SortingExtractor): + def __init__(self, path): + try: + from pynwb import NWBHDF5IO + from pynwb import NWBFile + from pynwb.ecephys import ElectricalSeries + except ModuleNotFoundError: + raise ModuleNotFoundError("To use the Nwb extractors, install pynwb: \n\n" + "pip install pynwb\n\n") + #self._path = path + #with NWBHDF5IO(path, 'r') as io: + # nwbfile = io.read() + # NSX = se.NumpySortingExtractor() + se.RecordingExtractor.__init__() + + @staticmethod + def write_sorting(sorting, save_path): + try: + from pynwb import NWBHDF5IO + from pynwb import NWBFile + from pynwb.ecephys import ElectricalSeries + except ModuleNotFoundError: + raise ModuleNotFoundError("To use the Nwb extractors, install pynwb: \n\n" + "pip install pynwb\n\n") + M = len(sorting.get_unit_ids()) + fs = sorting.get_sampling_frequency() + + #if NWB files already exists, just adds sorting data to it + if os.path.exists(save_path): + with NWBHDF5IO(save_path, 'r+') as io: + nwbfile = io.read() + + #Stores spike times for each detected cell (unit) + for id in range(M): + spkt = sorting.get_unit_spike_train(unit_id=id+1) / fs + print('ID: ',id) + print('Spkt: ', spkt) + nwbfile.add_unit( + id=id, + spike_times=spkt, + obs_intervals=None, + electrodes=None, + electrode_group=None, + waveform_mean=None, + waveform_sd=None + ) + + io.write(nwbfile) + + #if new NWB file does not exist, create it and add sorting data + else: + nwbfile = NWBFile( + session_description='', + identifier='', + session_start_time=datetime.now(), + experimenter='', + lab='', + institution='', + experiment_description='', + session_id='' + ) + + for id in range(M): + spkt = sorting.get_unit_spike_train(unit_id=id) / fs + nwbfile.add_unit( + id=id, + spike_times=spkt, + obs_intervals=None, + electrodes=None, + electrode_group=None, + waveform_mean=None, + waveform_sd=None + ) + + with NWBHDF5IO(save_path, 'w') as io: + io.write(nwbfile)
created NwbSortingExtractor with write_sorting method
SpikeInterface_spikeextractors
train
7834cb9ca210bed3ef0fdb0e8156e3471121ce73
diff --git a/lib/httparty.rb b/lib/httparty.rb index <HASH>..<HASH> 100644 --- a/lib/httparty.rb +++ b/lib/httparty.rb @@ -1,12 +1,11 @@ require 'net/http' require 'net/https' require 'uri' -require 'ostruct' require 'rubygems' require 'active_support' -directory = File.dirname(__FILE__) -$:.unshift(directory) unless $:.include?(directory) || $:.include?(File.expand_path(directory)) +dir = File.dirname(__FILE__) +$:.unshift(dir) unless $:.include?(dir) || $:.include?(File.expand_path(dir)) require 'httparty/request'
shortened directory to dir. why not...
jnunemaker_httparty
train
23c3f741ae15fb38aec416cef1bc2d7ff4e558de
diff --git a/pyamg/aggregation/smooth.py b/pyamg/aggregation/smooth.py index <HASH>..<HASH> 100644 --- a/pyamg/aggregation/smooth.py +++ b/pyamg/aggregation/smooth.py @@ -7,7 +7,7 @@ from pyamg.utils import approximate_spectral_radius, scale_rows __all__ = ['jacobi_prolongation_smoother', 'energy_prolongation_smoother'] -def jacobi_prolongation_smoother(S, T, omega=4.0/3.0): +def jacobi_prolongation_smoother(S, T, omega=4.0/3.0, degree=1): """Jacobi prolongation smoother Parameters @@ -36,7 +36,9 @@ def jacobi_prolongation_smoother(S, T, omega=4.0/3.0): D_inv_S = scale_rows(S, D_inv, copy=True) D_inv_S *= omega/approximate_spectral_radius(D_inv_S) - P = T - (D_inv_S*T) + P = T + for i in range(degree): + P = P - (D_inv_S*P) return P @@ -78,13 +80,15 @@ def Satisfy_Constraints(U, Sparsity_Pattern, B, BtBinv): num_block_rows = U.shape[0]/RowsPerBlock UB = U*B - B = ravel(asarray(B).reshape(-1,ColsPerBlock,B.shape[1])) UB = ravel(asarray(UB).reshape(-1,RowsPerBlock,UB.shape[1])) #Apply constraints - pyamg.multigridtools.satisfy_constraints_helper(RowsPerBlock,ColsPerBlock,num_blocks,num_block_rows,B,UB,ravel(BtBinv),U.indptr,U.indices,ravel(U.data)) + pyamg.multigridtools.satisfy_constraints_helper(RowsPerBlock, ColsPerBlock, + num_blocks, num_block_rows, + B, UB, ravel(BtBinv), + U.indptr, U.indices, ravel(U.data)) return U diff --git a/pyamg/multigridtools/setup.py b/pyamg/multigridtools/setup.py index <HASH>..<HASH> 100644 --- a/pyamg/multigridtools/setup.py +++ b/pyamg/multigridtools/setup.py @@ -1,12 +1,15 @@ #!/usr/bin/env python def configuration(parent_package='',top_path=None): - import numpy + from numpy.distutils.system_info import get_info, NotFoundError from numpy.distutils.misc_util import Configuration + lapack_opt = get_info('lapack_opt') config = Configuration('multigridtools', parent_package, top_path) - config.add_extension('_multigridtools', sources=['multigridtools_wrap.cxx']) + config.add_extension('_multigridtools', + sources=['multigridtools_wrap.cxx'], + extra_info = lapack_opt) return config
added degree parameter to jacobi prolongation smoother added LAPACK opts to setup.py
pyamg_pyamg
train