hash
stringlengths
40
40
diff
stringlengths
131
114k
message
stringlengths
7
980
project
stringlengths
5
67
split
stringclasses
1 value
1127ae24b365f026d7960b77b51464a816872fbe
diff --git a/app/assets/javascripts/rails_admin/ra.filtering-multiselect.js b/app/assets/javascripts/rails_admin/ra.filtering-multiselect.js index <HASH>..<HASH> 100644 --- a/app/assets/javascripts/rails_admin/ra.filtering-multiselect.js +++ b/app/assets/javascripts/rails_admin/ra.filtering-multiselect.js @@ -159,7 +159,7 @@ var i, html = ""; for (i in matches) { if (matches.hasOwnProperty(i) && !widget.selected(matches[i].id)) { - html += '<option value="' + matches[i].id + '">' + matches[i].label + '</option>'; + html += '<option value="' + matches[i].id + '" title="' + matches[i].label + '">' + matches[i].label + '</option>'; } } @@ -173,10 +173,10 @@ this.element.find("option").each(function(i, option) { if (option.selected) { widget._cache[option.value] = option.innerHTML; - $(option).clone().appendTo(widget.selection).attr("selected", false); + $(option).clone().appendTo(widget.selection).attr("selected", false).attr("title", $(option).text()); } else { widget._cache[option.value] = option.innerHTML; - $(option).clone().appendTo(widget.collection).attr("selected", false); + $(option).clone().appendTo(widget.collection).attr("selected", false).attr("title", $(option).text()); } }); },
Add title for longer options to multiselect widget
sferik_rails_admin
train
dcf100185775abad71606e93cb00663e5570b561
diff --git a/lib/axlsx.rb b/lib/axlsx.rb index <HASH>..<HASH> 100644 --- a/lib/axlsx.rb +++ b/lib/axlsx.rb @@ -87,8 +87,9 @@ module Axlsx # performs the increadible feat of changing snake_case to CamelCase # @param [String] s The snake case string to camelize # @return [String] - def self.camel(s="") - s = s.capitalize.gsub(/_(.)/){ $1.upcase } + def self.camel(s="", all_caps = true) + s = s.capitalize if all_caps + s.gsub(/_(.)/){ $1.upcase } end end
update camel to allow lower case first letter optionally
randym_axlsx
train
dc634bf44c76fe1b4c96c3825722c6ec8c75341b
diff --git a/EntityContext.js b/EntityContext.js index <HASH>..<HASH> 100644 --- a/EntityContext.js +++ b/EntityContext.js @@ -684,10 +684,23 @@ var noop = function(){}; var def = store.ec.definition(ent._type); //TODO make this smarter $.each(def.properties, function(){ - ent[this.name] = + var value = self[this.name] instanceof Function ? self[this.name]() : self[this.name]; + if(value instanceof String) + { + value = value + .replace(/\\n/g, "\\n") + .replace(/\\'/g, "\\'") + .replace(/\\"/g, '\"') + .replace(/\\&/g, "\\&") + .replace(/\\r/g, "\\r") + .replace(/\\t/g, "\\t") + .replace(/\\b/g, "\\b") + .replace(/\\f/g, "\\f"); + } + ent[this.name] = value; }); $.each(def.relationships, function(){ if(self[this.name])
Modified handling of special characters that were breaking JSON output in instances when the value of an Entity's field contained certain special characters.
DavidSouther_JEFRi
train
2e9e940e22c2231272e979bdd72a71764dec941f
diff --git a/activemodel/lib/active_model/attribute_assignment.rb b/activemodel/lib/active_model/attribute_assignment.rb index <HASH>..<HASH> 100644 --- a/activemodel/lib/active_model/attribute_assignment.rb +++ b/activemodel/lib/active_model/attribute_assignment.rb @@ -26,7 +26,7 @@ module ActiveModel # cat.name # => 'Gorby' # cat.status # => 'sleeping' def assign_attributes(new_attributes) - unless new_attributes.respond_to?(:stringify_keys) + unless new_attributes.respond_to?(:each_pair) raise ArgumentError, "When assigning attributes, you must pass a hash as an argument, #{new_attributes.class} passed." end return if new_attributes.empty? diff --git a/activemodel/test/cases/attribute_assignment_test.rb b/activemodel/test/cases/attribute_assignment_test.rb index <HASH>..<HASH> 100644 --- a/activemodel/test/cases/attribute_assignment_test.rb +++ b/activemodel/test/cases/attribute_assignment_test.rb @@ -49,8 +49,8 @@ class AttributeAssignmentTest < ActiveModel::TestCase @parameters end - def stringify_keys - dup + def each_pair(&block) + @parameters.each_pair(&block) end def dup diff --git a/activerecord/test/support/stubs/strong_parameters.rb b/activerecord/test/support/stubs/strong_parameters.rb index <HASH>..<HASH> 100644 --- a/activerecord/test/support/stubs/strong_parameters.rb +++ b/activerecord/test/support/stubs/strong_parameters.rb @@ -28,8 +28,8 @@ class ProtectedParams end alias to_unsafe_h to_h - def stringify_keys - dup + def each_pair(&block) + @parameters.each_pair(&block) end def dup
Change safe guard to check for each_pair instead of stringify_keys
rails_rails
train
160e2f436f9e9d62ef38ac8f667ce97745711aec
diff --git a/src/Interactable.js b/src/Interactable.js index <HASH>..<HASH> 100644 --- a/src/Interactable.js +++ b/src/Interactable.js @@ -643,8 +643,8 @@ Interactable.prototype = { } else { // remove delegated events - for (var type in scope.delegatedEvents) { - var delegated = scope.delegatedEvents[type]; + for (var type in events.delegatedEvents) { + var delegated = events.delegatedEvents[type]; for (var i = 0; i < delegated.selectors.length; i++) { if (delegated.selectors[i] === this.selector @@ -656,7 +656,7 @@ Interactable.prototype = { // remove the arrays if they are empty if (!delegated.selectors.length) { - scope.delegatedEvents[type] = null; + delegated[type] = null; } } diff --git a/src/utils/events.js b/src/utils/events.js index <HASH>..<HASH> 100644 --- a/src/utils/events.js +++ b/src/utils/events.js @@ -332,6 +332,7 @@ module.exports = { delegateListener: delegateListener, delegateUseCapture: delegateUseCapture, + delegatedEvents: delegatedEvents, documents: documents, useAttachEvent: useAttachEvent,
Fix Interactable#unset delegated event removal
taye_interact.js
train
dc198b57c3ce19e820a05880076bf9eded67afa7
diff --git a/src/Heyday/QueryBuilder/QueryBuilder.php b/src/Heyday/QueryBuilder/QueryBuilder.php index <HASH>..<HASH> 100644 --- a/src/Heyday/QueryBuilder/QueryBuilder.php +++ b/src/Heyday/QueryBuilder/QueryBuilder.php @@ -94,7 +94,7 @@ class QueryBuilder implements QueryBuilderInterface $results[] = $do; } } - $this->listCache = count($results) ? new \ArrayList($results) : null; + $this->listCache = new \ArrayList($results); } return $this->listCache;
Don't return null as it makes that api inconsistent
heyday_silverstripe-querybuilder
train
bae74a43406038f97ed3f2e3a243849ccb4f7a07
diff --git a/johnny/cache.py b/johnny/cache.py index <HASH>..<HASH> 100644 --- a/johnny/cache.py +++ b/johnny/cache.py @@ -274,7 +274,13 @@ class QueryCacheBackend11(QueryCacheBackend): val = self.cache_backend.get(key, None) if val is not None: + signals.qc_hit.send(sender=cls, tables=cls.tables, + query=(sql, params, cls.ordering_aliases), + size=len(val), key=key) return val + signals.qc_miss.send(sender=cls, tables=cls.tables, + query=(sql, params, cls.ordering_aliases), + key=key) # we didn't find the value in the cache, so execute the query result = original(cls, result_type) diff --git a/johnny/signals.py b/johnny/signals.py index <HASH>..<HASH> 100644 --- a/johnny/signals.py +++ b/johnny/signals.py @@ -5,5 +5,5 @@ from django.dispatch import Signal -qc_hit = Signal(providing_args=['tables', 'query', 'size']) -qc_miss = Signal(providing_args=['tables', 'query']) +qc_hit = Signal(providing_args=['key', 'tables', 'query', 'size']) +qc_miss = Signal(providing_args=['key', 'tables', 'query']) diff --git a/johnny/tests/cache.py b/johnny/tests/cache.py index <HASH>..<HASH> 100644 --- a/johnny/tests/cache.py +++ b/johnny/tests/cache.py @@ -82,6 +82,24 @@ class SingleModelTest(QueryCacheBase): self.failUnless((first[0], first[1] == second[1], second[0])) self.failUnless(len(connection.queries) == 2) + def test_signals(self): + """Test that the signals we say we're sending are being sent.""" + from testapp.models import Genre + from johnny.signals import qc_hit, qc_miss + connection.queries = [] + misses = [] + hits = [] + def qc_hit_listener(sender, **kwargs): + hits.append(kwargs['key']) + def qc_miss_listener(*args, **kwargs): + misses.append(kwargs['key']) + qc_hit.connect(qc_hit_listener) + qc_miss.connect(qc_miss_listener) + first = list(Genre.objects.filter(title__startswith='A').order_by('slug')) + second = list(Genre.objects.filter(title__startswith='A').order_by('slug')) + self.failUnless(len(misses) == len(hits) == 1) + + class MultiModelTest(QueryCacheBase): fixtures = base.johnny_fixtures
add signal sending to QueryCacheBackend<I> and a test to make sure the signals are working
jmoiron_johnny-cache
train
18664833b2ae9ae6de386ccce0ce306966b68af4
diff --git a/lib/puppet/indirector/file_metadata/http.rb b/lib/puppet/indirector/file_metadata/http.rb index <HASH>..<HASH> 100644 --- a/lib/puppet/indirector/file_metadata/http.rb +++ b/lib/puppet/indirector/file_metadata/http.rb @@ -20,9 +20,12 @@ class Puppet::Indirector::FileMetadata::Http < Puppet::Indirector::GenericHttp when 403 # AMZ presigned URL? if head.each_header.find { |k,_| k =~ /^x-amz-/i } - get = client.get(uri, headers: {'Range' => 'bytes=0-0'}, options: {include_system_store: true}) + get = partial_get(client, uri) return create_httpmetadata(get, checksum_type) if get.success? end + when 405 + get = partial_get(client, uri) + return create_httpmetadata(get, checksum_type) if get.success? end nil @@ -34,6 +37,10 @@ class Puppet::Indirector::FileMetadata::Http < Puppet::Indirector::GenericHttp private + def partial_get(client, uri) + client.get(uri, headers: {'Range' => 'bytes=0-0'}, options: {include_system_store: true}) + end + def create_httpmetadata(http_request, checksum_type) metadata = Puppet::FileServing::HttpMetadata.new(http_request) metadata.checksum_type = checksum_type if checksum_type diff --git a/spec/unit/indirector/file_metadata/http_spec.rb b/spec/unit/indirector/file_metadata/http_spec.rb index <HASH>..<HASH> 100644 --- a/spec/unit/indirector/file_metadata/http_spec.rb +++ b/spec/unit/indirector/file_metadata/http_spec.rb @@ -156,6 +156,15 @@ describe Puppet::Indirector::FileMetadata::Http do model.indirection.find(key) end + it "falls back to partial GET if HEAD is not allowed" do + stub_request(:head, key) + .to_return(status: 405) + stub_request(:get, key) + .to_return(status: 200, headers: {'Range' => 'bytes=0-0'}) + + model.indirection.find(key) + end + context "AWS" do it "falls back to a partial GET" do stub_request(:head, key)
(PUP-<I>) Fallback to partial GET if HEAD method is not allowed If HEAD method is not allowed, then fall back to a partial GET request.
puppetlabs_puppet
train
9aae2a7a210efddeb6cfad06a4ae9fc7560118d0
diff --git a/src/main/java/io/github/classgraph/TypeVariableSignature.java b/src/main/java/io/github/classgraph/TypeVariableSignature.java index <HASH>..<HASH> 100644 --- a/src/main/java/io/github/classgraph/TypeVariableSignature.java +++ b/src/main/java/io/github/classgraph/TypeVariableSignature.java @@ -101,7 +101,12 @@ public final class TypeVariableSignature extends ClassRefOrTypeVariableSignature if (containingClassInfo == null) { throw new IllegalArgumentException("Could not find ClassInfo object for " + definingClassName); } - final ClassTypeSignature containingClassSignature = containingClassInfo.getTypeSignature(); + ClassTypeSignature containingClassSignature = null; + try { + containingClassSignature = containingClassInfo.getTypeSignature(); + } catch (Exception e) { + // Ignore + } if (containingClassSignature != null && containingClassSignature.typeParameters != null && !containingClassSignature.typeParameters.isEmpty()) { for (final TypeParameter typeParameter : containingClassSignature.typeParameters) {
Make type resolution more robust to erroneous type signatures (#<I>)
classgraph_classgraph
train
e422143cddc971c1b9723877674a4f6cbeb6817d
diff --git a/log.go b/log.go index <HASH>..<HASH> 100644 --- a/log.go +++ b/log.go @@ -9,7 +9,7 @@ import ( "sync" "time" - "github.com/Sirupsen/logrus" + "github.com/lytics/logrus" ) const (
Switching to use github.com/lytics/logrus fork This way the code is actually open source and can be pulled from our own forked repo.
araddon_gou
train
3004bb3664d5ac21eb0e037fb4d18db2155ae91b
diff --git a/lib/puppet/transaction/report.rb b/lib/puppet/transaction/report.rb index <HASH>..<HASH> 100644 --- a/lib/puppet/transaction/report.rb +++ b/lib/puppet/transaction/report.rb @@ -390,11 +390,11 @@ class Puppet::Transaction::Report end def self.supported_formats - [:pson, :yaml] + [:json, :pson, :yaml] end def self.default_format - :pson + Puppet[:preferred_serialization_format].to_sym end private diff --git a/spec/unit/transaction/report_spec.rb b/spec/unit/transaction/report_spec.rb index <HASH>..<HASH> 100644 --- a/spec/unit/transaction/report_spec.rb +++ b/spec/unit/transaction/report_spec.rb @@ -480,12 +480,12 @@ describe Puppet::Transaction::Report do end end - it "defaults to serializing to pson" do - expect(Puppet::Transaction::Report.default_format).to eq(:pson) + it "defaults to serializing to json" do + expect(Puppet::Transaction::Report.default_format).to eq(:json) end - it "supports both yaml and pson" do - expect(Puppet::Transaction::Report.supported_formats).to eq([:pson, :yaml]) + it "supports both json, pson and yaml" do + expect(Puppet::Transaction::Report.supported_formats).to eq([:json, :pson, :yaml]) end it "can make a round trip through pson" do @@ -496,6 +496,14 @@ describe Puppet::Transaction::Report do expect_equivalent_reports(tripped, report) end + it "can make a round trip through json" do + report = generate_report + + tripped = Puppet::Transaction::Report.convert_from(:json, report.render) + + expect_equivalent_reports(tripped, report) + end + it "generates pson which validates against the report schema" do report = generate_report expect(report.render).to validate_against('api/schemas/report.json')
(PUP-<I>) Submit reports in JSON by default Previously, reports were only sent in PSON instead of JSON. Add json to the list of supported formats and use Puppet[:preferred_serialization_format] as the default, which is json unless downgraded pson.
puppetlabs_puppet
train
9fe611498ccc837e18c227a4276c14905f007953
diff --git a/airflow/models.py b/airflow/models.py index <HASH>..<HASH> 100644 --- a/airflow/models.py +++ b/airflow/models.py @@ -816,7 +816,8 @@ class TaskInstance(Base): .first() ) if not pool: - return False + raise ValueError('Task specified a pool ({}) but the pool ' + 'doesn\'t exist!').format(self.task.pool) open_slots = pool.open_slots(session=session) return open_slots <= 0
Raise an error if a pool doesn't exist
apache_airflow
train
c488d81a025ce9699887bd5c96bb7d3cb9023db2
diff --git a/src/app-constants.js b/src/app-constants.js index <HASH>..<HASH> 100644 --- a/src/app-constants.js +++ b/src/app-constants.js @@ -40,7 +40,7 @@ var consts = { size: 200 }, { label: 'large', - size: 1200 + size: 1000 }] };
Decrease default image variation size "large" to <I>px
pagespace_pagespace
train
0c163dae9ff5217054ff473c2b3d18552785453c
diff --git a/src/editor/CodeHintManager.js b/src/editor/CodeHintManager.js index <HASH>..<HASH> 100644 --- a/src/editor/CodeHintManager.js +++ b/src/editor/CodeHintManager.js @@ -389,7 +389,6 @@ define(function (require, exports, module) { // Pass to the hint list, if it's open if (hintList && hintList.isOpen()) { - shouldShowHintsOnChange = false; hintList.handleKeyEvent(editor, event); } }
Don't turn off shouldShowHintsOnChange too early
adobe_brackets
train
2fcb216d96edb0eeb38d743e8b3eac86f4585d01
diff --git a/example/client.js b/example/client.js index <HASH>..<HASH> 100644 --- a/example/client.js +++ b/example/client.js @@ -7,6 +7,6 @@ http.createServer(function (req, res) { var client = xmlrpc.createClient() console.log(client) -client.call('dosomething', ['param1', 2, 2.2, true, false, ['a', 'b', 'c']], function () { }) +client.call('dosomething', ['param1', 2, 2.2, true, false, ['a', 'b', 'c'], { a: 'objparam1', b2: 'objectparam2' }, null], function () { }) console.log('Started the XMLRPC test client') diff --git a/lib/client.js b/lib/client.js index <HASH>..<HASH> 100644 --- a/lib/client.js +++ b/lib/client.js @@ -56,8 +56,13 @@ Client.prototype.call = function(method, params, callback) { case 'object': + // Uses XML-RPC's nil + if (param == null) { + paramXml.ele('nil') + } + // Uses XML-RPC's date - if (param.constructor.name == 'Date') { + else if (param.constructor.name == 'Date') { //value.node('dateTime.iso8601', H.iso8601Encode(param)); } @@ -73,17 +78,16 @@ Client.prototype.call = function(method, params, callback) { // Uses XML-RPC's struct else if (param.constructor.name == 'Object') { - /* - var data = value.node('struct'); - var member; - for(var key in param) { + var arrayXml = paramXml.ele('struct') + + for (var key in param) { if (param.hasOwnProperty(key)) { - member = data.node('member'); - member.node('name', key) - _serialize(param[key], member); + var memberXml = arrayXml.ele('member') + memberXml.ele('name') + .txt(key) + serializeParam(param[key], memberXml) } } - */ } break; }
Adds support for XML-RPC's struct and nil to message calls.
baalexander_node-xmlrpc
train
0a60d623a31e57ba5371687eb4e318db426ef00e
diff --git a/jsonschema/cli.py b/jsonschema/cli.py index <HASH>..<HASH> 100644 --- a/jsonschema/cli.py +++ b/jsonschema/cli.py @@ -55,7 +55,7 @@ class CliOutputWriter(): file_name=file_name, exception=exception, ) - else: + else: # pragma: no cover raise ValueError( "Output mode '{}' is unknown by this function" .format(self.output_format) @@ -73,7 +73,7 @@ class CliOutputWriter(): object_name=object_name, error=error_obj, ) - else: + else: # pragma: no cover raise ValueError( "Output mode '{}' is unknown by this function" .format(self.output_format) @@ -88,7 +88,7 @@ class CliOutputWriter(): elif self.output_format == "plain": # Nothing to print in plain mode, only errors are wanted. msg = "" - else: + else: # pragma: no cover raise ValueError( "Output mode '{}' is unknown by this function" .format(self.output_format)
[CLI] Add pragma no cover for some safenet code
Julian_jsonschema
train
d10865dd54160f5c4d49c4cc13d44aa7732fce65
diff --git a/alot/db/message.py b/alot/db/message.py index <HASH>..<HASH> 100644 --- a/alot/db/message.py +++ b/alot/db/message.py @@ -232,12 +232,12 @@ class Message(object): content = part.get_payload(decode=True) ct = helper.guess_mimetype(content) - if cd.startswith('attachment'): - if ct not in ['application/pgp-encrypted', + if cd.lower().startswith('attachment'): + if ct.lower() not in ['application/pgp-encrypted', 'application/pgp-signature']: self._attachments.append(Attachment(part)) - elif cd.startswith('inline'): - if filename is not None and ct != 'application/pgp': + elif cd.lower().startswith('inline'): + if filename is not None and ct.lower() != 'application/pgp': self._attachments.append(Attachment(part)) return self._attachments
Dirty fix for case-(in)sensitive MIME matching. Simply cast every string to lower() when matching MIME media type and subtype. Matching is always case-insensitive as per RFC<I>, <I>.
pazz_alot
train
5aaa503a4d0ac0b8caad244f0ae4b198ac9a0a80
diff --git a/gui/src/main/java/org/jboss/as/console/client/domain/hosts/ColumnHostView.java b/gui/src/main/java/org/jboss/as/console/client/domain/hosts/ColumnHostView.java index <HASH>..<HASH> 100644 --- a/gui/src/main/java/org/jboss/as/console/client/domain/hosts/ColumnHostView.java +++ b/gui/src/main/java/org/jboss/as/console/client/domain/hosts/ColumnHostView.java @@ -291,9 +291,12 @@ public class ColumnHostView extends SuspendableViewImpl final String selectedHost = hosts.getSelectedItem(); columnManager.updateActiveSelection(hostColWidget); - presenter.getPlaceManager().revealRelativePlace( - new PlaceRequest(NameTokens.DomainRuntimePresenter) - ); + if(!presenter.getPlaceManager().getCurrentPlaceRequest().matchesNameToken(NameTokens.DomainRuntimePresenter)) + { + presenter.getPlaceManager().revealRelativePlace( + new PlaceRequest(NameTokens.DomainRuntimePresenter) + ); + } Scheduler.get().scheduleDeferred( new Scheduler.ScheduledCommand() {
Prevent chaining of revelaRelative() calls in domain runtime
hal_core
train
ad557613916a6e5b0a1f8c4cce96dac4bdf9fe7b
diff --git a/cmd/systemdutil/systemdutil.go b/cmd/systemdutil/systemdutil.go index <HASH>..<HASH> 100644 --- a/cmd/systemdutil/systemdutil.go +++ b/cmd/systemdutil/systemdutil.go @@ -21,16 +21,20 @@ const ( ) // ExecuteCommand executes a command in a cgroup and programs Trireme -func ExecuteCommand(arguments map[string]interface{}) { +func ExecuteCommand(arguments map[string]interface{}) error { stderrlogger := log.New(os.Stderr, "", 0) if arguments["<cgroup>"] != nil && len(arguments["<cgroup>"].(string)) > 0 { exitingCgroup := arguments["<cgroup>"].(string) + if err := HandleCgroupStop(exitingCgroup); err != nil { - stderrlogger.Fatalf("Cannot connect to policy process %s. Resources not deleted\n", err) + err = fmt.Errorf("Cannot connect to policy process %s. Resources not deleted\n", err) + stderrlogger.Print(err) + return err } - os.Exit(0) + + return nil } command := "" @@ -42,11 +46,11 @@ func ExecuteCommand(arguments map[string]interface{}) { command = arguments["<command>"].(string) - if args, ok := arguments["--metadata"]; ok && args != nil { + if args, ok := arguments["--label"]; ok && args != nil { metadata = args.([]string) } - if args, ok := arguments["--servicename"]; ok && args != nil { + if args, ok := arguments["--service-name"]; ok && args != nil { servicename = args.(string) } @@ -56,14 +60,20 @@ func ExecuteCommand(arguments map[string]interface{}) { } metadatamap, err := createMetadata(servicename, metadata) + if err != nil { - stderrlogger.Fatalf("Invalid metadata: %s\n ", err) + err = fmt.Errorf("Invalid metadata: %s", err) + stderrlogger.Print(err) + return err } // Make RPC call client, err := net.Dial("unix", rpcmonitor.DefaultRPCAddress) + if err != nil { - stderrlogger.Fatalf("Cannot connect to policy process %s", err) + err = fmt.Errorf("Cannot connect to policy process %s", err) + stderrlogger.Print(err) + return err } //This is added since the release_notification comes in this format @@ -78,22 +88,24 @@ func ExecuteCommand(arguments map[string]interface{}) { } response := &rpcmonitor.RPCResponse{} - rpcClient := jsonrpc.NewClient(client) - err = rpcClient.Call(remoteMethodCall, request, response) if err != nil { - stderrlogger.Fatalf("Policy Server call failed %s", err.Error()) - os.Exit(-1) + err = fmt.Errorf("Policy Server call failed %s", err.Error()) + stderrlogger.Print(err) + return err } if len(response.Error) > 0 { - stderrlogger.Fatalf("Your policy does not allow you to run this command") + err = fmt.Errorf("Your policy does not allow you to run this command") + stderrlogger.Print(err) + return err } syscall.Exec(command, params, os.Environ()) + return nil } // createMetadata extracts the relevant metadata
Fixed: ExecuteCommand now returns an error if it fails to launch the process. The keys metadata and servicename have been replaced by service-name and label (#<I>)
aporeto-inc_trireme-lib
train
de1d75a6f05075952473c4f377fd7a77abb2bb5f
diff --git a/MAVProxy/mavproxy.py b/MAVProxy/mavproxy.py index <HASH>..<HASH> 100755 --- a/MAVProxy/mavproxy.py +++ b/MAVProxy/mavproxy.py @@ -796,7 +796,7 @@ if __name__ == '__main__': parser.add_option("--source-component", dest='SOURCE_COMPONENT', type='int', default=0, help='MAVLink source component for this GCS') parser.add_option("--target-system", dest='TARGET_SYSTEM', type='int', - default=-1, help='MAVLink target master system') + default=0, help='MAVLink target master system') parser.add_option("--target-component", dest='TARGET_COMPONENT', type='int', default=0, help='MAVLink target master component') parser.add_option("--logfile", dest="logfile", help="MAVLink master logfile",
link: default --target-system to 0
ArduPilot_MAVProxy
train
58a3c41b2401ef71de2b58a4406bbe7b1e3dd0f8
diff --git a/src/org/opencms/main/I_CmsEventListener.java b/src/org/opencms/main/I_CmsEventListener.java index <HASH>..<HASH> 100644 --- a/src/org/opencms/main/I_CmsEventListener.java +++ b/src/org/opencms/main/I_CmsEventListener.java @@ -1,7 +1,7 @@ /* * File : $Source: /alkacon/cvs/opencms/src/org/opencms/main/I_CmsEventListener.java,v $ - * Date : $Date: 2009/11/19 07:58:23 $ - * Version: $Revision: 1.4 $ + * Date : $Date: 2009/11/23 10:04:45 $ + * Version: $Revision: 1.5 $ * * This library is part of OpenCms - * the Open Source Content Management System @@ -60,7 +60,7 @@ package org.opencms.main; * @author Alexander Kandzior * @author Ruediger Kurz * - * @version $Revision: 1.4 $ + * @version $Revision: 1.5 $ * * @since 6.0.0 * @@ -369,7 +369,7 @@ public interface I_CmsEventListener { String KEY_GROUP_ID = "groupId"; /** Key name for passing a group name. */ - String KEY_GROUP_NAME = "groupname"; + String KEY_GROUP_NAME = "groupName"; /** Key name for passing a comma separated list of search index names in the data map. */ String KEY_INDEX_NAMES = "indexNames"; @@ -378,7 +378,7 @@ public interface I_CmsEventListener { String KEY_OU_ID = "ouId"; /** Key name for passing a group name. */ - String KEY_OU_NAME = "ouname"; + String KEY_OU_NAME = "ouName"; /** Key name for passing a project id in the data map. */ String KEY_PROJECTID = "projectId"; @@ -399,17 +399,15 @@ public interface I_CmsEventListener { String KEY_RESOURCES = "resources"; /** Key name for passing a user action. */ - String KEY_USER_ACTION = "useraction"; + String KEY_USER_ACTION = "userAction"; /** Key name for passing an user ID. */ String KEY_USER_ID = "userId"; /** Key name for passing a user name. */ - String KEY_USER_NAME = "username"; + String KEY_USER_NAME = "userName"; - /** - * Marker for "all events".<p> - */ + /** Marker for "all events". */ Integer LISTENERS_FOR_ALL_EVENTS = new Integer(-1); /** Value for the "group modified" action. */
- changed KEYS for event handling
alkacon_opencms-core
train
d3a38bc00813e48d3462a27144c5406894dbf96a
diff --git a/platform/android/Rhodes/src/com/rhomobile/rhodes/camera/ImageCapture.java b/platform/android/Rhodes/src/com/rhomobile/rhodes/camera/ImageCapture.java index <HASH>..<HASH> 100644 --- a/platform/android/Rhodes/src/com/rhomobile/rhodes/camera/ImageCapture.java +++ b/platform/android/Rhodes/src/com/rhomobile/rhodes/camera/ImageCapture.java @@ -44,6 +44,8 @@ import android.view.Window; import android.view.WindowManager; import android.view.View.OnClickListener; import android.widget.ImageButton; +import android.hardware.SensorManager; +import android.view.OrientationEventListener; public class ImageCapture extends BaseActivity implements SurfaceHolder.Callback, OnClickListener { @@ -58,7 +60,9 @@ public class ImageCapture extends BaseActivity implements SurfaceHolder.Callback private SurfaceView surfaceView; private SurfaceHolder surfaceHolder; private ImageButton cameraButton; - + private OrientationEventListener myOrientationEventListener; + private int m_rotation = 0; + // private Uri target = Media.EXTERNAL_CONTENT_URI; @Override @@ -80,6 +84,41 @@ public class ImageCapture extends BaseActivity implements SurfaceHolder.Callback cameraButton = (ImageButton)findViewById(AndroidR.id.cameraButton); cameraButton.setOnClickListener(this); + + myOrientationEventListener = new OrientationEventListener(this, SensorManager.SENSOR_DELAY_NORMAL) + { + @Override + public void onOrientationChanged(int orientation) + { + //Logger.D(TAG, "onOrientationChanged: " + orientation); + if (orientation == ORIENTATION_UNKNOWN) + return; + + m_rotation = orientation; + } + }; + + if (myOrientationEventListener.canDetectOrientation()) + { + Logger.I(TAG, "myOrientationEventListener.enable()"); + myOrientationEventListener.enable(); + } + else + { + Logger.I(TAG, "cannot detect!"); + myOrientationEventListener = null; + } + } + + @Override + public void finish() + { + Logger.D(TAG, "finish"); + if ( myOrientationEventListener != null ) + myOrientationEventListener.disable(); + + myOrientationEventListener = null; + super.finish(); } @Override @@ -221,6 +260,17 @@ public class ImageCapture extends BaseActivity implements SurfaceHolder.Callback OutputStream osCommon = getContentResolver().openOutputStream(uri); iccb = new ImageCaptureCallback(this, callbackUrl, osCommon, dir + "/" + filename + ".jpg"); + + Camera.Parameters parameters = camera.getParameters(); + //int nOrient = RhodesService.getInstance().getScreenOrientation(); + int nCamRotate = 90; + if ( (m_rotation > 45 && m_rotation < 135) || (m_rotation > 225 && m_rotation < 315) ) + nCamRotate = 0; + + Logger.D(TAG, "Camera rotation: " + nCamRotate ); + parameters.set("rotation", nCamRotate ); + camera.setParameters(parameters); + } catch (Exception ex) { Logger.E(TAG, ex.getMessage()); }
fixed issue with rotated camera image on android
rhomobile_rhodes
train
cdbc9a1c4df92c44be1e9fae4d5be6f87828ce86
diff --git a/lib/simple_crud/base_controller.rb b/lib/simple_crud/base_controller.rb index <HASH>..<HASH> 100644 --- a/lib/simple_crud/base_controller.rb +++ b/lib/simple_crud/base_controller.rb @@ -26,7 +26,7 @@ module SimpleCrud # GET /resources # GET /resources.json def index - resources_set model_klass.all + resources_set resource_klass.all respond_with resources_get end @@ -39,7 +39,7 @@ module SimpleCrud # GET /resources/new # GET /resources/new.json def new - resource_set model_klass.new + resource_set resource_klass.new respond_with resource_get end @@ -50,15 +50,15 @@ module SimpleCrud # POST /resources # POST /resources.json def create - resource_set model_klass.new(model_params) + resource_set resource_klass.new(resource_params) respond_to do |wants| result = resource_get.save call_hook :after_save, result if result - flash[:notice] = t 'messages.record_created', resource: t("models.#{model_name}") + flash[:notice] = t 'messages.record_created', resource: t("resources.#{resource_name}") wants.html { redirect_to(resource_get) } - wants.json { render :json => resource_get, :status => :created, :location => model } + wants.json { render :json => resource_get, :status => :created, :location => resource } else wants.html { render :action => "new" } wants.json { render :json => resource_get.errors, :status => :unprocessable_entity } @@ -70,10 +70,10 @@ module SimpleCrud # PUT /resources/1.json def update respond_to do |wants| - result = resource_get.update_attributes(model_params) + result = resource_get.update_attributes(resource_params) call_hook :after_update_attributes, result if result - flash[:notice] = t 'messages.record_updated', resource: t("models.#{model_name}") + flash[:notice] = t 'messages.record_updated', resource: t("resources.#{resource_name}") wants.html { redirect_to(resource_get) } wants.json { head :ok } else @@ -88,7 +88,7 @@ module SimpleCrud def destroy result = resource_get.destroy call_hook :after_destroy, result - flash[:notice] = t 'messages.record_destroyed', resource: t("models.#{model_name}") + flash[:notice] = t 'messages.record_destroyed', resource: t("resources.#{resource_name}") respond_to do |wants| wants.html { redirect_to(resources_path) } @@ -99,7 +99,7 @@ module SimpleCrud private def find_resource - resource_set model_klass.find(params[:id]) + resource_set resource_klass.find(params[:id]) end def call_hook(method, *args)
Renamed model to resource in base controller
dsaenztagarro_simplecrud
train
bcb61fc2c4b55e5e221a4baae16b72b0e25d1329
diff --git a/allantools/allantools.py b/allantools/allantools.py index <HASH>..<HASH> 100644 --- a/allantools/allantools.py +++ b/allantools/allantools.py @@ -341,20 +341,6 @@ def adev_phase_calc(data, rate, mj, stride): return dev, deverr, n - -def remove_small_ns(taus, devs, deverrs, ns): - """ if n is small (==1), reject the result """ - - ns_big_enough = ns > 1 - - o_taus = taus[ns_big_enough] - o_dev = devs[ns_big_enough] - o_err = deverrs[ns_big_enough] - o_n = ns[ns_big_enough] - - return o_taus, o_dev, o_err, o_n - - def oadev_phase(data, rate, taus): """ overlapping Allan deviation of phase data @@ -1003,33 +989,6 @@ def tau_m(data, rate, taus, v=False): taus2 = m / float(rate) return data, m, taus2 -def tau_m(data, rate, taus, v=False): - """ pre-processing of the tau-list given by the user """ - data, taus = np.array(data), np.array(taus) - - if rate == 0: - raise RuntimeError("Warning! rate==0") - rate = float(rate) - m = [] - - taus_valid1 = taus < (1 / float(rate)) * float(len(data)) - taus_valid2 = taus > 0 - taus_valid = taus_valid1 & taus_valid2 - m = np.floor(taus[taus_valid] * rate) - m = m[m != 0] # m is tau in units of datapoints - m = np.unique(m) # remove duplicates and sort - - if v: - print("tau_m: ", m) - if len(m) == 0: - - print("Warning: sanity-check on tau failed!") - print(" len(data)=", len(data), " rate=", rate, "taus= ", taus) - - taus2 = m / float(rate) - return data, m, taus2 - - def remove_small_ns(*args): if len(args) == 4: return remove_small_ns_4(*args)
Deleted 2 duplicated functions I noticed that some functions where defined twice in the source : - tau_m only differs by some commentary lines, I kept the one with the most detailed comments - remove_small_ns is different, I kept the one which accepts 2 possible sets of parameters.
aewallin_allantools
train
9a3ee0c56148ccd84ac5d315f017ce4267f0feac
diff --git a/src/client/pkg/grpcutil/stream.go b/src/client/pkg/grpcutil/stream.go index <HASH>..<HASH> 100644 --- a/src/client/pkg/grpcutil/stream.go +++ b/src/client/pkg/grpcutil/stream.go @@ -103,11 +103,24 @@ func (s *streamingBytesWriter) Write(p []byte) (int, error) { return len(p), nil } +// Go's io.CopyBuffer has an annoying optimization wherein if the reader +// has the WriteTo function defined, it doesn't actually use the given +// buffer. As a result, we might write a large chunk to the gRPC streaming +// server even though we intend to use a small buffer. +// Therefore we wrap readers in this wrapper so that only Read is defined. +type readerWrapper struct { + reader io.Reader +} + +func (r readerWrapper) Read(p []byte) (int, error) { + return r.reader.Read(p) +} + // WriteToStreamingBytesServer writes the data from the io.Reader to the StreamingBytesServer. func WriteToStreamingBytesServer(reader io.Reader, streamingBytesServer StreamingBytesServer) error { buf := GetBuffer() defer PutBuffer(buf) - _, err := copyBuffer(NewStreamingBytesWriter(streamingBytesServer), reader, buf) + _, err := io.CopyBuffer(NewStreamingBytesWriter(streamingBytesServer), readerWrapper{reader}, buf) return err } @@ -123,44 +136,3 @@ func WriteFromStreamingBytesClient(streamingBytesClient StreamingBytesClient, wr } return nil } - -// copyBuffer is the same as io.CopyBuffer except that it always uses the -// given buffer. In contract, io.CopyBuffer does not use the given buffer -// if the reader has WriteTo defined. -func copyBuffer(dst io.Writer, src io.Reader, buf []byte) (written int64, err error) { - if buf != nil && len(buf) == 0 { - panic("empty buffer in io.CopyBuffer") - } - - if buf == nil { - buf = make([]byte, 32*1024) - } - - for { - nr, er := src.Read(buf) - if nr > 0 { - nw, ew := dst.Write(buf[0:nr]) - if nw > 0 { - written += int64(nw) - } - - if ew != nil { - err = ew - break - } - - if nr != nw { - err = io.ErrShortWrite - break - } - } - - if er != nil { - if er != io.EOF { - err = er - } - break - } - } - return written, err -}
Rewrite the code that gets us around io.CopyBuffer's undesirable optimization
pachyderm_pachyderm
train
b21636953c602f969adde2c3bd342c4b17e91968
diff --git a/karma.conf.js b/karma.conf.js index <HASH>..<HASH> 100644 --- a/karma.conf.js +++ b/karma.conf.js @@ -14,7 +14,6 @@ module.exports = function(config) { // list of files / patterns to load in the browser files: [ 'ie8-shims.js', - 'src/**/*.js', 'test/**/*.js' ], @@ -26,7 +25,6 @@ module.exports = function(config) { // available preprocessors: https://npmjs.org/browse/keyword/karma-preprocessor preprocessors: { 'ie8-shims.js': ['browserify'], - 'src/**/*.js': ['browserify'], 'test/**/*.js': ['browserify'] },
Don't load unnecessary source code in Karma
BladeRunnerJS_topiarist
train
b0eb39a19e4277672e653587dcde0de1f7928318
diff --git a/addon/components/mapbox-gl-on.js b/addon/components/mapbox-gl-on.js index <HASH>..<HASH> 100644 --- a/addon/components/mapbox-gl-on.js +++ b/addon/components/mapbox-gl-on.js @@ -49,7 +49,7 @@ const MapboxGlOnComponent = Component.extend({ const event = get(this, 'event'); assert(`mapbox-gl-event requires event to be a string, was ${event}`, typeof event === 'string'); - return event.toLowerCase(); + return event; }), _layerId: computed('layerId', '_action', function () { diff --git a/tests/integration/components/mapbox-gl-on-test.js b/tests/integration/components/mapbox-gl-on-test.js index <HASH>..<HASH> 100644 --- a/tests/integration/components/mapbox-gl-on-test.js +++ b/tests/integration/components/mapbox-gl-on-test.js @@ -19,7 +19,7 @@ module('Integration | Component | mapbox gl on', function(hooks) { this.set('eventSource', { on(eventName, cb) { - assert.equal(eventName, 'onzoom', 'subscribes to lowercased event name'); + assert.equal(eventName, 'onzoom', 'subscribes to event name'); run.next(cb, event); }, @@ -32,7 +32,7 @@ module('Integration | Component | mapbox gl on', function(hooks) { done(); }; - await render(hbs`{{mapbox-gl-on eventSource=eventSource event='onZoom' action=(action 'onEvent')}}`); + await render(hbs`{{mapbox-gl-on eventSource=eventSource event='onzoom' action=(action 'onEvent')}}`); }); test('it works with positionalParams', async function(assert) { @@ -43,13 +43,13 @@ module('Integration | Component | mapbox gl on', function(hooks) { this.set('eventSource', { on(eventName, cb) { - assert.equal(eventName, 'onzoom', 'subscribes to lowercased event name'); + assert.equal(eventName, 'onzoom', 'subscribes to event name'); run.next(cb, event); }, off(eventName) { - assert.equal(eventName, 'onzoom', 'unsubscribes to lowercased event name'); + assert.equal(eventName, 'onzoom', 'unsubscribes to event name'); } }); @@ -58,7 +58,7 @@ module('Integration | Component | mapbox gl on', function(hooks) { done(); }; - await render(hbs`{{mapbox-gl-on 'onZoom' (action 'onEvent') eventSource=eventSource}}`); + await render(hbs`{{mapbox-gl-on 'onzoom' (action 'onEvent') eventSource=eventSource}}`); }); test('it takes a layerId to target', async function(assert) { @@ -69,14 +69,14 @@ module('Integration | Component | mapbox gl on', function(hooks) { this.set('eventSource', { on(eventName, source, cb) { - assert.equal(eventName, 'onzoom', 'subscribes to lowercased event name'); + assert.equal(eventName, 'onzoom', 'subscribes to event name'); assert.equal(source, 'layer1', 'passes on layer'); run.next(cb, event); }, off(eventName, source) { - assert.equal(eventName, 'onzoom', 'unsubscribes to lowercased event name'); + assert.equal(eventName, 'onzoom', 'unsubscribes to event name'); assert.equal(source, 'layer1', 'passes on layer'); } }); @@ -86,6 +86,6 @@ module('Integration | Component | mapbox gl on', function(hooks) { done(); }; - await render(hbs`{{mapbox-gl-on 'onZoom' 'layer1' (action 'onEvent') eventSource=eventSource}}`); + await render(hbs`{{mapbox-gl-on 'onzoom' 'layer1' (action 'onEvent') eventSource=eventSource}}`); }); });
Remove toLowerCase for event strings; update tests
kturney_ember-mapbox-gl
train
cd4d5056acf9c1d68bb791ba252653f75517dd83
diff --git a/examples/bench/server/server.go b/examples/bench/server/server.go index <HASH>..<HASH> 100644 --- a/examples/bench/server/server.go +++ b/examples/bench/server/server.go @@ -85,23 +85,23 @@ func setupServer(host string, basePort, instanceNum int) error { } type kvHandler struct { - mut sync.RWMutex + sync.RWMutex vals map[string]string } func (h *kvHandler) WithLock(write bool, f func()) { if write { - h.mut.Lock() + h.Lock() } else { - h.mut.RLock() + h.RLock() } f() if write { - h.mut.Unlock() + h.Unlock() } else { - h.mut.RUnlock() + h.RUnlock() } } diff --git a/examples/keyvalue/server/server.go b/examples/keyvalue/server/server.go index <HASH>..<HASH> 100644 --- a/examples/keyvalue/server/server.go +++ b/examples/keyvalue/server/server.go @@ -79,7 +79,7 @@ func main() { } type kvHandler struct { - mut sync.RWMutex + sync.RWMutex vals map[string]string } @@ -94,8 +94,8 @@ func (h *kvHandler) Get(ctx thrift.Context, key string) (string, error) { return "", err } - h.mut.RLock() - defer h.mut.RUnlock() + h.RLock() + defer h.RUnlock() if val, ok := h.vals[key]; ok { return val, nil @@ -110,8 +110,8 @@ func (h *kvHandler) Set(ctx thrift.Context, key, value string) error { return err } - h.mut.Lock() - defer h.mut.Unlock() + h.Lock() + defer h.Unlock() h.vals[key] = value // Example of how to use response headers. Normally, these values should be passed via result structs. @@ -130,8 +130,8 @@ func (h *kvHandler) ClearAll(ctx thrift.Context) error { return &keyvalue.NotAuthorized{} } - h.mut.Lock() - defer h.mut.Unlock() + h.Lock() + defer h.Unlock() h.vals = make(map[string]string) return nil
Embed mutex in example code
uber_tchannel-go
train
038dcbb5b30e75e312bbd7ec44eba1f5711e60c3
diff --git a/modules/activiti-cycle/src/main/java/org/activiti/cycle/impl/CycleServiceImpl.java b/modules/activiti-cycle/src/main/java/org/activiti/cycle/impl/CycleServiceImpl.java index <HASH>..<HASH> 100644 --- a/modules/activiti-cycle/src/main/java/org/activiti/cycle/impl/CycleServiceImpl.java +++ b/modules/activiti-cycle/src/main/java/org/activiti/cycle/impl/CycleServiceImpl.java @@ -1,6 +1,7 @@ package org.activiti.cycle.impl; import java.io.File; +import java.io.Serializable; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -22,12 +23,12 @@ import org.activiti.cycle.RepositoryNode; import org.activiti.cycle.RepositoryNodeCollection; import org.activiti.cycle.RepositoryNodeNotFoundException; import org.activiti.cycle.RepositoryNodeTag; -import org.activiti.cycle.TransactionalRepositoryConnector; import org.activiti.cycle.impl.conf.ConfigurationContainer; import org.activiti.cycle.impl.conf.PasswordEnabledRepositoryConnectorConfiguration; import org.activiti.cycle.impl.connector.demo.DemoConnectorConfiguration; import org.activiti.cycle.impl.connector.fs.FileSystemConnectorConfiguration; import org.activiti.cycle.impl.connector.signavio.SignavioConnectorConfiguration; +import org.activiti.cycle.impl.connector.util.TransactionalConnectorUtils; import org.activiti.cycle.impl.connector.view.TagConnectorConfiguration; import org.activiti.cycle.impl.db.CycleConfigurationService; import org.activiti.cycle.impl.db.CycleDAO; @@ -47,9 +48,19 @@ import org.activiti.cycle.impl.plugin.PluginFinder; */ public class CycleServiceImpl implements CycleService { + protected static class ConnectorList implements Serializable { + private static final long serialVersionUID = 1L; + // the transient field keeps the servlet container from serializing the connectors in the session + // TODO: needs testing: When do servlet containers serialize/deserialize? Tomcat seems to do it + // between shutdowns / startups. At the moment I would qualify this as a 'hack' - Daniel Meyer + private transient List<RepositoryConnector> connectors; + } + private CycleDAO cycleDAO; private List<RepositoryConnector> repositoryConnectors; + + // private static ThreadLocal<CycleService> currentCycleService = new // ThreadLocal<CycleService>(); @@ -127,13 +138,19 @@ public class CycleServiceImpl implements CycleService { */ public static List<RepositoryConnector> getConfiguredRepositoryConnectors(String currentUserId, HttpSession session) { String key = currentUserId + "_cycleConfiguredRepositoryConnectors"; - @SuppressWarnings("unchecked") - List<RepositoryConnector> connectors = ((List<RepositoryConnector>) session.getAttribute(key)); + + ConnectorList connectorList = (ConnectorList) session.getAttribute(key); + List<RepositoryConnector> connectors =null; + if(connectorList != null) { + connectors = connectorList.connectors; + } if (connectors == null) { PluginFinder.registerServletContext(session.getServletContext()); - ConfigurationContainer container = loadUserConfiguration(currentUserId); + ConfigurationContainer container = loadUserConfiguration(currentUserId); connectors = container.getConnectorList(); - session.setAttribute(key, connectors); + connectorList = new ConnectorList(); + connectorList.connectors = connectors; + session.setAttribute(key, connectorList); } return connectors; } @@ -183,10 +200,7 @@ public class CycleServiceImpl implements CycleService { */ public void commitPendingChanges(String comment) { for (RepositoryConnector connector : this.repositoryConnectors) { - if (connector instanceof TransactionalRepositoryConnector) { - TransactionalRepositoryConnector transactionalConnector = (TransactionalRepositoryConnector) connector; - transactionalConnector.commitPendingChanges(comment); - } + TransactionalConnectorUtils.commitPendingChanges(connector, comment); } }
Activiti Cycle: fixed Serialization / Deserialization issues (hopefully)
Activiti_Activiti
train
b80fe52375c327024c95bdfa58c4ca658911998b
diff --git a/tests/pod_users_test.py b/tests/pod_users_test.py index <HASH>..<HASH> 100644 --- a/tests/pod_users_test.py +++ b/tests/pod_users_test.py @@ -26,8 +26,10 @@ class Pod_Users_test(unittest.TestCase): def test_get_user_id_by_email(self): ''' test get_user_id_by_email ''' # register response - httpretty.register_uri(httpretty.GET, "http://fake.pod/", - body="{userId: '123456'}") + httpretty.register_uri(httpretty.GET, "http://fake.pod/pod/v1/user", + body='{"userId": 123456 }', + status=500, + content_type='text/json') # dummy authenticate symphony_pod_uri = 'http://fake.pod/' session_token = 'sessions' @@ -36,7 +38,7 @@ class Pod_Users_test(unittest.TestCase): # run test query status_code, response = pod.get_userid_by_email('test@email.com') # verify return - assert response.text == "{userId: '123456'}" + assert response.text == "123456" if __name__ == '__main__':
fixing test to be more sane
symphonyoss_python-symphony
train
49843fa83e496c5b572b35714bd1ccaea0ea6032
diff --git a/Admin/PostAdmin.php b/Admin/PostAdmin.php index <HASH>..<HASH> 100644 --- a/Admin/PostAdmin.php +++ b/Admin/PostAdmin.php @@ -200,7 +200,7 @@ class PostAdmin extends AbstractAdmin /** * {@inheritdoc} */ - protected function configureSideMenu(MenuItemInterface $menu, $action, AdminInterface $childAdmin = null) + protected function configureTabMenu(MenuItemInterface $menu, $action, AdminInterface $childAdmin = null) { if (!$childAdmin && !in_array($action, array('edit'))) { return;
Avoid deprecated method name configureSideMenu() has been deprecated in favor of configureTabMenu() ages ago.
sonata-project_SonataNewsBundle
train
d98bddac152b8e81adb1967668e96cc565fdb6e4
diff --git a/cmd/minikube/cmd/start_test.go b/cmd/minikube/cmd/start_test.go index <HASH>..<HASH> 100644 --- a/cmd/minikube/cmd/start_test.go +++ b/cmd/minikube/cmd/start_test.go @@ -71,6 +71,9 @@ func TestGetKuberneterVersion(t *testing.T) { } func TestGenerateCfgFromFlagsHTTPProxyHandling(t *testing.T) { + // Set default disk size value in lieu of flag init + viper.SetDefault(humanReadableDiskSize, defaultDiskSize) + originalEnv := os.Getenv("HTTP_PROXY") defer func() { err := os.Setenv("HTTP_PROXY", originalEnv) @@ -104,8 +107,6 @@ func TestGenerateCfgFromFlagsHTTPProxyHandling(t *testing.T) { }, } for _, test := range tests { - // Set default disk size value in lieu of flag init - viper.SetDefault(humanReadableDiskSize, defaultDiskSize) t.Run(test.description, func(t *testing.T) { cmd := &cobra.Command{} if err := os.Setenv("HTTP_PROXY", test.proxy); err != nil {
Set viper default in a better place
kubernetes_minikube
train
f57b7e2d29c6211d16ffa52a0998272f75799030
diff --git a/cookie_go111_test.go b/cookie_go111_test.go index <HASH>..<HASH> 100644 --- a/cookie_go111_test.go +++ b/cookie_go111_test.go @@ -4,7 +4,6 @@ package sessions import ( "net/http" - "strconv" "testing" ) @@ -19,14 +18,12 @@ func TestNewCookieFromOptionsSameSite(t *testing.T) { {http.SameSiteStrictMode}, } for i, v := range tests { - t.Run(strconv.Itoa(i+1), func(t *testing.T) { - options := &Options{ - SameSite: v.sameSite, - } - cookie := newCookieFromOptions("", "", options) - if cookie.SameSite != v.sameSite { - t.Fatalf("bad cookie sameSite: got %v, want %v", cookie.SameSite, v.sameSite) - } - }) + options := &Options{ + SameSite: v.sameSite, + } + cookie := newCookieFromOptions("", "", options) + if cookie.SameSite != v.sameSite { + t.Fatalf("%v: bad cookie sameSite: got %v, want %v", i+1, cookie.SameSite, v.sameSite) + } } } diff --git a/cookie_test.go b/cookie_test.go index <HASH>..<HASH> 100644 --- a/cookie_test.go +++ b/cookie_test.go @@ -1,7 +1,6 @@ package sessions import ( - "strconv" "testing" ) @@ -25,36 +24,34 @@ func TestNewCookieFromOptions(t *testing.T) { {"foo", "bar", "/foo/bar", "foo.example.com", 3600, true, false}, } for i, v := range tests { - t.Run(strconv.Itoa(i+1), func(t *testing.T) { - options := &Options{ - Path: v.path, - Domain: v.domain, - MaxAge: v.maxAge, - Secure: v.secure, - HttpOnly: v.httpOnly, - } - cookie := newCookieFromOptions(v.name, v.value, options) - if cookie.Name != v.name { - t.Fatalf("bad cookie name: got %q, want %q", cookie.Name, v.name) - } - if cookie.Value != v.value { - t.Fatalf("bad cookie value: got %q, want %q", cookie.Value, v.value) - } - if cookie.Path != v.path { - t.Fatalf("bad cookie path: got %q, want %q", cookie.Path, v.path) - } - if cookie.Domain != v.domain { - t.Fatalf("bad cookie domain: got %q, want %q", cookie.Domain, v.domain) - } - if cookie.MaxAge != v.maxAge { - t.Fatalf("bad cookie maxAge: got %q, want %q", cookie.MaxAge, v.maxAge) - } - if cookie.Secure != v.secure { - t.Fatalf("bad cookie secure: got %v, want %v", cookie.Secure, v.secure) - } - if cookie.HttpOnly != v.httpOnly { - t.Fatalf("bad cookie httpOnly: got %v, want %v", cookie.HttpOnly, v.httpOnly) - } - }) + options := &Options{ + Path: v.path, + Domain: v.domain, + MaxAge: v.maxAge, + Secure: v.secure, + HttpOnly: v.httpOnly, + } + cookie := newCookieFromOptions(v.name, v.value, options) + if cookie.Name != v.name { + t.Fatalf("%v: bad cookie name: got %q, want %q", i+1, cookie.Name, v.name) + } + if cookie.Value != v.value { + t.Fatalf("%v: bad cookie value: got %q, want %q", i+1, cookie.Value, v.value) + } + if cookie.Path != v.path { + t.Fatalf("%v: bad cookie path: got %q, want %q", i+1, cookie.Path, v.path) + } + if cookie.Domain != v.domain { + t.Fatalf("%v: bad cookie domain: got %q, want %q", i+1, cookie.Domain, v.domain) + } + if cookie.MaxAge != v.maxAge { + t.Fatalf("%v: bad cookie maxAge: got %q, want %q", i+1, cookie.MaxAge, v.maxAge) + } + if cookie.Secure != v.secure { + t.Fatalf("%v: bad cookie secure: got %v, want %v", i+1, cookie.Secure, v.secure) + } + if cookie.HttpOnly != v.httpOnly { + t.Fatalf("%v: bad cookie httpOnly: got %v, want %v", i+1, cookie.HttpOnly, v.httpOnly) + } } }
Don't use t.Run in tests, not supported in earlier Go versions This package is meant to work on Go versions going back to Go <I>, which means tests can't use testing.T.Run which doesn't exists in Go <I> and earlier.
gorilla_sessions
train
aef5a0084876b97496167ecc3b61b15472294dbd
diff --git a/controller/src/test/java/io/pravega/controller/store/stream/HostStoreTest.java b/controller/src/test/java/io/pravega/controller/store/stream/HostStoreTest.java index <HASH>..<HASH> 100644 --- a/controller/src/test/java/io/pravega/controller/store/stream/HostStoreTest.java +++ b/controller/src/test/java/io/pravega/controller/store/stream/HostStoreTest.java @@ -89,20 +89,27 @@ public class HostStoreTest { .containerCount(containerCount) .build(); + // Update host store map. + Map<Host, Set<Integer>> hostContainerMap = HostMonitorConfigImpl.getHostContainerMap(host, controllerPort, containerCount); + // Create ZK based host store. HostControllerStore hostStore = HostStoreFactory.createStore(hostMonitorConfig, storeClient); - - CompletableFuture<Void> latch = new CompletableFuture<>(); + CompletableFuture<Void> latch1 = new CompletableFuture<>(); + CompletableFuture<Void> latch2 = new CompletableFuture<>(); ((ZKHostStore) hostStore).addListener(() -> { - latch.complete(null); + // With the addition of updateMap() in tryInit(), this listener is actually called twice, and we need to + // wait for the second operation to complete (related to updateHostContainersMap()). + if (latch1.isDone()) { + latch2.complete(null); + } + latch1.complete(null); }); - // Update host store map. - Map<Host, Set<Integer>> hostContainerMap = HostMonitorConfigImpl.getHostContainerMap(host, controllerPort, containerCount); hostStore.updateHostContainersMap(hostContainerMap); - latch.join(); + latch1.join(); + latch2.join(); validateStore(hostStore); - // verify that a new hostStore is initialized with map set by previous host store. + // verify that a new hostStore is initialized with map set by previous host store. HostControllerStore hostStore2 = HostStoreFactory.createStore(hostMonitorConfig, storeClient); Map<Host, Set<Integer>> map = hostStore2.getHostContainersMap();
Issue <I>: Sporadic failure of HostStoreTest.zkHostStoreTests (#<I>) * Adds a second CompletableFuture for the test to wait for completion to deal with the behavior introduced by PR <I>.
pravega_pravega
train
6a49a0fb8118ae9c1355edecedb952fe40a8a3d2
diff --git a/scheduler/generic_sched.go b/scheduler/generic_sched.go index <HASH>..<HASH> 100644 --- a/scheduler/generic_sched.go +++ b/scheduler/generic_sched.go @@ -74,7 +74,7 @@ const ( ) // minVersionMaxClientDisconnect is the minimum version that supports max_client_disconnect. -var minVersionMaxClientDisconnect = version.Must(version.NewVersion("1.2.6")) +var minVersionMaxClientDisconnect = version.Must(version.NewVersion("1.3.0")) // SetStatusError is used to set the status of the evaluation to the given error type SetStatusError struct {
set minimum version for disconnected client mode to <I> (#<I>)
hashicorp_nomad
train
4a5089ef5252c0c23e2d661d7a3d9e015593f29b
diff --git a/src/geo/map.js b/src/geo/map.js index <HASH>..<HASH> 100644 --- a/src/geo/map.js +++ b/src/geo/map.js @@ -81,18 +81,16 @@ var Map = Model.extend({ }, createPlainLayer: function (attrs, options) { - this._checkProperties(attrs, ['color']); - return this._addNewLayerModel('plain', attrs, options); - }, - - createBackgroundLayer: function (attrs, options) { - this._checkProperties(attrs, ['image']); + this._checkProperties(attrs, ['image|color']); return this._addNewLayerModel('plain', attrs, options); }, _checkProperties: function (obj, requiredProperties) { var missingProperties = _.select(requiredProperties, function (property) { - return obj[property] === undefined; + var properties = property.split('|'); + return _.all(properties, function (property) { + return obj[property] === undefined; + }); }); if (missingProperties.length) { throw new Error('The following attributes are missing: ' + missingProperties.join(',')); diff --git a/test/spec/geo/map.spec.js b/test/spec/geo/map.spec.js index <HASH>..<HASH> 100644 --- a/test/spec/geo/map.spec.js +++ b/test/spec/geo/map.spec.js @@ -313,41 +313,26 @@ describe('core/geo/map', function() { it('should throw an error if no properties are given', function () { expect(function () { this.map.createPlainLayer({}); - }.bind(this)).toThrowError('The following attributes are missing: color'); + }.bind(this)).toThrowError('The following attributes are missing: image|color'); }); - it('should return a layer of the corresponding type', function () { + it('should return a layer of the corresponding type if color attribute is present', function () { var layer = this.map.createPlainLayer({ color: '#FABADA' }); expect(layer instanceof PlainLayer).toBeTruthy(); }); - it('should add the layer model to the collection of layers', function () { + it('should return a layer of the corresponding type if image attribute is present', function () { var layer = this.map.createPlainLayer({ - color: '#FABADA' - }); - expect(this.map.layers.at(0)).toEqual(layer); - }); - }); - - describe('.createBackgroundLayer', function () { - it('should throw an error if no properties are given', function () { - expect(function () { - this.map.createBackgroundLayer({}); - }.bind(this)).toThrowError('The following attributes are missing: image'); - }); - - it('should return a layer of the corresponding type', function () { - var layer = this.map.createBackgroundLayer({ image: 'http://example.com/image.png' }); expect(layer instanceof PlainLayer).toBeTruthy(); }); it('should add the layer model to the collection of layers', function () { - var layer = this.map.createBackgroundLayer({ - image: 'http://example.com/image.png' + var layer = this.map.createPlainLayer({ + color: '#FABADA' }); expect(this.map.layers.at(0)).toEqual(layer); });
Removed map.createBackgroundLayer in favor of map.createPlainLayer + image attribute
CartoDB_carto.js
train
bface4d53caeabb052ac390bf0cae91971bd4f10
diff --git a/gridsome/app/head.js b/gridsome/app/head.js index <HASH>..<HASH> 100644 --- a/gridsome/app/head.js +++ b/gridsome/app/head.js @@ -30,10 +30,10 @@ const head = { link: [] } -icons.favicons.forEach(({ type, width, height, src: href }) => { +icons.favicons.forEach(({ width, height, src: href }) => { head.link.push({ rel: 'icon', - type: `image/${type}`, + type: icons.faviconMimeType, sizes: `${width}x${height}`, href }) @@ -42,6 +42,7 @@ icons.favicons.forEach(({ type, width, height, src: href }) => { icons.touchicons.forEach(({ width, height, src: href }) => { head.link.push({ rel: `apple-touch-icon${icons.precomposed ? '-precomposed' : ''}`, + type: icons.touchiconMimeType, sizes: `${width}x${height}`, href }) diff --git a/gridsome/lib/app/CodeGenerator.js b/gridsome/lib/app/CodeGenerator.js index <HASH>..<HASH> 100644 --- a/gridsome/lib/app/CodeGenerator.js +++ b/gridsome/lib/app/CodeGenerator.js @@ -48,6 +48,8 @@ async function genIcons ({ config, resolve, queue }) { const faviconPath = resolve(favicon.src) const icons = { + touchiconMimeType: null, + faviconMimeType: null, precomposed: false, touchicons: [], favicons: [] @@ -61,6 +63,7 @@ async function genIcons ({ config, resolve, queue }) { icons.precomposed = touchicon.precomposed icons.touchicons = touchicons.sets + icons.touchiconMimeType = touchicons.mimeType } if (await fs.exists(faviconPath)) { @@ -70,6 +73,7 @@ async function genIcons ({ config, resolve, queue }) { }) icons.favicons = favicons.sets + icons.faviconMimeType = favicons.mimeType } return `export default ${JSON.stringify(icons, null, 2)}`
fix(favicon): set mime type for icons in head
gridsome_gridsome
train
42b1d49aff910107b687552cc09aa73f89a11b56
diff --git a/bibliopixel/util/colors/wheel.py b/bibliopixel/util/colors/wheel.py index <HASH>..<HASH> 100644 --- a/bibliopixel/util/colors/wheel.py +++ b/bibliopixel/util/colors/wheel.py @@ -1,6 +1,3 @@ -WHEEL_MAX = 384 - - def _gen_wheel(): result = [] for p in range(385): @@ -28,15 +25,10 @@ def wheel_color(position): """Get color from wheel value (0 - 384). Provided for those used to using it from Adafruit libraries """ - if position < 0: - position = 0 - if position > 384: - position = 384 - - return _WHEEL[position] + return _WHEEL[round(position) % len(_WHEEL)] def wheel_helper(pos, length, cycle_step): """Helper for wheel_color that distributes colors over length and allows shifting position.""" - return wheel_color(((pos * WHEEL_MAX // length) + cycle_step) % WHEEL_MAX) + return wheel_color((pos * len(_WHEEL) / length) + cycle_step)
Fix wheel to cycle and accept floating parameters
ManiacalLabs_BiblioPixel
train
060c2e4186441759d0a5cb4db153c89d68d4d347
diff --git a/Library/Bullhorn/FastRest/UnitTestHelper/Base.php b/Library/Bullhorn/FastRest/UnitTestHelper/Base.php index <HASH>..<HASH> 100644 --- a/Library/Bullhorn/FastRest/UnitTestHelper/Base.php +++ b/Library/Bullhorn/FastRest/UnitTestHelper/Base.php @@ -106,6 +106,22 @@ abstract class Base extends PHPUnit_Framework_TestCase implements InjectionAware } /** + * validatePhql + * @param string $modelName + * @param int|array $parameters + * @return mixed + */ + protected function validateFindFirstPhql($modelName, $parameters) { + $this->validatePhqlGenerateModelFactory(); + + /** @type Model $modelFactory */ + $modelFactory = new $modelName(); + $modelFactory->findFirst($parameters); + + return $parameters; + } + + /** * validatePhqlGenerateModelFactory * @return void */
Added findFirst to the parsable phql tests
bullhorn_fast-rest
train
bbc1cb82702b678b21bef15394f067c146e47625
diff --git a/filesystem/Upload.php b/filesystem/Upload.php index <HASH>..<HASH> 100644 --- a/filesystem/Upload.php +++ b/filesystem/Upload.php @@ -270,9 +270,11 @@ class Upload extends Controller { /** * Clear out all errors (mostly set by {loadUploaded()}) + * including the validator's errors */ public function clearErrors() { $this->errors = array(); + $this->validator->clearErrors(); } /** @@ -343,6 +345,13 @@ class Upload_Validator { } /** + * Clear out all errors + */ + public function clearErrors() { + $this->errors = array(); + } + + /** * Set information about temporary file produced by PHP. * @param array $tmpFile */ diff --git a/forms/UploadField.php b/forms/UploadField.php index <HASH>..<HASH> 100644 --- a/forms/UploadField.php +++ b/forms/UploadField.php @@ -1222,20 +1222,25 @@ class UploadField extends FileField { $name = $this->getName(); $postVars = $request->postVar($name); - // Save the temporary file into a File object + // Extract uploaded files from Form data $uploadedFiles = $this->extractUploadedFileData($postVars); - $firstFile = reset($uploadedFiles); - $file = $this->saveTemporaryFile($firstFile, $error); - if(empty($file)) { - $return = array('error' => $error); - } else { - $return = $this->encodeFileAttributes($file); + $return = array(); + + // Save the temporary files into a File objects + // and save data/error on a per file basis + foreach ($uploadedFiles as $tempFile) { + $file = $this->saveTemporaryFile($tempFile, $error); + if(empty($file)) { + array_push($return, array('error' => $error)); + } else { + array_push($return, $this->encodeFileAttributes($file)); + } + $this->upload->clearErrors(); } - + // Format response with json - $response = new SS_HTTPResponse(Convert::raw2json(array($return))); + $response = new SS_HTTPResponse(Convert::raw2json($return)); $response->addHeader('Content-Type', 'text/plain'); - if (!empty($return['error'])) $response->setStatusCode(403); return $response; } diff --git a/tests/forms/uploadfield/UploadFieldTest.php b/tests/forms/uploadfield/UploadFieldTest.php index <HASH>..<HASH> 100644 --- a/tests/forms/uploadfield/UploadFieldTest.php +++ b/tests/forms/uploadfield/UploadFieldTest.php @@ -167,8 +167,9 @@ class UploadFieldTest extends FunctionalTest { 'UploadFieldTest_Controller/Form/field/AllowedExtensionsField/upload', array('AllowedExtensionsField' => $this->getUploadFile($invalidFile)) ); - $this->assertTrue($response->isError()); - $this->assertContains('Extension is not allowed', $response->getBody()); + $response = json_decode($response->getBody(), true); + $this->assertTrue(array_key_exists('error', $response[0])); + $this->assertContains('Extension is not allowed', $response[0]['error']); $validFile = 'valid.txt'; $_FILES = array('AllowedExtensionsField' => $this->getUploadFile($validFile)); @@ -176,8 +177,8 @@ class UploadFieldTest extends FunctionalTest { 'UploadFieldTest_Controller/Form/field/AllowedExtensionsField/upload', array('AllowedExtensionsField' => $this->getUploadFile($validFile)) ); - $this->assertFalse($response->isError()); - $this->assertNotContains('Extension is not allowed', $response->getBody()); + $response = json_decode($response->getBody(), true); + $this->assertFalse(array_key_exists('error', $response[0])); } /**
FIX #<I> iframe transport multi file upload FIX #<I>, FIX #<I> UploadField now handles multiple file upload through iframe transport correctly (mainly for IE) as well as upload errors on a per file basis.
silverstripe_silverstripe-framework
train
28d373e90dcbbba2dcaabdde33f8349b1dfbef38
diff --git a/hazelcast-client/src/test/java/com/hazelcast/client/topic/Issue9766Test.java b/hazelcast-client/src/test/java/com/hazelcast/client/topic/Issue9766Test.java index <HASH>..<HASH> 100644 --- a/hazelcast-client/src/test/java/com/hazelcast/client/topic/Issue9766Test.java +++ b/hazelcast-client/src/test/java/com/hazelcast/client/topic/Issue9766Test.java @@ -30,6 +30,7 @@ package com.hazelcast.client.topic;/* * limitations under the License. */ +import com.hazelcast.client.config.ClientConfig; import com.hazelcast.client.test.TestHazelcastFactory; import com.hazelcast.core.HazelcastInstance; import com.hazelcast.core.ITopic; @@ -67,8 +68,10 @@ public class Issue9766Test { public void serverRestartWhenReliableTopicListenerRegistered() { HazelcastInstance server = hazelcastFactory.newHazelcastInstance(); - HazelcastInstance hazelcastClient = hazelcastFactory.newHazelcastClient(); - HazelcastInstance hazelcastClient2 = hazelcastFactory.newHazelcastClient(); + ClientConfig clientConfig = new ClientConfig(); + clientConfig.getNetworkConfig().setConnectionAttemptLimit(Integer.MAX_VALUE); + HazelcastInstance hazelcastClient = hazelcastFactory.newHazelcastClient(clientConfig); + HazelcastInstance hazelcastClient2 = hazelcastFactory.newHazelcastClient(clientConfig); ITopic<Integer> topic = hazelcastClient.getReliableTopic(topicName); final ITopic<Integer> topic2 = hazelcastClient2.getReliableTopic(topicName);
fixes Issue<I>Test.serverRestartWhenReliableTopicListenerRegistered Test was failing because client can not connect to cluster. Test is fixed via setting connection attemp limit to Integer.MAX fixes #<I>
hazelcast_hazelcast
train
545481b6368af4446c3742b386eb00172c4e44b1
diff --git a/distributed/src/main/java/com/orientechnologies/orient/server/distributed/impl/ONewDistributedResponseManager.java b/distributed/src/main/java/com/orientechnologies/orient/server/distributed/impl/ONewDistributedResponseManager.java index <HASH>..<HASH> 100644 --- a/distributed/src/main/java/com/orientechnologies/orient/server/distributed/impl/ONewDistributedResponseManager.java +++ b/distributed/src/main/java/com/orientechnologies/orient/server/distributed/impl/ONewDistributedResponseManager.java @@ -132,15 +132,15 @@ public class ONewDistributedResponseManager implements ODistributedResponseManag } public synchronized boolean collectResponse(OTransactionPhase1TaskResult response, String senderNodeName) { + if (response.getResultPayload() instanceof OTxStillRunning) { + stillRunning++; + return false; + } debugNodeReplied.add(senderNodeName); return addResult(senderNodeName, response.getResultPayload()); } private boolean addResult(String senderNodeName, OTransactionResultPayload result) { - if (result instanceof OTxStillRunning) { - stillRunning++; - return false; - } List<OTransactionResultPayload> results = new ArrayList<>(); if (nodesConcurToTheQuorum.contains(senderNodeName)) {
made sure to not track transaction still running answers
orientechnologies_orientdb
train
31fea7e5651994b8d06adea0fdc1419fd7e49563
diff --git a/views/js/qtiCreator/widgets/Widget.js b/views/js/qtiCreator/widgets/Widget.js index <HASH>..<HASH> 100755 --- a/views/js/qtiCreator/widgets/Widget.js +++ b/views/js/qtiCreator/widgets/Widget.js @@ -177,7 +177,7 @@ define([ if(currentState){ // hide widget tooltips when interaction leaves response mapping ('map') state: - if (this.serial.match(/^interaction/) && currentState.name === 'map' && state.name !== 'map') { + if (currentState.name === 'map' && state.name !== 'map') { this.$container .find('[data-has-tooltip]') .each(function(j, el) {
Remove unnecessary interaction check in if-statement
oat-sa_extension-tao-itemqti
train
945384cca227924e8f7f1eb9151e124de5871766
diff --git a/js/test/base/functions/test.number.js b/js/test/base/functions/test.number.js index <HASH>..<HASH> 100644 --- a/js/test/base/functions/test.number.js +++ b/js/test/base/functions/test.number.js @@ -17,7 +17,7 @@ assert (numberToString (7.9e27) === '7900000000000000000000000000'); assert (numberToString (-12.345) === '-12.345'); assert (numberToString (12.345) === '12.345'); assert (numberToString (0) === '0'); -// this line breaks the test +// the following line breaks the test // see https://github.com/ccxt/ccxt/issues/5744 // assert (numberToString (0.00000001) === '0.00000001');
test.number.js minor edits
ccxt_ccxt
train
1b29633be90eb148a0c0fe884324f77d9b4fca69
diff --git a/lib/access_control_admin.py b/lib/access_control_admin.py index <HASH>..<HASH> 100644 --- a/lib/access_control_admin.py +++ b/lib/access_control_admin.py @@ -993,7 +993,7 @@ def acc_get_action_id(name_action): try: return run_sql("""SELECT id FROM accACTION WHERE name = %s""", - (name_action, ))[0][0] + (name_action, ), run_on_slave=True)[0][0] except (ProgrammingError, IndexError): return 0 @@ -1099,7 +1099,7 @@ def acc_get_role_id(name_role): """get id of role, name given. """ try: return run_sql("""SELECT id FROM accROLE WHERE name = %s""", - (name_role, ))[0][0] + (name_role, ), run_on_slave=True)[0][0] except IndexError: return 0 @@ -1214,7 +1214,7 @@ def acc_is_user_in_role(user_info, id_role): if run_sql("""SELECT ur.id_accROLE FROM user_accROLE ur WHERE ur.id_user = %s AND ur.expiration >= NOW() AND - ur.id_accROLE = %s LIMIT 1""", (user_info['uid'], id_role), 1): + ur.id_accROLE = %s LIMIT 1""", (user_info['uid'], id_role), 1, run_on_slave=True): return True return acc_firerole_check_user(user_info, load_role_definition(id_role)) @@ -1247,7 +1247,7 @@ def acc_get_user_roles(id_user): explicit_roles = run_sql("""SELECT ur.id_accROLE FROM user_accROLE ur WHERE ur.id_user = %s AND ur.expiration >= NOW() - ORDER BY ur.id_accROLE""", (id_user, )) + ORDER BY ur.id_accROLE""", (id_user, ), run_on_slave=True) return [id_role[0] for id_role in explicit_roles] @@ -1368,10 +1368,10 @@ def acc_find_possible_roles(name_action, always_add_superadmin=True, **arguments given arguments. roles is a list of role_id """ id_action = acc_get_action_id(name_action) - roles = intbitset(run_sql("SELECT id_accROLE FROM accROLE_accACTION_accARGUMENT WHERE id_accACTION=%s AND argumentlistid <= 0", (id_action, ))) + roles = intbitset(run_sql("SELECT id_accROLE FROM accROLE_accACTION_accARGUMENT WHERE id_accACTION=%s AND argumentlistid <= 0", (id_action, ), run_on_slave=True)) if always_add_superadmin: roles.add(CFG_SUPERADMINROLE_ID) - other_roles_to_check = run_sql("SELECT id_accROLE, keyword, value, argumentlistid FROM accROLE_accACTION_accARGUMENT JOIN accARGUMENT ON id_accARGUMENT=id WHERE id_accACTION=%s AND argumentlistid > 0", (id_action, )) + other_roles_to_check = run_sql("SELECT id_accROLE, keyword, value, argumentlistid FROM accROLE_accACTION_accARGUMENT JOIN accARGUMENT ON id_accARGUMENT=id WHERE id_accACTION=%s AND argumentlistid > 0", (id_action, ), run_on_slave=True) other_roles_to_check_dict = {} for id_accROLE, keyword, value, argumentlistid in other_roles_to_check: if id_accROLE not in roles: diff --git a/lib/access_control_firerole.py b/lib/access_control_firerole.py index <HASH>..<HASH> 100644 --- a/lib/access_control_firerole.py +++ b/lib/access_control_firerole.py @@ -142,14 +142,14 @@ def load_role_definition(role_id): @param role_id: @return: a deserialized compiled role definition """ - res = run_sql("SELECT firerole_def_ser FROM accROLE WHERE id=%s", (role_id, ), 1) + res = run_sql("SELECT firerole_def_ser FROM accROLE WHERE id=%s", (role_id, ), 1, run_on_slave=True) if res: try: return deserialize(res[0][0]) except Exception: ## Something bad might have happened? (Update of Python?) repair_role_definitions() - res = run_sql("SELECT firerole_def_ser FROM accROLE WHERE id=%s", (role_id, ), 1) + res = run_sql("SELECT firerole_def_ser FROM accROLE WHERE id=%s", (role_id, ), 1, run_on_slave=True) if res: return deserialize(res[0][0]) return CFG_ACC_EMPTY_ROLE_DEFINITION_OBJ
WebAccess: use DB slave for even more queries * Adds the run_on_slave=True argument to frequently used SQL SELECT queries on tables 'accACTION', 'accARGUMENT', 'accMAILCOOKIE, 'accROLE' and 'accROLE_accACTION_accARGUMENT'. This commit also affects functions executed in webaccessadmin_lib.py.
inveniosoftware_invenio-access
train
a42c4eb2bb37be3e988bb88936e9759223cd3269
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -1,3 +1,4 @@ +import re from setuptools import setup, find_packages import sys @@ -49,6 +50,6 @@ setup(name='baselines', try: from distutils.version import StrictVersion import tensorflow - assert StrictVersion(tensorflow.__version__) >= StrictVersion('1.4.0') + assert StrictVersion(re.sub(r'-rc\d+$', '', tensorflow.__version__)) >= StrictVersion('1.4.0') except ImportError: assert False, "TensorFlow needed, of version above 1.4"
Fix setup.py for tensorflow -rc versions
openai_baselines
train
cdfae2dc1d768e05378cb2792d32bf88e441f822
diff --git a/request-server.go b/request-server.go index <HASH>..<HASH> 100644 --- a/request-server.go +++ b/request-server.go @@ -140,19 +140,18 @@ func (rs *RequestServer) packetWorker() error { if err != nil { return err } continue case hasPath: - handle = rs.nextRequest(newRequest(pkt.getPath(), rs)) + handle = rs.nextRequest(newRequest(pkt.getPath(), *rs.Handlers)) case hasHandle: handle = pkt.getHandle() } request, ok := rs.getRequest(handle) if !ok { return rs.sendError(pkt, syscall.EBADF) } - select { - case request.pktChan <- pkt: - case resp := <-request.rspChan: - if resp.err != nil { rs.sendError(resp.pkt, err) } - rs.sendPacket(resp.pkt) - } + // send packet to request handler and wait for response + request.pktChan <- pkt + resp := <-request.rspChan + if resp.err != nil { rs.sendError(resp.pkt, err) } + rs.sendPacket(resp.pkt) } return nil } diff --git a/request.go b/request.go index <HASH>..<HASH> 100644 --- a/request.go +++ b/request.go @@ -13,9 +13,8 @@ type response struct { err error } -// Valid Method values: -// Get, Put, SetStat, Rename, Rmdir, Mkdir, Symlink, List, Stat, Readlink type Request struct { + // Get, Put, SetStat, Rename, Rmdir, Mkdir, Symlink, List, Stat, Readlink Method string Filepath string Pflags uint32 @@ -25,11 +24,11 @@ type Request struct { length uint32 pktChan chan packet rspChan chan response - svr *RequestServer + handlers Handlers } -func newRequest(path string, svr *RequestServer) *Request { - request := &Request{Filepath: path, svr: svr} +func newRequest(path string, handlers Handlers) *Request { + request := &Request{Filepath: path, handlers: handlers} go request.requestWorker() return request } @@ -42,7 +41,7 @@ func (r *Request) close() { func (r *Request) requestWorker() { for pkt := range r.pktChan { r.populate(pkt) - handlers := r.svr.Handlers + handlers := r.handlers var err error var rpkt resp_packet switch r.Method {
pass/store handlers on request vs server
pkg_sftp
train
17ee5ea811df3a295b280e62a2ce443fb45b38ce
diff --git a/structurizr-core/src/com/structurizr/view/View.java b/structurizr-core/src/com/structurizr/view/View.java index <HASH>..<HASH> 100644 --- a/structurizr-core/src/com/structurizr/view/View.java +++ b/structurizr-core/src/com/structurizr/view/View.java @@ -20,6 +20,9 @@ import java.util.stream.Collectors; */ public abstract class View { + private static final int DEFAULT_RANK_SEPARATION = 300; + private static final int DEFAULT_NODE_SEPARATION = 300; + private SoftwareSystem softwareSystem; private String softwareSystemId; private String description = ""; @@ -167,6 +170,15 @@ public abstract class View { } /** + * Enables automatic layout for this view, with the specified direction, using the Graphviz implementation. + * + * @param rankDirection the rank direction + */ + public void enableAutomaticLayout(AutomaticLayout.RankDirection rankDirection) { + enableAutomaticLayout(rankDirection, DEFAULT_RANK_SEPARATION, DEFAULT_NODE_SEPARATION); + } + + /** * Enables automatic layout for this view, with the specified settings, using the Graphviz implementation. * * @param rankDirection the rank direction
Adds a method to enable Graphviz auto-layout, with some defaults.
structurizr_java
train
82cb3df7e028bd1b861457c359afe7fcb1a2c46d
diff --git a/lib/doctor.js b/lib/doctor.js index <HASH>..<HASH> 100644 --- a/lib/doctor.js +++ b/lib/doctor.js @@ -724,6 +724,14 @@ function callHook(type, filename, info) { } } +/* @private */ +function wrapCall(fn, ctx, args, filename, info) { + callHook('enter', filename, info); + var result = fn.apply(ctx, args); + callHook('exit', filename, info); + return result; +} + /* __EXPERIMENTAL__ Inserts enter/exit hooks into a JavaScript file. Registers callback hooks for that file. @@ -781,4 +789,5 @@ module.exports.hookEnterExit = hookEnterExit; module.exports.callEnterHook = callEnterHook; module.exports.callExitHook = callExitHook; module.exports.insertEnterExitHooksSync = insertEnterExitHooksSync; -module.exports.callHook = callHook; \ No newline at end of file +module.exports.callHook = callHook; +module.exports.wrapCall = wrapCall; \ No newline at end of file diff --git a/package.json b/package.json index <HASH>..<HASH> 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "author": "Justin Deal", "name": "doctor", "description": "Create documentation from a JavaScript AST.", - "version": "0.1.23", + "version": "0.1.24", "homepage": "https://github.com/jdeal/doctor", "repository": { "type": "git", diff --git a/transform/enter-exit.js b/transform/enter-exit.js index <HASH>..<HASH> 100644 --- a/transform/enter-exit.js +++ b/transform/enter-exit.js @@ -1,21 +1,36 @@ +var _ = require('underscore'); + var rules = []; -function makeHook(options, type, node) { +function hookInfoString(options, node) { var info = { - type: node.type === 'file' ? 'module' : 'function', + type: 'function', line: node.line, - column: node.column, - name: 'function' + column: node.column }; if (node.type === 'file') { + info.type = 'module'; info.name = 'module'; } else if (node.nodes[0].type === 'name') { info.name = node.nodes[0].value; } - return node.fromSource('__doctor.callHook(' + + if (node.type === 'call') { + info.type = 'call'; + } + return JSON.stringify(info).replace(/\}$/, ', filename:__filename}'); +} + +function hookFilename(options) { + return options.hookFilename ? options.hookFilename : ''; +} + +function makeHook(options, type, node) { + var hookNode = node.fromSource('__doctor.callHook(' + JSON.stringify(type) + ',' + - JSON.stringify(options.hookFilename ? options.hookFilename : '') + ',' + - JSON.stringify(info).replace(/\}$/, ', filename:__filename}') + ')'); + JSON.stringify(hookFilename(options)) + ',' + + hookInfoString(options, node) + ')'); + hookNode.ignore = true; + return hookNode; } rules.push({ @@ -24,7 +39,10 @@ rules.push({ node.item('modulePath', node.path); node.item('functionNode', node); node.prepend(makeHook(transform.options, 'enter', node)); - node.prepend(node.fromSource("var __doctor = require(" + JSON.stringify(transform.options.hookDoctorPath) + ")")); + //node.prepend(node.fromSource("var __ = {}")); + var drRequire = node.fromSource("var __doctor = require(" + JSON.stringify(transform.options.hookDoctorPath) + ")"); + drRequire.nodes[0].nodes[1].ignore = true; + node.prepend(drRequire); } }); @@ -78,6 +96,34 @@ rules.push({ } }); +rules.push({ + type: 'call', + match: function (node) { + return !node.ignore; + }, + transform: function (node, transform) { + var args = node.nodes[1]; + var wrap = node.nodeFromSource('__doctor.wrapCall(' + + JSON.stringify(hookFilename(transform.options)) + ', ' + + hookInfoString(transform.options, node) + ')'); + wrap.line = node.line; + var argsArray = node.nodeFromSource('[]'); + argsArray.line = node.line; + _(args.nodes).each(function (argNode) { + argsArray.append(argNode); + }); + wrap.nodes[1].prepend(argsArray); + if (node.nodes[0].type === 'dot') { + wrap.nodes[1].prepend(node.nodes[0].nodes[0]); + } else { + wrap.nodes[1].prepend({type: 'null'}); + } + wrap.nodes[1].prepend(node.nodes[0]); + node.before(wrap); + node.remove(); + } +}); + // rules.push({ // type: 'return', // match: function (node) {
hooks for enter/exit of calls
jdeal_doctor
train
5c791b60536df392042deb0bb6808a8290d3d39b
diff --git a/dev/com.ibm.websphere.javaee.jsf.2.3/src/javax/faces/event/WebsocketEvent.java b/dev/com.ibm.websphere.javaee.jsf.2.3/src/javax/faces/event/WebsocketEvent.java index <HASH>..<HASH> 100644 --- a/dev/com.ibm.websphere.javaee.jsf.2.3/src/javax/faces/event/WebsocketEvent.java +++ b/dev/com.ibm.websphere.javaee.jsf.2.3/src/javax/faces/event/WebsocketEvent.java @@ -50,9 +50,9 @@ public final class WebsocketEvent implements Serializable return channel; } - public Serializable getUser() + public <S extends java.io.Serializable> S getUser() { - return user; + return (S) user; } public CloseReason.CloseCode getCloseCode()
Issue #<I>: update WebsocketEvent.getUser signature
OpenLiberty_open-liberty
train
2692cb530605bdb3f263b330523e8c7bf0a4531c
diff --git a/lib/mxit_rails/mxit_api/auth_token.rb b/lib/mxit_rails/mxit_api/auth_token.rb index <HASH>..<HASH> 100644 --- a/lib/mxit_rails/mxit_api/auth_token.rb +++ b/lib/mxit_rails/mxit_api/auth_token.rb @@ -1,6 +1,7 @@ module MxitRails::MxitApi class AuthToken - attr_reader :access_token, :type, :expires_in, :refresh_token, :expires_at + attr_reader :access_token, :type, :expires_in, :refresh_token, :expires_at, + :refresh_token_expires_at def initialize(token_response) @access_token = token_response['access_token'] @@ -10,6 +11,8 @@ module MxitRails::MxitApi @scope = token_response['scope'].split @expires_at = Time.now + expires_in.seconds + # If there isn't a refresh token `has_refresh_token_expired?` must always return true. + @refresh_token_expires_at = @refresh_token ? Time.now + 24.hours : Time.now end def scope @@ -21,6 +24,10 @@ module MxitRails::MxitApi @expires_at - Time.now <= 3.0 end + def has_refresh_token_expired? + @refresh_token_expires_at - Time.now <= 3.0 + end + def has_scopes?(scopes) (scopes - @scope).empty? end
Added method to test if the refresh token has expired.
linsen_mxit-rails
train
caef8ed897e726b84642dc8b0609169a27ab4e6e
diff --git a/src/cr/cube/crunch_cube.py b/src/cr/cube/crunch_cube.py index <HASH>..<HASH> 100644 --- a/src/cr/cube/crunch_cube.py +++ b/src/cr/cube/crunch_cube.py @@ -382,6 +382,14 @@ class CrunchCube(object): adjusted=adjusted, include_transforms_for_dims=transform_dims, ) + + if axis > 0 and len(array.shape) == 1: + # If any of the dimensions has only one element, it's flattened + # from the resulting array (as a part of the MR pre-processing). + # This can lead to a potential inconsistency between dimensions + # and axes, and we need to restore one dimension in this case. + array = array[:, np.newaxis] + return np.sum(array, axis) def _mr_proportions(self, axis, weighted):
Implement fix for the axis vs shape mismatch (restore flattened dimension)
Crunch-io_crunch-cube
train
fe3e87d1e4c9ad977b25e0cff723f9ec5da85484
diff --git a/src/Peach/Http/Header/CookieItem.php b/src/Peach/Http/Header/CookieItem.php index <HASH>..<HASH> 100644 --- a/src/Peach/Http/Header/CookieItem.php +++ b/src/Peach/Http/Header/CookieItem.php @@ -49,7 +49,8 @@ class CookieItem private $value; /** - * + * この cookie が持つ属性をあらわします. + * * @var CookieOptions */ private $options;
Modified PHPdoc comments of CookieItem
trashtoy_PEACH2
train
f088ef2719bd222fe8c29e3dcd72e0b8c007ac87
diff --git a/fixed.go b/fixed.go index <HASH>..<HASH> 100644 --- a/fixed.go +++ b/fixed.go @@ -85,18 +85,19 @@ func (f *fixed) readNFrom(expected int64, reader io.Reader) (bytes, int64, error } } -func (f *fixed) WriteTo(w io.Writer) (int64, error) { - var at int64 - l := int64(f.length) - for at < l { - n, err := w.Write(f.bytes[at:l]) - at += int64(n) +func (f *fixed) WriteTo(w io.Writer) (n int64, err error) { + r := f.r + l := f.length + for r < l { + n, err := w.Write(f.bytes[r:l]) if err != nil { - return at, err + break } + r += n } f.reset() - return at, nil + f.r += int(n) + return n, err } func (f *fixed) Read(data []byte) (int, error) {
WriteTo interacts properly with ReadX methods
karlseguin_bytepool
train
e2e96bb793daf3c68272cab7727377681b022306
diff --git a/lib/codemirror.js b/lib/codemirror.js index <HASH>..<HASH> 100644 --- a/lib/codemirror.js +++ b/lib/codemirror.js @@ -533,7 +533,6 @@ window.CodeMirror = (function() { var lineElement = line.hidden ? elt("div") : lineContent(cm, line); var markers = line.gutterMarkers, display = cm.display; - if (line.className) lineElement.className = line.className; if (!cm.options.lineNumbers && !markers && !line.bgClassName && (!line.widgets || !line.widgets.length)) return lineElement; @@ -3197,6 +3196,7 @@ window.CodeMirror = (function() { function buildLineContent(line, tabSize, wrapAt, compensateForWrapping) { var first = true, col = 0, specials = /[\t\u0000-\u0019\u200b\u2028\u2029\uFEFF]/g; var pre = elt("pre"); + if (line.className) pre.className = line.className; function span_(text, style) { if (!text) return; // Work around a bug where, in some compat modes, IE ignores leading spaces
Move line.className check to buildLineContent This way, it is also used when measuring the line.
codemirror_CodeMirror
train
bf6fffc4f7950b3b110fab0aaaeb4ff5b4d84ba5
diff --git a/src/index.js b/src/index.js index <HASH>..<HASH> 100644 --- a/src/index.js +++ b/src/index.js @@ -18,7 +18,7 @@ import stage2 from "./config/stage2" import stage3 from "./config/stage3" const pkg = require(resolve(process.cwd(), "package.json")) -const external = Object.keys(pkg.dependencies).concat(builtinModules) +const external = [].concat(Object.keys(pkg.dependencies || {}), Object.keys(pkg.devDependencies || {}), Object.keys(pkg.peerDependencies || {}), builtinModules) const externalsMap = {} for (var i=0, l=external.length; i<l; i++) { externalsMap[external[i]] = true
Support for non existing 'dependencies' key in package.json + added filtering for devDeps + peerDeps
sebastian-software_preppy
train
f87d16446ea54e65d5f7dccd0126134fa67dd18f
diff --git a/experiment/manual-trigger/manual-trigger.go b/experiment/manual-trigger/manual-trigger.go index <HASH>..<HASH> 100644 --- a/experiment/manual-trigger/manual-trigger.go +++ b/experiment/manual-trigger/manual-trigger.go @@ -55,8 +55,8 @@ func flagOptions() options { flag.StringVar(&o.jenkinsTokenFile, "jenkins-token-file", "", "Path to the file containing the Jenkins API token.") flag.StringVar(&o.jenkinsUserName, "jenkins-user-name", "", "Jenkins username.") - flag.StringVar(&o.githubEndpoint, "github-endpoint", "https://api.github.com", "GitHub's API endpoint.") - flag.StringVar(&o.graphqlEndpoint, "graphql-endpoint", "https://api.github.com/graphql", "GitHub's GraphQL API endpoint.") + flag.StringVar(&o.githubEndpoint, "github-endpoint", github.DefaultAPIEndpoint, "GitHub's API endpoint.") + flag.StringVar(&o.graphqlEndpoint, "graphql-endpoint", github.DefaultGraphQLEndpoint, "GitHub's GraphQL API endpoint.") flag.StringVar(&o.githubTokenFile, "github-token-file", "", "Path to file containing GitHub OAuth token.") flag.StringVar(&o.jobName, "job-name", "", "Name of Jenkins job")
Added Default API and GrpahQL Endpoints - manual trigger
kubernetes_test-infra
train
1d84cf1d0a9de3d6c5a9b418d0731b5214d14667
diff --git a/lib/nesser/packets/constants.rb b/lib/nesser/packets/constants.rb index <HASH>..<HASH> 100644 --- a/lib/nesser/packets/constants.rb +++ b/lib/nesser/packets/constants.rb @@ -19,7 +19,7 @@ module Nesser ('a'..'z').to_a + ('A'..'Z').to_a + ('0'..'9').to_a + - ['-', '.', '@'] + ['-', '.', '@', '_'] ) MAX_SEGMENT_LENGTH = 63 MAX_TOTAL_LENGTH = 253 diff --git a/lib/nesser/transaction.rb b/lib/nesser/transaction.rb index <HASH>..<HASH> 100644 --- a/lib/nesser/transaction.rb +++ b/lib/nesser/transaction.rb @@ -47,6 +47,7 @@ module Nesser private def not_sent!() if @sent + # TODO: This isn't a valid raise raise ArgumentError("Already sent!") end end diff --git a/test/packets/packer_test.rb b/test/packets/packer_test.rb index <HASH>..<HASH> 100644 --- a/test/packets/packer_test.rb +++ b/test/packets/packer_test.rb @@ -97,13 +97,18 @@ module Nesser end end - # Based on an actual bug I found - I'd had 0-9 legal as numbers, but not as - # characters. + # Based on an actual bugs I found - I'd had 0-9 legal as numbers, but not as + # characters (ditto underscore). def test_pack_name_with_numbers() packer = Packer.new() packer.pack_name('test0123456789.com') end + def test_pack_name_with_underscore() + packer = Packer.new() + packer.pack_name("_xXmpP-ClienT._TCp.test.com") + end + def test_pack_name_segment_too_long() packer = Packer.new()
Added underscore as a valid character i na DNS name
iagox86_nesser
train
1fc21c4b8d9fdaa18396473fd8beb031157d8cea
diff --git a/oauth2client/old_run.py b/oauth2client/old_run.py index <HASH>..<HASH> 100644 --- a/oauth2client/old_run.py +++ b/oauth2client/old_run.py @@ -23,6 +23,7 @@ import sys import webbrowser import gflags +from six.moves import input from oauth2client import client from oauth2client import util @@ -147,7 +148,7 @@ def run(flow, storage, http=None): print('Failed to find "code" in the query parameters of the redirect.') sys.exit('Try running with --noauth_local_webserver.') else: - code = raw_input('Enter verification code: ').strip() + code = input('Enter verification code: ').strip() try: credential = flow.step2_exchange(code, http=http) diff --git a/oauth2client/tools.py b/oauth2client/tools.py index <HASH>..<HASH> 100644 --- a/oauth2client/tools.py +++ b/oauth2client/tools.py @@ -30,6 +30,7 @@ import sys from six.moves import BaseHTTPServer from six.moves import urllib +from six.moves import input from oauth2client import client from oauth2client import util @@ -216,7 +217,7 @@ def run_flow(flow, storage, flags, http=None): print('Failed to find "code" in the query parameters of the redirect.') sys.exit('Try running with --noauth_local_webserver.') else: - code = raw_input('Enter verification code: ').strip() + code = input('Enter verification code: ').strip() try: credential = flow.step2_exchange(code, http=http) diff --git a/samples/oauth2_for_devices.py b/samples/oauth2_for_devices.py index <HASH>..<HASH> 100644 --- a/samples/oauth2_for_devices.py +++ b/samples/oauth2_for_devices.py @@ -3,6 +3,7 @@ # See: https://developers.google.com/accounts/docs/OAuth2ForDevices import httplib2 +from six.moves import input from oauth2client.client import OAuth2WebServerFlow from googleapiclient.discovery import build @@ -18,7 +19,7 @@ flow_info = flow.step1_get_device_and_user_codes() print "Enter the following code at %s: %s" % (flow_info.verification_url, flow_info.user_code) print "Then press Enter." -raw_input() +input() # Step 2: get credentials # https://developers.google.com/accounts/docs/OAuth2ForDevices#obtainingatoken
Use six to call raw_input for Python 3 support In Python 3 `raw_input` was renamed to `input`. Start to use the compatibility library to call the function. <URL>
googleapis_oauth2client
train
72f7fae3141d75906955e2c30f34668e2f78b9d9
diff --git a/lib/gush/cli.rb b/lib/gush/cli.rb index <HASH>..<HASH> 100644 --- a/lib/gush/cli.rb +++ b/lib/gush/cli.rb @@ -34,6 +34,9 @@ module Gush end desc "show [workflow_id]", "Shows details about workflow with given ID" + option :skip_overview, type: :boolean + option :skip_jobs, type: :boolean + option :jobs, default: :all def show(workflow_id) require gushfile workflow = Gush.find_workflow(workflow_id, redis) @@ -43,73 +46,9 @@ module Gush return end - rows = [] - progress = "" - if workflow.failed? - status = "failed".red - status += "\n" - status += "#{workflow.nodes.find(&:failed).name} failed".red - elsif workflow.running? - status = "running".yellow - finished = workflow.nodes.count {|job| job.finished } - total = workflow.nodes.count - progress = "#{finished}/#{total} [#{(finished*100)/total}%]" - elsif workflow.finished? - status = "done".green - else - status = "pending".light_white - end + display_overview_for(workflow) unless options[:skip_overview] - rows << [{alignment: :center, value: "id"}, workflow.name] - rows << :separator - rows << [{alignment: :center, value: "name"}, workflow.class.to_s] - rows << :separator - rows << [{alignment: :center, value: "jobs"}, workflow.nodes.count] - rows << :separator - rows << [{alignment: :center, value: "failed jobs"}, workflow.nodes.count(&:failed?).to_s.red] - rows << :separator - rows << [{alignment: :center, value: "succeeded jobs"}, - workflow.nodes.count { |j| j.finished && !j.failed }.to_s.green] - rows << :separator - rows << [{alignment: :center, value: "enqueued jobs"}, workflow.nodes.count(&:running?).to_s.yellow] - rows << :separator - rows << [{alignment: :center, value: "remaining jobs"}, - workflow.nodes.count{|j| [j.finished, j.failed, j.enqueued].all? {|b| !b} }] - rows << :separator - rows << [{alignment: :center, value: "status"}, status] - if !progress.empty? - rows << :separator - rows << [{alignment: :center, value: "progress"}, progress] - end - table = Terminal::Table.new(rows: rows) - puts table - - puts "\nJobs list:\n" - - workflow.nodes.sort_by do |job| - case - when job.failed? - 0 - when job.finished? - 1 - when job.running? - 2 - else - 3 - end - end.each do |job| - name = job.name - puts case - when job.failed? - "[✗] #{name.red}" - when job.finished? - "[✓] #{name.green}" - when job.running? - "[•] #{name.yellow}" - else - "[ ] #{name}" - end - end + display_jobs_list_for(workflow, options[:jobs]) unless options[:skip_jobs] end @@ -210,5 +149,83 @@ module Gush def redis @redis ||= Redis.new end + + def display_overview_for(workflow) + rows = [] + progress = "" + if workflow.failed? + status = "failed".red + status += "\n" + status += "#{workflow.nodes.find(&:failed).name} failed".red + elsif workflow.running? + status = "running".yellow + finished = workflow.nodes.count {|job| job.finished } + total = workflow.nodes.count + progress = "#{finished}/#{total} [#{(finished*100)/total}%]" + elsif workflow.finished? + status = "done".green + else + status = "pending".light_white + end + + rows << [{alignment: :center, value: "id"}, workflow.name] + rows << :separator + rows << [{alignment: :center, value: "name"}, workflow.class.to_s] + rows << :separator + rows << [{alignment: :center, value: "jobs"}, workflow.nodes.count] + rows << :separator + rows << [{alignment: :center, value: "failed jobs"}, workflow.nodes.count(&:failed?).to_s.red] + rows << :separator + rows << [{alignment: :center, value: "succeeded jobs"}, + workflow.nodes.count { |j| j.finished && !j.failed }.to_s.green] + rows << :separator + rows << [{alignment: :center, value: "enqueued jobs"}, workflow.nodes.count(&:running?).to_s.yellow] + rows << :separator + rows << [{alignment: :center, value: "remaining jobs"}, + workflow.nodes.count{|j| [j.finished, j.failed, j.enqueued].all? {|b| !b} }] + rows << :separator + rows << [{alignment: :center, value: "status"}, status] + if !progress.empty? + rows << :separator + rows << [{alignment: :center, value: "progress"}, progress] + end + puts Terminal::Table.new(rows: rows) + end + + def display_jobs_list_for(workflow, jobs) + puts "\nJobs list:\n" + + jobs_by_type(workflow, jobs).each do |job| + name = job.name + puts case + when job.failed? + "[✗] #{name.red}" + when job.finished? + "[✓] #{name.green}" + when job.running? + "[•] #{name.yellow}" + else + "[ ] #{name}" + end + end + end + + def jobs_by_type(workflow, type) + jobs = workflow.nodes.sort_by do |job| + case + when job.failed? + 0 + when job.finished? + 1 + when job.running? + 2 + else + 3 + end + end + + jobs.select!{|j| j.public_send("#{type}?") } unless type == :all + jobs + end end end
Changes in gush show command - Added ability to skip overview by --skip-overview option - Added ability to skip jobs list by --skip-jobs option - Added ability to filter jobs type in jobs list by --jobs (all|running|done|failed) option
chaps-io_gush
train
0961b0807c106018f1316ae014f674bb7b3eca83
diff --git a/src/Links.php b/src/Links.php index <HASH>..<HASH> 100644 --- a/src/Links.php +++ b/src/Links.php @@ -29,7 +29,7 @@ class Links private function parseLinks(\stdClass $links) { foreach (get_object_vars($links) as $property => $value) { - if ($property=="checkout") { // nested links? eg: checkout=>payOnlineBankDebitItau,payCreditCard + if ($property == 'checkout') { // nested links? eg: checkout=>payOnlineBankDebitItau,payCreditCard... $this->parseLinks($value); } else { $this->links[$property] = new Link($property, $value); diff --git a/tests/MoipTestCase.php b/tests/MoipTestCase.php index <HASH>..<HASH> 100644 --- a/tests/MoipTestCase.php +++ b/tests/MoipTestCase.php @@ -57,6 +57,11 @@ abstract class MoipTestCase extends TestCase protected $body_cc_pay_pci = '{"id":"PAY-L6J2NKS9OGYU","status":"IN_ANALYSIS","delayCapture":false,"amount":{"total":102470,"fees":5695,"refunds":0,"liquid":96775,"currency":"BRL"},"installmentCount":1,"fundingInstrument":{"creditCard":{"id":"CRC-2TJ13YB4Y1WU","brand":"MASTERCARD","first6":"555566","last4":"8884","holder":{"birthdate":"1989-06-01","birthDate":"1989-06-01","taxDocument":{"type":"CPF","number":"22222222222"},"fullname":"Jose Silva"}},"method":"CREDIT_CARD"},"fees":[{"type":"TRANSACTION","amount":5695}],"events":[{"type":"PAYMENT.IN_ANALYSIS","createdAt":"2016-02-19T18:18:54.535-02"},{"type":"PAYMENT.CREATED","createdAt":"2016-02-19T18:18:51.946-02"}],"_links":{"order":{"href":"https://sandbox.moip.com.br/v2/orders/ORD-8UDL4K9VRJTB","title":"ORD-8UDL4K9VRJTB"},"self":{"href":"https://sandbox.moip.com.br/v2/payments/PAY-L6J2NKS9OGYU"}},"createdAt":"2016-02-19T18:18:51.944-02","updatedAt":"2016-02-19T18:18:54.535-02"}'; /** + * @var string response from moip API. + */ + protected $body_billet_pay = '{"id":"PAY-XNVIBO5MIQ9S","status":"WAITING","delayCapture":false,"amount":{"total":102470,"fees":3645,"refunds":0,"liquid":98825,"currency":"BRL"},"installmentCount":1,"fundingInstrument":{"boleto":{"expirationDate":"2016-05-21","lineCode":"23793.39126 60000.062608 32001.747909 7 68010000102470"},"method":"BOLETO"},"fees":[{"type":"TRANSACTION","amount":3645}],"events":[{"type":"PAYMENT.CREATED","createdAt":"2016-05-20T15:19:47.000-03"},{"type":"PAYMENT.WAITING","createdAt":"2016-05-20T15:19:47.000-03"}],"_links":{"order":{"href":"https://sandbox.moip.com.br/v2/orders/ORD-3KSQDBJSTIF6","title":"ORD-3KSQDBJSTIF6"},"payBoleto":{"redirectHref":"https://checkout-sandbox.moip.com.br/boleto/PAY-XNVIBO5MIQ9S"},"self":{"href":"https://sandbox.moip.com.br/v2/payments/PAY-XNVIBO5MIQ9S"}},"updatedAt":"2016-05-20T15:19:47.000-03","createdAt":"2016-05-20T15:19:47.000-03"}'; + + /** * @var string holds the last generated customer ownId. In mock mode it'll be always the default, but it changes on sandbox mode. */ protected $last_cus_id = 'meu_id_customer'; diff --git a/tests/Resource/PaymentTest.php b/tests/Resource/PaymentTest.php index <HASH>..<HASH> 100644 --- a/tests/Resource/PaymentTest.php +++ b/tests/Resource/PaymentTest.php @@ -6,7 +6,7 @@ use Moip\Tests\MoipTestCase; class PaymentTest extends MoipTestCase { - //todo: test boleto and credit card hash + //todo: credit card hash /** * Test creating a credit card payment, passing all credit card data. @@ -24,4 +24,20 @@ class PaymentTest extends MoipTestCase $this->assertEquals($first6, substr($cc, 0, 6)); $this->assertEquals($last4, substr($cc, -4)); } + + /** + * Test creating a billet payment. + */ + public function testBillet() + { + $this->mockHttpSession($this->body_order); + $order = $this->createOrder()->create(); + $this->mockHttpSession($this->body_billet_pay); + $payment = $order->payments()->setBoleto(new \DateTime('today +1day'), + 'http://dev.moip.com.br/images/logo-header-moip.png')->execute(); + $this->assertNotEmpty($payment->getFundingInstrument()->boleto); + $pay_id = $payment->getId(); + $this->assertEquals($payment->getLinks()->getLink('payBoleto')->getHref(), + "https://checkout-sandbox.moip.com.br/boleto/$pay_id"); + } }
- add unit test to billet payment - fix checkout links
wirecardBrasil_moip-sdk-php
train
dac1ce4172b006540b91e2fe12d916a52de33885
diff --git a/tests/Doctrine/Tests/ORM/Functional/Ticket/GH6217Test.php b/tests/Doctrine/Tests/ORM/Functional/Ticket/GH6217Test.php index <HASH>..<HASH> 100644 --- a/tests/Doctrine/Tests/ORM/Functional/Ticket/GH6217Test.php +++ b/tests/Doctrine/Tests/ORM/Functional/Ticket/GH6217Test.php @@ -14,7 +14,6 @@ final class GH6217Test extends OrmFunctionalTestCase $this->_schemaTool->createSchema( [ $this->_em->getClassMetadata(GH6217User::class), - $this->_em->getClassMetadata(GH6217Profile::class), $this->_em->getClassMetadata(GH6217Category::class), $this->_em->getClassMetadata(GH6217UserProfile::class), ] @@ -27,13 +26,11 @@ final class GH6217Test extends OrmFunctionalTestCase public function testRetrievingCacheShouldNotThrowUndefinedIndexException() { $user = new GH6217User(); - $profile = new GH6217Profile(); $category = new GH6217Category(); - $userProfile = new GH6217UserProfile($user, $profile, $category); + $userProfile = new GH6217UserProfile($user, $category); $this->_em->persist($category); $this->_em->persist($user); - $this->_em->persist($profile); $this->_em->persist($userProfile); $this->_em->flush(); $this->_em->clear(); @@ -64,18 +61,6 @@ class GH6217User } /** @Entity @Cache(usage="NONSTRICT_READ_WRITE") */ -class GH6217Profile -{ - /** @Id @Column(type="string") @GeneratedValue(strategy="NONE") */ - public $id; - - public function __construct() - { - $this->id = uniqid(self::class, true); - } -} - -/** @Entity @Cache(usage="NONSTRICT_READ_WRITE") */ class GH6217Category { /** @Id @Column(type="string") @GeneratedValue(strategy="NONE") */ @@ -93,16 +78,12 @@ class GH6217UserProfile /** @Id @Cache("NONSTRICT_READ_WRITE") @ManyToOne(targetEntity=GH6217User::class) */ public $user; - /** @Id @Cache("NONSTRICT_READ_WRITE") @ManyToOne(targetEntity=GH6217Profile::class, fetch="EAGER") */ - public $profile; - /** @Id @Cache("NONSTRICT_READ_WRITE") @ManyToOne(targetEntity=GH6217Category::class, fetch="EAGER") */ public $category; - public function __construct(GH6217User $user, GH6217Profile $profile, GH6217Category $category) + public function __construct(GH6217User $user, GH6217Category $category) { $this->user = $user; - $this->profile = $profile; $this->category = $category; } }
#<I> removing profile entity, since it is not needed to reproduce the issue
doctrine_orm
train
fceca0cbbcce3e3eb9117dc0a41e9087a2a13843
diff --git a/lib/coverband/utils/railtie.rb b/lib/coverband/utils/railtie.rb index <HASH>..<HASH> 100644 --- a/lib/coverband/utils/railtie.rb +++ b/lib/coverband/utils/railtie.rb @@ -12,6 +12,10 @@ module Coverband Coverband.runtime_coverage! end + config.before_initialize do + Coverband.eager_loading_coverage! + end + rake_tasks do load 'coverband/utils/tasks.rb' end
Call eager_loading in rails before_initialize Since configure resets, we need to call configure afterward again in before_initialize. I believe this problem would only occur within tests when calling configure multiple times.
danmayer_coverband
train
5e56f0731c4f027064e7862f2139968d9c70e611
diff --git a/tofu/utils.py b/tofu/utils.py index <HASH>..<HASH> 100644 --- a/tofu/utils.py +++ b/tofu/utils.py @@ -2444,7 +2444,7 @@ class KeyHandler_mpl(object): #--------------- # Preliminary checks - ls = ['nMax','key','defid','defax'] + ls = ['nMax','key','defax'] for k,v in dgroup.items(): c0 = type(k) is str c1 = type(v) is dict @@ -2453,7 +2453,6 @@ class KeyHandler_mpl(object): raise Exception(cls._msgdobj) assert type(v['nMax']) in [int,np.int64] assert type(v['key']) is str - assert v['defid'] in dref.keys() assert v['defax'] in dax.keys() lg = sorted(list(dgroup.keys())) assert len(set(lg))==len(lg) @@ -2530,8 +2529,12 @@ class KeyHandler_mpl(object): if dobj[o]['ax'] not in lla: lla.append(dobj[o]['ax']) dgroup[g]['lax'] = lla - # Check unicity of def ref - assert dref[dgroup[g]['defid']]['group'] == g + + # Set defid + ldefid = dax[dgroup[g]['defax']]['ref'].keys() + ldefid = [defid for defid in ldefid if dref[defid]['group'] == g] + assert len(ldefid) == 1 + dgroup[g]['defid'] = ldefid[0] # Get list of obj with their indices, for fast updates lobj = [obj for obj in dobj.keys() @@ -2573,14 +2576,6 @@ class KeyHandler_mpl(object): assert dax[ax]['defrefid'] in lrefid - ########## DB - # Detect inconsistencies in defax vs defid for the ax in question - # (ex.: tf vs t for dax['t'][0], favorite of group time !!! - # Force clear definition to avoid further mistakes !!!! - ################# - - - # dref for rid in lrid: dref[rid]['ind'] = np.zeros((dgroup[dref[rid]['group']]['nMax'],), diff --git a/tofu/version.py b/tofu/version.py index <HASH>..<HASH> 100644 --- a/tofu/version.py +++ b/tofu/version.py @@ -1,2 +1,2 @@ # Do not edit this file, pipeline versioning is governed by git tags ! -__version__='1.3.24-391-gd75cb29' \ No newline at end of file +__version__='1.3.24-392-g3647ef8' \ No newline at end of file
Debugged tf.utils.KeyHandler_mpl() regarding conflicting dgroup[g]['drefid'] and dgroup[g]['defax'], now defid is not user-specified but derived from dgroup[g]['defax'] and dax
ToFuProject_tofu
train
c0bc28954da80a94f213be932885dd4b1bbb003e
diff --git a/lib/BoundTCPSocket.php b/lib/BoundTCPSocket.php index <HASH>..<HASH> 100755 --- a/lib/BoundTCPSocket.php +++ b/lib/BoundTCPSocket.php @@ -12,7 +12,7 @@ class BoundTCPSocket extends BoundSocket { public $reuse = false; public $host; public $port; - public $listenerMode = true; + public $listenerMode = false; public function setDefaultPort($n) { $this->defaultPort = (int) $n; diff --git a/lib/IOStream.php b/lib/IOStream.php index <HASH>..<HASH> 100755 --- a/lib/IOStream.php +++ b/lib/IOStream.php @@ -399,9 +399,7 @@ abstract class IOStream { Daemon::uncaughtExceptionHandler($e); } } else { - while (!$this->onWriteOnce->isEmpty()) { - $this->onWriteOnce->executeOne($this); - } + $this->onWriteOnce->executeAll($this); } try { $this->onWrite();
Turned off default BoundTCPSocket->listenerMode to be compatible with eio.
kakserpom_phpdaemon
train
f54b3b2e9b09caefd494126dad1a6434125697b9
diff --git a/spec/branch_cover_spec.rb b/spec/branch_cover_spec.rb index <HASH>..<HASH> 100644 --- a/spec/branch_cover_spec.rb +++ b/spec/branch_cover_spec.rb @@ -53,8 +53,8 @@ RSpec::Matchers.define :have_correct_branch_coverage do |filename, lineno| @errors.empty? end failure_message do |fn| - puts DeepCover::Tools.format_branch_cover(@file_coverage, show_line_nbs: true) - "Branch cover does not match on lines #{@errors.join(', ')}" + formatted_branch_cover = DeepCover::Tools.format_branch_cover(@file_coverage, show_line_nbs: true) + "Branch cover does not match on lines #{@errors.join(', ')}\n#{formatted_branch_cover.join("\n")}" end end
branch spec failure message now include the spec code ...Instead of printing it when it happens
deep-cover_deep-cover
train
2e807de0092622c62c382adec9929d4df53aec5b
diff --git a/python/lib/uploader.py b/python/lib/uploader.py index <HASH>..<HASH> 100644 --- a/python/lib/uploader.py +++ b/python/lib/uploader.py @@ -167,7 +167,7 @@ def get_authentication_info(): def upload_done_file(params): - print("Upload a DONE file to tell the backend that the sequence is all uploaded and ready to submit.") + print("Upload a DONE file {} to indicate the sequence is all uploaded and ready to submit.".format(params['key'])) if not os.path.exists("DONE"): open("DONE", 'a').close() #upload @@ -184,7 +184,10 @@ def upload_file(filepath, url, permission, signature, key=None, move_files=True) Move to subfolders 'success'/'failed' on completion if move_files is True. ''' filename = os.path.basename(filepath) - s3_filename = EXIF(filepath).exif_name() + try: + s3_filename = EXIF(filepath).exif_name() + except: + s3_filename = filename print("Uploading: {0}".format(filename)) # add S3 'path' if given @@ -273,10 +276,8 @@ def create_dirs(root_path=''): def verify_exif(filename): ''' Check that image file has the required EXIF fields. - Incompatible files will be ignored server side. ''' - # required tags in IFD name convention required_exif = exif_gps_fields() + exif_datetime_fields() + [["Image Orientation"]] exif = EXIF(filename)
Handle naming with no exif during upload
mapillary_mapillary_tools
train
ace9a96fc2cdd5962deab0f48f9064cfa8212788
diff --git a/src/blocks/scratch3_motion.js b/src/blocks/scratch3_motion.js index <HASH>..<HASH> 100644 --- a/src/blocks/scratch3_motion.js +++ b/src/blocks/scratch3_motion.js @@ -37,7 +37,7 @@ Scratch3MotionBlocks.prototype.getPrimitives = function() { Scratch3MotionBlocks.prototype.moveSteps = function (args, util) { var steps = Cast.toNumber(args.STEPS); - var radians = MathUtil.degToRad(util.target.direction); + var radians = MathUtil.degToRad(90 - util.target.direction); var dx = steps * Math.cos(radians); var dy = steps * Math.sin(radians); util.target.setXY(util.target.x + dx, util.target.y + dy); diff --git a/src/util/math-util.js b/src/util/math-util.js index <HASH>..<HASH> 100644 --- a/src/util/math-util.js +++ b/src/util/math-util.js @@ -6,7 +6,7 @@ function MathUtil () {} * @return {!number} Equivalent value in radians. */ MathUtil.degToRad = function (deg) { - return (Math.PI * (90 - deg)) / 180; + return deg * Math.PI / 180; }; /** diff --git a/test/unit/util_math.js b/test/unit/util_math.js index <HASH>..<HASH> 100644 --- a/test/unit/util_math.js +++ b/test/unit/util_math.js @@ -2,12 +2,11 @@ var test = require('tap').test; var math = require('../../src/util/math-util'); test('degToRad', function (t) { - // @todo This is incorrect - t.strictEqual(math.degToRad(0), 1.5707963267948966); - t.strictEqual(math.degToRad(1), 1.5533430342749535); - t.strictEqual(math.degToRad(180), -1.5707963267948966); - t.strictEqual(math.degToRad(360), -4.71238898038469); - t.strictEqual(math.degToRad(720), -10.995574287564276); + t.strictEqual(math.degToRad(0), 0); + t.strictEqual(math.degToRad(1), 0.017453292519943295); + t.strictEqual(math.degToRad(180), Math.PI); + t.strictEqual(math.degToRad(360), 2 * Math.PI); + t.strictEqual(math.degToRad(720), 4 * Math.PI); t.end(); });
Fix degToRad function definition. Resolves GH-<I>
LLK_scratch-vm
train
7735164d24245555ee377f792a8bd9dbea31b586
diff --git a/src/stream/line.js b/src/stream/line.js index <HASH>..<HASH> 100644 --- a/src/stream/line.js +++ b/src/stream/line.js @@ -4,39 +4,18 @@ */ /*#ifndef(UMD)*/ "use strict"; +/*global _GpfStreamBufferedRead*/ // gpf.stream.BufferedRead +/*global _gpfArrayForEach*/ // Almost like [].forEach (undefined are also enumerated) /*global _gpfDefine*/ // Shortcut for gpf.define -/*global _gpfStreamSecureInstallProgressFlag*/ // Install the progress flag used by _gpfStreamSecureRead and Write -/*global _gpfStreamSecureRead*/ // Generate a wrapper to secure multiple calls to stream#read /*global _gpfStreamSecureWrite*/ // Generates a wrapper to secure multiple calls to stream#write /*exported _GpfStreamLineAdatper*/ // gpf.stream.LineAdapter /*#endif*/ -function _gpfStreamLineLastDoesntEndsWithLF (buffer) { - var lastItem = buffer[buffer.length - 1]; - return lastItem.charAt(lastItem.length - 1) !== "\n"; -} - -function _gpfStreamLineTrimCR (line) { - var lengthMinus1 = line.length - 1; - if (line.lastIndexOf("\r") === lengthMinus1) { - return line.substr(0, lengthMinus1); - } - return line; -} - -function _gpfStreamLineWrite (output, lines) { - if (!lines.length) { - return Promise.resolve(); - } - return output.write(_gpfStreamLineTrimCR(lines.shift())) - .then(function () { - return _gpfStreamLineWrite(output, lines); - }); -} - var + _reDOSCR = /\r\n/g, _GpfStreamLineAdatper = _gpfDefine(/** @lends gpf.stream.LineAdapter.prototype */ { $class: "gpf.stream.LineAdapter", + $extend: _GpfStreamBufferedRead, /** * Stream line adapter @@ -48,38 +27,20 @@ var * @since 0.2.1 */ constructor: function () { + this.$super(); this._buffer = []; }, //region gpf.interfaces.IReadableStream /** - * @gpf:sameas gpf.interfaces.IReadableStream#read - * @since 0.2.1 - */ - read: _gpfStreamSecureRead(function (output) { - var me = this; //eslint-disable-line no-invalid-this - me._output = output; - if (me._buffer.length) { - return me._process(); - } - return Promise.resolve(); - }), - - //endregion - - //region gpf.interfaces.IReadableStream - - /** * @gpf:sameas gpf.interfaces.IWritableStream#write * @since 0.2.1 */ write: _gpfStreamSecureWrite(function (buffer) { var me = this; //eslint-disable-line no-invalid-this me._buffer.push(buffer.toString()); - if (me._output) { - return me._process(); - } + me._process(); return Promise.resolve(); }), @@ -92,36 +53,33 @@ var * @since 0.2.1 */ flush: function () { - if (_gpfStreamLineLastDoesntEndsWithLF(this._buffer)) { - return this.write("\n"); + if (this._buffer.length) { + this._buffer.push("\n"); + this._process(); } + this._completeReadBuffer(); return Promise.resolve(); }, //endregion /** - * Output stream - * - * @type {gpf.interfaces.IWritableStream} - * @since 0.2.1 - */ - _output: null, - - /** * Buffer * @since 0.2.1 */ _buffer: [], /** - * Extract lines from buffer + * Consolidate lines from buffer * * @return {String[]} Array of lines * @since 0.2.1 */ - _extractLines: function () { - return this._buffer.join("").split("\n"); + _consolidateLines: function () { + return this._buffer + .join("") + .replace(_reDOSCR, "\n") + .split("\n"); }, /** @@ -141,16 +99,16 @@ var /** * Check if the buffer contains any carriage return and write to output * - * @return {Promise} Resolve when all lines were written * @since 0.2.1 */ _process: function () { - var lines = this._extractLines(); - this._buffer.length = 0; - this._pushBackLastLineIfNotEmpty(lines); - return _gpfStreamLineWrite(this._output, lines); + var me = this, + lines = me._consolidateLines(); + me._buffer.length = 0; + me._pushBackLastLineIfNotEmpty(lines); + _gpfArrayForEach(lines, function (line) { + me._appendToReadBuffer(line); + }); } }); - -_gpfStreamSecureInstallProgressFlag(_GpfStreamLineAdatper);
Simplify code (#<I>)
ArnaudBuchholz_gpf-js
train
3bd14d0e2d8ecb575f6213789bffbf565b2ad2b9
diff --git a/providers/line/line.go b/providers/line/line.go index <HASH>..<HASH> 100644 --- a/providers/line/line.go +++ b/providers/line/line.go @@ -7,7 +7,6 @@ import ( "encoding/json" "io/ioutil" "net/http" - "strings" "fmt" @@ -123,10 +122,8 @@ func (p *Provider) FetchUser(session goth.Session) (goth.User, error) { return user, err } - name := strings.Split(u.DisplayName, " ") - user.FirstName = name[1] - user.LastName = name[0] - user.NickName = strings.ToLower(u.DisplayName) + user.NickName = u.DisplayName + user.AvatarURL = u.PictureURL user.UserID = u.UserID return user, err }
Removed splitting DisplayName and added the PictureURL as part of the data
markbates_goth
train
9210bac0e4c849270dc5b5d15237a1491c22ef34
diff --git a/src/Services/FieldSetFieldFinder.php b/src/Services/FieldSetFieldFinder.php index <HASH>..<HASH> 100644 --- a/src/Services/FieldSetFieldFinder.php +++ b/src/Services/FieldSetFieldFinder.php @@ -237,8 +237,12 @@ class FieldSetFieldFinder } elseif( $field instanceof HasOne ) { - /** @var HasOne $field */ - return $field->getRelationFieldSet( $field->getValue() ?: $field->getRelatedModel() ); + if( $field->getValue() ) + { + return $field->getRelationFieldSet( $field->getValue() ); + } + + return new FieldSet( $field->getRelatedModel(), $field->getNameSpacedName() ); } elseif( $field instanceof Translatable ) {
Fix issue with arbory image field validation in new nodes
arbory_arbory
train
0830eda63d04537d592b482c6ed7abbac1be0b4b
diff --git a/library/CM/SmartyPlugins/function.select.php b/library/CM/SmartyPlugins/function.select.php index <HASH>..<HASH> 100644 --- a/library/CM/SmartyPlugins/function.select.php +++ b/library/CM/SmartyPlugins/function.select.php @@ -55,6 +55,9 @@ function smarty_function_select(array $params, Smarty_Internal_Template $templat $optionListValues = array_keys($optionList); $selectedValue = reset($optionListValues); } + if (null !== $selectedValue) { + $selectedValue = (string) $selectedValue; + } foreach ($optionList as $itemValue => $itemLabel) { if (null !== $itemValue) { $itemValue = (string) $itemValue;
also cast selectedValue to string
cargomedia_cm
train
be86d0cd46ab3929b959e1e76298725e56967473
diff --git a/src/TextureAppChrome.js b/src/TextureAppChrome.js index <HASH>..<HASH> 100644 --- a/src/TextureAppChrome.js +++ b/src/TextureAppChrome.js @@ -1,6 +1,12 @@ -import { Component, DefaultDOMElement, platform } from 'substance' +import { Component, DefaultDOMElement, platform, Router } from 'substance' export default class TextureAppChrome extends Component { + constructor (...args) { + super(...args) + + this._router = new Router() + } + didMount () { // if debug is turned on do not 'forward' to an error display and instead // leave the app in its failed state @@ -20,6 +26,7 @@ export default class TextureAppChrome extends Component { DefaultDOMElement.getBrowserWindow().on('drop', this._supressDnD, this) DefaultDOMElement.getBrowserWindow().on('dragover', this._supressDnD, this) } + this._router.start() this.handleActions({ 'save': this._handleSave }) @@ -74,7 +81,7 @@ export default class TextureAppChrome extends Component { } _setupChildContext (cb) { - cb(null, {}) + cb(null, { router: this._router }) } _initContext (context, cb) { diff --git a/src/TextureWebAppChrome.js b/src/TextureWebAppChrome.js index <HASH>..<HASH> 100644 --- a/src/TextureWebAppChrome.js +++ b/src/TextureWebAppChrome.js @@ -1,27 +1,9 @@ /* global vfs */ -import { parseKeyEvent, Router } from 'substance' +import { parseKeyEvent } from 'substance' import TextureAppChrome from './TextureAppChrome' import { VfsStorageClient, HttpStorageClient, InMemoryDarBuffer } from './dar' export default class TextureWebAppChrome extends TextureAppChrome { - constructor (...args) { - super(...args) - - this._router = new Router() - } - - _setupChildContext (cb) { - super._setupChildContext((err, context) => { - if (err) cb(err) - else cb(null, Object.assign(context, { router: this._router })) - }) - } - - didMount () { - super.didMount() - this._router.start() - } - _loadArchive (archiveId, context, cb) { let storage = this._getStorage(this.props.storageType) let buffer = new InMemoryDarBuffer()
Move router intialization to TextureAppChrome.
substance_texture
train
4302bcfa35d23b099e073c9f7831c68962bf2b6a
diff --git a/resources/lang/fa-IR/notifications.php b/resources/lang/fa-IR/notifications.php index <HASH>..<HASH> 100644 --- a/resources/lang/fa-IR/notifications.php +++ b/resources/lang/fa-IR/notifications.php @@ -85,6 +85,14 @@ return [ 'action' => 'Verify', ], ], + 'manage' => [ + 'mail' => [ + 'subject' => 'Manage Your Subscription', + 'content' => 'Click to manage your subscription to :app_name status page.', + 'title' => 'Click to manage your subscription to :app_name status page.', + 'action' => 'Manage subscription', + ], + ], ], 'system' => [ 'test' => [
New translations notifications.php (Persian)
CachetHQ_Cachet
train
c70f242676abf2e0c8b3a00745932d2327614c83
diff --git a/impact_functions/tables.py b/impact_functions/tables.py index <HASH>..<HASH> 100644 --- a/impact_functions/tables.py +++ b/impact_functions/tables.py @@ -119,7 +119,7 @@ class TableCell (object): """ def __init__(self, text="", bgcolor=None, header=False, width=None, - align=None, char=None, charoff=None, valign=None, style=None, + align=None, char=None, charoff=None, valign=None, style='', attribs=None, cell_class=None, row_span=None, col_span=None): """TableCell constructor""" self.text = text @@ -139,20 +139,26 @@ class TableCell (object): self.attribs = {} def __str__(self): - """return the HTML code for the table cell as a string""" - attribs_str = "" + """return the HTML code for the table cell as a string + .. note:: Since we are using the bootstrap framework we set + alignment using inlined css as bootstrap will override the + alignment given by align and valign html attributes.""" + """ + attribs_str = '' if self.bgcolor: self.attribs['bgcolor'] = self.bgcolor if self.width: self.attribs['width'] = self.width if self.align: self.attribs['align'] = self.align + self.style += 'text-align: ' + self.align + ';' if self.char: self.attribs['char'] = self.char if self.charoff: self.attribs['charoff'] = self.charoff if self.valign: self.attribs['valign'] = self.valign + self.style += 'text-align: ' + self.valign + ';' if self.style: self.attribs['style'] = self.style if self.cell_class: @@ -289,6 +295,9 @@ class Table(object): col_char=None, col_charoff=None, col_styles=None, caption=None, caption_at_bottom=False): """TableCell constructor""" + # Ensure Rows is an array of rows + if isinstance(rows, TableRow): + rows = [rows] self.border = border self.style = style # style for thin borders by default @@ -315,19 +324,6 @@ class Table(object): self.col_valign = col_valign self.col_styles = col_styles - def max_column_count(self): - """Get the maximum number of cells in any one row""" - if isinstance(self.rows, basestring): - return 1 - else: - max_column_count = 0 - for row in self.rows: - if not isinstance(row, TableRow): - row = TableRow(row) - if row.column_count() > max_column_count: - max_column_count = row.column_count() - return max_column_count - def mozilla_row_fix(self, row): # apply column alignments and styles to each row if specified: # (Mozilla bug workaround) @@ -388,7 +384,6 @@ class Table(object): result += ' </thead>\n' # then all data rows: result += ' <tbody>\n' - max_column_count = self.max_column_count() if isinstance(self.rows, basestring): #user instantiated the table with only a string for content row = TableRow(self.rows) diff --git a/impact_functions/test_tables.py b/impact_functions/test_tables.py index <HASH>..<HASH> 100644 --- a/impact_functions/test_tables.py +++ b/impact_functions/test_tables.py @@ -376,9 +376,10 @@ class TablesTest(unittest.TestCase): actual_result = Table(['12', '3000', '5'], col_align=['right', 'right', 'right']) - expected_strings = ['<td colspan="100%" align="right">12</td>', - '<td colspan="100%" align="right">3000</td>', - '<td colspan="100%" align="right">5</td>'] + expected_strings = [ + '<td colspan="100%" align="right" style="text-align: right;">12</td>', + '<td colspan="100%" align="right" style="text-align: right;">3000</td>', + '<td colspan="100%" align="right" style="text-align: right;">5</td>'] for s in expected_strings: message = ('Did not find expected string "%s" in result: %s' % (s, actual_result)) @@ -418,7 +419,10 @@ class TablesTest(unittest.TestCase): assert 'align="right"' in str(cell) table = Table(row) - #print str(table) # Broken + self.html += str(table) + self.writeHtml('table_column_alignment') + + if __name__ == '__main__': suite = unittest.makeSuite(TablesTest, 'test')
Fixed issues with table cell alignment and non iterable rows. Closes #<I>.
inasafe_inasafe
train
82ae2544845e4fdaee804523025e535ccbc4f1d3
diff --git a/src/cmd/handshake/client-handshake-response.js b/src/cmd/handshake/client-handshake-response.js index <HASH>..<HASH> 100644 --- a/src/cmd/handshake/client-handshake-response.js +++ b/src/cmd/handshake/client-handshake-response.js @@ -16,7 +16,7 @@ const NativePasswordAuth = require("./auth/native_password_auth"); */ module.exports.send = function send(cmd, out, opts, pluginName, info) { out.startPacket(cmd); - + info.defaultPluginName = pluginName; let authToken; switch (pluginName) { case "mysql_native_password": diff --git a/src/io/packet-output-stream.js b/src/io/packet-output-stream.js index <HASH>..<HASH> 100644 --- a/src/io/packet-output-stream.js +++ b/src/io/packet-output-stream.js @@ -476,7 +476,9 @@ class PacketOutputStream { } else { this.stream.flush(true, this.cmd); //if not flushed, ensure not reusing a buffer than is not send - if (!flushed) this.buf = Buffer.allocUnsafe(SMALL_BUFFER_SIZE); + //forcing + // if (!flushed) + this.buf = Buffer.allocUnsafe(SMALL_BUFFER_SIZE); } } else { this.buf = this.allocateBuffer(remainingLen + 4); diff --git a/test/integration/test-connection.js b/test/integration/test-connection.js index <HASH>..<HASH> 100644 --- a/test/integration/test-connection.js +++ b/test/integration/test-connection.js @@ -165,7 +165,7 @@ describe("connection", () => { it("connection row event", function(done) { this.timeout(10000); //can take some time - const conn = base.createConnection({debug:true, debugLen:40}); + const conn = base.createConnection(); conn.connect(() => { conn.query("CREATE TEMPORARY TABLE row_event (val varchar(1024))"); const array1 = []; @@ -190,8 +190,9 @@ describe("connection", () => { fieldEvent = true; }) .on("result", function (row) { - //fields defined - assert.equal(row.val, padStartZero(numberFetched, 3) + str); + const expected = padStartZero(numberFetched, 3) + str; + if (row.val !== expected) conn.end(); + assert.equal(row.val, expected); numberFetched++; }) .on("end", function () {
trying to detect racecondition issue appveyor - part 2
MariaDB_mariadb-connector-nodejs
train
8c6d01f55db21fcc17e728a7acae55bd503f132e
diff --git a/test/genfun.js b/test/genfun.js index <HASH>..<HASH> 100644 --- a/test/genfun.js +++ b/test/genfun.js @@ -140,6 +140,65 @@ describe("Genfun", function() { describe("variable arity dispatch", function() { it("treats 'unfilled' spaces like Object.prototype when comparing " + "methods with dispatch arrays of different lengths"); + it("handles complex interactions between all the dispatch features", function() { + var frobnicate = new Genfun(), + addMethod = Genfun.addMethod; + + addMethod( + frobnicate, + [Number.prototype], + function(num) { + return "one number: "+num; + }); + + addMethod( + frobnicate, + [String.prototype], + function(str) { + return "one string: "+str; + }); + + addMethod( + frobnicate, + [String.prototype, Number.prototype], + function(string, number) { + return "string + number: " + string + ", " + number; + }); + + addMethod( + frobnicate, + [Number.prototype, String.prototype], + function(number, string) { + return "number + string: " + number + ", " + string; + }); + + addMethod( + frobnicate, + [], + function() { + return "Dispatch fell through"; + }); + + addMethod( + frobnicate, + [Number.prototype, , String.prototype], + function(number, anything, string) { + return "number + anything + string: " + + number + ", " + anything + ", " + string; + }); + + function test(expected, args) { + assert.equal(expected, frobnicate.apply(null, args)); + } + test("string + number: foo, 1", [new String("foo"), new Number(1)]); + test("number + string: 1, foo", [1, "foo"]); + test("one number: 1", [1, 2]); + test("one number: 1", [1]); + test("one string: str", ["str"]); + test("Dispatch fell through", [true]); + test("Dispatch fell through", []); + test("number + anything + string: 1, true, foo", [1, true, "foo"]); + }); }); it("treats empty array items (`[x, ,z]`) like Object.prototype", function() { var frob = new Genfun(), @@ -154,65 +213,6 @@ describe("Genfun", function() { assert.equal("3-arg method", frob(x, {}, x)); assert.equal("0-arg method", frob(x, Object.create(null), x)); }); - it("properly dispatches methods", function() { - var frobnicate = new Genfun(), - addMethod = Genfun.addMethod; - - addMethod( - frobnicate, - [Number.prototype], - function(num) { - return "one number: "+num; - }); - - addMethod( - frobnicate, - [String.prototype], - function(str) { - return "one string: "+str; - }); - - addMethod( - frobnicate, - [String.prototype, Number.prototype], - function(string, number) { - return "string + number: " + string + ", " + number; - }); - - addMethod( - frobnicate, - [Number.prototype, String.prototype], - function(number, string) { - return "number + string: " + number + ", " + string; - }); - - addMethod( - frobnicate, - [], - function() { - return "Dispatch fell through"; - }); - - addMethod( - frobnicate, - [Number.prototype, , String.prototype], - function(number, anything, string) { - return "number + anything + string: " - + number + ", " + anything + ", " + string; - }); - - function test(expected, args) { - assert.equal(expected, frobnicate.apply(null, args)); - } - test("string + number: foo, 1", [new String("foo"), new Number(1)]); - test("number + string: 1, foo", [1, "foo"]); - test("one number: 1", [1, 2]); - test("one number: 1", [1]); - test("one string: str", ["str"]); - test("Dispatch fell through", [true]); - test("Dispatch fell through", []); - test("number + anything + string: 1, true, foo", [1, true, "foo"]); - }); }); });
Moved big blobby test to a better place
zkat_genfun
train
fc41e9a35956cc3e785830208dbddd69d20cba3e
diff --git a/Resources/public/js/views/fields/ez-richtext-editview.js b/Resources/public/js/views/fields/ez-richtext-editview.js index <HASH>..<HASH> 100644 --- a/Resources/public/js/views/fields/ez-richtext-editview.js +++ b/Resources/public/js/views/fields/ez-richtext-editview.js @@ -12,6 +12,7 @@ YUI.add('ez-richtext-editview', function (Y) { Y.namespace('eZ'); var FIELDTYPE_IDENTIFIER = 'ezrichtext', + L = Y.Lang, AlloyEditor = Y.eZ.AlloyEditor; /** @@ -64,18 +65,27 @@ YUI.add('ez-richtext-editview', function (Y) { }, /** - * Checks whether the field is empty. The field is considered empty if - * the editor handles a `section` element and this element has some - * child nodes. + * Checks whether the field is empty. The field is considered empty if: + * * there's no section element + * * or the section element has no child + * * or the section element has only child without content * * @method _isEmpty * @protected * @return {Boolean} */ _isEmpty: function () { - var section = Y.Node.create(this.get('editor').get('nativeEditor').getData()); - - return (!section || !section.hasChildNodes()); + var section = Y.Node.create(this.get('editor').get('nativeEditor').getData()), + hasChildNodes = function (element) { + return !!element.get('children').size(); + }, + hasChildWithContent = function (element) { + return element.get('children').some(function (node) { + return L.trim(node.get('text')) !== ''; + }); + }; + + return !section || !hasChildNodes(section) || !hasChildWithContent(section); }, /** @@ -128,6 +138,12 @@ YUI.add('ez-richtext-editview', function (Y) { if ( !doc ) { return ""; } + if ( !doc.documentElement.hasChildNodes() ) { + // making sure to have at least a paragraph element + // otherwise CKEditor adds a br to make sure the editor can put + // the caret inside the element. + doc.documentElement.appendChild(doc.createElement('p')); + } return (new XMLSerializer()).serializeToString(doc.documentElement); }, diff --git a/Tests/js/views/fields/assets/ez-richtext-editview-tests.js b/Tests/js/views/fields/assets/ez-richtext-editview-tests.js index <HASH>..<HASH> 100644 --- a/Tests/js/views/fields/assets/ez-richtext-editview-tests.js +++ b/Tests/js/views/fields/assets/ez-richtext-editview-tests.js @@ -5,7 +5,7 @@ YUI.add('ez-richtext-editview-tests', function (Y) { var renderTest, registerTest, validateTest, getFieldTest, editorTest, - VALID_XHTML, INVALID_XHTML, RESULT_XHTML, + VALID_XHTML, INVALID_XHTML, RESULT_XHTML, EMPTY_XHTML, Assert = Y.Assert, Mock = Y.Mock; INVALID_XHTML = "I'm invalid"; @@ -14,6 +14,9 @@ YUI.add('ez-richtext-editview-tests', function (Y) { VALID_XHTML += '<section xmlns="http://ez.no/namespaces/ezpublish5/xhtml5/edit">'; VALID_XHTML += '<p>I\'m not empty</p></section>'; + EMPTY_XHTML = '<?xml version="1.0" encoding="UTF-8"?>'; + EMPTY_XHTML += '<section xmlns="http://ez.no/namespaces/ezpublish5/xhtml5/edit"/>'; + RESULT_XHTML = '<section xmlns="http://ez.no/namespaces/ezpublish5/xhtml5/edit"><p>I\'m not empty</p></section>'; renderTest = new Y.Test.Case({ @@ -156,9 +159,24 @@ YUI.add('ez-richtext-editview-tests', function (Y) { ); }, + "Should not validate a buggy and required field": function () { + var fieldDefinition = this._getFieldDefinition(true); + + this.view.set('fieldDefinition', fieldDefinition); + this.view.render(); + this.view.set('active', true); + this.view.validate(); + + Assert.isFalse( + this.view.isValid(), + "A required and empty field should not be valid" + ); + }, + "Should not validate an empty and required field": function () { var fieldDefinition = this._getFieldDefinition(true); + this.field.fieldValue.xhtml5edit = EMPTY_XHTML; this.view.set('fieldDefinition', fieldDefinition); this.view.render(); this.view.set('active', true);
EZP-<I>: Improved the handling of empty RichText field
ezsystems_PlatformUIBundle
train
736158dc09e10f1911ca3a1e1b01f11b566ce5db
diff --git a/doc.go b/doc.go index <HASH>..<HASH> 100644 --- a/doc.go +++ b/doc.go @@ -84,16 +84,16 @@ You may use one of several pre-defined schedules in place of a cron expression. Intervals -You may also schedule a job to execute at fixed intervals. This is supported by -formatting the cron spec like this: +You may also schedule a job to execute at fixed intervals, starting at the time it's added +or cron is run. This is supported by formatting the cron spec like this: @every <duration> where "duration" is a string accepted by time.ParseDuration (http://golang.org/pkg/time/#ParseDuration). -For example, "@every 1h30m10s" would indicate a schedule that activates every -1 hour, 30 minutes, 10 seconds. +For example, "@every 1h30m10s" would indicate a schedule that activates immediately, +and then every 1 hour, 30 minutes, 10 seconds. Note: The interval does not take the job runtime into account. For example, if a job takes 3 minutes to run, and it is scheduled to run every 5 minutes,
Clarify meaning of @every Fixes issue #<I>
robfig_cron
train
85840d3bf42d41ca7e4711218d01afeb6fcdc8b8
diff --git a/lib/surveyor/models/survey_methods.rb b/lib/surveyor/models/survey_methods.rb index <HASH>..<HASH> 100644 --- a/lib/surveyor/models/survey_methods.rb +++ b/lib/surveyor/models/survey_methods.rb @@ -40,6 +40,7 @@ module Surveyor end def title=(value) + return if value == self.title adjusted_value = value while Survey.find_by_access_code(Survey.to_normalized_string(adjusted_value)) i ||= 0 @@ -47,7 +48,7 @@ module Surveyor adjusted_value = "#{value} #{i.to_s}" end self.access_code = Survey.to_normalized_string(adjusted_value) - super(adjusted_value) + super(adjusted_value) # self.access_code = Survey.to_normalized_string(value) # super end @@ -74,4 +75,4 @@ module Surveyor end end end -end \ No newline at end of file +end diff --git a/spec/models/survey_spec.rb b/spec/models/survey_spec.rb index <HASH>..<HASH> 100644 --- a/spec/models/survey_spec.rb +++ b/spec/models/survey_spec.rb @@ -5,12 +5,12 @@ describe Survey, "when saving a new one" do before(:each) do @survey = Factory(:survey, :title => "Foo") end - + it "should be invalid without a title" do @survey.title = nil @survey.should have(1).error_on(:title) end - + it "should adjust the title to save unique titles" do original = Survey.new(:title => "Foo") original.save.should be_true @@ -21,7 +21,14 @@ describe Survey, "when saving a new one" do bandwagoneer.save.should be_true bandwagoneer.title.should == "Foo 2" end - + + it "should not adjust the title when updating itself" do + original = Survey.new(:title => "Foo") + original.save.should be_true + original.title = "Foo" + original.title.should == "Foo" + end + it "should have an api_id" do @survey.api_id.length.should == 36 end @@ -44,7 +51,7 @@ describe Survey, "that has sections" do @survey.sections.should have(3).sections @survey.sections.should == [@s3, @s1, @s2] end - + it "should return survey_sections_with_questions in display order" do @survey.sections_with_questions.map(&:questions).flatten.should have(4).questions @survey.sections_with_questions.map(&:questions).flatten.should == [@q4,@q1,@q3,@q2] @@ -72,19 +79,19 @@ describe Survey do @survey.active?.should be_true @survey.inactive_at.should be_nil end - + it "should be able to deactivate as of a certain date/time" do @survey.active_at = 2.days.ago @survey.inactive_at = 3.days.ago @survey.active?.should be_false @survey.active_at.should be_nil end - + it "should activate and deactivate" do @survey.activate! @survey.active?.should be_true @survey.deactivate! @survey.active?.should be_false end - -end \ No newline at end of file + +end
stop survey titles from appending '1' to itself when calling update attributes
NUBIC_surveyor
train
817258b2720df633e36ea65ba8850a33d5894f7f
diff --git a/lib/piccolo.js b/lib/piccolo.js index <HASH>..<HASH> 100644 --- a/lib/piccolo.js +++ b/lib/piccolo.js @@ -94,6 +94,21 @@ Piccolo.prototype.build = function () { var self = this; async.parallel({ + routerSource: function (callback) { + fs.readFile(self.get('router'), 'utf8', function (error, content) { + if (error) return callback(error, null); + + // Remove byte order marker. This catches EF BB BF (the UTF-8 BOM) + // because the buffer-to-string conversion in `fs.readFile()` + // translates it to FEFF, the UTF-16 BOM. + if (content.charCodeAt(0) === 0xFEFF) { + content = content.slice(1); + } + + callback(null, content); + }); + }, + // build directory maps staticFile: directory.bind(null, this.get('static')), presenter: directory.bind(null, this.get('presenter')), @@ -111,33 +126,21 @@ Piccolo.prototype.build = function () { // store build object and emit ready self.build = build; - // add router source code to moduleSource object - fs.readFile(self.get('router'), 'utf8', function (error, content) { - if (error) return self.emit('error', error); - - // Remove byte order marker. This catches EF BB BF (the UTF-8 BOM) - // because the buffer-to-string conversion in `fs.readFile()` - // translates it to FEFF, the UTF-16 BOM. - if (content.charCodeAt(0) === 0xFEFF) { - content = content.slice(1); - } - - // store router source code - build.moduleSource.router = content; + // store router source code + build.moduleSource.router = build.routerSource; - // create Module constructor - self.build.Module = compileModule(this); + // create Module constructor + self.build.Module = compileModule(this); - // create Main module - var main = self.build.main = this.build.Module('module.js'); + // create Main module + var main = self.build.main = this.build.Module('module.js'); - // compile router module - var router = main.require('router'); - self.build.router = new router(); + // compile router module + var router = main.require('router'); + self.build.router = new router(); - // emit ready event - self.emit('ready'); - }); + // emit ready event + self.emit('ready'); }); };
[refactor] read router source code parallel
AndreasMadsen_piccolo
train
55bb6cf8f094e740b432dd31f1075d4175c7dfde
diff --git a/lib/pod/command/try.rb b/lib/pod/command/try.rb index <HASH>..<HASH> 100644 --- a/lib/pod/command/try.rb +++ b/lib/pod/command/try.rb @@ -7,6 +7,8 @@ module Pod # The pod try command. # class Try < Command + include RepoUpdate + self.summary = 'Try a Pod!' self.description = <<-DESC @@ -17,14 +19,7 @@ module Pod self.arguments = [CLAide::Argument.new(%w(NAME URL), true)] - def self.options - [ - ['--no-repo-update', 'Skip running `pod repo update` before install'], - ].concat(super) - end - def initialize(argv) - config.skip_repo_update = !argv.flag?('repo-update', !config.skip_repo_update) @name = argv.shift_argument super end @@ -201,7 +196,7 @@ module Pod # @return [void] Updates the specs repo unless disabled by the config. # def update_specs_repos - return if config.skip_repo_update? + return unless repo_update?(default: true) UI.section 'Updating spec repositories' do SourcesManager.update end diff --git a/spec/command/try_spec.rb b/spec/command/try_spec.rb index <HASH>..<HASH> 100644 --- a/spec/command/try_spec.rb +++ b/spec/command/try_spec.rb @@ -33,14 +33,12 @@ module Pod end it 'allows the user to try the Pod with the given name' do - Config.instance.with_changes(:skip_repo_update => false) do - command = Pod::Command.parse(%w(try ARAnalytics)) - Installer::PodSourceInstaller.any_instance.expects(:install!) - command.expects(:update_specs_repos) - command.expects(:pick_demo_project).returns(XCODE_PROJECT) - command.expects(:open_project).with(XCODE_PROJECT) - command.run - end + command = Pod::Command.parse(%w(try ARAnalytics)) + Installer::PodSourceInstaller.any_instance.expects(:install!) + command.expects(:update_specs_repos) + command.expects(:pick_demo_project).returns(XCODE_PROJECT) + command.expects(:open_project).with(XCODE_PROJECT) + command.run end it 'allows the user to try the Pod with the given Git URL' do @@ -51,7 +49,6 @@ module Pod stub_spec = stub(:name => 'ARAnalytics') Pod::Specification.stubs(:from_file).with(spec_file).returns(stub_spec) - Config.instance.skip_repo_update = false command = Pod::Command.parse(['try', 'https://github.com/orta/ARAnalytics.git']) Installer::PodSourceInstaller.any_instance.expects(:install!) command.expects(:update_specs_repos).never @@ -59,6 +56,26 @@ module Pod command.expects(:open_project).with(XCODE_PROJECT) command.run end + + describe 'updates of the spec repos' do + it 'updates the spec repos by default' do + command = Pod::Command.parse(%w(try ARAnalytics)) + Installer::PodSourceInstaller.any_instance.expects(:install!) + SourcesManager.expects(:update) + command.expects(:pick_demo_project).returns(XCODE_PROJECT) + command.expects(:open_project).with(XCODE_PROJECT) + command.run + end + + it "doesn't update the spec repos if that option was given" do + command = Pod::Command.parse(%w(try ARAnalytics --no-repo-update)) + Installer::PodSourceInstaller.any_instance.expects(:install!) + SourcesManager.expects(:update).never + command.expects(:pick_demo_project).returns(XCODE_PROJECT) + command.expects(:open_project).with(XCODE_PROJECT) + command.run + end + end end describe 'Helpers' do
Adopt changes necessary for CocoaPods/CocoaPods#<I>
CocoaPods_cocoapods-try
train
b80f8ba5ab74d46bdebc68589b97d281197e1144
diff --git a/src/server/create-bundle-renderer.js b/src/server/create-bundle-renderer.js index <HASH>..<HASH> 100644 --- a/src/server/create-bundle-renderer.js +++ b/src/server/create-bundle-renderer.js @@ -50,7 +50,9 @@ export function createBundleRendererCreator (createRenderer: () => Renderer) { runInVm(entry, files, context).then(app => { renderer.renderToString(app, cb) }).catch(err => { - rewriteErrorTrace(err, maps) + if (err instanceof Error) { + rewriteErrorTrace(err, maps) + } cb(err) }) }, @@ -65,7 +67,9 @@ export function createBundleRendererCreator (createRenderer: () => Renderer) { renderStream.pipe(res) }).catch(err => { process.nextTick(() => { - rewriteErrorTrace(err, maps) + if (err instanceof Error) { + rewriteErrorTrace(err, maps) + } res.emit('error', err) }) }) diff --git a/src/server/source-map-support.js b/src/server/source-map-support.js index <HASH>..<HASH> 100644 --- a/src/server/source-map-support.js +++ b/src/server/source-map-support.js @@ -32,7 +32,7 @@ function rewriteTraceLine (trace: string, mapConsumers: { }) if (originalPosition.source != null) { const { source, line, column } = originalPosition - const mappedPosition = `(${source.replace(/^webpack:\/\/\//, '')}:${line}:${column})` + const mappedPosition = `(${source.replace(/^webpack:\/\/\//, '')}:${String(line)}:${String(column)})` return trace.replace(filenameRE, mappedPosition) } else { return trace
only map ssr stack trace if error is indeed an Error
IOriens_wxml-transpiler
train
fbdaf175c17cc6ea884a1506a79a211718fadb3d
diff --git a/src/test/java/com/authlete/common/dto/AuthzDetailsElementTest.java b/src/test/java/com/authlete/common/dto/AuthzDetailsElementTest.java index <HASH>..<HASH> 100644 --- a/src/test/java/com/authlete/common/dto/AuthzDetailsElementTest.java +++ b/src/test/java/com/authlete/common/dto/AuthzDetailsElementTest.java @@ -18,6 +18,7 @@ package com.authlete.common.dto; import org.junit.Test; import com.google.gson.Gson; +import com.google.gson.JsonParseException; import com.neovisionaries.security.Digest; import static org.junit.Assert.*; import java.io.IOException; @@ -311,4 +312,11 @@ public class AuthzDetailsElementTest prop1.add("c"); assertEquals(prop1, map.get("prop1")); } + + + @Test(expected = JsonParseException.class) + public void test06() + { + deserialize("{\"locations\":false}"); + } } diff --git a/src/test/java/com/authlete/common/dto/AuthzDetailsTest.java b/src/test/java/com/authlete/common/dto/AuthzDetailsTest.java index <HASH>..<HASH> 100644 --- a/src/test/java/com/authlete/common/dto/AuthzDetailsTest.java +++ b/src/test/java/com/authlete/common/dto/AuthzDetailsTest.java @@ -19,6 +19,7 @@ package com.authlete.common.dto; import org.junit.Test; import com.google.gson.Gson; import com.google.gson.GsonBuilder; +import com.google.gson.JsonParseException; import static org.junit.Assert.*; import java.util.Map; @@ -164,4 +165,11 @@ public class AuthzDetailsTest assertNull(elements[2]); } + + + @Test(expected = JsonParseException.class) + public void test05() + { + deserialize("{}"); + } }
Added some tests for JsonParseException.
authlete_authlete-java-common
train
003bbe6a92c2538c51dbea56f69fb525eec22db6
diff --git a/lib/appium_lib/common/patch.rb b/lib/appium_lib/common/patch.rb index <HASH>..<HASH> 100644 --- a/lib/appium_lib/common/patch.rb +++ b/lib/appium_lib/common/patch.rb @@ -1,26 +1,5 @@ # encoding: utf-8 -# Add status to WebDriver -# https://code.google.com/p/selenium/issues/detail?id=5669 -class Selenium::WebDriver::Driver - # @private - def status - bridge.status - end -end - -class Selenium::WebDriver::Remote::Bridge - # @private - def status - raw_execute :status - end -end - -class Selenium::WebDriver::Remote::Bridge - command :status, :get, 'status' -end -# end Add status to WebDriver - module Appium::Common # Implement useful features for element. class Selenium::WebDriver::Element @@ -174,4 +153,4 @@ class Selenium::WebDriver::Remote::Response msg end -end \ No newline at end of file +end
Status is in the official bindings <URL>
appium_ruby_lib
train
b078bbd56ff5ead3dc3103c70b6d2728dd1c7671
diff --git a/src/index.js b/src/index.js index <HASH>..<HASH> 100644 --- a/src/index.js +++ b/src/index.js @@ -74,7 +74,10 @@ function makeVisitor ({types: t}) { if (!this.__dv__) { return } - this.__dv__.exit(path) + const result = this.__dv__.exit(path) + if (this.opts.onCover) { + this.opts.onCover(getRealpath(this.file.opts.filename), result.fileCoverage) + } } } } diff --git a/test/babel-plugin-istanbul.js b/test/babel-plugin-istanbul.js index <HASH>..<HASH> 100644 --- a/test/babel-plugin-istanbul.js +++ b/test/babel-plugin-istanbul.js @@ -2,6 +2,7 @@ const babel = require('babel-core') import makeVisitor from '../src' +import path from 'path' require('chai').should() @@ -28,6 +29,22 @@ describe('babel-plugin-istanbul', function () { }) result.code.should.not.match(/statementMap/) }) + + it('should call onCover callback', function () { + var args + babel.transformFileSync('./fixtures/plugin-should-cover.js', { + plugins: [ + [makeVisitor({types: babel.types}), { + onCover: function () { + args = [].slice.call(arguments) + }, + include: ['fixtures/plugin-should-cover.js'] + }] + ] + }) + args[0].should.equal(path.resolve('./fixtures/plugin-should-cover.js')) + args[1].statementMap.should.exist + }) }) context('source maps', function () {
feat: add includeUntested option to expose coverage of all instrumented files (#<I>)
istanbuljs_babel-plugin-istanbul
train
3968d9cb7c956746992e34725fdb981306b7ba7d
diff --git a/plugin/s3/plugin.go b/plugin/s3/plugin.go index <HASH>..<HASH> 100644 --- a/plugin/s3/plugin.go +++ b/plugin/s3/plugin.go @@ -301,6 +301,9 @@ func (p S3Plugin) Validate(endpoint plugin.ShieldEndpoint) error { if err != nil { fmt.Printf("@R{\u2717 bucket %s}\n", err) fail = true + } else if s == "" { + fmt.Printf("@R{\u2717 bucket no s3 bucket specified}\n") + fail = true } else if !validBucketName(s) { fmt.Printf("@R{\u2717 bucket '%s' is an invalid bucket name (must be all lowercase)}\n", s) fail = true
Explicitly disallow empty bucket names in s3 plugin
starkandwayne_shield
train
b3fa8bf7cbca6192bb1ea43eb47a7acbecb60ff0
diff --git a/src/Symfony/Bundle/SwiftmailerBundle/DependencyInjection/Configuration.php b/src/Symfony/Bundle/SwiftmailerBundle/DependencyInjection/Configuration.php index <HASH>..<HASH> 100644 --- a/src/Symfony/Bundle/SwiftmailerBundle/DependencyInjection/Configuration.php +++ b/src/Symfony/Bundle/SwiftmailerBundle/DependencyInjection/Configuration.php @@ -49,13 +49,7 @@ class Configuration implements ConfigurationInterface $rootNode ->children() - ->scalarNode('transport') - ->defaultValue('smtp') - ->validate() - ->ifNotInArray(array('smtp', 'mail', 'sendmail', 'gmail', null)) - ->thenInvalid('The %s transport is not supported') - ->end() - ->end() + ->scalarNode('transport')->defaultValue('smtp')->end() ->scalarNode('username')->defaultNull()->end() ->scalarNode('password')->defaultNull()->end() ->scalarNode('host')->defaultValue('localhost')->end() diff --git a/src/Symfony/Bundle/SwiftmailerBundle/DependencyInjection/SwiftmailerExtension.php b/src/Symfony/Bundle/SwiftmailerBundle/DependencyInjection/SwiftmailerExtension.php index <HASH>..<HASH> 100644 --- a/src/Symfony/Bundle/SwiftmailerBundle/DependencyInjection/SwiftmailerExtension.php +++ b/src/Symfony/Bundle/SwiftmailerBundle/DependencyInjection/SwiftmailerExtension.php @@ -68,16 +68,19 @@ class SwiftmailerExtension extends Extension $loader->load('smtp.xml'); } - $container->setParameter('swiftmailer.transport.name', $transport); + if (in_array($transport, array('smtp', 'mail', 'sendmail', 'null'))) { + // built-in transport + $transport = 'swiftmailer.transport.'.$transport; + } - $container->setAlias('swiftmailer.transport', 'swiftmailer.transport.'.$transport); + $container->setAlias('swiftmailer.transport', $transport); if (false === $config['port']) { $config['port'] = 'ssl' === $config['encryption'] ? 465 : 25; } foreach (array('encryption', 'port', 'host', 'username', 'password', 'auth_mode') as $key) { - $container->setParameter('swiftmailer.transport.'.$transport.'.'.$key, $config[$key]); + $container->setParameter('swiftmailer.transport.smtp.'.$key, $config[$key]); } // spool? @@ -85,7 +88,7 @@ class SwiftmailerExtension extends Extension $type = $config['spool']['type']; $loader->load('spool.xml'); - $container->setAlias('swiftmailer.transport.real', 'swiftmailer.transport.'.$transport); + $container->setAlias('swiftmailer.transport.real', $transport); $container->setAlias('swiftmailer.transport', 'swiftmailer.transport.spool'); $container->setAlias('swiftmailer.spool', 'swiftmailer.spool.'.$type); diff --git a/src/Symfony/Bundle/SwiftmailerBundle/Tests/DependencyInjection/SwiftmailerExtensionTest.php b/src/Symfony/Bundle/SwiftmailerBundle/Tests/DependencyInjection/SwiftmailerExtensionTest.php index <HASH>..<HASH> 100644 --- a/src/Symfony/Bundle/SwiftmailerBundle/Tests/DependencyInjection/SwiftmailerExtensionTest.php +++ b/src/Symfony/Bundle/SwiftmailerBundle/Tests/DependencyInjection/SwiftmailerExtensionTest.php @@ -27,11 +27,9 @@ class SwiftmailerExtensionTest extends TestCase $this->assertEquals('Swift_Mailer', $container->getParameter('swiftmailer.class'), '->load() loads the swiftmailer.xml file if not already loaded'); $loader->load(array(array('transport' => 'sendmail')), $container); - $this->assertEquals('sendmail', $container->getParameter('swiftmailer.transport.name'), '->load() overrides existing configuration options'); $this->assertEquals('swiftmailer.transport.sendmail', (string) $container->getAlias('swiftmailer.transport')); $loader->load(array(array()), $container); - $this->assertEquals('smtp', $container->getParameter('swiftmailer.transport.name'), '->load() provides default values for configuration options'); $this->assertEquals('swiftmailer.transport.smtp', (string) $container->getAlias('swiftmailer.transport')); } @@ -42,7 +40,6 @@ class SwiftmailerExtensionTest extends TestCase $loader = new SwiftmailerExtension(); $loader->load(array(array('transport' => null)), $container); - $this->assertEquals('null', $container->getParameter('swiftmailer.transport.name'), '->load() uses the "null" string transport when transport is null'); $this->assertEquals('swiftmailer.transport.null', (string) $container->getAlias('swiftmailer.transport')); }
[Swiftmailer] allowed any service to be used as a transport (closes #<I>)
symfony_symfony
train
d07e9012147b7a6447c45ac6eb0eb2d4d01bfea8
diff --git a/source/Application/Controller/Admin/LoginController.php b/source/Application/Controller/Admin/LoginController.php index <HASH>..<HASH> 100644 --- a/source/Application/Controller/Admin/LoginController.php +++ b/source/Application/Controller/Admin/LoginController.php @@ -101,6 +101,11 @@ class LoginController extends \OxidEsales\Eshop\Application\Controller\Admin\Adm $sProfile = \OxidEsales\Eshop\Core\Registry::getConfig()->getRequestParameter('profile'); try { // trying to login + $session = \OxidEsales\Eshop\Core\Registry::getSession(); + $adminProfiles = $session->getVariable("aAdminProfiles"); + $session->initNewSession(); + $session->setVariable("aAdminProfiles", $adminProfiles); + /** @var \OxidEsales\Eshop\Application\Model\User $oUser */ $oUser = oxNew(\OxidEsales\Eshop\Application\Model\User::class); $oUser->login($sUser, $sPass); diff --git a/tests/Acceptance/Admin/FunctionalityInAdminTest.php b/tests/Acceptance/Admin/FunctionalityInAdminTest.php index <HASH>..<HASH> 100644 --- a/tests/Acceptance/Admin/FunctionalityInAdminTest.php +++ b/tests/Acceptance/Admin/FunctionalityInAdminTest.php @@ -839,6 +839,8 @@ class FunctionalityInAdminTest extends AdminTestCase */ public function testLoginToAdminInOtherLang() { + $shopUrl = $this->getTestConfig()->getShopUrl(); + $this->loginAdmin(); $this->waitForText("Welcome to the OXID eShop Admin"); $this->frame("navigation"); @@ -866,6 +868,26 @@ class FunctionalityInAdminTest extends AdminTestCase $this->checkForErrors(); $this->assertElementPresent("link=Stammdaten"); $this->assertElementPresent("link=Shopeinstellungen"); + + $this->getMinkSession()->restart(); + $this->openNewWindow($shopUrl . "admin"); + $this->clickAndWait("//input[@type='submit']"); + $url = $this->getLocation(); + + $this->getMinkSession()->restart(); + $this->openNewWindow($shopUrl . "admin"); + $this->open($url); + $this->type("usr", "admin@myoxideshop.com"); + $this->type("pwd", "admin0303"); + $this->select("lng", "English"); + $this->select("prf", "Standard"); + $this->clickAndWait("//input[@type='submit']"); + $this->assertElementPresent("//frame[@id='navigation']"); + + $this->getMinkSession()->restart(); + $this->openNewWindow($shopUrl . "admin"); + $this->open($url); + $this->assertElementNotPresent("//frame[@id='navigation']"); } /**
OXDEV-<I> Clean up before login
OXID-eSales_oxideshop_ce
train
9a8f26d30d4c9dc96878f4be83bc6b20a08a93b0
diff --git a/datasource-decorator-spring-boot-autoconfigure/src/main/java/com/github/gavlyukovskiy/cloud/sleuth/TracingJdbcEventListener.java b/datasource-decorator-spring-boot-autoconfigure/src/main/java/com/github/gavlyukovskiy/cloud/sleuth/TracingJdbcEventListener.java index <HASH>..<HASH> 100644 --- a/datasource-decorator-spring-boot-autoconfigure/src/main/java/com/github/gavlyukovskiy/cloud/sleuth/TracingJdbcEventListener.java +++ b/datasource-decorator-spring-boot-autoconfigure/src/main/java/com/github/gavlyukovskiy/cloud/sleuth/TracingJdbcEventListener.java @@ -23,6 +23,7 @@ import com.p6spy.engine.common.PreparedStatementInformation; import com.p6spy.engine.common.ResultSetInformation; import com.p6spy.engine.common.StatementInformation; import com.p6spy.engine.event.SimpleJdbcEventListener; +import org.springframework.core.Ordered; import org.springframework.util.StringUtils; import java.sql.SQLException; @@ -33,7 +34,7 @@ import java.sql.SQLException; * @author Arthur Gavlyukovskiy * @since 1.2 */ -public class TracingJdbcEventListener extends SimpleJdbcEventListener { +public class TracingJdbcEventListener extends SimpleJdbcEventListener implements Ordered { private final DataSourceNameResolver dataSourceNameResolver; @@ -114,4 +115,9 @@ public class TracingJdbcEventListener extends SimpleJdbcEventListener { ? statementInformation.getSqlWithValues() : statementInformation.getSql(); } + + @Override + public int getOrder() { + return Ordered.HIGHEST_PRECEDENCE + 10; + } } diff --git a/datasource-decorator-spring-boot-autoconfigure/src/main/java/com/github/gavlyukovskiy/cloud/sleuth/TracingQueryExecutionListener.java b/datasource-decorator-spring-boot-autoconfigure/src/main/java/com/github/gavlyukovskiy/cloud/sleuth/TracingQueryExecutionListener.java index <HASH>..<HASH> 100644 --- a/datasource-decorator-spring-boot-autoconfigure/src/main/java/com/github/gavlyukovskiy/cloud/sleuth/TracingQueryExecutionListener.java +++ b/datasource-decorator-spring-boot-autoconfigure/src/main/java/com/github/gavlyukovskiy/cloud/sleuth/TracingQueryExecutionListener.java @@ -22,6 +22,7 @@ import net.ttddyy.dsproxy.QueryInfo; import net.ttddyy.dsproxy.listener.MethodExecutionContext; import net.ttddyy.dsproxy.listener.MethodExecutionListener; import net.ttddyy.dsproxy.listener.QueryExecutionListener; +import org.springframework.core.Ordered; import javax.sql.DataSource; @@ -38,7 +39,7 @@ import java.util.stream.Collectors; * @author Arthur Gavlyukovskiy * @since 1.2 */ -public class TracingQueryExecutionListener implements QueryExecutionListener, MethodExecutionListener { +public class TracingQueryExecutionListener implements QueryExecutionListener, MethodExecutionListener, Ordered { private final TracingListenerStrategy<String, Statement, ResultSet> strategy; @@ -117,4 +118,9 @@ public class TracingQueryExecutionListener implements QueryExecutionListener, Me } } } + + @Override + public int getOrder() { + return Ordered.HIGHEST_PRECEDENCE + 10; + } }
Added order to brave listeners. Fixes #<I>
gavlyukovskiy_spring-boot-data-source-decorator
train
87bf3549f260da72e7eb06c7091dbf639dbcd329
diff --git a/cookiecutter/generate.py b/cookiecutter/generate.py index <HASH>..<HASH> 100755 --- a/cookiecutter/generate.py +++ b/cookiecutter/generate.py @@ -37,7 +37,8 @@ def generate_context(context_file='cookiecutter.json', default_context=None): Generates the context for a Cookiecutter project template. Loads the JSON file as a Python object, with key being the JSON filename. - :param context_file: JSON file containing project config values. + :param context_file: JSON file containing key/value pairs for populating + the cookiecutter's variables. :param config_dict: Dict containing any config to take into account. """ @@ -62,11 +63,26 @@ def generate_context(context_file='cookiecutter.json', default_context=None): def generate_file(project_dir, infile, context, env): """ - 1. Render the contents of infile. - 2. Render the filename of infile as the name of outfile. - 3. Write the rendered infile to outfile. - :param infile: Input file to generate the file from. + 1. Render the filename of infile as the name of outfile. + 2. Deal with infile appropriately: + + a. If infile is a binary file, copy it over without rendering. + b. If infile is a text file, render its contents and write the + rendered infile to outfile. + + .. precondition:: + + When calling `generate_file()`, the root template dir must be the + current working directory. Using `utils.work_in()` is the recommended + way to perform this directory change. + + :param project_dir: Absolute path to the resulting generated project. + :param infile: Input file to generate the file from. Relative to the root + template dir. + :param context: Dict for populating the cookiecutter's variables. + :param env: Jinja2 template execution environment. """ + logging.debug("Generating file {0}".format(infile)) # Render the path to the output file (not including the root project dir) @@ -136,8 +152,9 @@ def ensure_dir_is_templated(dirname): def generate_files(repo_dir, context=None, output_dir="."): """ Renders the templates and saves them to files. - :param input_dir: Project template input directory. - :paramtype input_dir: directory + + :param repo_dir: Project template input directory. + :param context: Dict for populating the template's variables. :param output_dir: Where to output the generated project dir into. """
Add docstrings to document generate_file's precondition and params. Fix other docstrings in generate too.
audreyr_cookiecutter
train
283f6314c3fee863db91c3aaf80bb1a25e0bd662
diff --git a/tests/integration/cloud/helpers/cloud_test_base.py b/tests/integration/cloud/helpers/cloud_test_base.py index <HASH>..<HASH> 100644 --- a/tests/integration/cloud/helpers/cloud_test_base.py +++ b/tests/integration/cloud/helpers/cloud_test_base.py @@ -101,20 +101,20 @@ class CloudTest(ShellCase): shutdown_delay = 30 log.debug('Deleting instance "{}"'.format(self.instance_name)) delete_str = self.run_cloud('-d {0} --assume-yes --out=yaml'.format(self.instance_name), timeout=TIMEOUT) - delete = safe_load('\n'.join(delete_str)) - # example response: ['gce-config:', '----------', ' gce:', '----------', 'cloud-test-dq4e6c:', 'True', ''] - self.assertIn(self.profile_str, delete) - self.assertIn(self.PROVIDER, delete[self.profile_str]) - self.assertIn(self.instance_name, delete[self.profile_str][self.PROVIDER]) - if delete_str: + delete = safe_load('\n'.join(delete_str)) + # example response: ['gce-config:', '----------', ' gce:', '----------', 'cloud-test-dq4e6c:', 'True', ''] + self.assertIn(self.profile_str, delete) + self.assertIn(self.PROVIDER, delete[self.profile_str]) + self.assertIn(self.instance_name, delete[self.profile_str][self.PROVIDER]) + delete_status = delete[self.profile_str][self.PROVIDER][self.instance_name] if isinstance(delete_status, str): self.assertEqual(delete_status, 'True') elif isinstance(delete_status, dict): if delete_status.get('currentState'): self.assertEqual(delete_status.get('currentState').get('name'), 'shutting-down') - self.assertTrue(delete_status.get('ACTION', '').endswith('.delete')) + self.assertIn('.delete', delete_status.get('ACTION', '')) else: # It's not clear from the delete string that deletion was successful, ask salt-cloud after a delay sleep(shutdown_delay)
run cloud --query if delete string is not available
saltstack_salt
train
7a3a7a3171904ba30385d1e9c4a66d9f92f18429
diff --git a/py/h2o.py b/py/h2o.py index <HASH>..<HASH> 100644 --- a/py/h2o.py +++ b/py/h2o.py @@ -348,7 +348,7 @@ def decide_if_localhost(): # node_count is per host if hosts is specified. def build_cloud(node_count=2, base_port=54321, hosts=None, - timeoutSecs=30, retryDelaySecs=0.5, cleanup=True, rand_shuffle=True, **kwargs): + timeoutSecs=30, retryDelaySecs=1, cleanup=True, rand_shuffle=True, **kwargs): # moved to here from unit_main. so will run with nosetests too! clean_sandbox() # keep this param in kwargs, because we pass to the H2O node build, so state
change default retry of cloud status to 1 sec in build_cloud
h2oai_h2o-2
train
cb4fab83a1fb6a95edef0e5c2eec5ad842f2cce4
diff --git a/framework/validators/FileValidator.php b/framework/validators/FileValidator.php index <HASH>..<HASH> 100644 --- a/framework/validators/FileValidator.php +++ b/framework/validators/FileValidator.php @@ -229,7 +229,9 @@ class FileValidator extends Validator */ public function isEmpty($value, $trim = false) { - return !$value instanceof UploadedFile || $value->error == UPLOAD_ERR_NO_FILE; + $value = is_array($value) && !empty($value) ? $value[0] : $value; + + return !$value instanceof UploadedFile || $value->error == UPLOAD_ERR_NO_FILE; } /** diff --git a/tests/unit/data/validators/models/FakedValidationModel.php b/tests/unit/data/validators/models/FakedValidationModel.php index <HASH>..<HASH> 100644 --- a/tests/unit/data/validators/models/FakedValidationModel.php +++ b/tests/unit/data/validators/models/FakedValidationModel.php @@ -30,6 +30,8 @@ class FakedValidationModel extends Model return [ [['val_attr_a', 'val_attr_b'], 'required', 'on' => 'reqTest'], ['val_attr_c', 'integer'], + ['attr_images', 'file', 'maxFiles' => 3, 'types' => ['png'], 'on' => 'validateMultipleFiles'], + ['attr_image', 'file', 'types' => ['png'], 'on' => 'validateFile'] ]; } diff --git a/tests/unit/framework/validators/FileValidatorTest.php b/tests/unit/framework/validators/FileValidatorTest.php index <HASH>..<HASH> 100644 --- a/tests/unit/framework/validators/FileValidatorTest.php +++ b/tests/unit/framework/validators/FileValidatorTest.php @@ -107,6 +107,68 @@ class FileValidatorTest extends TestCase $val->validateAttribute($m, 'attr_files'); $this->assertTrue($m->hasErrors()); $this->assertTrue(stripos(current($m->getErrors('attr_files')), 'you can upload at most') !== false); + + $m = FakedValidationModel::createWithAttributes( + [ + 'attr_images' => $this->createTestFiles( + [ + [ + 'name' => 'image.png', + 'size' => 1024, + 'type' => 'image/png' + ], + [ + 'name' => 'image.png', + 'size' => 1024, + 'type' => 'image/png' + ], + [ + 'name' => 'text.txt', + 'size' => 1024 + ], + ] + ) + ] + ); + $m->setScenario('validateMultipleFiles'); + $this->assertFalse($m->validate()); + $this->assertTrue(stripos(current($m->getErrors('attr_images')), 'Only files with these extensions are allowed') !== false); + + $m = FakedValidationModel::createWithAttributes( + [ + 'attr_images' => $this->createTestFiles( + [ + [ + 'name' => 'image.png', + 'size' => 1024, + 'type' => 'image/png' + ], + [ + 'name' => 'image.png', + 'size' => 1024, + 'type' => 'image/png' + ], + ] + ) + ] + ); + $m->setScenario('validateMultipleFiles'); + $this->assertTrue($m->validate()); + + $m = FakedValidationModel::createWithAttributes( + [ + 'attr_image' => $this->createTestFiles( + [ + [ + 'name' => 'text.txt', + 'size' => 1024, + ], + ] + ) + ] + ); + $m->setScenario('validateFile'); + $this->assertFalse($m->validate()); } /**
fix isEmpty method for file validator.
yiisoft_yii-core
train
4239787dfca9c3a88d68919d49dba40cfae40140
diff --git a/src/org/jgroups/Event.java b/src/org/jgroups/Event.java index <HASH>..<HASH> 100644 --- a/src/org/jgroups/Event.java +++ b/src/org/jgroups/Event.java @@ -1,4 +1,4 @@ -// $Id: Event.java,v 1.15 2006/03/17 09:28:08 belaban Exp $ +// $Id: Event.java,v 1.16 2006/04/28 15:15:20 belaban Exp $ package org.jgroups; @@ -62,8 +62,6 @@ public class Event { public static final int UNSUSPECT = 51; // arg = Address (of unsuspected member) public static final int SET_PID = 52; // arg = Integer (process id) public static final int MERGE_DIGEST = 53; // arg = Digest - public static final int BLOCK_SEND = 54; // arg = null - public static final int UNBLOCK_SEND = 55; // arg = null public static final int CONFIG = 56; // arg = HashMap (config properties) public static final int GET_DIGEST_STABLE = 57; public static final int GET_DIGEST_STABLE_OK = 58; // response to GET_DIGEST_STABLE @@ -168,8 +166,6 @@ public class Event { case UNSUSPECT: return "UNSUSPECT"; case SET_PID: return "SET_PID"; case MERGE_DIGEST: return "MERGE_DIGEST"; - case BLOCK_SEND: return "BLOCK_SEND"; - case UNBLOCK_SEND: return "UNBLOCK_SEND"; case CONFIG: return "CONFIG"; case GET_DIGEST_STABLE: return "GET_DIGEST_STABLE"; case GET_DIGEST_STABLE_OK: return "GET_DIGEST_STABLE_OK";
removed BLOCK_SENDING and UNBLOCK_SENDING
belaban_JGroups
train
19bbf829477b199e0b7c9f75f9f39b578c4e1ebf
diff --git a/src/createToken.js b/src/createToken.js index <HASH>..<HASH> 100644 --- a/src/createToken.js +++ b/src/createToken.js @@ -19,13 +19,13 @@ module.exports = function (sharedKey = '') { .map(key => meteorSessions[key]) .reduce((meteorAccount, session) => meteorAccount || session.type === 'meteor-account' && session, undefined); - const { username, userId, token } = meteorAccount || {}; + const { username, userId, token, session } = meteorAccount || {}; - if (!username || !userId || !token) { + if (!username || !userId || !token || !session) { throw new Error('You need to login as a meteor developer in order for me to extract a token data'); } - const tokenData = JSON.stringify([ username, userId, token ]); + const tokenData = JSON.stringify([ username, userId, token, session ]); const cipher = crypto.createCipher('aes256', sharedKey); let cipherText = cipher.update(tokenData, 'utf8', 'base64'); diff --git a/src/loginViaToken.js b/src/loginViaToken.js index <HASH>..<HASH> 100644 --- a/src/loginViaToken.js +++ b/src/loginViaToken.js @@ -27,10 +27,10 @@ module.exports = function (cryptToken, sharedKey = '') { throw new Error('Invalid token data'); } - const [ username, userId, token ] = tokenData; + const [ username, userId, token, session ] = tokenData; const type = 'meteor-account'; - if (!username || !userId || !token) { + if (!username || !userId || !token || !session) { throw new Error('Invalid token data format'); } @@ -40,6 +40,7 @@ module.exports = function (cryptToken, sharedKey = '') { username, userId, token, + session, type } }
feat(login): store session
raix_meteor-ci-package-release
train
67b2068dc081b65a21caff4bc360adf16fdfe070
diff --git a/blueocean-plugin/src/test/java/io/jenkins/blueocean/service/embedded/BaseTest.java b/blueocean-plugin/src/test/java/io/jenkins/blueocean/service/embedded/BaseTest.java index <HASH>..<HASH> 100644 --- a/blueocean-plugin/src/test/java/io/jenkins/blueocean/service/embedded/BaseTest.java +++ b/blueocean-plugin/src/test/java/io/jenkins/blueocean/service/embedded/BaseTest.java @@ -96,7 +96,9 @@ public abstract class BaseTest { assert path.startsWith("/"); try { if(HttpResponse.class.isAssignableFrom(type)){ - HttpResponse<String> response = Unirest.get(getBaseUrl(path)).header("Accept", accept).asString(); + HttpResponse<String> response = Unirest.get(getBaseUrl(path)).header("Accept", accept) + .header("Accept-Encoding","") + .asString(); Assert.assertEquals(expectedStatus, response.getStatus()); return (T) response; } @@ -144,6 +146,7 @@ public abstract class BaseTest { try { HttpResponse<String> response = Unirest.post(getBaseUrl(path)) .header("Content-Type",contentType) + .header("Accept-Encoding","") .body(body).asObject(String.class); Assert.assertEquals(expectedStatus, response.getStatus()); return response.getBody(); @@ -163,6 +166,12 @@ public abstract class BaseTest { try { HttpResponse<Map> response = Unirest.put(getBaseUrl(path)) .header("Content-Type","application/json") + .header("Accept","application/json") + //Unirest by default sets accept-encoding to gzip but stapler is sending malformed gzip value if + // the response length is small (in this case its 20 chars). + // Needs investigation in stapler to see whats going on there. + // For time being gzip compression is disabled + .header("Accept-Encoding","") .body(body).asObject(Map.class); Assert.assertEquals(expectedStatus, response.getStatus()); return response.getBody();
gzip encoding disabled in test. Stapler has a bug where it doesn't write response if gzip compressesion is enabled for small JSON response. It works fine for larger JSON response. Needs to be investigated in Stapler.
jenkinsci_blueocean-plugin
train
8d5a8e843fb2ee336da9c2b975e7715ea637b58b
diff --git a/src/Models/UpdateExecutor.php b/src/Models/UpdateExecutor.php index <HASH>..<HASH> 100644 --- a/src/Models/UpdateExecutor.php +++ b/src/Models/UpdateExecutor.php @@ -92,14 +92,14 @@ final class UpdateExecutor $directories = (new Finder())->in($folder)->exclude(config('self-update.exclude_folders'))->directories(); collect($directories->sort(function (SplFileInfo $a, SplFileInfo $b) { - return strlen($b->getRealpath()) - strlen($a->getRealpath()); - }))->each(function (SplFileInfo $directory) { - if (! dirsIntersect(File::directories($directory->getRealPath()), config('self-update.exclude_folders'))) { - File::copyDirectory($directory->getRealPath(), $this->targetFolder($directory)); - } - - File::deleteDirectory($directory->getRealPath()); - }); + return strlen($b->getRealpath()) - strlen($a->getRealpath()); + }))->each(function (SplFileInfo $directory) { + if (! dirsIntersect(File::directories($directory->getRealPath()), config('self-update.exclude_folders'))) { + File::copyDirectory($directory->getRealPath(), $this->targetFolder($directory)); + } + + File::deleteDirectory($directory->getRealPath()); + }); } /** @@ -116,7 +116,7 @@ final class UpdateExecutor return base_path($file->getFilename()); } - return $this->basePath . $file->getFilename(); + return $this->basePath.$file->getFilename(); } /** @@ -130,11 +130,11 @@ final class UpdateExecutor private function targetFolder(SplFileInfo $directory): string { if (empty($this->basePath)) { - return Str::finish(base_path($directory->getRealPath()), DIRECTORY_SEPARATOR) . $directory->getBasename(); + return Str::finish(base_path($directory->getRealPath()), DIRECTORY_SEPARATOR).$directory->getBasename(); } return $this->basePath - . Str::finish($directory->getRealPath(), DIRECTORY_SEPARATOR) - . $directory->getBasename(); + .Str::finish($directory->getRealPath(), DIRECTORY_SEPARATOR) + .$directory->getBasename(); } }
Apply fixes from StyleCI (#<I>)
codedge_laravel-selfupdater
train
3fbce106c09b073eb7aa950e23019a75f5d3cfaf
diff --git a/slave/buildslave/commands/mtn.py b/slave/buildslave/commands/mtn.py index <HASH>..<HASH> 100644 --- a/slave/buildslave/commands/mtn.py +++ b/slave/buildslave/commands/mtn.py @@ -182,4 +182,4 @@ class Monotone(SourceBaseCommand): d = c.start() d.addCallback(self._abandonOnFailure) d.addCallback(_parse) - + return d
Return the deferred from Mtn.parseGotRevision Without this, any error in parseGotRevision is silently dropped.
buildbot_buildbot
train
42e98f15aeb61171ad9e54b722818fc4c693b761
diff --git a/command/build.go b/command/build.go index <HASH>..<HASH> 100644 --- a/command/build.go +++ b/command/build.go @@ -170,11 +170,12 @@ func (c *BuildCommand) RunContext(buildCtx context.Context, cla *BuildArgs) int for i := range builds { ui := c.Ui if cla.Color { - ui = &packer.ColoredUi{ - Color: colors[i%len(colors)], - Ui: ui, - } + // Only set up UI colors if -machine-readable isn't set. if _, ok := c.Ui.(*packer.MachineReadableUi); !ok { + ui = &packer.ColoredUi{ + Color: colors[i%len(colors)], + Ui: ui, + } ui.Say(fmt.Sprintf("%s: output will be in this color.", builds[i].Name())) if i+1 == len(builds) { // Add a newline between the color output and the actual output diff --git a/packer/ui.go b/packer/ui.go index <HASH>..<HASH> 100644 --- a/packer/ui.go +++ b/packer/ui.go @@ -131,7 +131,6 @@ func (u *ColoredUi) supportsColors() bool { type TargetedUI struct { Target string Ui Ui - *uiProgressBar } var _ Ui = new(TargetedUI) @@ -172,6 +171,10 @@ func (u *TargetedUI) prefixLines(arrow bool, message string) string { return strings.TrimRightFunc(result.String(), unicode.IsSpace) } +func (u *TargetedUI) TrackProgress(src string, currentSize, totalSize int64, stream io.ReadCloser) io.ReadCloser { + return u.Ui.TrackProgress(src, currentSize, totalSize, stream) +} + // The BasicUI is a UI that reads and writes from a standard Go reader // and writer. It is safe to be called from multiple goroutines. Machine // readable output is simply logged for this UI.
make sure the no-op progress bar gets used for machine readable uis
hashicorp_packer
train