hash
stringlengths
40
40
diff
stringlengths
131
114k
message
stringlengths
7
980
project
stringlengths
5
67
split
stringclasses
1 value
16674daefee4f50d4b9dce01aa11eed7194a87bc
diff --git a/lib/adhearsion/generators/app/app_generator.rb b/lib/adhearsion/generators/app/app_generator.rb index <HASH>..<HASH> 100644 --- a/lib/adhearsion/generators/app/app_generator.rb +++ b/lib/adhearsion/generators/app/app_generator.rb @@ -4,11 +4,6 @@ module Adhearsion BASEDIRS = %w( config lib script ) - def self.source_root(path = nil) - path = File.join(base_root, 'app', 'templates') - path if File.exists?(path) - end - def setup_project self.destination_root = @generator_name BASEDIRS.each { |dir| directory dir } diff --git a/lib/adhearsion/generators/controller/controller_generator.rb b/lib/adhearsion/generators/controller/controller_generator.rb index <HASH>..<HASH> 100644 --- a/lib/adhearsion/generators/controller/controller_generator.rb +++ b/lib/adhearsion/generators/controller/controller_generator.rb @@ -4,11 +4,6 @@ module Adhearsion argument :controller_name, :type => :string - def self.source_root(path = nil) - path = File.join(base_root, 'controller', 'templates') - path if File.exists?(path) - end - def create_controller raise Exception, "Generator commands need to be run in an Adhearsion app directory" unless ScriptAhnLoader.in_ahn_application?('.') self.destination_root = '.' diff --git a/lib/adhearsion/generators/generator.rb b/lib/adhearsion/generators/generator.rb index <HASH>..<HASH> 100644 --- a/lib/adhearsion/generators/generator.rb +++ b/lib/adhearsion/generators/generator.rb @@ -16,8 +16,47 @@ module Adhearsion argument :generate_command, :type => :string argument :generator_name, :type => :string + # Returns the source root for this generator using default_source_root as default. + def self.source_root(path = nil) + @_source_root = path if path + @_source_root ||= default_source_root + end + + # Convenience method to get the namespace from the class name. It's the + # same as Thor default except that the Generator at the end of the class + # is removed. + def self.namespace(name = nil) + return super if name + @namespace ||= super.sub(/_generator$/, '').sub(/:generators:/, ':') + end + + # Returns the default source root for a given generator. This is used internally + # by adhearsion to set its generators source root. If you want to customize your source + # root, you should use source_root. + def self.default_source_root + return unless generator_name + path = File.expand_path File.join(generator_name, 'templates'), base_root + path if File.exists?(path) + end + + # Returns the base root for a common set of generators. This is used to dynamically + # guess the default source root. def self.base_root - File.dirname(__FILE__) + File.dirname __FILE__ + end + + protected + + # Removes the namespaces and get the generator name. For example, + # Adhearsion::Generators::ModelGenerator will return "model" as generator name. + # + def self.generator_name + @generator_name ||= begin + if generator = name.to_s.split('::').last + generator.sub! /Generator$/, '' + generator.underscore + end + end end end
[CS] Move generator source_root into base class
adhearsion_adhearsion
train
14870fa08077149fecda8b1b3db83e2cc6b34d8f
diff --git a/org/postgresql/jdbc2/AbstractJdbc2Statement.java b/org/postgresql/jdbc2/AbstractJdbc2Statement.java index <HASH>..<HASH> 100644 --- a/org/postgresql/jdbc2/AbstractJdbc2Statement.java +++ b/org/postgresql/jdbc2/AbstractJdbc2Statement.java @@ -3,7 +3,7 @@ * Copyright (c) 2004-2008, PostgreSQL Global Development Group * * IDENTIFICATION -* $PostgreSQL: pgjdbc/org/postgresql/jdbc2/AbstractJdbc2Statement.java,v 1.106 2008/01/08 06:56:28 jurka Exp $ +* $PostgreSQL: pgjdbc/org/postgresql/jdbc2/AbstractJdbc2Statement.java,v 1.107 2008/01/15 03:29:15 jurka Exp $ * *------------------------------------------------------------------------- */ @@ -367,7 +367,7 @@ public abstract class AbstractJdbc2Statement implements BaseStatement int outParameterCount = preparedParameters.getOutParameterCount() ; if ( cols != outParameterCount ) - throw new PSQLException(GT.tr("A CallableStatement was excecuted with an invalid number of parameters"),PSQLState.SYNTAX_ERROR); + throw new PSQLException(GT.tr("A CallableStatement was executed with an invalid number of parameters"),PSQLState.SYNTAX_ERROR); // reset last result fetched (for wasNull) lastIndex = 0;
Typo excecuted -> executed. Devrim Gunduz
pgjdbc_pgjdbc
train
e2c15919de46efd6151ec5ea4ebab9070eb78fb8
diff --git a/lib/common.js b/lib/common.js index <HASH>..<HASH> 100644 --- a/lib/common.js +++ b/lib/common.js @@ -11,6 +11,7 @@ const clone = require('reftools/lib/clone.js').clone; const circularClone = require('reftools/lib/clone.js').circularClone; const walkSchema = require('oas-schema-walker').walkSchema; const wsGetState = require('oas-schema-walker').getDefaultState; +const pinyin = require("pinyin"); const httpsnippetGenerator = require('./httpsnippetGenerator'); const hljs = require('highlightjs/highlight.pack.js'); @@ -509,6 +510,7 @@ function slugify(text) { .replace(/\(/, '').replace(/\)/, '') //remove parentheses .replace(/[:,']/,'') //remove colons, commas, apostrophes .replace(/<code>/, '').replace(/<\/code>/, '') //remove code tags + .replace(/[\u4e00-\u9fa5]+/g, (s) => pinyin(s, { style: pinyin.STYLE_TONE2 }).join()) // replace chinese with pinyin .replace(/[\s\W-]+/g, '-') // Replace spaces, non-word characters and dashes with a single dash (-) } diff --git a/package.json b/package.json index <HASH>..<HASH> 100644 --- a/package.json +++ b/package.json @@ -48,6 +48,7 @@ "oas-resolver": "^2.4.4", "oas-schema-walker": "^1.1.5", "openapi-sampler": "^1.0.0-beta.17", + "pinyin": "^2.9.1", "reftools": "^1.1.6", "swagger2openapi": "^7.0.0", "urijs": "^1.19.2", diff --git a/test/common.test.js b/test/common.test.js index <HASH>..<HASH> 100644 --- a/test/common.test.js +++ b/test/common.test.js @@ -72,6 +72,9 @@ describe('common tests', () => { it('should replace remaining non-word characters and dashes with a single dash (-)', () => { assert.equal(common.slugify('id: one, two & three'), 'id-one-two-and-three'); }); + it('should replace chinese characters with pinyin', () => { + assert.equal(common.slugify('id: one, 二 & 三'), 'id-one-er4-and-san1'); + }); }); describe('schemaToArray tests', () => { it('should return a blank container if all inputs are blank', () => {
feat: slugify repalce chinese with pinyin (#<I>)
Mermade_widdershins
train
7765bf89e505759239d7b67521389606c5860977
diff --git a/test/functionals/api/v2/containers_controller_test.rb b/test/functionals/api/v2/containers_controller_test.rb index <HASH>..<HASH> 100644 --- a/test/functionals/api/v2/containers_controller_test.rb +++ b/test/functionals/api/v2/containers_controller_test.rb @@ -60,51 +60,53 @@ module Api end end - test 'power call turns on/off container in Docker host' do - Fog.mock! - Fog::Compute::Fogdocker::Server.any_instance.expects(:start) - put :power, :id => @container.id, :power_action => 'start' - assert_response :success - end + context 'power calls' do + setup { Fog.mock! } + teardown { Fog.unmock! } - test 'power call checks status of container in Docker host' do - Fog.mock! - Fog::Compute::Fogdocker::Server.any_instance.expects(:ready?).returns(false) - put :power, :id => @container.id, :power_action => 'status' - assert_response :success - assert_equal ActiveSupport::JSON.decode(response.body)['running'], false - end + test 'power call turns on/off container in Docker host' do + Fog::Compute::Fogdocker::Server.any_instance.expects(:start) + put :power, :id => @container.id, :power_action => 'start' + assert_response :success + end - test 'power call host' do - Fog.mock! - Fog::Compute::Fogdocker::Server.any_instance.expects(:ready?).returns(false) - put :power, :id => @container.id, :power_action => 'status' - assert_response :success - assert_equal ActiveSupport::JSON.decode(response.body)['running'], false - end + test 'power call checks status of container in Docker host' do + Fog::Compute::Fogdocker::Server.any_instance.expects(:ready?).returns(false) + put :power, :id => @container.id, :power_action => 'status' + assert_response :success + assert_equal ActiveSupport::JSON.decode(response.body)['running'], false + end - test 'creates a container with correct params' do - repository_name = "centos" - tag = "7" - name = "foo" - registry_uri = URI.parse(@registry.url) - Service::Containers.any_instance.expects(:pull_image).returns(true) - Service::Containers.any_instance - .expects(:start_container).returns(true).with do |container| - container.must_be_kind_of(Container) - container.repository_name.must_equal(repository_name) - container.tag.must_equal(tag) - container.compute_resource_id.must_equal(@compute_resource.id) - container.name.must_equal(name) - container.repository_pull_url.must_include(registry_uri.host) - container.repository_pull_url.must_include("#{repository_name}:#{tag}") + test 'power call host' do + Fog::Compute::Fogdocker::Server.any_instance.expects(:ready?).returns(false) + put :power, :id => @container.id, :power_action => 'status' + assert_response :success + assert_equal ActiveSupport::JSON.decode(response.body)['running'], false + end + + test 'creates a container with correct params' do + repository_name = "centos" + tag = "7" + name = "foo" + registry_uri = URI.parse(@registry.url) + Service::Containers.any_instance.expects(:pull_image).returns(true) + Service::Containers.any_instance + .expects(:start_container).returns(true).with do |container| + container.must_be_kind_of(Container) + container.repository_name.must_equal(repository_name) + container.tag.must_equal(tag) + container.compute_resource_id.must_equal(@compute_resource.id) + container.name.must_equal(name) + container.repository_pull_url.must_include(registry_uri.host) + container.repository_pull_url.must_include("#{repository_name}:#{tag}") + end + post :create, :container => { :compute_resource_id => @compute_resource.id, + :name => name, + :registry_id => @registry.id, + :repository_name => repository_name, + :tag => tag } + assert_response :created end - post :create, :container => { :compute_resource_id => @compute_resource.id, - :name => name, - :registry_id => @registry.id, - :repository_name => repository_name, - :tag => tag } - assert_response :created end test 'creates a katello container with correct params' do
Ensure Fog is mocked before running API/v2/create test
theforeman_foreman_docker
train
a17f8790c9f01e12e3e810b3326bce37cf8c4daf
diff --git a/contrib/externs/jquery-1.7.js b/contrib/externs/jquery-1.7.js index <HASH>..<HASH> 100644 --- a/contrib/externs/jquery-1.7.js +++ b/contrib/externs/jquery-1.7.js @@ -1581,7 +1581,7 @@ jQuery.now = function() {}; $.now = function() {}; /** - * @param {(string|Object.<string,*>)} arg1 + * @param {(string|Object.<string,*>)=} arg1 * @param {(string|function(!jQuery.event=))=} selector * @param {function(!jQuery.event=)=} handler * @return {!jQuery} diff --git a/contrib/externs/jquery-1.8.js b/contrib/externs/jquery-1.8.js index <HASH>..<HASH> 100644 --- a/contrib/externs/jquery-1.8.js +++ b/contrib/externs/jquery-1.8.js @@ -1559,7 +1559,7 @@ jQuery.now = function() {}; $.now = function() {}; /** - * @param {(string|Object.<string,*>)} arg1 + * @param {(string|Object.<string,*>)=} arg1 * @param {(string|function(!jQuery.event=))=} selector * @param {function(!jQuery.event=)=} handler * @return {!jQuery} diff --git a/contrib/externs/jquery-1.9.js b/contrib/externs/jquery-1.9.js index <HASH>..<HASH> 100644 --- a/contrib/externs/jquery-1.9.js +++ b/contrib/externs/jquery-1.9.js @@ -1469,7 +1469,7 @@ jQuery.now = function() {}; $.now = function() {}; /** - * @param {(string|Object.<string,*>)} arg1 + * @param {(string|Object.<string,*>)=} arg1 * @param {(string|function(!jQuery.event=))=} selector * @param {function(!jQuery.event=)=} handler * @return {!jQuery}
jQuery extern changes to allow jQuery.prototype.off to be called without arguments.
google_closure-compiler
train
e5bb75c702ca0015fa35005e44a80211b2485f8a
diff --git a/c3d.py b/c3d.py index <HASH>..<HASH> 100644 --- a/c3d.py +++ b/c3d.py @@ -863,8 +863,10 @@ class Reader(Manager): self._handle.seek((self.header.data_block - 1) * 512) for frame_no in range(self.first_frame(), self.last_frame() + 1): - raw = np.fromfile(self._handle, dtype=point_dtype, - count=4 * self.header.point_count).reshape((ppf, 4)) + n = 4 * self.header.point_count + raw = np.fromstring(self._handle.read(n * point_bytes), + dtype=point_dtype, + count=n).reshape((self.point_used, 4)) points[:, :3] = raw[:, :3] * point_scale
Use fromstring to read point data.
EmbodiedCognition_py-c3d
train
e62ff1147bb984c3545dfbff1e0a09b60d9a359f
diff --git a/generators/navigation_config/templates/config/navigation.rb b/generators/navigation_config/templates/config/navigation.rb index <HASH>..<HASH> 100644 --- a/generators/navigation_config/templates/config/navigation.rb +++ b/generators/navigation_config/templates/config/navigation.rb @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # Configures your navigation SimpleNavigation::Configuration.run do |navigation| # Specify a custom renderer if needed. @@ -8,6 +9,10 @@ SimpleNavigation::Configuration.run do |navigation| # Specify the class that will be applied to active navigation items. Defaults to 'selected' # navigation.selected_class = 'your_selected_class' + # Specify the class that will be applied to the current leaf of + # active navigation items. Defaults to 'simple-navigation-active-leaf' + # navigation.active_leaf_class = 'your_active_leaf_class' + # Item keys are normally added to list items as id. # This setting turns that off # navigation.autogenerate_item_ids = false @@ -68,4 +73,4 @@ SimpleNavigation::Configuration.run do |navigation| end -end \ No newline at end of file +end
added active_leaf_class configuration item to generator
codeplant_simple-navigation
train
9cf340d4a5b873894529d6b91d8ad676660664d6
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -1,11 +1,10 @@ -const whitelist = ['concat', 'every', 'filter', 'find', 'includes', 'keys', 'map', 'reduce', 'reduceRight', 'slice', 'some'] +const whitelist = ['concat', 'every', 'filter', 'find', 'includes', 'map', 'reduce', 'reduceRight', 'slice', 'some'] const Box = {} Object .getOwnPropertyNames(Array.prototype) .filter(s => whitelist.includes(s)) .forEach(method => { - console.log(method) - Box[method] = fn => a => a[method](fn) + Box[method] = fn => (a, ...args) => a[method](fn, ...args) }) Box.cons = (val, a) => [].concat([a].unshift(val)) diff --git a/test.js b/test.js index <HASH>..<HASH> 100644 --- a/test.js +++ b/test.js @@ -7,14 +7,6 @@ const l = a => { console.log('# -- log', i, '-->', a); i++; } -tape('api: map', (t) => { - t.test('applys function to items in array', (t) => { - t.plan(1) - const addTwo = pl.map(x => x * 2) - const doubled = addTwo([1, 2, 3]) - t.deepEqual(doubled, [2, 4, 6]) - }) -}) tape('api: concat', (t) => { t.test('should add array of items to end of array', (t) => { @@ -80,43 +72,49 @@ tape('api: find', (t) => { }) }) -// tape('Describe block', (t) => { -// t.test('it block', (t) => { -// t.plan(3) -// t.doesNotThrow(barracks, 'no args does not throw') -// t.doesNotThrow(barracks.bind(null, {}), 'object does not throw') -// t.throws(barracks.bind(null, 123), 'non-object throws') -// }) - -// t.test('should validate hook types', (t) => { -// t.plan(3) -// t.throws(barracks.bind(null, { onError: 123 }), /function/, 'onError throws') -// t.throws(barracks.bind(null, { onAction: 123 }), /function/, 'onAction throws') -// t.throws(barracks.bind(null, { onStateChange: 123 }), /function/, 'onStateChange throws') -// }) -// }) - -//helpers -// const l = console.log -// l(Object.keys(pl)); - -// const length = a => a.length -// const upperCase = a => a.toUpperCase() - - -// //functions -// var myset = [1, 2, 2, 2, 3, 3, 4, 55] -// var mybuds = ["Ronn", "Matt", "Brendan", "Pete", "Yan"] +tape('api: includes', (t) => { + t.test('should return true when an item is found in array', (t) => { + t.plan(1) + const arr = [1, 2, 3, 4, 5]; + const isThree = pl.includes(3) + const result = (isThree(arr)) + t.deepEqual(result, true) + }) -// // l('head ', head) -// // l('tail ', tail) -// // l('uniq', pl.uniq(myset)) -// const addTwo = pl.map(x => x * 2) + t.test('should return false when an item is not found in array', (t) => { + t.plan(1) + const arr = [1, 2, 3, 4, 5]; + const isThree = pl.includes(8) + const result = (isThree(arr)) + t.deepEqual(result, false) + }) +}) -// const comline = pl.pipe( -// x => x * 2, -// addTwo -// ) +tape('api: map', (t) => { + t.test('applys function to items in array', (t) => { + t.plan(1) + const addTwo = pl.map(x => x * 2) + const doubled = addTwo([1, 2, 3]) + t.deepEqual(doubled, [2, 4, 6]) + }) +}) +tape('api: reduce', (t) => { + t.test('applys function to each item and accums results from left to right', (t) => { + t.plan(1) + const sum = pl.reduce((acc, val) => acc + val); + const total = sum([2, 3, 4], 99) + t.deepEqual(total, 108) + }) +}) -// l(comline(5)) \ No newline at end of file +tape('api: reduce right', (t) => { + t.test('applys function to each item and accums results from right to left', (t) => { + t.plan(1) + const sum = pl.reduceRight(((acc, val) => acc - val), 10); + const total = sum([2, 3, 4], 99) + l(total) + t.deepEqual(total, 90) + }) + +})
Added test for reduce and reduceRight
trainyard_pico-lambda
train
3518601daf5e50d77803440fa0aa14c32019335a
diff --git a/bundles/as3/lib/sprout/as3/version.rb b/bundles/as3/lib/sprout/as3/version.rb index <HASH>..<HASH> 100644 --- a/bundles/as3/lib/sprout/as3/version.rb +++ b/bundles/as3/lib/sprout/as3/version.rb @@ -3,7 +3,7 @@ module Sprout # :nodoc: module VERSION #:nodoc: MAJOR = 1 MINOR = 0 - TINY = 28 + TINY = 29 STRING = [MAJOR, MINOR, TINY].join('.') MAJOR_MINOR = [MAJOR, MINOR].join('.') diff --git a/bundles/as3/lib/sprout/tasks/mxmlc_task.rb b/bundles/as3/lib/sprout/tasks/mxmlc_task.rb index <HASH>..<HASH> 100644 --- a/bundles/as3/lib/sprout/tasks/mxmlc_task.rb +++ b/bundles/as3/lib/sprout/tasks/mxmlc_task.rb @@ -196,8 +196,11 @@ This is an advanced option. EOF end - add_param(:define, :string) do |p| + add_param(:define_conditional, :strings) do |p| + p.shell_name = "-define" p.description =<<EOF +This parameter is normally called 'define' but thanks to scoping issues with Sprouts and Rake, we needed to rename it and chose: 'define_conditional'. + Define a global AS3 conditional compilation definition, e.g. -define=CONFIG::debugging,true or -define+=CONFIG::debugging,true (to append to existing definitions in flex-config.xml) (advanced, repeatable) EOF end diff --git a/bundles/as3/test/mxmlc_test.rb b/bundles/as3/test/mxmlc_test.rb index <HASH>..<HASH> 100644 --- a/bundles/as3/test/mxmlc_test.rb +++ b/bundles/as3/test/mxmlc_test.rb @@ -76,6 +76,13 @@ class MXMLCTest < Test::Unit::TestCase assert_equal('-compiler.fonts.languages.language-range=hello', some_task.to_shell) end + def test_defines_param + some_task = Sprout::MXMLCTask.new(:some_task, Rake::application) + some_task.define_conditional << 'CONFIG::debug,true' + some_task.define_conditional << 'CONFIG::release,false' + assert_equal('-define+=CONFIG::debug,true -define+=CONFIG::release,false', some_task.to_shell) + end + def test_use_network_false some_task = Sprout::MXMLCTask.new(:some_task, Rake::application) some_task.use_network = false
Fixed support for MXMLC.define attribute. Implemented as 'define_conditional' collection of strings
lukebayes_project-sprouts
train
d02c133a2c5c794a6b14671c12b43b216e312682
diff --git a/packages/site/src/ui/link.js b/packages/site/src/ui/link.js index <HASH>..<HASH> 100644 --- a/packages/site/src/ui/link.js +++ b/packages/site/src/ui/link.js @@ -2,7 +2,23 @@ import Link from 'next/link' import React from 'react' // TODO: make handle activeClassName-type thing -export default props => - /^http/.test(props.href) - ? <a {...props} href={props.href}>{props.children}</a> - : <Link href={props.href}><a {...props}>{props.children}</a></Link> +export default class extends React.Component { + constructor(props) { + super(props) + this.handleClick = this.handleClick.bind(this) + } + handleClick(evt) { + if (document) + document.body.scrollTop = document.documentElement.scrollTop = 0 + if (this.props.onClick) this.props.onClick(evt) + } + render() { + return /^http/.test(this.props.href) + ? <a {...this.props} href={this.props.href}>{this.props.children}</a> + : <Link href={this.props.href}> + <a {...this.props} onClick={this.handleClick}> + {this.props.children} + </a> + </Link> + } +}
refactor(site): put scrolltop feature back in client links
pluralsight_design-system
train
442be25dd63c02b7b9a6d0c9e193470382e49638
diff --git a/stockcrawler/loaders.py b/stockcrawler/loaders.py index <HASH>..<HASH> 100644 --- a/stockcrawler/loaders.py +++ b/stockcrawler/loaders.py @@ -369,11 +369,11 @@ class ReportItemLoader(XmlXPathItemLoader): except (IndexError, ValueError): return url_date.strftime(DATE_FORMAT) - # count occurences of the date on url appearing in doc - url_date_count = len(self.selector.select('//*[local-name()="context"]//*[local-name()="endDate" and text()="%s"]' % url_date_str)) + context_date_strs = set(self.selector.select('//*[local-name()="context"]//*[local-name()="endDate"]/text()').extract()) + context_dates = [datetime.strptime(s, DATE_FORMAT) for s in context_date_strs] date = url_date - if url_date_count == 0: + if doc_date in context_dates: date = doc_date return date.strftime(DATE_FORMAT) diff --git a/stockcrawler/tests/test_loaders.py b/stockcrawler/tests/test_loaders.py index <HASH>..<HASH> 100644 --- a/stockcrawler/tests/test_loaders.py +++ b/stockcrawler/tests/test_loaders.py @@ -1407,7 +1407,7 @@ class ReportItemLoaderTest(TestCaseBase): 'eps_diluted': 1.3, 'dividend': 0.7, 'assets': 8742000000, - 'equity': 2911000000.0, + 'equity': 2925000000.0, 'cash': 1383000000.0 })
Improve the way to find doc end date.
eliangcs_pystock-crawler
train
ff709f473cbd49f88dbc2b0290e319908e45ba1f
diff --git a/mstranslator.py b/mstranslator.py index <HASH>..<HASH> 100644 --- a/mstranslator.py +++ b/mstranslator.py @@ -104,6 +104,23 @@ class Translator(object): params['from'] = lang_from return self.make_request('Translate', params) + def translate_array(self, texts=[], lang_from=None, lang_to=None, + contenttype='text/plain', category='general'): + if not lang_to: + raise ValueError('lang_to parameter is required') + if contenttype not in ('text/plain', 'text/html'): + raise ValueError('Invalid contenttype value') + params = { + 'texts': json.dumps(texts), + 'to': lang_to, + 'contentType': contenttype, + 'category': category, + } + if lang_from: + params['from'] = lang_from + + return self.make_request('TranslateArray', params) + def get_translations(self, text, lang_from, lang_to, max_n=10, contenttype='text/plain', category='general', url=None, user=None, state=None): options = { diff --git a/tests.py b/tests.py index <HASH>..<HASH> 100644 --- a/tests.py +++ b/tests.py @@ -45,6 +45,11 @@ class TranslatorTestCase(unittest.TestCase): t = self.translator.translate('world', 'en', 'ru') self.assertEqual('мир', t) + def test_translate_array(self): + ts = self.translator.translate_array(['hello', 'world'], 'en', 'ru') + translations = [t['TranslatedText'] for t in ts] + self.assertEqual(['Привет', 'мир'], translations) + def test_get_translations(self): t = self.translator.get_translations('world', 'en', 'ru') self.assertIsInstance(t, dict)
Support TranslateArray See issue #6.
wronglink_mstranslator
train
9a42e092b266d9611fa3ad16327eb80c7753f1ae
diff --git a/ib_insync/wrapper.py b/ib_insync/wrapper.py index <HASH>..<HASH> 100644 --- a/ib_insync/wrapper.py +++ b/ib_insync/wrapper.py @@ -770,7 +770,7 @@ class Wrapper(EWrapper): @iswrapper def error(self, reqId, errorCode, errorString): # https://interactivebrokers.github.io/tws-api/message_codes.html - warningCodes = {165, 202, 399, 10167} + warningCodes = {165, 202, 399, 434, 10167} isWarning = errorCode in warningCodes or 2100 <= errorCode < 2200 msg = (f'{"Warning" if isWarning else "Error"} ' f'{errorCode}, reqId {reqId}: {errorString}')
Treat error <I> ('The order size cannot be zero') as warning
erdewit_ib_insync
train
75c2b202b8de32810fba77e1e5a7c71d274ae5e9
diff --git a/google-http-client/src/main/java/com/google/api/client/http/javanet/NetHttpRequest.java b/google-http-client/src/main/java/com/google/api/client/http/javanet/NetHttpRequest.java index <HASH>..<HASH> 100644 --- a/google-http-client/src/main/java/com/google/api/client/http/javanet/NetHttpRequest.java +++ b/google-http-client/src/main/java/com/google/api/client/http/javanet/NetHttpRequest.java @@ -75,10 +75,21 @@ final class NetHttpRequest extends LowLevelHttpRequest { connection.setChunkedStreamingMode(0); } OutputStream out = connection.getOutputStream(); + boolean threw = true; try { getStreamingContent().writeTo(out); + threw = false; } finally { - out.close(); + try { + out.close(); + } catch (IOException exception) { + // When writeTo() throws an exception, chances are that the close call will also fail. + // In such case, swallow exception from close call so that the underlying cause can + // propagate. + if (!threw) { + throw exception; + } + } } } else { // cannot call setDoOutput(true) because it would change a GET method to POST
Swallow exception from close call when there is an underlying IOException
googleapis_google-http-java-client
train
172d095648d26f9bdd8e846398bf01cd2d9dbb89
diff --git a/controller.go b/controller.go index <HASH>..<HASH> 100644 --- a/controller.go +++ b/controller.go @@ -507,13 +507,14 @@ func (c *controller) NewSandbox(containerID string, options ...SandboxOption) (S return nil, err } + c.Lock() if sb.osSbox == nil && !sb.config.useExternalKey { if sb.osSbox, err = osl.NewSandbox(sb.Key(), !sb.config.useDefaultSandBox); err != nil { + c.Unlock() return nil, fmt.Errorf("failed to create new osl sandbox: %v", err) } } - c.Lock() c.sandboxes[sb.id] = sb c.Unlock() defer func() {
Fix race in host sandbox creation Since we share the host sandbox with many containers we need to serialize creation of the sandbox. Otherwise container starts may see the namespace path in inconsistent state.
docker_libnetwork
train
423f181500f7b25a9dfdf6bee458eadb8a6844d5
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100755 --- a/setup.py +++ b/setup.py @@ -1,3 +1,4 @@ +#!/usr/bin/env python # -*- coding: utf-8 -*- try:
Add shebung to setup.py
Vagrants_blackbird
train
6c3c074151a073939db496b10ff5b66e04f419e1
diff --git a/va/validation-authority.go b/va/validation-authority.go index <HASH>..<HASH> 100644 --- a/va/validation-authority.go +++ b/va/validation-authority.go @@ -9,6 +9,7 @@ import ( "crypto/sha256" "crypto/subtle" "crypto/tls" + "encoding/base64" "encoding/hex" "fmt" "io/ioutil" @@ -425,7 +426,7 @@ func (va *ValidationAuthorityImpl) validateDNS01(identifier core.AcmeIdentifier, // Compute the digest of the key authorization file h := sha256.New() h.Write([]byte(challenge.KeyAuthorization.String())) - authorizedKeysDigest := hex.EncodeToString(h.Sum(nil)) + authorizedKeysDigest := base64.RawURLEncoding.EncodeToString(h.Sum(nil)) // Look for the required record in the DNS challengeSubdomain := fmt.Sprintf("%s.%s", core.DNSPrefix, identifier.Value)
Fix dns value encoding according to acme <I> spec "The record provisioned to the DNS is the base<I>url encoding of this digest"
letsencrypt_boulder
train
7e146192786ed1673f970f1c3c92ddd3ffcdbc1e
diff --git a/lib/smoke.rb b/lib/smoke.rb index <HASH>..<HASH> 100644 --- a/lib/smoke.rb +++ b/lib/smoke.rb @@ -65,7 +65,7 @@ module Smoke # Rename a source def rename(candidates) candidates.each do |o, n| - active_sources[o].rename(o => n) + active_sources.rename(o => n) return active_sources[n] end end diff --git a/lib/smoke/request.rb b/lib/smoke/request.rb index <HASH>..<HASH> 100644 --- a/lib/smoke/request.rb +++ b/lib/smoke/request.rb @@ -9,7 +9,7 @@ module Smoke end end - SUPPORTED_TYPES = %w(json xml) + SUPPORTED_TYPES = %w(json xml javascript) attr_reader :uri, :content_type, :body, :type def initialize(uri, *options) @@ -45,7 +45,7 @@ module Smoke def parse! case @type - when :json + when :json, :javascript @body = ::Crack::JSON.parse(@body).symbolize_keys! when :xml @body = ::Crack::XML.parse(@body).symbolize_keys! diff --git a/spec/smoke_spec.rb b/spec/smoke_spec.rb index <HASH>..<HASH> 100644 --- a/spec/smoke_spec.rb +++ b/spec/smoke_spec.rb @@ -53,8 +53,8 @@ describe Smoke do end it "should be able to be renamed" do - @rename = Smoke.rename(:a => :b) - @rename.name.should == :b + Smoke.rename(:a => :b) + Smoke[:a].should be_nil Smoke[:b].should be_an_instance_of(Smoke::Origin) end end
Added support for x-javascript headers. Corrected renaming implementation
benschwarz_smoke
train
dd5349316dcc39305201d9dbbb4e5489ae6a1773
diff --git a/scss.inc.php b/scss.inc.php index <HASH>..<HASH> 100644 --- a/scss.inc.php +++ b/scss.inc.php @@ -687,13 +687,20 @@ class scssc { $this->throwError("Unexpected @content inside of mixin"); } - $this->storeEnv = $content->scope; + if(is_object($content)) { + $this->storeEnv = $content->scope; + if(count($content->children) > 0) { + foreach ($content->children as $child) { + if($child[0] == "include" || $child[0] == "block") {//} && !is_null($content->parent)) { + $this->storeEnv = @$content->parent; + } + $this->compileChild($child, $out); + $this->storeEnv = $content->scope; + } + } - foreach ($content->children as $child) { - $this->compileChild($child, $out); + unset($this->storeEnv); } - - unset($this->storeEnv); break; case "debug": list(,$value, $pos) = $child;
fixes issue #<I> at leafo/scssphp
leafo_scssphp
train
464d31165bdd1963993f05e5c30bea10d8d2cfc3
diff --git a/src/Knp/JsonSchemaBundle/Schema/SchemaGenerator.php b/src/Knp/JsonSchemaBundle/Schema/SchemaGenerator.php index <HASH>..<HASH> 100644 --- a/src/Knp/JsonSchemaBundle/Schema/SchemaGenerator.php +++ b/src/Knp/JsonSchemaBundle/Schema/SchemaGenerator.php @@ -36,10 +36,13 @@ class SchemaGenerator $this->schemaFactory = $schemaFactory; $this->propertyFactory = $propertyFactory; $this->propertyHandlers = new \SplPriorityQueue; + $this->aliasList = new \SplDoublyLinkedList; } public function generate($alias) { + $this->aliases[] = $alias; + $className = $this->schemaRegistry->getNamespace($alias); $refl = $this->reflectionFactory->create($className); $schema = $this->schemaFactory->createSchema(ucfirst($alias)); @@ -55,10 +58,17 @@ class SchemaGenerator if (!$property->isIgnored()) { $schema->addProperty($property); - if ($property->hasType(Property::TYPE_OBJECT) && $property->getObject()) { + if ($property->hasType(Property::TYPE_OBJECT) && + $property->getObject() && + // Make sure that we're not creating a reference to the parent schema of the property + $property->getObject() != prev($this->aliases)) { + $property->setSchema( - $x = $this->generate($property->getObject()) + $this->generate($property->getObject()) ); + + // Fast forward from our prev() call earlier so the pointer is in the right place + next($this->aliases); } } }
Prevent recursive schema objects (common in to many doctrine entities)
KnpLabs_KnpJsonSchemaBundle
train
1e949ff914a08151f99b82351e59823bf8cb1df0
diff --git a/Doctrine/ORM/EncryptionEntityRepositoryDecorator.php b/Doctrine/ORM/EncryptionEntityRepositoryDecorator.php index <HASH>..<HASH> 100644 --- a/Doctrine/ORM/EncryptionEntityRepositoryDecorator.php +++ b/Doctrine/ORM/EncryptionEntityRepositoryDecorator.php @@ -91,7 +91,8 @@ class EncryptionEntityRepositoryDecorator implements ObjectRepository, Selectabl */ public function findOneBy(array $criteria) { - return $this->wrapped->findOneBy($criteria); + $elements = $this->findBy($criteria); + return !empty($elements) ? reset($elements) : null; } /**
Fixed a problem when searching for an encrypted entity using findOneBy
jagilpe_encryption-bundle
train
fdcfd969be69665652006dd319e68011516432ac
diff --git a/src/photini/__init__.py b/src/photini/__init__.py index <HASH>..<HASH> 100644 --- a/src/photini/__init__.py +++ b/src/photini/__init__.py @@ -1,4 +1,4 @@ """Full documentation is at https://photini.readthedocs.io/""" __version__ = '2022.5.1' -build = '2163 (b87d986)' +build = '2164 (fd79389)' diff --git a/src/photini/types.py b/src/photini/types.py index <HASH>..<HASH> 100644 --- a/src/photini/types.py +++ b/src/photini/types.py @@ -1140,6 +1140,8 @@ class MD_Orientation(MD_Int): class MD_Timezone(MD_Int): + _quiet = True + @classmethod def from_exiv2(cls, file_value, tag): if file_value in (None, ''):
Reduce messages from camera timezone merging
jim-easterbrook_Photini
train
6b2167ca245558317be1f8881f162f4d5312eb95
diff --git a/allauth/account/views.py b/allauth/account/views.py index <HASH>..<HASH> 100644 --- a/allauth/account/views.py +++ b/allauth/account/views.py @@ -765,15 +765,18 @@ class LogoutView(TemplateResponseMixin, View): if app_settings.LOGOUT_ON_GET: return self.post(*args, **kwargs) if not self.request.user.is_authenticated: - return redirect(self.get_redirect_url()) + response = redirect(self.get_redirect_url()) + return _ajax_response(self.request, response) ctx = self.get_context_data() - return self.render_to_response(ctx) + response = self.render_to_response(ctx) + return _ajax_response(self.request, response) def post(self, *args, **kwargs): url = self.get_redirect_url() if self.request.user.is_authenticated: self.logout() - return redirect(url) + response = redirect(url) + return _ajax_response(self.request, response) def logout(self): adapter = get_adapter(self.request)
feat: add ajax functionality to logout (#<I>) This PR is necessary for single page applications to handle redirects on logout.
pennersr_django-allauth
train
34ffdcd468f7eeb7f697e9204aee96719bce4d6a
diff --git a/src/edit/selection.js b/src/edit/selection.js index <HASH>..<HASH> 100644 --- a/src/edit/selection.js +++ b/src/edit/selection.js @@ -479,14 +479,21 @@ export function scrollIntoView(pm, pos) { for (let parent = pm.content;; parent = parent.parentNode) { let atBody = parent == document.body let rect = atBody ? windowRect() : parent.getBoundingClientRect() + let moveX = 0, moveY = 0 if (coords.top < rect.top) - parent.scrollTop -= rect.top - coords.top + scrollMargin + moveY = -(rect.top - coords.top + scrollMargin) else if (coords.bottom > rect.bottom) - parent.scrollTop += coords.bottom - rect.bottom + scrollMargin + moveY = coords.bottom - rect.bottom + scrollMargin if (coords.left < rect.left) - parent.scrollLeft -= rect.left - coords.left + scrollMargin + moveX = -(rect.left - coords.left + scrollMargin) else if (coords.right > rect.right) - parent.scrollLeft += coords.right - rect.right + scrollMargin + moveX = coords.right - rect.right + scrollMargin + if (moveX || moveY) { + if (atBody) window.scrollBy(moveX, moveY) + } else { + if (moveY) parent.scrollTop += moveY + if (moveX) parent.scrollLeft += moveX + } if (atBody) break } }
Fix scrolling cursor into view on non-Chrome platforms You can't manipulate the body's scrollTop/scrollLeft there, have to use window.scrollBy Closes #<I>
ProseMirror_prosemirror-markdown
train
853190748a3250bdfaf6fb09846b0d8a3682087e
diff --git a/src/main/java/org/web3j/abi/FunctionReturnDecoder.java b/src/main/java/org/web3j/abi/FunctionReturnDecoder.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/web3j/abi/FunctionReturnDecoder.java +++ b/src/main/java/org/web3j/abi/FunctionReturnDecoder.java @@ -5,6 +5,7 @@ import java.util.Collections; import java.util.List; import org.web3j.abi.datatypes.Array; +import org.web3j.abi.datatypes.Bytes; import org.web3j.abi.datatypes.BytesType; import org.web3j.abi.datatypes.DynamicArray; import org.web3j.abi.datatypes.DynamicBytes; @@ -72,7 +73,9 @@ public class FunctionReturnDecoder { try { Class<T> type = typeReference.getClassType(); - if (Array.class.isAssignableFrom(type) + if (Bytes.class.isAssignableFrom(type)) { + return TypeDecoder.decodeBytes(input, (Class<Bytes>) Class.forName(type.getName())); + } else if (Array.class.isAssignableFrom(type) || BytesType.class.isAssignableFrom(type) || Utf8String.class.isAssignableFrom(type)) { return TypeDecoder.decodeBytes(input, Bytes32.class); diff --git a/src/test/java/org/web3j/abi/FunctionReturnDecoderTest.java b/src/test/java/org/web3j/abi/FunctionReturnDecoderTest.java index <HASH>..<HASH> 100644 --- a/src/test/java/org/web3j/abi/FunctionReturnDecoderTest.java +++ b/src/test/java/org/web3j/abi/FunctionReturnDecoderTest.java @@ -15,6 +15,7 @@ import org.web3j.abi.datatypes.StaticArray; import org.web3j.abi.datatypes.Type; import org.web3j.abi.datatypes.Uint; import org.web3j.abi.datatypes.Utf8String; +import org.web3j.abi.datatypes.generated.Bytes16; import org.web3j.abi.datatypes.generated.Bytes32; import org.web3j.abi.datatypes.generated.Uint256; import org.web3j.crypto.Hash; @@ -182,6 +183,28 @@ public class FunctionReturnDecoderTest { } @Test + public void testDecodeIndexedBytes32Value() { + String rawInput = "0x1234567890123456789012345678901234567890123456789012345678901234"; + byte[] rawInputBytes = Numeric.hexStringToByteArray(rawInput); + + assertThat(FunctionReturnDecoder.decodeIndexedValue( + rawInput, + new TypeReference<Bytes32>(){}), + equalTo(new Bytes32(rawInputBytes))); + } + + @Test + public void testDecodeIndexedBytes16Value() { + String rawInput = "0x1234567890123456789012345678901200000000000000000000000000000000"; + byte[] rawInputBytes = Numeric.hexStringToByteArray(rawInput.substring(0, 34)); + + assertThat(FunctionReturnDecoder.decodeIndexedValue( + rawInput, + new TypeReference<Bytes16>(){}), + equalTo(new Bytes16(rawInputBytes))); + } + + @Test public void testDecodeIndexedDynamicBytesValue() { DynamicBytes bytes = new DynamicBytes(new byte[]{ 1, 2, 3, 4, 5}); String encoded = TypeEncoder.encodeDynamicBytes(bytes);
Added a new condition when decoding indexed values to check for static sized Bytes types. The specific types are required to prevent cast errors when used in events (in my case Bytes<I> was trying to be cast to Bytes<I>). I'm not convinced by the branch for Utf8String and Array types - these take a String and array of Types respectively, not bytes. The fix for that is outside the scope of this change.
web3j_web3j
train
3e3bfc47a9cb3bf13e3f3f13124761f203f819ad
diff --git a/slither/printers/summary/slithir.py b/slither/printers/summary/slithir.py index <HASH>..<HASH> 100644 --- a/slither/printers/summary/slithir.py +++ b/slither/printers/summary/slithir.py @@ -39,7 +39,7 @@ class PrinterSlithIR(AbstractPrinter): continue txt += "Contract {}\n".format(contract.name) for function in contract.functions: - txt = f'\tFunction {function.canonical_name} {"" if function.is_shadowed else "(*)"}\n' + txt += f'\tFunction {function.canonical_name} {"" if function.is_shadowed else "(*)"}\n' txt += _print_function(function) for modifier in contract.modifiers: txt += "\tModifier {}\n".format(modifier.canonical_name) diff --git a/slither/slithir/convert.py b/slither/slithir/convert.py index <HASH>..<HASH> 100644 --- a/slither/slithir/convert.py +++ b/slither/slithir/convert.py @@ -1475,15 +1475,15 @@ def convert_constant_types(irs): if isinstance(ir.rvalue, TupleVariable): # TODO: fix missing Unpack conversion continue - if ir.rvalue.type.type != "int256": - ir.rvalue.set_type(ElementaryType("int256")) + if ir.rvalue.type.type not in ElementaryTypeInt: + ir.rvalue.set_type(ElementaryType(ir.lvalue.type.type)) was_changed = True if isinstance(ir, Binary): if isinstance(ir.lvalue.type, ElementaryType): if ir.lvalue.type.type in ElementaryTypeInt: for r in ir.read: - if r.type.type != "int256": - r.set_type(ElementaryType("int256")) + if r.type.type not in ElementaryTypeInt: + r.set_type(ElementaryType(ir.lvalue.type.type)) was_changed = True if isinstance(ir, (HighLevelCall, InternalCall)): func = ir.function @@ -1498,8 +1498,8 @@ def convert_constant_types(irs): t = types[idx] if isinstance(t, ElementaryType): if t.type in ElementaryTypeInt: - if arg.type.type != "int256": - arg.set_type(ElementaryType("int256")) + if arg.type.type not in ElementaryTypeInt: + arg.set_type(ElementaryType(t.type)) was_changed = True if isinstance(ir, NewStructure): st = ir.structure @@ -1507,16 +1507,16 @@ def convert_constant_types(irs): e = st.elems_ordered[idx] if isinstance(e.type, ElementaryType): if e.type.type in ElementaryTypeInt: - if arg.type.type != "int256": - arg.set_type(ElementaryType("int256")) + if arg.type.type not in ElementaryTypeInt: + arg.set_type(ElementaryType(e.type.type)) was_changed = True if isinstance(ir, InitArray): if isinstance(ir.lvalue.type, ArrayType): if isinstance(ir.lvalue.type.type, ElementaryType): if ir.lvalue.type.type.type in ElementaryTypeInt: for r in ir.read: - if r.type.type != "int256": - r.set_type(ElementaryType("int256")) + if r.type.type not in ElementaryTypeInt: + r.set_type(ElementaryType(ir.lvalue.type.type.type)) was_changed = True
Fix signed integer type propagation (fix #<I>)
crytic_slither
train
13d489e088e7c6237824f9d4a6854a500f16d791
diff --git a/lang/en/hub.php b/lang/en/hub.php index <HASH>..<HASH> 100644 --- a/lang/en/hub.php +++ b/lang/en/hub.php @@ -103,8 +103,8 @@ $string['licence_help'] = 'Select the licence you want to distribute your course $string['logourl'] = 'Logo URL'; $string['modulenumberaverage'] = 'Average number of course modules ({$a})'; $string['moodleorg'] = 'Moodle.org'; -$string['moodleorgregistrationdetail'] = 'The main community hub is Moodle.org. By registering your site here your information will contribute to the statistics of the worldwide Moodle community. You also have the option of joining a low-volume mailing list to receive important early notifications of security fixes and new releases of Moodle.'; -$string['moodleorgregistrationdetail2'] = 'This option allows you to register your Moodle site with Moodle.org. Registration is free. +$string['moodleorgregistrationdetail'] = 'The main community hub is called MOOCH, at hub.moodle.org. By registering your site with MOOCH your information will contribute to the statistics of the worldwide Moodle community. You also have the option of joining a low-volume mailing list to receive important early notifications of security fixes and new releases of Moodle.'; +$string['moodleorgregistrationdetail2'] = 'This option allows you to register your Moodle site with MOOCH, at hub.moodle.org. Registration is free. The main benefit of registering is that you will be added to a low-volume mailing list for important notifications such as security alerts and new releases of Moodle. By default, your information will be kept private, and will never be sold or passed on to anyone else. The only reason for collecting this information is for support purposes, and to help build up a statistical picture of the Moodle community as a whole. If you choose, you can allow your site name, country and URL to be added to the public list of Moodle Sites. @@ -137,8 +137,8 @@ $string['publisheremail_help'] = 'The publisher email address is used by hub adm $string['publishername'] = 'Publisher'; $string['publishername_help'] = 'The publisher is the unique person/organisation publishing the course. Most of the time it should be you, except if you do it on the behalf of someone else.'; $string['publishon'] = 'Publish on'; -$string['publishonmoodleorg'] = 'Publish on Moodle.org'; -$string['publishonspecifichub'] = 'Publish on a Hub'; +$string['publishonmoodleorg'] = 'Publish on MOOCH'; +$string['publishonspecifichub'] = 'Publish on another Hub'; $string['questionsnumber'] = 'Number of questions ({$a})'; $string['registeredcourses'] = 'Registered courses'; $string['registeredsites'] = 'Registered sites'; @@ -167,7 +167,7 @@ $string['share'] = 'Share this course for people to download'; $string['shared'] = 'Shared'; $string['shareon'] = 'Upload this course to {$a}'; $string['shareonhub'] = 'Upload this course to a hub'; -$string['shareonmoodleorg'] = 'Upload this course to Moodle.org'; +$string['shareonmoodleorg'] = 'Upload this course to MOOCH'; $string['sharepublication_help'] = 'Uploading this course to a community hub server will enable people to download it and install it on their own Moodle sites.'; $string['siteadmin'] = 'Administrator'; $string['sitecreated'] = 'Site created';
Updated some strings to mention MOOCH
moodle_moodle
train
4ef74536940ea4c8c7f8c2cb0252bfe5f0db6fdf
diff --git a/actionpack/lib/action_dispatch/middleware/request_id.rb b/actionpack/lib/action_dispatch/middleware/request_id.rb index <HASH>..<HASH> 100644 --- a/actionpack/lib/action_dispatch/middleware/request_id.rb +++ b/actionpack/lib/action_dispatch/middleware/request_id.rb @@ -1,5 +1,6 @@ require 'securerandom' require 'active_support/core_ext/string/access' +require 'active_support/core_ext/object/blank' module ActionDispatch # Makes a unique request id available to the action_dispatch.request_id env variable (which is then accessible through @@ -26,8 +27,8 @@ module ActionDispatch private def external_request_id(env) - if env["HTTP_X_REQUEST_ID"].present? - env["HTTP_X_REQUEST_ID"].gsub(/[^\w\d\-]/, "").first(255) + if request_id = env["HTTP_X_REQUEST_ID"].presence + request_id.gsub(/[^\w\d\-]/, "").first(255) end end
Load object/blank and make use of presence.
rails_rails
train
2ee7720b9e854d220d88cb2e292fd6df4a4d9971
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -251,7 +251,9 @@ def acquire_setuptools_setup(): # displaying its help information or running a non-install related # setup command) ez_setup.use_setuptools() - except (Exception, SystemExit): + except (KeyboardInterrupt, SystemExit): + raise + except Exception: return return import_setuptools_setup() @@ -453,6 +455,8 @@ def test_requirements(): # runtime when running our test suite. Versions 2.4.2 and later # can not be installed at all. have_pytest = "2.4.0" <= pytest.__version__ < "2.4.2" + except (KeyboardInterrupt, SystemExit): + raise except Exception: have_pytest = False if not have_pytest: @@ -468,6 +472,8 @@ def test_requirements(): # Version 1.4.16 may be installed but will cause pytest to fail # when running our test suite. have_py = py.__version__ < "1.4.16" + except (KeyboardInterrupt, SystemExit): + raise except Exception: have_py = False if not have_py:
fix KeyboardInterrupt & SetupExit handling in setup.py There were several occurrences in code where such exceptions could have been gobbled up when running under Python <I>.x.
suds-community_suds
train
3b1dcac8b66e51091ec588089dabbe3724434131
diff --git a/driver/src/main/java/org/kaazing/k3po/driver/internal/Robot.java b/driver/src/main/java/org/kaazing/k3po/driver/internal/Robot.java index <HASH>..<HASH> 100644 --- a/driver/src/main/java/org/kaazing/k3po/driver/internal/Robot.java +++ b/driver/src/main/java/org/kaazing/k3po/driver/internal/Robot.java @@ -358,8 +358,12 @@ public class Robot { server.setPipelineFactory(pipelineFactory(pipeline(closeOnExceptionHandler))); } for (ClientBootstrapResolver clientResolver : configuration.getClientResolvers()) { - ClientBootstrap client = clientResolver.resolve(); - client.setPipelineFactory(pipelineFactory(pipeline(closeOnExceptionHandler))); + try { + ClientBootstrap client = clientResolver.resolve(); + client.setPipelineFactory(pipelineFactory(pipeline(closeOnExceptionHandler))); + } catch (RuntimeException e) { + LOGGER.warn("Exception caught while trying to stop client pipelies: " + e); + } } // remove each handler from the configuration pipelines diff --git a/driver/src/main/java/org/kaazing/k3po/driver/internal/resolver/LocationResolver.java b/driver/src/main/java/org/kaazing/k3po/driver/internal/resolver/LocationResolver.java index <HASH>..<HASH> 100644 --- a/driver/src/main/java/org/kaazing/k3po/driver/internal/resolver/LocationResolver.java +++ b/driver/src/main/java/org/kaazing/k3po/driver/internal/resolver/LocationResolver.java @@ -20,6 +20,9 @@ import java.net.URI; import javax.el.ELContext; import javax.el.ValueExpression; +import org.jboss.netty.logging.InternalLogger; +import org.jboss.netty.logging.InternalLoggerFactory; +import org.kaazing.k3po.driver.internal.Robot; import org.kaazing.k3po.driver.internal.behavior.visitor.GenerateConfigurationVisitor; import org.kaazing.k3po.lang.internal.ast.value.AstLocation; import org.kaazing.k3po.lang.internal.ast.value.AstLocationExpression; @@ -36,6 +39,7 @@ import org.kaazing.k3po.lang.internal.ast.value.AstLocationLiteral; public class LocationResolver { private static final LocationVisitorImpl VISITOR = new LocationVisitorImpl(); + private static final InternalLogger LOGGER = InternalLoggerFactory.getInstance(LocationResolver.class); private final AstLocation location; private final ELContext environment; @@ -66,9 +70,14 @@ public class LocationResolver { Object location; // TODO: Remove when JUEL sync bug is fixed https://github.com/k3po/k3po/issues/147 - synchronized (environment) { - ValueExpression expression = value.getValue(); - location = expression.getValue(environment); + try { + synchronized (environment) { + ValueExpression expression = value.getValue(); + location = expression.getValue(environment); + } + } catch (javax.el.PropertyNotFoundException e) { + LOGGER.warn(e.getMessage()); + location = null; } if (location == null) { diff --git a/junit/src/main/java/org/kaazing/k3po/junit/rules/ScriptRunner.java b/junit/src/main/java/org/kaazing/k3po/junit/rules/ScriptRunner.java index <HASH>..<HASH> 100644 --- a/junit/src/main/java/org/kaazing/k3po/junit/rules/ScriptRunner.java +++ b/junit/src/main/java/org/kaazing/k3po/junit/rules/ScriptRunner.java @@ -31,6 +31,7 @@ import java.util.List; import java.util.Map; import java.util.concurrent.Callable; import java.util.concurrent.CountDownLatch; +import java.util.logging.Logger; import org.kaazing.k3po.control.internal.Control; import org.kaazing.k3po.control.internal.command.AbortCommand; @@ -260,7 +261,6 @@ final class ScriptRunner implements Callable<ScriptPair> { @Override public void initial() { - System.out.println("Hello"); synchronized (this) { this.state = NOTIFYING; for (BarrierStateListener listener : stateListeners) { @@ -323,7 +323,15 @@ final class ScriptRunner implements Callable<ScriptPair> { default: throw new IllegalArgumentException("Unrecognized event kind: " + event.getKind()); } - } finally { + } catch (Exception e) { + // TODO log this when we get a logger added to Junit, or remove need for this which always clean + // shutdown of k3po channels + e.printStackTrace(); + // NOOP swallow exception as this is a clean up task that may fail in case setup didn't complete, + // expressions didn't get resolved. Etc. This happens frequently when Junit Assume is used, as K3po + // will have inited the accept channels outside of the test method. + } + finally { controller.disconnect(); } }
Caught exceptions that cause catostrophic failures in K3po and thus cause junit to error out. When in fact the Junit Assume or ignore can cause these in the first place. It would be better to revisit all these places and ensure they can always clean up properly
k3po_k3po
train
68c7378b2df5d23d30e81e25d2c478ceea640fba
diff --git a/lib/model/active_record/database/index.rb b/lib/model/active_record/database/index.rb index <HASH>..<HASH> 100644 --- a/lib/model/active_record/database/index.rb +++ b/lib/model/active_record/database/index.rb @@ -3,7 +3,7 @@ module Regressor module Database module Index def indices - ::ActiveRecord::Base.connection.indexes(@model.tableize.gsub("/", "_")).map do |indexes| + ::ActiveRecord::Base.connection.indexes(@model.constantize.table_name).map do |indexes| "it { is_expected.to have_db_index #{indexes.columns} }" end.flatten.join("\n ") end
Use Model.table_name to take into account the table_name_prefix
ndea_regressor
train
6e3c7aaae627eef81c6771c874df74c7b025520d
diff --git a/test/e2e/container_probe.go b/test/e2e/container_probe.go index <HASH>..<HASH> 100644 --- a/test/e2e/container_probe.go +++ b/test/e2e/container_probe.go @@ -48,7 +48,7 @@ var _ = Describe("Probing container", func() { p, err := podClient.Create(makePodSpec(probe.withInitialDelay().build(), nil)) expectNoError(err) - Expect(wait.Poll(poll, 120*time.Second, func() (bool, error) { + Expect(wait.Poll(poll, 240*time.Second, func() (bool, error) { p, err := podClient.Get(p.Name) if err != nil { return false, err @@ -88,7 +88,7 @@ var _ = Describe("Probing container", func() { p, err := podClient.Create(makePodSpec(probe.withFailing().build(), nil)) expectNoError(err) - err = wait.Poll(poll, 120*time.Second, func() (bool, error) { + err = wait.Poll(poll, 180*time.Second, func() (bool, error) { p, err := podClient.Get(p.Name) if err != nil { return false, err
Bump the timeout for container readiness probe e2e tests To reduce the flakiness.
kubernetes_kubernetes
train
d81ee5120d245ef6ca0f92eb5a6f0a1033cd0bc9
diff --git a/Lib/fontbakery/specifications/general.py b/Lib/fontbakery/specifications/general.py index <HASH>..<HASH> 100644 --- a/Lib/fontbakery/specifications/general.py +++ b/Lib/fontbakery/specifications/general.py @@ -155,8 +155,12 @@ def com_google_fonts_check_037(font): try: import subprocess + if os.name == 'posix': + fval_exec = 'FontValidator' + elif os.name == 'nt': + fval_exec = 'FontValidator.exe' fval_cmd = [ - "FontValidator", "-file", font, "-all-tables", + fval_exec, "-file", font, "-all-tables", "-report-in-font-dir", "-no-raster-tests" ] subprocess.check_output(fval_cmd, stderr=subprocess.STDOUT)
Run FontValidator.exe if user's OS is Win
googlefonts_fontbakery
train
f004a84e7cfcb23fa267f406c482602c891d3fda
diff --git a/tests/func/test_add.py b/tests/func/test_add.py index <HASH>..<HASH> 100644 --- a/tests/func/test_add.py +++ b/tests/func/test_add.py @@ -96,7 +96,7 @@ class TestAddCmdDirectoryRecursive(TestDvc): warning = ( "You are adding a large directory 'large-dir' recursively," " consider tracking it as a whole instead.\n" - "{purple}HINT:{nc} Remove the generated DVC-files and then" + "{purple}HINT:{nc} Remove the generated DVC-file and then" " run {cyan}dvc add large-dir{nc}".format( purple=colorama.Fore.MAGENTA, cyan=colorama.Fore.CYAN,
tests: fix failing test_add (due to extra 's' in word) To close #<I>
iterative_dvc
train
4fb6269ccd2d9e0d3b59322396e9b91d40174988
diff --git a/instrument/extended.go b/instrument/extended.go index <HASH>..<HASH> 100644 --- a/instrument/extended.go +++ b/instrument/extended.go @@ -133,6 +133,7 @@ type runtimeMetrics struct { MemoryHeapIdle tally.Gauge MemoryHeapInuse tally.Gauge MemoryStack tally.Gauge + GCCPUFraction tally.Gauge NumGC tally.Counter GcPauseMs tally.Timer lastNumGC uint32 @@ -156,6 +157,7 @@ func (r *runtimeMetrics) report(metricsType ExtendedMetricsType) { r.MemoryHeapIdle.Update(float64(memStats.HeapIdle)) r.MemoryHeapInuse.Update(float64(memStats.HeapInuse)) r.MemoryStack.Update(float64(memStats.StackInuse)) + r.GCCPUFraction.Update(memStats.GCCPUFraction) // memStats.NumGC is a perpetually incrementing counter (unless it wraps at 2^32). num := memStats.NumGC @@ -219,6 +221,7 @@ func NewExtendedMetricsReporter( r.runtime.MemoryHeapIdle = memoryScope.Gauge("heapidle") r.runtime.MemoryHeapInuse = memoryScope.Gauge("heapinuse") r.runtime.MemoryStack = memoryScope.Gauge("stack") + r.runtime.GCCPUFraction = memoryScope.Gauge("gc-cpu-fraction") r.runtime.NumGC = memoryScope.Counter("num-gc") r.runtime.GcPauseMs = memoryScope.Timer("gc-pause-ms") r.runtime.lastNumGC = memstats.NumGC
[instrument] Add metric for GC CPU Fraction (#<I>)
m3db_m3x
train
721bcd5318daef941ec08ed8021642d3ff5d2db7
diff --git a/rootpy/logger/magic.py b/rootpy/logger/magic.py index <HASH>..<HASH> 100644 --- a/rootpy/logger/magic.py +++ b/rootpy/logger/magic.py @@ -61,12 +61,23 @@ def get_seh(): ErrorHandlerFunc_t = ctypes.CFUNCTYPE(None, ctypes.c_int, ctypes.c_bool, ctypes.c_char_p, ctypes.c_char_p) - dll = ctypes.cdll.LoadLibrary(libcore()) - assert dll, "Can't find `libCore` shared library. Possible bug?" + dll = None + try: + dll = ctypes.cdll.LoadLibrary(libcore()) + except OSError: + log.warning("Unable to find libCore (tried %s)", libcore()) - SetErrorHandler = dll._Z15SetErrorHandlerPFvibPKcS0_E - assert SetErrorHandler, ("Couldn't find SetErrorHandler, please submit a " - "bug report to rootpy.") + SetErrorHandler = None + try: + if dll: + SetErrorHandler = dll._Z15SetErrorHandlerPFvibPKcS0_E + except AttributeError: + pass + + if not SetErrorHandler: + log.warning("Couldn't find SetErrorHandler, please submit a bug report " + "to rootpy.") + return lambda x: None SetErrorHandler.restype = ErrorHandlerFunc_t SetErrorHandler.argtypes = ErrorHandlerFunc_t, diff --git a/rootpy/logger/tests/test_threading.py b/rootpy/logger/tests/test_threading.py index <HASH>..<HASH> 100644 --- a/rootpy/logger/tests/test_threading.py +++ b/rootpy/logger/tests/test_threading.py @@ -2,6 +2,7 @@ from __future__ import division import itertools import os +import platform import resource import thread import threading @@ -51,6 +52,10 @@ def spareprocs(): """ Compute the maximum number of threads we can start up according to ulimit """ + if platform.system().lower() == "darwin": + # Return a decent small value, we just want it to run, more grindy tests + # can take place on other machines. + return 10 nmax, _ = resource.getrlimit(resource.RLIMIT_NPROC) me = os.geteuid() return nmax - sum(1 for p in os.listdir("/proc")
Don't use a log handler if we can't obtain SetErrorHandler
rootpy_rootpy
train
f6e92c3c142c11c4005629053127a9f2f8a8d51a
diff --git a/bitex/utils.py b/bitex/utils.py index <HASH>..<HASH> 100644 --- a/bitex/utils.py +++ b/bitex/utils.py @@ -30,7 +30,7 @@ def return_api_response(formatter=None): @wraps(func) def wrapper(*args, **kwargs): try: - r = func(*args, **kwargs) + r = APIResponse(func(*args, **kwargs)) except Exception: log.exception("return_api_response(): Error during call to %s(%s, %s)", func.__name__, args, kwargs)
APIResponse is now correctly Applied and returned - Hotfix
Crypto-toolbox_bitex
train
2139f350c3bce594332bc1785fb9f35e66cf045b
diff --git a/src/FilterSetting/RangeDateFilterSettingTypeFactory.php b/src/FilterSetting/RangeDateFilterSettingTypeFactory.php index <HASH>..<HASH> 100644 --- a/src/FilterSetting/RangeDateFilterSettingTypeFactory.php +++ b/src/FilterSetting/RangeDateFilterSettingTypeFactory.php @@ -58,7 +58,7 @@ class RangeDateFilterSettingTypeFactory extends AbstractFilterSettingTypeFactory ->setTypeName('rangedate') ->setTypeIcon('bundles/metamodelsfilterrange/filter_range.png') ->setTypeClass(RangeDate::class) - ->allowAttributeTypes('numeric', 'decimal', 'timestamp'); + ->allowAttributeTypes('timestamp'); $this->dispatcher = $dispatcher; $this->filterUrlBuilder = $filterUrlBuilder;
Allow only timestamp in date filter
MetaModels_filter_range
train
ecd96587d70e1c4d6a27599de44028dbb4248bd3
diff --git a/lib/Service/DoozR/I18n/Service/Interface/Gettext.php b/lib/Service/DoozR/I18n/Service/Interface/Gettext.php index <HASH>..<HASH> 100644 --- a/lib/Service/DoozR/I18n/Service/Interface/Gettext.php +++ b/lib/Service/DoozR/I18n/Service/Interface/Gettext.php @@ -207,8 +207,8 @@ class DoozR_I18n_Service_Interface_Gettext extends DoozR_I18n_Service_Interface_ $result = setlocale(LC_ALL, $fullQualifiedLocales); $result2 = setlocale(LC_MESSAGES, $fullQualifiedLocales); - var_dump($result); - var_dump($result2); + echo setlocale(LC_MESSAGES, NULL) . PHP_EOL; + echo PHP_EOL; if ($result === null || $result === false) { throw new DoozR_I18n_Service_Exception( diff --git a/lib/Service/DoozR/I18n/tests/Service/TranslatorTest.php b/lib/Service/DoozR/I18n/tests/Service/TranslatorTest.php index <HASH>..<HASH> 100644 --- a/lib/Service/DoozR/I18n/tests/Service/TranslatorTest.php +++ b/lib/Service/DoozR/I18n/tests/Service/TranslatorTest.php @@ -217,7 +217,7 @@ class TranslatorTest extends DoozR_Base_Service_Test_Abstract public function testTranslateWithArguments() { $locale = self::$fixtures['locale']['valid']; - self::$service->setActiveLocale('en'); + self::$service->setActiveLocale('en_US'); $translator = self::$service->getTranslator(); $translator->setNamespace('default');
Fix for travis cmd failed
Doozer-Framework_Doozr
train
defe505ec831b009b55d81e379164912eb2698d8
diff --git a/taxtastic/subcommands/taxtable.py b/taxtastic/subcommands/taxtable.py index <HASH>..<HASH> 100644 --- a/taxtastic/subcommands/taxtable.py +++ b/taxtastic/subcommands/taxtable.py @@ -176,8 +176,9 @@ def action(args): schema=args.schema, columns=['tax_id', 'tax_name', 'is_primary']) names = names[names['is_primary']].set_index('tax_id') + len_nodes = len(nodes) nodes = nodes.join(names['tax_name']) - # TODO: assert len(nodes) same before and after names join + assert len_nodes == len(nodes) taxtable = build_taxtable(nodes, ranks) # subset taxtable clade lineages
added one primary name per node assertion
fhcrc_taxtastic
train
453d04e4ba05bec467a2dcea139e75edb1abcab5
diff --git a/dev/SapphireTest.php b/dev/SapphireTest.php index <HASH>..<HASH> 100644 --- a/dev/SapphireTest.php +++ b/dev/SapphireTest.php @@ -801,6 +801,8 @@ class SapphireTest extends PHPUnit_Framework_TestCase { */ public function resetDBSchema($includeExtraDataObjects = false) { if(self::using_temp_db()) { + DataObject::reset(); + // clear singletons, they're caching old extension info which is used in DatabaseAdmin->doBuild() Injector::inst()->unregisterAllObjects();
BUG Reset DataObject caches in SapphireTest->resetDBSchema() This became a problem with fdcd7a2e where $custom_database_fields were cached, but never reset. It lead to extensions not applying correctly in SapphireTest->setUpOnce().
silverstripe_silverstripe-framework
train
2b91662fb9c07010b973e06beed05f3041b0608e
diff --git a/src/config/cloudinit.go b/src/config/cloudinit.go index <HASH>..<HASH> 100644 --- a/src/config/cloudinit.go +++ b/src/config/cloudinit.go @@ -18,15 +18,14 @@ package config import ( "strings" + "unicode" ) func isCloudConfig(userdata []byte) bool { header := strings.SplitN(string(userdata), "\n", 2)[0] - // Explicitly trim the header so we can handle user-data from - // non-unix operating systems. The rest of the file is parsed - // by yaml, which correctly handles CRLF. - header = strings.TrimSuffix(header, "\r") + // Trim trailing whitespaces + header = strings.TrimRightFunc(header, unicode.IsSpace) return (header == "#cloud-config") } diff --git a/src/config/config_test.go b/src/config/config_test.go index <HASH>..<HASH> 100644 --- a/src/config/config_test.go +++ b/src/config/config_test.go @@ -45,7 +45,7 @@ func TestParse(t *testing.T) { out: out{err: ErrEmpty}, }, { - in: in{config: []byte(`#cloud-config`)}, + in: in{config: []byte(`#cloud-config `)}, out: out{err: ErrCloudConfig}, }, {
config: update check for cloud-config coreos-cloudinit became more permissive when checking for a valid header.
coreos_ignition
train
689ca79e8a20b6d1267ad643fc4cf742c7a3b5da
diff --git a/lib/screwcap/runner.rb b/lib/screwcap/runner.rb index <HASH>..<HASH> 100644 --- a/lib/screwcap/runner.rb +++ b/lib/screwcap/runner.rb @@ -80,8 +80,8 @@ class Runner if command[:type] == :remote log " I: (#{address}): #{command[:command]}\n", :color => :green stdout, stderr, exit_code, exit_signal = ssh_exec! ssh, command[:command] - log(" O: (#{address}): #{stdout}", :color => :green) unless stdout.nil? - errorlog(" O: (#{address}): #{stderr}", :color => :red) unless stderr.nil? + log(" O: (#{address}): #{stdout}", :color => :green) unless stdout.nil? or stdout == "" + errorlog(" O: (#{address}): #{stderr}", :color => :red) unless stderr.nil? or stderr == "" errorlog(" E: (#{address}): #{command[:command]} return exit code: #{exit_code}\n", :color => :red) if exit_code != 0 return exit_code elsif command[:type] == :local diff --git a/spec/task_spec.rb b/spec/task_spec.rb index <HASH>..<HASH> 100644 --- a/spec/task_spec.rb +++ b/spec/task_spec.rb @@ -28,7 +28,7 @@ describe "Tasks" do it "should be able to execute statements on a remote server" do task = @deployer.__tasks.find {|t| t.name == :task1 } Runner.execute! task, @deployer.__options - @stderr.size.should == 12 + @stderr.size.should == 0 @stdout.size.should == 26 end
fix bug, outputting blank string to stdout for every task
gammons_screwcap
train
5fbfb440a957e188bb0b57512a272fad9ef368f8
diff --git a/spec/support/rails_template_with_data.rb b/spec/support/rails_template_with_data.rb index <HASH>..<HASH> 100644 --- a/spec/support/rails_template_with_data.rb +++ b/spec/support/rails_template_with_data.rb @@ -318,7 +318,7 @@ append_file "db/seeds.rb", "\n\n" + <<-RUBY.strip_heredoc ActiveAdmin::Comment.create!( namespace: :admin, author: AdminUser.first, - body: "Test comment #{i}", + body: "Test comment \#{i}", resource: categories.sample, ) end
Fix undefined variable error in app generation
activeadmin_activeadmin
train
633a51a2caef35ad7c5bac7f7363d12f31624038
diff --git a/spec/public/shared/collection_shared_spec.rb b/spec/public/shared/collection_shared_spec.rb index <HASH>..<HASH> 100644 --- a/spec/public/shared/collection_shared_spec.rb +++ b/spec/public/shared/collection_shared_spec.rb @@ -50,7 +50,7 @@ share_examples_for 'A public Collection' do it { @articles.should respond_to(:blank?) } - describe '#blank?' do + describe '#blank?' do describe 'when the collection is empty' do it 'should be true' do @articles.clear.blank?.should be_true
use hash conditions on Text properties so that correct WHERE conditions are generated by adapter (necessary for Oracle Text/CLOB properties comparison)
datamapper_dm-core
train
ec7d3d7659bfb02e23aa3e0adf638690c37f0324
diff --git a/src/Composer/Satis/Command/BuildCommand.php b/src/Composer/Satis/Command/BuildCommand.php index <HASH>..<HASH> 100644 --- a/src/Composer/Satis/Command/BuildCommand.php +++ b/src/Composer/Satis/Command/BuildCommand.php @@ -99,6 +99,7 @@ EOT // fetch options $requireAll = isset($config['require-all']) && true === $config['require-all']; + $requireDependencies = isset($config['require-dependencies']) && true === $config['require-dependencies']; if (!$requireAll && !isset($config['require'])) { $output->writeln('No explicit requires defined, enabling require-all'); $requireAll = true; @@ -113,7 +114,7 @@ EOT } $composer = $this->getApplication()->getComposer(true, $config); - $packages = $this->selectPackages($composer, $output, $verbose, $requireAll); + $packages = $this->selectPackages($composer, $output, $verbose, $requireAll, $requireDependencies); if ($htmlView = !$input->getOption('no-html-output')) { $htmlView = !isset($config['output-html']) || $config['output-html']; @@ -133,7 +134,7 @@ EOT } } - private function selectPackages(Composer $composer, OutputInterface $output, $verbose, $requireAll) + private function selectPackages(Composer $composer, OutputInterface $output, $verbose, $requireAll, $requireDependencies) { $selected = array(); @@ -178,7 +179,7 @@ EOT } // process links if any - foreach ($links as $link) { + for (; $link = current($links); next($links)) { $name = $link->getTarget(); $matches = $pool->whatProvides($name, $link->getConstraint()); @@ -200,6 +201,17 @@ EOT $output->writeln('Selected '.$package->getPrettyName().' ('.$package->getPrettyVersion().')'); } $selected[$package->getUniqueName()] = $package; + + // append dependencies + if (!$requireAll && $requireDependencies) { + foreach ($package->getRequires() as $dependencyLink) { + // avoid php, ext-* and lib-* + $target = $dependencyLink->getTarget(); + if (strpos($target, '/')) { + $links[] = $dependencyLink; + } + } + } } }
adding ability to dump dependencies Change-Id: I6cc<I>d0b<I>c<I>fbbb<I>ad<I>ed0d2c<I>
composer_satis
train
7d3b7fa58c00a6e4fba63a7a18ebd6c5915893d7
diff --git a/webpack.config.js b/webpack.config.js index <HASH>..<HASH> 100644 --- a/webpack.config.js +++ b/webpack.config.js @@ -22,4 +22,9 @@ module.exports = { }, ], }, + + externals: { + react: "React", + "react-dom": "reactDOM", + }, };
FIX: UMD package without React (#<I>)
kiwicom_orbit-components
train
3063fb45dd2c3bb4f97a28fb43d6345bbe6fca25
diff --git a/pkg/minikube/cluster/mount.go b/pkg/minikube/cluster/mount.go index <HASH>..<HASH> 100644 --- a/pkg/minikube/cluster/mount.go +++ b/pkg/minikube/cluster/mount.go @@ -20,6 +20,7 @@ import ( "fmt" "os" "os/exec" + "runtime" "sort" "strconv" "strings" @@ -92,8 +93,12 @@ func Mount(r mountRunner, source string, target string, c *MountConfig) error { } return &MountError{ErrorType: MountErrorUnknown, UnderlyingError: errors.Wrapf(err, "mount with cmd %s ", rr.Command())} } - if _, err := r.RunCmd(exec.Command("/bin/bash", "-c", fmt.Sprintf("sudo chmod %o %s", c.Mode, target))); err != nil { - return &MountError{ErrorType: MountErrorChmod, UnderlyingError: errors.Wrap(err, "chmod folder")} + + // skipping macOS due to https://github.com/kubernetes/minikube/issues/13070 + if runtime.GOOS != "darwin" { + if _, err := r.RunCmd(exec.Command("/bin/bash", "-c", fmt.Sprintf("sudo chmod %o %s", c.Mode, target))); err != nil { + return &MountError{ErrorType: MountErrorChmod, UnderlyingError: errors.Wrap(err, "chmod folder")} + } } klog.Infof("mount successful: %q", rr.Output())
skip chmod-ing mounted folder on macOS
kubernetes_minikube
train
b9c6e5a91b6f05a5da4546383abea92a004a22e0
diff --git a/kernel/classes/ezcontentobject.php b/kernel/classes/ezcontentobject.php index <HASH>..<HASH> 100644 --- a/kernel/classes/ezcontentobject.php +++ b/kernel/classes/ezcontentobject.php @@ -2901,10 +2901,18 @@ class eZContentObject extends eZPersistentObject { $showInvisibleNodesCond = self::createFilterByVisibilitySQLString( $params['IgnoreVisibility'] ); } + + // related class identifier filter + $relatedClassIdentifiersSQL = ''; if ( isset( $params['RelatedClassIdentifiers'] ) && is_array( $params['RelatedClassIdentifiers'] ) ) { - $relatedClassIdentifiersString = implode( "', '", $params['RelatedClassIdentifiers'] ); - $relatedClassIdentifiersSQL = "ezcontentclass.identifier IN ('$relatedClassIdentifiersString') AND "; + $relatedClassIdentifiers = array(); + foreach( $params['RelatedClassIdentifiers'] as $classIdentifier ) + { + $relatedClassIdentifiers[] = "'" . $db->escapeString( $classIdentifier ) . "'"; + } + $relatedClassIdentifiersSQL = $db->generateSQLINStatement( $relatedClassIdentifiers, 'ezcontentclass.identifier', false, true, 'string' ). " AND"; + unset( $classIdentifier, $relatedClassIdentifiers ); } }
Improved related class filtering pull request code
ezsystems_ezpublish-legacy
train
f2c82fa21216d660ebfa2542ddf3e2f4efa86867
diff --git a/docs/running_psalm/issues.md b/docs/running_psalm/issues.md index <HASH>..<HASH> 100644 --- a/docs/running_psalm/issues.md +++ b/docs/running_psalm/issues.md @@ -1106,12 +1106,22 @@ extending all its template params. ```php /** - * @template-implements IteratorAggregate<int> + * @template-implements ArrayAccess<int> */ -class SomeIterator implements IteratorAggregate +class SomeIterator implements ArrayAccess { - public function getIterator() { - yield 5; + public function offsetSet($offset, $value) { + } + + public function offsetExists($offset) { + return false; + } + + public function offsetUnset($offset) { + } + + public function offsetGet($offset) { + return null; } } ``` diff --git a/src/Psalm/Type.php b/src/Psalm/Type.php index <HASH>..<HASH> 100644 --- a/src/Psalm/Type.php +++ b/src/Psalm/Type.php @@ -251,7 +251,7 @@ abstract class Type && count($generic_params) === 1 ) { array_unshift($generic_params, new Union([new TArrayKey])); - } elseif (($generic_type_value === 'iterable' || $generic_type_value === 'Traversable') + } elseif (in_array($generic_type_value, ['iterable', 'Traversable', 'Iterator', 'IteratorAggregate'], true) && count($generic_params) === 1 ) { array_unshift($generic_params, new Union([new TMixed])); diff --git a/tests/Loop/ForeachTest.php b/tests/Loop/ForeachTest.php index <HASH>..<HASH> 100644 --- a/tests/Loop/ForeachTest.php +++ b/tests/Loop/ForeachTest.php @@ -973,8 +973,6 @@ class ForeachTest extends \Psalm\Tests\TestCase '<?php /** * @param Iterator<string> $arr - * @psalm-suppress MissingTemplateParam - * @psalm-suppress MixedAssignment */ function foo(Iterator $arr) : void { foreach ($arr as $a) {} diff --git a/tests/TypeParseTest.php b/tests/TypeParseTest.php index <HASH>..<HASH> 100644 --- a/tests/TypeParseTest.php +++ b/tests/TypeParseTest.php @@ -186,7 +186,7 @@ class TypeParseTest extends TestCase */ public function testInteratorAndTraversable() { - $this->assertSame('Iterator<int>&Traversable', (string) Type::parseString('Iterator<int>&Traversable')); + $this->assertSame('Iterator<mixed, int>&Traversable', (string) Type::parseString('Iterator<int>&Traversable')); } /** @@ -195,7 +195,7 @@ class TypeParseTest extends TestCase public function testTraversableAndIteratorOrNull() { $this->assertSame( - 'Traversable&Iterator<int>|null', + 'Traversable&Iterator<mixed, int>|null', (string) Type::parseString('Traversable&Iterator<int>|null') ); }
Add support for Iterator<X> type (#<I>) * Add support for Iterator<X> type * Fix tests
vimeo_psalm
train
17fa800636fa448f302a2feba1487bdb9baaae5d
diff --git a/lib/public_activity/version.rb b/lib/public_activity/version.rb index <HASH>..<HASH> 100644 --- a/lib/public_activity/version.rb +++ b/lib/public_activity/version.rb @@ -1,4 +1,4 @@ module PublicActivity # A constant with gem's version - VERSION = '0.4.0' + VERSION = '0.4.0.rc1' end
<I>.rc1 release
chaps-io_public_activity
train
cb8c1b71d55117c8c8ef6b4361677d207bf1cd2c
diff --git a/tests/common/test_runtime_value.py b/tests/common/test_runtime_value.py index <HASH>..<HASH> 100644 --- a/tests/common/test_runtime_value.py +++ b/tests/common/test_runtime_value.py @@ -18,6 +18,7 @@ logger = log.get_logger(__name__) def setup_module(module=None): # set the test_libraries path temporarily to the correct value + testing_utils.test_multithreading_lock.acquire() testing_utils.rewind_and_set_libraries({"unit_test_state_machines": testing_utils.get_test_sm_path("unit_test_state_machines")}) logger.debug(rafcon.core.config.global_config.get_config_value("LIBRARY_PATHS")["unit_test_state_machines"]) @@ -25,19 +26,17 @@ def setup_module(module=None): def test_runtime_values(caplog): state_machine_manager.delete_all_state_machines() - testing_utils.test_multithreading_lock.acquire() - try: - sm = state_machine_execution_engine.execute_state_machine_from_path( - path=testing_utils.get_test_sm_path("unit_test_state_machines/library_runtime_value_test")) - state_machine_manager.remove_state_machine(sm.state_machine_id) - assert sm.root_state.output_data["data_output_port1"] == 114 - finally: - testing_utils.shutdown_environment(caplog=caplog, expected_warnings=0, expected_errors=0) + sm = state_machine_execution_engine.execute_state_machine_from_path( + path=testing_utils.get_test_sm_path("unit_test_state_machines/library_runtime_value_test")) + state_machine_manager.remove_state_machine(sm.state_machine_id) + assert sm.root_state.output_data["data_output_port1"] == 114 + testing_utils.assert_logger_warnings_and_errors(caplog) def teardown_module(module=None): testing_utils.reload_config(gui_config=False) + testing_utils.test_multithreading_lock.release() if __name__ == '__main__':
tests: example of acquire and release thread lock for hole module
DLR-RM_RAFCON
train
ee7dd7c91121970559874ada8296827bf93f2875
diff --git a/drools-compiler/src/main/java/org/drools/compiler/kie/builder/impl/KieContainerImpl.java b/drools-compiler/src/main/java/org/drools/compiler/kie/builder/impl/KieContainerImpl.java index <HASH>..<HASH> 100644 --- a/drools-compiler/src/main/java/org/drools/compiler/kie/builder/impl/KieContainerImpl.java +++ b/drools-compiler/src/main/java/org/drools/compiler/kie/builder/impl/KieContainerImpl.java @@ -115,10 +115,21 @@ public class KieContainerImpl public Results updateDependencyToVersion(ReleaseId currentReleaseId, ReleaseId newReleaseId) { checkNotClasspathKieProject(); - // if the new and the current release are equal (a snapshot) check if there is an older version with the same releaseId - InternalKieModule currentKM = currentReleaseId.equals( newReleaseId ) ? - (InternalKieModule) ((KieRepositoryImpl)kr).getOldKieModule( currentReleaseId ) : - (InternalKieModule) kr.getKieModule( currentReleaseId ); + + ReleaseId installedReleaseId = getReleaseId(); + InternalKieModule currentKM; + if (currentReleaseId.getGroupId().equals(installedReleaseId.getGroupId()) && + currentReleaseId.getArtifactId().equals(installedReleaseId.getArtifactId())) { + // upgrading the kProject itself: taking the kmodule from there + currentKM = ((KieModuleKieProject)kProject).getInternalKieModule(); + } else { + // upgrading a transitive dependency: taking the kmodule from the krepo + // if the new and the current release are equal (a snapshot) check if there is an older version with the same releaseId + currentKM = currentReleaseId.equals(newReleaseId) ? + (InternalKieModule) ((KieRepositoryImpl) kr).getOldKieModule(currentReleaseId) : + (InternalKieModule) kr.getKieModule(currentReleaseId); + } + return update(currentKM, newReleaseId); }
[BZ-<I>] during an incremental compilation take the kmodule to be updated directly from the kproject stored inside the kcontainer
kiegroup_drools
train
85ca32c566e2a676249a8145bbffb64a8adcbe11
diff --git a/config.js b/config.js index <HASH>..<HASH> 100644 --- a/config.js +++ b/config.js @@ -45,7 +45,7 @@ export default { */ aws: { batch: { - vcpusMax: 4, + vcpusMax: 12, memoryMax: 15360 } },
Raise vCPUs limit on app definitions to <I>.
OpenNeuroOrg_openneuro
train
5149c9be86838bf25c5b9bc23cbf8ed0a91434dd
diff --git a/tests/app/models.py b/tests/app/models.py index <HASH>..<HASH> 100644 --- a/tests/app/models.py +++ b/tests/app/models.py @@ -5,6 +5,7 @@ from modelcluster.fields import ParentalKey from taggit.models import TaggedItemBase from wagtail.wagtailadmin.edit_handlers import FieldPanel, PageChooserPanel from wagtail.wagtailcore.models import Page +from wagtail.wagtailsearch import index from wagtail.wagtailsnippets.models import register_snippet from wagtailnews.decorators import newsindex @@ -46,6 +47,10 @@ class NewsItem(AbstractNewsItem): FieldPanel('date'), ] + search_fields = AbstractNewsItem.search_fields + [ + index.SearchField('title'), + ] + def __str__(self): return self.title diff --git a/wagtailnews/models.py b/wagtailnews/models.py index <HASH>..<HASH> 100644 --- a/wagtailnews/models.py +++ b/wagtailnews/models.py @@ -133,11 +133,11 @@ class AbstractNewsItem(index.Indexed, ClusterableModel): FieldPanel('date'), ] - search_fields = ( + search_fields = [ index.FilterField('date'), index.FilterField('newsindex_id'), index.FilterField('live'), - ) + ] class Meta: ordering = ('-date',)
Change AbstractNewsitem.search_fields to a list Wagtail made this change in <I>, so we should probably do the same thing.
neon-jungle_wagtailnews
train
5a5b7e30669699195653823c557a4a3fea3bc599
diff --git a/aioxmpp/disco/service.py b/aioxmpp/disco/service.py index <HASH>..<HASH> 100644 --- a/aioxmpp/disco/service.py +++ b/aioxmpp/disco/service.py @@ -61,6 +61,8 @@ class Node(object): .. automethod:: iter_identities + .. automethod:: as_info_xso + To access items, use: .. automethod:: iter_items @@ -253,6 +255,42 @@ class Node(object): del self._identities[key] self.on_info_changed() + def as_info_xso(self, stanza=None): + """ + Construct a :class:`~.disco.xso.InfoQuery` response object for this + node. + + :param stanza: The IQ request stanza + :type stanza: :class:`~aioxmpp.IQ` + :rtype: iterable of :class:`~.disco.xso.InfoQuery` + :return: The disco#info response for this node. + + The resulting :class:`~.disco.xso.InfoQuery` carries the features and + identities as returned by :meth:`iter_features` and + :meth:`iter_identities`. The :attr:`~.disco.xso.InfoQuery.node` + attribute is at its default value and may need to be set by the caller + accordingly. + + `stanza` is passed to :meth:`iter_features` and + :meth:`iter_identities`. See those methods for information on the + effects. + + .. versionadded:: 0.9 + """ + + result = disco_xso.InfoQuery() + result.features.update(self.iter_features(stanza)) + result.identities[:] = ( + disco_xso.Identity( + category=category, + type_=type_, + lang=lang, + name=name, + ) + for category, type_, lang, name in self.iter_identities(stanza) + ) + return result + class StaticNode(Node): """ diff --git a/docs/api/changelog.rst b/docs/api/changelog.rst index <HASH>..<HASH> 100644 --- a/docs/api/changelog.rst +++ b/docs/api/changelog.rst @@ -131,6 +131,8 @@ Version 0.9 * :meth:`aioxmpp.disco.StaticNode.clone` +* :meth:`aioxmpp.disco.Node.as_info_xso` + .. _api-changelog-0.8: Version 0.8 diff --git a/tests/disco/test_service.py b/tests/disco/test_service.py index <HASH>..<HASH> 100644 --- a/tests/disco/test_service.py +++ b/tests/disco/test_service.py @@ -383,6 +383,120 @@ class TestNode(unittest.TestCase): [] ) + def test_as_info_xso(self): + n = disco_service.Node() + + features = [ + "http://jabber.org/protocol/disco#info", + unittest.mock.sentinel.f1, + unittest.mock.sentinel.f2, + unittest.mock.sentinel.f3, + ] + + identities = [ + ("cat1", "t1", + structs.LanguageTag.fromstr("lang-a"), "name11"), + ("cat1", "t1", + structs.LanguageTag.fromstr("lang-b"), "name12"), + ("cat2", "t2", None, "name2"), + ("cat3", "t3", None, None), + ] + + with contextlib.ExitStack() as stack: + iter_features = stack.enter_context( + unittest.mock.patch.object(n, "iter_features") + ) + iter_features.return_value = iter(features) + + iter_identities = stack.enter_context( + unittest.mock.patch.object(n, "iter_identities") + ) + iter_identities.return_value = iter(identities) + + iter_items = stack.enter_context( + unittest.mock.patch.object(n, "iter_items") + ) + + result = n.as_info_xso() + + self.assertIsInstance( + result, + disco_xso.InfoQuery, + ) + + iter_items.assert_not_called() + + iter_features.assert_called_once_with(None) + iter_identities.assert_called_once_with(None) + + self.assertSetEqual( + result.features, + set(features), + ) + + self.assertCountEqual( + [ + (i.category, i.type_, i.lang, i.name) + for i in result.identities + ], + identities, + ) + + def test_as_info_xso_with_stanza(self): + n = disco_service.Node() + + features = [ + "http://jabber.org/protocol/disco#info", + unittest.mock.sentinel.f1, + ] + + identities = [ + ("cat1", "t1", + structs.LanguageTag.fromstr("lang-a"), "name11"), + ("cat1", "t1", + structs.LanguageTag.fromstr("lang-b"), "name12"), + ] + + with contextlib.ExitStack() as stack: + iter_features = stack.enter_context( + unittest.mock.patch.object(n, "iter_features") + ) + iter_features.return_value = iter(features) + + iter_identities = stack.enter_context( + unittest.mock.patch.object(n, "iter_identities") + ) + iter_identities.return_value = iter(identities) + + iter_items = stack.enter_context( + unittest.mock.patch.object(n, "iter_items") + ) + + result = n.as_info_xso(unittest.mock.sentinel.stanza) + + self.assertIsInstance( + result, + disco_xso.InfoQuery, + ) + + iter_items.assert_not_called() + + iter_features.assert_called_once_with(unittest.mock.sentinel.stanza) + iter_identities.assert_called_once_with(unittest.mock.sentinel.stanza) + + self.assertSetEqual( + result.features, + set(features), + ) + + self.assertCountEqual( + [ + (i.category, i.type_, i.lang, i.name) + for i in result.identities + ], + identities, + ) + class TestStaticNode(unittest.TestCase): def setUp(self):
disco: add method to generate InfoQuery from Node
horazont_aioxmpp
train
4d335b418ad9052c707531b36b91482a73f6b655
diff --git a/tests/test_code_quality.py b/tests/test_code_quality.py index <HASH>..<HASH> 100644 --- a/tests/test_code_quality.py +++ b/tests/test_code_quality.py @@ -35,6 +35,7 @@ def collect_errors(): line = None for idx, line in enumerate(test_file): + line = line.decode('utf-8') line_no = idx + 1 if idx == 0 and len(line.strip()) == 0: @@ -55,7 +56,7 @@ def collect_errors(): errors.append((message_eof, fname, line_no)) def test(fname): - with open(fname, "rt") as test_file: + with open(fname, "rb") as test_file: test_this_file(fname, test_file) def canonicalize(path):
test_code_quality.py use explicit utf-8 encoding to read source files (#<I>) * test_code_quality.py use explicit utf-8 encoding to read source files * fix decoding in test_code_quality
bokeh_bokeh
train
a82f4f29113420cd69fa73ab32149e167280ad8c
diff --git a/OpenPNM/Physics/models/multiphase.py b/OpenPNM/Physics/models/multiphase.py index <HASH>..<HASH> 100644 --- a/OpenPNM/Physics/models/multiphase.py +++ b/OpenPNM/Physics/models/multiphase.py @@ -66,6 +66,7 @@ def conduit_conductance(physics, phase, network, throat_conductance, open_conduits = -closed_conduits throat_value = phase[throat_conductance] value = throat_value*open_conduits + throat_value*closed_conduits*factor + value = value[phase.throats(physics.name)] return value @@ -104,4 +105,5 @@ def late_pore_filling(physics, phase, network, Pc, Swp_star=0.2, eta=3, values = Swp*phase[pore_occupancy]*(Pc_star < Pc) else: values = (1-Swp)*(1-phase[pore_occupancy])*(Pc_star < Pc) + value = value[phase.throats(physics.name)] return values
Fixed bug in multiphase It was returning the values based on old fashioned OpenPNM style. Now, it can work with the cloned physics.
PMEAL_OpenPNM
train
c349b093625879551a022feb029325651bca2597
diff --git a/api/python/setup.py b/api/python/setup.py index <HASH>..<HASH> 100644 --- a/api/python/setup.py +++ b/api/python/setup.py @@ -52,7 +52,7 @@ setup( ], author='quiltdata', author_email='contact@quiltdata.io', - license='LICENSE', + license='Apache-2.0', url='https://github.com/quiltdata/quilt', keywords='', install_requires=[
Fix license on PyPI (#<I>)
quiltdata_quilt
train
a18f305de1bdc579a137cce158722731cc7d8b51
diff --git a/lib/server.js b/lib/server.js index <HASH>..<HASH> 100644 --- a/lib/server.js +++ b/lib/server.js @@ -295,55 +295,59 @@ function Client(cfg, stream, socket) { switch (info.type) { case 'session': - accept = function() { - if (replied) - return; + if (listenerCount(self, 'session')) { + accept = function() { + if (replied) + return; - replied = true; + replied = true; - stream.channelOpenConfirm(info.sender, localChan, Channel.MAX_WINDOW, - Channel.PACKET_SIZE); + stream.channelOpenConfirm(info.sender, + localChan, + Channel.MAX_WINDOW, + Channel.PACKET_SIZE); - return new Session(self, info, localChan); - }; + return new Session(self, info, localChan); + }; - if (listenerCount(self, 'session')) self.emit('session', accept, reject); - else + } else reject(); break; case 'direct-tcpip': - accept = function() { - if (replied) - return; + if (listenerCount(self, 'tcpip')) { + accept = function() { + if (replied) + return; - replied = true; + replied = true; - stream.channelOpenConfirm(info.sender, localChan, Channel.MAX_WINDOW, - Channel.PACKET_SIZE); - - var chaninfo = { - type: undefined, - incoming: { - id: localChan, - window: Channel.MAX_WINDOW, - packetSize: Channel.PACKET_SIZE, - state: 'open' - }, - outgoing: { - id: info.sender, - window: info.window, - packetSize: info.packetSize, - state: 'open' - } + stream.channelOpenConfirm(info.sender, + localChan, + Channel.MAX_WINDOW, + Channel.PACKET_SIZE); + + var chaninfo = { + type: undefined, + incoming: { + id: localChan, + window: Channel.MAX_WINDOW, + packetSize: Channel.PACKET_SIZE, + state: 'open' + }, + outgoing: { + id: info.sender, + window: info.window, + packetSize: info.packetSize, + state: 'open' + } + }; + + return new Channel(chaninfo, self); }; - return new Channel(chaninfo, self); - }; - - if (listenerCount(self, 'tcpip')) - self.emit('tcpip', accept, reject); - else + self.emit('tcpip', accept, reject, info.data); + } else reject(); break; default:
Server: don't bother creating accept on auto-reject
mscdex_ssh2
train
0e6a82c78e86bd311fb346c55788a3ff0d53b1ff
diff --git a/README.md b/README.md index <HASH>..<HASH> 100644 --- a/README.md +++ b/README.md @@ -59,6 +59,9 @@ with colorful.with_palette(my_company_palette) as c: # use f-string (only Python >= 3.6) print(f'{colorful.bold}Hello World') + +# support for chinese +print(colorful.red('你好')) ``` ## Key Features diff --git a/colorful/core.py b/colorful/core.py index <HASH>..<HASH> 100644 --- a/colorful/core.py +++ b/colorful/core.py @@ -20,6 +20,7 @@ from . import ansi from . import rgb from . import styles from . import terminal +from .utils import PY2, DEFAULT_ENCODE, UNICODE #: Holds the name of the env variable which is # used as path to the default rgb.txt file @@ -30,9 +31,6 @@ DEFAULT_RGB_TXT_PATH = os.environ.get( #: Holds the color names mapped to RGB channels COLOR_PALETTE = rgb.parse_rgb_txt_file(path=DEFAULT_RGB_TXT_PATH) -#: Holds a flag if the Python version is 2.X -PY2 = sys.version_info.major == 2 - class ColorfulError(Exception): """ @@ -209,17 +207,16 @@ def style_string(string, ansi_style, colormode, nested=False): ansi_start_code, ansi_end_code = ansi_style # replace nest placeholders with the current begin style - if PY2 and isinstance(string, unicode): # noqa - str_type = unicode # noqa - else: - str_type = str - string = str_type(string).replace(ansi.NEST_PLACEHOLDER, ansi_start_code) + if PY2: + if isinstance(string, str): + string = string.decode(DEFAULT_ENCODE) + string = UNICODE(string).replace(ansi.NEST_PLACEHOLDER, ansi_start_code) - return '{start_code}{string}{end_code}{nest_ph}'.format( - start_code=ansi_start_code, - string=string, - end_code=ansi_end_code, - nest_ph=ansi.NEST_PLACEHOLDER if nested else '') + return ('{start_code}{string}{end_code}{nest_ph}' + .format(start_code=ansi_start_code, + string=string, + end_code=ansi_end_code, + nest_ph=ansi.NEST_PLACEHOLDER if nested else '')) class ColorfulString(object): @@ -230,17 +227,15 @@ class ColorfulString(object): self.orig_string = orig_string self.styled_string = styled_string - if not PY2: - def __str__(self): - return self.styled_string - else: + if PY2: def __unicode__(self): - string = self.styled_string - if isinstance(string, bytes): - string = string.decode('utf-8') - return string + return self.styled_string - __str__ = __unicode__ + def __str__(self): + return self.styled_string.encode(DEFAULT_ENCODE) + else: + def __str__(self): + return self.styled_string def __len__(self): return len(self.orig_string) diff --git a/colorful/utils.py b/colorful/utils.py index <HASH>..<HASH> 100644 --- a/colorful/utils.py +++ b/colorful/utils.py @@ -10,6 +10,16 @@ :license: MIT, see LICENSE for more details. """ import re +import sys + +PY2 = sys.version_info.major == 2 + +if PY2: + UNICODE = unicode # noqa +else: + UNICODE = str + +DEFAULT_ENCODE = sys.stdout.encoding def hex_to_rgb(value): diff --git a/tests/test_core.py b/tests/test_core.py index <HASH>..<HASH> 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -22,6 +22,7 @@ os.environ['COLORFUL_NO_MODULE_OVERWRITE'] = '1' import colorful.core as core # noqa import colorful.terminal as terminal # noqa +from colorful.utils import UNICODE # noqa @pytest.mark.parametrize('style_string,expected', [ @@ -770,13 +771,8 @@ def test_unicode_support(): s = u'🐧🎉🐧' styled_s = colorful.black(s) - if core.PY2: - unicode_type = unicode # noqa - else: - unicode_type = str - # test basic unicode support - assert unicode_type(styled_s) == u'\033[30m🐧🎉🐧\033[39m' + assert UNICODE(styled_s) == u'\033[30m🐧🎉🐧\033[39m' def test_combining_styles():
support for chinese in py2 (#9) * support for chinese in py2 and py3 * modify style_string return value * judgment python2 version * move some var to utils module * modify test_core * fix bug * imporve chinese
timofurrer_colorful
train
1b699dc7666d14f12a1bb8f3021b02d90121219f
diff --git a/findbugs/src/java/edu/umd/cs/findbugs/detect/SerializableIdiom.java b/findbugs/src/java/edu/umd/cs/findbugs/detect/SerializableIdiom.java index <HASH>..<HASH> 100644 --- a/findbugs/src/java/edu/umd/cs/findbugs/detect/SerializableIdiom.java +++ b/findbugs/src/java/edu/umd/cs/findbugs/detect/SerializableIdiom.java @@ -230,7 +230,7 @@ public class SerializableIdiom extends BytecodeScanningDetector ( sawSerialVersionUID ? NORMAL_PRIORITY : LOW_PRIORITY)) .addClass(getThisClass().getClassName())); // Downgrade class-level warnings if it's a GUI class. - int priority = isGUIClass ? LOW_PRIORITY : NORMAL_PRIORITY; + int priority = false && isGUIClass ? LOW_PRIORITY : NORMAL_PRIORITY; if (obj.getClassName().endsWith("_Stub")) priority++; if (isExternalizable && !hasPublicVoidConstructor && !isAbstract) @@ -325,25 +325,37 @@ public class SerializableIdiom extends BytecodeScanningDetector fieldsThatMightBeAProblem.containsKey(nameOfField)) { try { OpcodeStack.Item first = stack.getStackItem(0); - JavaClass classStored = first.getJavaClass(); - double isSerializable = Analyze.isDeepSerializable(classStored); - if (isSerializable <= 0.2) { - XField f = fieldsThatMightBeAProblem.get(nameOfField); - - int priority = LOW_PRIORITY; - if (implementsSerializableDirectly || seenTransientField) priority--; - if (isSerializable <= 0.1) priority--; - - fieldWarningList.add(new BugInstance(this, "SE_BAD_FIELD_STORE", priority) - .addClass(getThisClass().getClassName()) - .addField(f) - .addClass(classStored) - .addSourceLine(this)); + JavaClass classStored = first.getJavaClass(); + double isSerializable = Analyze + .isDeepSerializable(classStored); + if (isSerializable <= 0.2) { + XField f = fieldsThatMightBeAProblem.get(nameOfField); + + String sig = f.getSignature(); + // System.out.println("Field signature: " + sig); + // System.out.println("Class stored: " + + // classStored.getClassName()); + String genSig = "L" + + classStored.getClassName().replace('.', '/') + + ";"; + if (!sig.equals(genSig)) { + int priority = LOW_PRIORITY; + if (implementsSerializableDirectly + || seenTransientField) + priority--; + if (isSerializable <= 0.1) + priority--; + + fieldWarningList.add(new BugInstance(this, + "SE_BAD_FIELD_STORE", priority).addClass( + getThisClass().getClassName()).addField(f) + .addClass(classStored).addSourceLine(this)); + } + } + } catch (Exception e) { + // ignore it + } } - - } catch (Exception e) { - // ignore it - }} } stack.sawOpcode(this,seen); @@ -375,7 +387,7 @@ public class SerializableIdiom extends BytecodeScanningDetector priority = NORMAL_PRIORITY; if (implementsSerializableDirectly || sawSerialVersionUID) priority--; - else if (isGUIClass) { + else if (false && isGUIClass) { priority++; if (priority < LOW_PRIORITY) priority = LOW_PRIORITY;
Only report a bad_se_field_store if the type of the value stored is different than the type of the field git-svn-id: <URL>
spotbugs_spotbugs
train
825710a1e8cfbf7d501f2825fda1c73fcb8b7073
diff --git a/pydle/client.py b/pydle/client.py index <HASH>..<HASH> 100644 --- a/pydle/client.py +++ b/pydle/client.py @@ -415,7 +415,7 @@ class BasicClient: self._handler_top_level = False await handler(message) - except: + except Exception: # fixme: This should be a more specific exception but I don't know what could be raised here self.logger.exception('Failed to execute %s handler.', method) async def on_unknown(self, message): diff --git a/pydle/features/ircv3/ircv3_2.py b/pydle/features/ircv3/ircv3_2.py index <HASH>..<HASH> 100644 --- a/pydle/features/ircv3/ircv3_2.py +++ b/pydle/features/ircv3/ircv3_2.py @@ -20,7 +20,7 @@ class IRCv3_2Support(metadata.MetadataSupport, monitor.MonitoringSupport, tags.T async def on_capability_cap_notify_available(self, value): """ Take note of new or removed capabilities. """ return True - + async def on_capability_chghost_available(self, value): """ Server reply to indicate a user we are in a common channel with changed user and/or host. """ return True diff --git a/pydle/features/ircv3/tags.py b/pydle/features/ircv3/tags.py index <HASH>..<HASH> 100644 --- a/pydle/features/ircv3/tags.py +++ b/pydle/features/ircv3/tags.py @@ -98,7 +98,7 @@ class TaggedMessage(rfc1459.RFC1459Message): if self.tags: raw_tags = [] for tag, value in self.tags.items(): - if value == True: + if value is True: raw_tags.append(tag) else: raw_tags.append(tag + TAG_VALUE_SEPARATOR + value) diff --git a/pydle/features/isupport.py b/pydle/features/isupport.py index <HASH>..<HASH> 100644 --- a/pydle/features/isupport.py +++ b/pydle/features/isupport.py @@ -55,9 +55,9 @@ class ISUPPORTSupport(rfc1459.RFC1459Support): # And have callbacks update other internals. for entry, value in isupport.items(): - if value != False: + if value is not False: # A value of True technically means there was no value supplied; correct this for callbacks. - if value == True: + if value is True: value = None method = 'on_isupport_' + pydle.protocol.identifierify(entry) diff --git a/pydle/features/rfc1459/client.py b/pydle/features/rfc1459/client.py index <HASH>..<HASH> 100644 --- a/pydle/features/rfc1459/client.py +++ b/pydle/features/rfc1459/client.py @@ -375,7 +375,7 @@ class RFC1459Support(BasicClient): """ if not self.is_channel(channel): raise ValueError('Not a channel: {}'.format(channel)) - elif not self.in_channel(channel): + if not self.in_channel(channel): raise NotInChannel(channel) await self.rawmsg('TOPIC', channel, topic) diff --git a/pydle/features/whox.py b/pydle/features/whox.py index <HASH>..<HASH> 100644 --- a/pydle/features/whox.py +++ b/pydle/features/whox.py @@ -24,7 +24,7 @@ class WHOXSupport(isupport.ISUPPORTSupport, account.AccountSupport): else: # Find account name of person. pass - + async def _create_user(self, nickname): super()._create_user(nickname) if self.registered and 'WHOX' not in self._isupport: diff --git a/pydle/utils/irccat.py b/pydle/utils/irccat.py index <HASH>..<HASH> 100644 --- a/pydle/utils/irccat.py +++ b/pydle/utils/irccat.py @@ -17,7 +17,7 @@ class IRCCat(Client): self.async_stdin = None async def _send(self, data): - await super(IRCCat, self)._send(data) + await super()._send(data) async def process_stdin(self): """ Yes. """
Additional Fixes Remove args from super(), extra whitespace, raise-else, assertation on Trues Will need to look at that broad except, but it's not bare anymore...
Shizmob_pydle
train
413f83e1460abaae7031d250184496a73feb20b1
diff --git a/vstutils/static/js/guiElements.js b/vstutils/static/js/guiElements.js index <HASH>..<HASH> 100644 --- a/vstutils/static/js/guiElements.js +++ b/vstutils/static/js/guiElements.js @@ -331,7 +331,18 @@ guiElements.autocomplete = function() this._onRender = function(options) { this._onBaseRender(options) - // необходимо ли теперь оставлять ветку if(options.searchObj) для хардкода на js? + + /* + * options.searchObj - object for JS hardcode, which aim is to redefine way of getting data for autocomplete. + * + * Example of hardcode: + * tabSignal.connect("openapi.factory.ansiblemodule", function(data) + * { + * let inventory = apiansiblemodule.one.view.definition.properties.inventory; + * inventory.type = "autocomplete" + * inventory.searchObj = new apiinventory.list(); + * }); + */ if(options.searchObj) { return new autoComplete({ @@ -362,7 +373,6 @@ guiElements.autocomplete = function() return; } - // На основе текста из search_str сложить возможные вариант подсказок в массив matches $.when(options.searchObj.search(search_str)).done((rawdata) => { if(!rawdata || !rawdata.data || !rawdata.data.results) @@ -384,6 +394,10 @@ guiElements.autocomplete = function() } }); } + /* + * options.enum - array, which comes from api. + * This array has data for autocomplete. + */ else if(options.enum) { return new autoComplete({ @@ -428,6 +442,10 @@ guiElements.autocomplete = function() } }); } + /* + * options.additionalProperties - object, which comes from api. + * This object has info about model and fields, where data for autocomplete is stored. + */ else if(options.additionalProperties) { let props = getInfoFromAdditionalProperties(options); @@ -506,10 +524,29 @@ guiElements.select2 = function(filed, filed_value, parent_object) this._onRender = function(options) { this._onBaseRender(options) - // необходимо ли теперь оставлять ветку if(options.search) для хардкода на js? + + /* + * options.search - function for JS hardcode, which aim is to redefine way of getting data for select2. + * @param {object} params - argument from select2 transport function + * @param {object} filed - filed to which we want add select2 + * @param {integer/string} filed_value - value of field + * @param {object} parent_object - object (one) - model of single object page + * @returns Deferred object + * + * Example of hardcode: + * tabSignal.connect("openapi.factory.ansiblemodule", function(data) + * { + * let filed = apiansiblemodule.one.view.definition.properties.inventory; + * filed.format = "select2" + * filed.search = function(params, filed, filed_value, parent_object) + * { + * //some code here + * } + * }); + */ if(options.search) { - $('#'+this.element_id).select2({ + return $('#'+this.element_id).select2({ width: '100%', ajax: { transport: function (params, success, failure) @@ -541,6 +578,32 @@ guiElements.select2 = function(filed, filed_value, parent_object) } }); } + /* + * options.enum - array, which comes from api. + * This array has data for select2. + */ + else if(options.enum) + { + let data = []; + for(let i in options.enum) + { + data.push( + { + id: options.enum[i], + text: options.enum[i], + } + ) + } + + $('#'+this.element_id).select2({ + width: '100%', + data: data + }); + } + /* + * options.additionalProperties - object, which comes from api. + * This object has info about model and fields, where data for select2 is stored. + */ else if(options.additionalProperties) { let props = getInfoFromAdditionalProperties(options);
Commented hardcode for autocomplete and select2 was removed. polemarch/ce#<I> polemarch/ce#<I> [ci skip]
vstconsulting_vstutils
train
c5de8301981532b978af6b1ee160057337a91101
diff --git a/Cache/FileSystem.php b/Cache/FileSystem.php index <HASH>..<HASH> 100644 --- a/Cache/FileSystem.php +++ b/Cache/FileSystem.php @@ -70,14 +70,14 @@ class sb_Cache_FileSystem implements sb_Cache_Base{ try{ mkdir($dir, 0777, true); } catch (Exception $e){ - throw new Exception('Could create cache directory: '.$key." - ".$e->getMessage()); + throw new Exception('Could create cache directory: '.$file_path." - ".$e->getMessage()); } } try{ $fh = fopen($file_path, 'a+'); } catch (Exception $e){ - throw new Exception('Could not write to cache: '.$key." - ".$e->getMessage()); + throw new Exception('Could not write to cache: '.$file_path." - ".$e->getMessage()); } //exclusive lock @@ -94,7 +94,7 @@ class sb_Cache_FileSystem implements sb_Cache_Base{ $data = serialize(array($lifetime, $data)); if (fwrite($fh, $data)===false){ - throw new Exception('Could not write to cache: '.$key); + throw new Exception('Could not write to cache: '.$file_path); } fclose($fh);
added full file path to errors when permission denied writing to cache
surebert_surebert-framework
train
570f3984c6273e53837fe1fbbb245b703fcf52ae
diff --git a/plugins/metadata_extractor/server/metadata_extractor.py b/plugins/metadata_extractor/server/metadata_extractor.py index <HASH>..<HASH> 100644 --- a/plugins/metadata_extractor/server/metadata_extractor.py +++ b/plugins/metadata_extractor/server/metadata_extractor.py @@ -17,7 +17,8 @@ # limitations under the License. ############################################################################### -import os.path +import os +import six from hachoir_core.error import HachoirError from hachoir_metadata import extractMetadata @@ -58,7 +59,8 @@ class MetadataExtractor(object): Extract metadata from file on client or server using hachoir-metadata. """ try: - parser = createParser(unicode(self.path), str(self.path)) + parser = createParser(six.text_type(self.path), + six.binary_type(self.path)) if parser is None: raise HachoirError
Failing style test for metadata extractor in py3
girder_girder
train
0cda98ec780474c1eda2ac653b19122f6ecfc0a9
diff --git a/src/article/settings/DefaultSettings.js b/src/article/settings/DefaultSettings.js index <HASH>..<HASH> 100644 --- a/src/article/settings/DefaultSettings.js +++ b/src/article/settings/DefaultSettings.js @@ -27,6 +27,7 @@ export default { 'newspaper-article-ref.authors': { required: true }, 'newspaper-article-ref.containerTitle': { required: true }, 'newspaper-article-ref.title': { required: true }, + 'organisation.institution': { required: true }, 'patent-ref.containerTitle': { required: true }, 'patent-ref.inventors': { required: true }, 'patent-ref.title': { required: true },
Let institution be mandatory for Organisations.
substance_texture
train
8f7a071e6438b0817720cfb20f1e9bf5653efe16
diff --git a/wandb/integration/keras/keras.py b/wandb/integration/keras/keras.py index <HASH>..<HASH> 100644 --- a/wandb/integration/keras/keras.py +++ b/wandb/integration/keras/keras.py @@ -174,8 +174,8 @@ patch_tf_keras() class _CustomOptimizer(tf.keras.optimizers.Optimizer): def __init__(self): super(_CustomOptimizer, self).__init__(name="CustomOptimizer") + self._resource_apply_dense = tf.function(self._resource_apply_dense) - @tf.function def _resource_apply_dense(self, grad, var): var.assign(grad)
lazy tf.function (#<I>)
wandb_client
train
b0dc4ebc4d0abcc4f9462b40067035eef61411d7
diff --git a/src/jquery.contextMenu.js b/src/jquery.contextMenu.js index <HASH>..<HASH> 100755 --- a/src/jquery.contextMenu.js +++ b/src/jquery.contextMenu.js @@ -156,7 +156,7 @@ var // currently active contextMenu trigger my: "left top", at: "right top", of: this, - collision: "fit" + collision: "flipfit fit" }).css('display', ''); } else { // determine contextMenu position
Issue #<I> - properly position submenu when there's not enough space
swisnl_jQuery-contextMenu
train
452bcb30009ca6d01bdb88951d3fa57176a80f0d
diff --git a/bin/codemods/src/single-tree-rendering.js b/bin/codemods/src/single-tree-rendering.js index <HASH>..<HASH> 100644 --- a/bin/codemods/src/single-tree-rendering.js +++ b/bin/codemods/src/single-tree-rendering.js @@ -7,13 +7,19 @@ * * Transforms `renderWithReduxStore()` to `context.primary/secondary`. * - * Adds `context` to params in middlewares when using `ReactDom.render()`. + * Adds `context` to params in middlewares when needed * * Adds `next` to params and `next()` to body in middlewares when using * `ReactDom.render()` or `renderWithReduxStore()`. * * Adds `makeLayout` and `clientRender` to `page()` route definitions and * accompanying import statement. + * + * Removes: + * `ReactDom.unmountComponentAtNode( document.getElementById( 'secondary' ) );` + * + * Removes: + * Un-used ReactDom imports. */ /** @@ -78,6 +84,39 @@ export default function transformer( file, api ) { } /** + * Removes imports maintaining any comments above them + * + * @param {object} collection Collection containing at least one node. Comments are preserved only from first node. + */ + function removeImport( collection ) { + const node = collection.nodes()[ 0 ]; + + // Find out if import had comments above it + const comments = _.get( node, 'comments', [] ); + + // Remove import (and any comments with it) + collection.remove(); + + // Put back that removed comment (if any) + if ( comments.length ) { + const isRemovedExternal = isExternal( node ); + + // Find remaining external or internal dependencies and place comments above first one + root + .find( j.ImportDeclaration ) + .filter( p => { + // Look for only imports that are same type as the removed import was + return isExternal( p.value ) === isRemovedExternal; + } ) + .at( 0 ) + .replaceWith( p => { + p.value.comments = p.value.comments ? p.value.comments.concat( comments ) : comments; + return p.value; + } ); + } + } + + /** * Ensure `context` is among params * * @param {object} path Path object that wraps a single node @@ -286,23 +325,55 @@ export default function transformer( file, api ) { .filter( p => ! p.value.specifiers.length ); if ( orphanImportHelpers.size() ) { - // Find out if import had comment above it - const comment = _.get( orphanImportHelpers.nodes(), '[0].comments[0]', false ); + removeImport( orphanImportHelpers ); + } - // Remove empty `import 'lib/react-helpers'` (and any comments with it) - orphanImportHelpers.remove(); + /** + * Removes: + * ``` + * ReactDom.unmountComponentAtNode( document.getElementById( 'secondary' ) ); + * ``` + */ + root + .find( j.CallExpression, { + callee: { + type: 'MemberExpression', + object: { + name: 'ReactDom', + }, + property: { + name: 'unmountComponentAtNode', + }, + }, + } ) + // Ensures we remove only nodes containing `document.getElementById( 'secondary' )` + .filter( p => _.get( p, 'value.arguments[0].arguments[0].value' ) === 'secondary' ) + .remove(); - // Put back that removed comment (if any) - if ( comment ) { - // Find internal dependencies and place comment above first one - root - .find( j.ImportDeclaration ) - .filter( p => ! isExternal( p.value ) ) - .at( 0 ) - .replaceWith( p => { - p.value.comments = [ comment ]; - return p.value; - } ); + // Find if `ReactDom` is used + const reactDomDefs = root.find( j.MemberExpression, { + object: { + name: 'ReactDom', + }, + } ); + + // Remove stranded `react-dom` imports + if ( ! reactDomDefs.size() ) { + const importReactDom = root.find( j.ImportDeclaration, { + specifiers: [ + { + local: { + name: 'ReactDom', + }, + }, + ], + source: { + value: 'react-dom', + }, + } ); + + if ( importReactDom.size() ) { + removeImport( importReactDom ); } }
Single tree rendering codemod: transform unmountComponentAtNode (#<I>) Update single tree rendering codemod to remove instances of: ``` ReactDom.unmountComponentAtNode( document.getElementById( 'secondary' ) ); ``` As well to clean out stranded `react-dom` imports when not used anymore in files modified by the codemod.
Automattic_wp-calypso
train
566e6e24de8579408debc9ffc1886b99f1b0257d
diff --git a/docs/api/Props.md b/docs/api/Props.md index <HASH>..<HASH> 100644 --- a/docs/api/Props.md +++ b/docs/api/Props.md @@ -92,6 +92,16 @@ This is a bound action creator, so it returns nothing. > * `true` - Asynchronous validation is currently running in preparation to submit a form > * a `string` - The name of the field that just blurred to trigger asynchronous validation +### `blur(field:String, value:any) : Function` + +> Marks a field as blurred in the Redux store. +This is a bound action creator, so it returns nothing. + +### `change(field:String, value:any) : Function` + +> Changes the value of a field in the Redux store. +This is a bound action creator, so it returns nothing. + ### `destroy() : Function` > Destroys the form state in the Redux store. By default, this will be called for you in diff --git a/src/__tests__/reduxForm.spec.js b/src/__tests__/reduxForm.spec.js index <HASH>..<HASH> 100644 --- a/src/__tests__/reduxForm.spec.js +++ b/src/__tests__/reduxForm.spec.js @@ -78,6 +78,8 @@ const describeReduxForm = (name, structure, combineReducers, expect) => { 'array', 'asyncValidate', 'asyncValidating', + 'blur', + 'change', 'destroy', 'dirty', 'dispatch', diff --git a/src/propTypes.js b/src/propTypes.js index <HASH>..<HASH> 100644 --- a/src/propTypes.js +++ b/src/propTypes.js @@ -27,6 +27,8 @@ const propTypes = { unshift: func.isRequired // function to unshift a value into an array field }), asyncValidate: func.isRequired, // function to trigger async validation + blur: func.isRequired, // action to mark a field as blurred + change: func.isRequired, // action to change the value of a field destroy: func.isRequired, // action to destroy the form's data in Redux dispatch: func.isRequired, // the Redux dispatch action handleSubmit: func.isRequired, // function to submit the form diff --git a/src/reduxForm.js b/src/reduxForm.js index <HASH>..<HASH> 100755 --- a/src/reduxForm.js +++ b/src/reduxForm.js @@ -348,6 +348,8 @@ const createReduxForm = anyTouched, asyncValidate: this.asyncValidate, asyncValidating, + blur, + change, destroy, dirty, dispatch,
Replaced blur and change bound actions as form props (#<I>)
erikras_redux-form
train
18a300aa3ef2b717441ad9381d0ce4da8a1f089a
diff --git a/python-package/lightgbm/engine.py b/python-package/lightgbm/engine.py index <HASH>..<HASH> 100644 --- a/python-package/lightgbm/engine.py +++ b/python-package/lightgbm/engine.py @@ -188,12 +188,11 @@ def train( if num_boost_round <= 0: raise ValueError("num_boost_round should be greater than zero.") + predictor: Optional[_InnerPredictor] = None if isinstance(init_model, (str, Path)): predictor = _InnerPredictor(model_file=init_model, pred_parameter=params) elif isinstance(init_model, Booster): predictor = init_model._to_predictor(dict(init_model.params, **params)) - else: - predictor = None init_iteration = predictor.num_total_iteration if predictor is not None else 0 # check dataset if not isinstance(train_set, Dataset):
fix mypy error in engine.py (#<I>)
Microsoft_LightGBM
train
8eba891cce8726ea8227d3d2b631e68835cdd83f
diff --git a/intercept.go b/intercept.go index <HASH>..<HASH> 100644 --- a/intercept.go +++ b/intercept.go @@ -5,19 +5,16 @@ import ( "reflect" ) -// An "interceptor" is a function that is invoked by the framework at a -// designated time (BEFORE or AFTER) an action invcation. -// -// Since an interceptor may be used across many user Controllers, it is a -// function that takes the base Controller, rather than a method on a user -// controller. +// An "interceptor" is functionality invoked by the framework BEFORE or AFTER +// an action. // // An interceptor may optionally return a Result (instead of nil). Depending on // when the interceptor was invoked, the response is different: // 1. BEFORE: No further interceptors are invoked, and neither is the action. // 2. AFTER: Further interceptors are still run. // In all cases, any returned Result will take the place of any existing Result. -// But in the BEFORE case, that returned Result is guaranteed to be final, while +// +// In the BEFORE case, that returned Result is guaranteed to be final, while // in the AFTER case it is possible that a further interceptor could emit its // own Result. //
Small update to interceptor docs.
revel_revel
train
ab7f49c89189d7213cab670cc649d8a334b46461
diff --git a/uw_trumba/models.py b/uw_trumba/models.py index <HASH>..<HASH> 100644 --- a/uw_trumba/models.py +++ b/uw_trumba/models.py @@ -157,6 +157,16 @@ class Permission(models.Model): # Return True if self.level is edit or a higher permission return self.in_editor_group() or self.in_showon_group() + def is_higher_permission(self, level): + # Return True if self.level is higher than the given level + return (self.is_publish() and + level != Permission.PUBLISH or + self.is_edit() and + level != Permission.PUBLISH and + level != Permission.EDIT or + self.is_showon() and + level == Permission.VIEW) + def set_edit(self): self.level = Permission.EDIT @@ -183,7 +193,9 @@ class Permission(models.Model): self.level == other.level) def __lt__(self, other): - return self.level < other.level and self.uwnetid < other.uwnetid + return (self.is_higher_permission(other.level) or + self.level == other.level and + self.uwnetid < other.uwnetid) def __str__(self): return json.dumps(self.to_json()) diff --git a/uw_trumba/tests/test_calendars.py b/uw_trumba/tests/test_calendars.py index <HASH>..<HASH> 100644 --- a/uw_trumba/tests/test_calendars.py +++ b/uw_trumba/tests/test_calendars.py @@ -33,8 +33,8 @@ class TestCalendars(TestCase): self.assertEqual(trumba_cal.name, 'Seattle calendar') perms = sorted(trumba_cal.permissions.values()) self.assertEqual(len(perms), 3) - self.assertTrue(perms[0].is_edit()) - self.assertTrue(perms[1].is_publish()) + self.assertTrue(perms[0].is_publish()) + self.assertTrue(perms[1].is_edit()) self.assertTrue(perms[2].is_showon()) trumba_cal1 = cals.get_calendar('sea', 11321)
Roll back the last change (c<I>) to permission sort
uw-it-aca_uw-restclients-trumba
train
3d46080efad74dd0fb3edabe8ebab0f01e5570dd
diff --git a/src/main/java/io/openliberty/tools/common/plugins/util/DevUtil.java b/src/main/java/io/openliberty/tools/common/plugins/util/DevUtil.java index <HASH>..<HASH> 100644 --- a/src/main/java/io/openliberty/tools/common/plugins/util/DevUtil.java +++ b/src/main/java/io/openliberty/tools/common/plugins/util/DevUtil.java @@ -1018,6 +1018,14 @@ public abstract class DevUtil { throw new RuntimeException(e); } catch (InterruptedException e) { error("Thread was interrupted while starting the container: " + e.getMessage()); + } catch (RuntimeException r) { + // remove container in case of an error trying to run the container because the docker run --rm will not rm the container + String containerId = getContainerId(); + if (containerId != null && !containerId.isEmpty()) { + String dockerRmCmd = "docker container rm " + containerId; + execDockerCmd(dockerRmCmd, 10); + } + throw r; } } @@ -1117,13 +1125,12 @@ public abstract class DevUtil { try { if (dockerRunProcess != null) { info("Stopping container..."); - String dockerPsCmd = "docker ps -qf name=" + DEVMODE_CONTAINER_NAME; - debug("docker ps command: " + dockerPsCmd); - String containerId = execDockerCmd(dockerPsCmd, 10); - - String dockerStopCmd = "docker stop " + containerId; - debug("docker stop command: " + dockerStopCmd); - execDockerCmd(dockerStopCmd, 30); + String containerId = getContainerId(); + debug("Stopping container id=" + containerId); + if (containerId != null && !containerId.isEmpty()) { + String dockerStopCmd = "docker stop " + containerId; + execDockerCmd(dockerStopCmd, 30); + } } } catch (RuntimeException r) { error("Error stopping container: " + r.getMessage()); @@ -1133,6 +1140,12 @@ public abstract class DevUtil { } } + private String getContainerId() { + // -q = quiet, only id number, -a = include stopped containers, -f = filter by key=value. -f must be last + String dockerPsCmd = "docker ps -aqf name=" + DEVMODE_CONTAINER_NAME; + return execDockerCmd(dockerPsCmd, 10); + } + /** * @param timeout unit is seconds * @return the stdout of the command or null for no output on stdout
When startContainer() fails check for and remove the container left over.
WASdev_ci.common
train
70cbbd3f288e87914c93ab21259139a0375e2655
diff --git a/pyqg/tests/test_fftw.py b/pyqg/tests/test_fftw.py index <HASH>..<HASH> 100644 --- a/pyqg/tests/test_fftw.py +++ b/pyqg/tests/test_fftw.py @@ -22,11 +22,11 @@ def test_fftw_rfft2(Nx = 64, Ny = None, n = 7200): A = np.random.randn(Nx,Ny) Ai = A.copy() - tstart = time.clock() + tstart = time.time() for i in range(n): Ah = pyfftw.interfaces.numpy_fft.rfft2(Ai, threads=1) Ai = pyfftw.interfaces.numpy_fft.irfft2(Ah, threads=1) - tend = time.clock() + tend = time.time() # error after nmax fft cycles abs_err = np.abs((A-Ai)).max()
change timing routine to handle python <I> deprecation
pyqg_pyqg
train
8157ab9358905e9e855911b87b5ac87fc3ad37c8
diff --git a/packages/bonde-admin-canary/src/scenes/Auth/scenes/ResetPassword/tokenVerify.graphql.js b/packages/bonde-admin-canary/src/scenes/Auth/scenes/ResetPassword/tokenVerify.graphql.js index <HASH>..<HASH> 100644 --- a/packages/bonde-admin-canary/src/scenes/Auth/scenes/ResetPassword/tokenVerify.graphql.js +++ b/packages/bonde-admin-canary/src/scenes/Auth/scenes/ResetPassword/tokenVerify.graphql.js @@ -2,6 +2,8 @@ import gql from 'graphql-tag' export default gql` query TokenVerify ($token: String!) { - reset_password_verify(token: $token) + reset_password_verify(token: $token) { + id + } } `
chore(canary): update verify token on reset password
nossas_bonde-client
train
908a12bf1829085c82f5053b1266044276fc6840
diff --git a/spec/unit/helpers/collection_spec.rb b/spec/unit/helpers/collection_spec.rb index <HASH>..<HASH> 100644 --- a/spec/unit/helpers/collection_spec.rb +++ b/spec/unit/helpers/collection_spec.rb @@ -29,7 +29,7 @@ describe ActiveAdmin::Helpers::Collection do end it "should return the collection size for a collection with group by, select and custom order" do - expect(collection_size(Post.select("title, count(*) as nb_posts").group(:title).order("nb_posts"))).to eq 2 + expect(collection_size(Post.select(:title).group(:title).order('length(title)'))).to eq 2 end it "should take the defined collection by default" do
fix SQL syntax error ```sql SELECT COUNT(title, count(*) as nb_posts) AS count_title_count_all_as_nb_posts, title AS title FROM "posts" GROUP BY title ```
activeadmin_activeadmin
train
f4f1946f6158c10919f42aed09d5044b805d408a
diff --git a/lib/mutations.rb b/lib/mutations.rb index <HASH>..<HASH> 100644 --- a/lib/mutations.rb +++ b/lib/mutations.rb @@ -1,4 +1,3 @@ -require 'active_support' require 'active_support/core_ext/hash/indifferent_access' require 'active_support/core_ext/string/inflections' require 'date' @@ -30,11 +29,11 @@ module Mutations def error_message_creator=(creator) @error_message_creator = creator end - + def cache_constants=(val) @cache_constants = val end - + def cache_constants? @cache_constants end
Removing unneeded requirement for the active_support module.
cypriss_mutations
train
79c8d99062b8848723bc2a9848dda8d33ccbc6fd
diff --git a/test/matrix/test_set_get.py b/test/matrix/test_set_get.py index <HASH>..<HASH> 100644 --- a/test/matrix/test_set_get.py +++ b/test/matrix/test_set_get.py @@ -16,6 +16,19 @@ def test_set_pair(): assert m.get_pair('b', 'a') == 1 +def test_update_key_set(): + + """ + Keys should be added to a set of stored keys. + """ + + m = TextMatrix() + m.set_pair('a', 'b', 1) + m.set_pair('a', 'c', 2) + + assert m.keys == set(['a', 'b', 'c']) + + def test_missing_key(): """ diff --git a/textplot/matrix.py b/textplot/matrix.py index <HASH>..<HASH> 100644 --- a/textplot/matrix.py +++ b/textplot/matrix.py @@ -18,6 +18,16 @@ class Matrix: Initialize the underlying dictionary. """ + self.clear() + + + def clear(self): + + """ + Reset the pair mappings and key set. + """ + + self.keys = set() self.pairs = {} @@ -49,6 +59,7 @@ class Matrix: """ key = self.key(term1, term2) + self.keys.update([term1, term2]) self.pairs[key] = value @@ -85,10 +96,10 @@ class TextMatrix(Matrix): self.pairs = {} # By default, use all terms. - self.terms = terms or text.terms.keys() + terms = terms or text.terms.keys() - pairs = combinations(self.terms, 2) - count = comb(len(self.terms), 2) + pairs = combinations(terms, 2) + count = comb(len(terms), 2) for t1, t2 in bar(pairs, expected_size=count, every=1000): @@ -111,7 +122,7 @@ class TextMatrix(Matrix): pairs = OrderedDict() - for term in self.terms: + for term in self.keys: score = self.get_pair(anchor, term) if score: pairs[term] = score
In Matrix, maintain a keys as pairs are indexed, instead of setting a terms attribute directly.
davidmcclure_textplot
train
64a5b1517f6c212275e8512ead87421393975a60
diff --git a/autocrud.js b/autocrud.js index <HASH>..<HASH> 100644 --- a/autocrud.js +++ b/autocrud.js @@ -1,7 +1,8 @@ var ObjectID = require('mongodb').ObjectID, jsonSchema = require('json-schema'); -module.exports = function (options) { +module.exports = function Autocrud(options) { + if (!(this instanceof Autocrud)) return new Autocrud(options); // Establish required options var app = options.app, @@ -48,7 +49,7 @@ module.exports = function (options) { // GET - var getRouteFn = function (req, res) { + this.getRouteFn = function (req, res) { var cursor = collection.find(createQuery(req)), sort = req.param('sort'), limit = req.param('limit'), @@ -72,11 +73,11 @@ module.exports = function (options) { }); }; if (getCreate) { - if (getAuthentication) app.get(rootObjectPath, getAuthentication, getRouteFn); - else app.get(rootObjectPath, getRouteFn); + if (getAuthentication) app.get(rootObjectPath, getAuthentication, this.getRouteFn); + else app.get(rootObjectPath, this.getRouteFn); } - var getIdRouteFn = function (req, res) { + this.getIdRouteFn = function (req, res) { collection.findOne(createQuery(req, {_id: ObjectID(req.params.id)}), function (err, document) { if (err) return res.json(500, err); if (!document) return res.send(404); @@ -84,13 +85,13 @@ module.exports = function (options) { }); }; if (getCreate) { - if (getAuthentication) app.get(rootObjectPath + '/:id', getAuthentication, getIdRouteFn); - else app.get(rootObjectPath + '/:id', getIdRouteFn); + if (getAuthentication) app.get(rootObjectPath + '/:id', getAuthentication, this.getIdRouteFn); + else app.get(rootObjectPath + '/:id', this.getIdRouteFn); } // POST - var postRouteFn = function (req, res) { + this.postRouteFn = function (req, res) { var report = jsonSchema.validate(req.body, schema); if (!report.valid) return res.json(400, report.errors); if (postTransform) postTransform(req.body); @@ -101,13 +102,13 @@ module.exports = function (options) { }); }; if (postCreate) { - if (postAuthentication) app.post(rootObjectPath, postAuthentication, postRouteFn); - else app.post(rootObjectPath, postRouteFn); + if (postAuthentication) app.post(rootObjectPath, postAuthentication, this.postRouteFn); + else app.post(rootObjectPath, this.postRouteFn); } // PUT - var putIdRouteFn = function (req, res) { + this.putIdRouteFn = function (req, res) { var report = jsonSchema.validate(req.body, schema); if (!report.valid) return res.json(400, report.errors); if (putTransform) putTransform(req.body); @@ -118,13 +119,13 @@ module.exports = function (options) { }); }; if (putCreate) { - if (putAuthentication) app.put(rootObjectPath + '/:id', putAuthentication, putIdRouteFn); - else app.put(rootObjectPath + '/:id', putIdRouteFn); + if (putAuthentication) app.put(rootObjectPath + '/:id', putAuthentication, this.putIdRouteFn); + else app.put(rootObjectPath + '/:id', this.putIdRouteFn); } // DELETE - var deleteIdRouteFn = function (req, res) { + this.deleteIdRouteFn = function (req, res) { collection.remove(createQuery(req, {_id: ObjectID(req.params.id)}), function (err, modCount) { if (err) return res.json(500, err); if (modCount === 0) return res.send(404); @@ -132,7 +133,7 @@ module.exports = function (options) { }); }; if (deleteCreate) { - if (deleteAuthentication) app.delete(rootObjectPath + '/:id', deleteAuthentication, deleteIdRouteFn); - else app.delete(rootObjectPath + '/:id', deleteIdRouteFn); + if (deleteAuthentication) app.delete(rootObjectPath + '/:id', deleteAuthentication, this.deleteIdRouteFn); + else app.delete(rootObjectPath + '/:id', this.deleteIdRouteFn); } }; \ No newline at end of file
-Converted autocrud function into a named object constructor.
AndrewRademacher_auto-crud
train
6499bbce02d8a8db1fb158fd5a04d91c51dd9d5e
diff --git a/class-themeisle-sdk-product.php b/class-themeisle-sdk-product.php index <HASH>..<HASH> 100644 --- a/class-themeisle-sdk-product.php +++ b/class-themeisle-sdk-product.php @@ -60,7 +60,7 @@ if ( ! class_exists( 'ThemeIsle_SDK_Product' ) ) : /** * @var array $allowed_authors The allowed authors. */ - private $allowed_authors = array( 'proteusthemes.com', 'anarieldesign.com', 'prothemedesign.com' ); + private $allowed_authors = array( 'proteusthemes.com', 'anarieldesign.com', 'prothemedesign.com', 'cssigniter.com' ); /** * @var bool $requires_license Either user needs to activate it with license. */
Adds css igniter as author.
Codeinwp_themeisle-sdk
train
8f67d1d7e6809d528fe957cb4eb78308d87da324
diff --git a/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/ddl/DropCatalogFunctionOperation.java b/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/ddl/DropCatalogFunctionOperation.java index <HASH>..<HASH> 100644 --- a/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/ddl/DropCatalogFunctionOperation.java +++ b/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/ddl/DropCatalogFunctionOperation.java @@ -33,17 +33,14 @@ public class DropCatalogFunctionOperation implements DropOperation { private final ObjectIdentifier functionIdentifier; private final boolean ifExists; private final boolean isTemporary; - private final boolean isSystemFunction; public DropCatalogFunctionOperation( ObjectIdentifier functionIdentifier, - boolean isTemporary, - boolean isSystemFunction, - boolean ifExists) { + boolean ifExists, + boolean isTemporary) { this.functionIdentifier = functionIdentifier; this.ifExists = ifExists; this.isTemporary = isTemporary; - this.isSystemFunction = isSystemFunction; } public ObjectIdentifier getFunctionIdentifier() { @@ -59,7 +56,6 @@ public class DropCatalogFunctionOperation implements DropOperation { Map<String, Object> params = new LinkedHashMap<>(); params.put("identifier", functionIdentifier); params.put("ifExists", ifExists); - params.put("isSystemFunction", isSystemFunction); params.put("isTemporary", isTemporary); return OperationUtils.formatWithChildren( @@ -73,10 +69,6 @@ public class DropCatalogFunctionOperation implements DropOperation { return isTemporary; } - public boolean isSystemFunction() { - return isSystemFunction; - } - public String getFunctionName() { return this.functionIdentifier.getObjectName(); } diff --git a/flink-table/flink-table-planner-blink/src/main/java/org/apache/flink/table/planner/operations/SqlToOperationConverter.java b/flink-table/flink-table-planner-blink/src/main/java/org/apache/flink/table/planner/operations/SqlToOperationConverter.java index <HASH>..<HASH> 100644 --- a/flink-table/flink-table-planner-blink/src/main/java/org/apache/flink/table/planner/operations/SqlToOperationConverter.java +++ b/flink-table/flink-table-planner-blink/src/main/java/org/apache/flink/table/planner/operations/SqlToOperationConverter.java @@ -298,9 +298,8 @@ public class SqlToOperationConverter { return new DropCatalogFunctionOperation( identifier, - sqlDropFunction.isTemporary(), - sqlDropFunction.isSystemFunction(), - sqlDropFunction.getIfExists() + sqlDropFunction.getIfExists(), + sqlDropFunction.isTemporary() ); } }
[hotfix] remove isSystemFunction from DropCatalogFunctionOperation as it's never used
apache_flink
train
d5e6f31fe516c90385d2497e4705d7953c96c685
diff --git a/ide/plugins/guibuilder/src/main/java/org/overture/guibuilder/launching/IGuiBuilderConstants.java b/ide/plugins/guibuilder/src/main/java/org/overture/guibuilder/launching/IGuiBuilderConstants.java index <HASH>..<HASH> 100644 --- a/ide/plugins/guibuilder/src/main/java/org/overture/guibuilder/launching/IGuiBuilderConstants.java +++ b/ide/plugins/guibuilder/src/main/java/org/overture/guibuilder/launching/IGuiBuilderConstants.java @@ -5,7 +5,7 @@ package org.overture.guibuilder.launching; * @author ldc * */ -public class IGuiBuilderConstants { +public interface IGuiBuilderConstants { public static String REMOTE_CONTROL_CLASS = "org.overture.guibuilder.GuiBuilderRemote"; public static String GUI_BUILDER_BUNDLE_ID = "org.overture.ide.plugins.guibuilder";
Change constants clas to an interface. [Issue: #<I>]
overturetool_overture
train
b765b9caa0217e0b982fc449e1f189a2398a5d53
diff --git a/autotest/sqlserveradapter.rb b/autotest/sqlserveradapter.rb index <HASH>..<HASH> 100644 --- a/autotest/sqlserveradapter.rb +++ b/autotest/sqlserveradapter.rb @@ -1,4 +1,5 @@ require 'autotest' +require 'activesupport' class Autotest::Sqlserveradapter < Autotest @@ -31,7 +32,7 @@ class Autotest::Sqlserveradapter < Autotest # Have to use a custom reorder method since the normal :alpha for Autotest would put the # files with ../ in the path before others. def reorder(files_to_test) - ar_tests, sqlsvr_tests = files_to_test.partition { |k,v| k.start_with?('../../../') } + ar_tests, sqlsvr_tests = files_to_test.partition { |k,v| k.starts_with?('../../../') } ar_tests.sort! { |a,b| a[0] <=> b[0] } sqlsvr_tests.sort! { |a,b| a[0] <=> b[0] } sqlsvr_tests + ar_tests
Require activesupport for Autotest::Sqlserveradapter.
rails-sqlserver_activerecord-sqlserver-adapter
train
2d2e06f4382512cd21590d17e4f98e9390473b22
diff --git a/min/server-info.php b/min/server-info.php index <HASH>..<HASH> 100644 --- a/min/server-info.php +++ b/min/server-info.php @@ -16,10 +16,13 @@ if (!$enabled) { header('Content-Type: text/plain'); $file = __FILE__; +$tmp = sys_get_temp_dir(); + echo <<<EOD __FILE__ : $file SCRIPT_FILENAME : {$_SERVER['SCRIPT_FILENAME']} DOCUMENT_ROOT : {$_SERVER['DOCUMENT_ROOT']} SCRIPT_NAME : {$_SERVER['SCRIPT_NAME']} REQUEST_URI : {$_SERVER['REQUEST_URI']} +Cache directory : $tmp EOD;
Add temp location to server-info.php
mrclay_minify
train
d50ffc63f505cef766dff86bbcab969e4105c864
diff --git a/classes/fields/pick.php b/classes/fields/pick.php index <HASH>..<HASH> 100644 --- a/classes/fields/pick.php +++ b/classes/fields/pick.php @@ -1115,7 +1115,7 @@ class PodsField_Pick extends PodsField { * * @return array An array of available items from a relationship field */ - protected function get_field_data( $field, $options = array(), $object_params = array() ) { + public function get_field_data( $field, $options = array(), $object_params = array() ) { // Handle field array overrides if ( is_array( $field ) ) { @@ -1165,7 +1165,7 @@ class PodsField_Pick extends PodsField { * * @return array|bool Object data */ - protected function get_object_data ( $object_params = null ) { + public function get_object_data ( $object_params = null ) { global $wpdb, $polylang, $sitepress, $icl_adjust_id_url_filter_off; $current_language = false;
Fix for PHP protected methods in PodsField_Pick
pods-framework_pods
train
d5d39d541998ab5cb0733ebdf5e08b3a72a28233
diff --git a/Capsule/Manager.php b/Capsule/Manager.php index <HASH>..<HASH> 100644 --- a/Capsule/Manager.php +++ b/Capsule/Manager.php @@ -3,6 +3,7 @@ use PDO; use Illuminate\Support\Fluent; use Illuminate\Events\Dispatcher; +use Illuminate\Cache\CacheManager; use Illuminate\Container\Container; use Illuminate\Database\DatabaseManager; use Illuminate\Database\Eloquent\Model as Eloquent; @@ -11,6 +12,13 @@ use Illuminate\Database\Connectors\ConnectionFactory; class Manager { /** + * The current globally used instance. + * + * @var \Illuminate\Database\Capsule\Manager + */ + protected static $instance; + + /** * Create a new database capsule manager. * * @param \Illuminate\Container\Container $container @@ -66,6 +74,51 @@ class Manager { } /** + * Get a connection instance from the global manager. + * + * @param string $connection + * @return \Illuminate\Database\Connection + */ + public static function connection($connection = null) + { + return static::$instance->getConnection($connection); + } + + /** + * Get a fluent query builder instance. + * + * @param string $table + * @param string $connection + * @return \Illuminate\Database\Query\Builder + */ + public static function table($table, $connection = null) + { + return static::$instance->connection($connection)->table($table); + } + + /** + * Get a schema builder instance. + * + * @param string $connection + * @return \Illuminate\Database\Schema\Builder + */ + public static function schema($connection = null) + { + return static::$instance->connection($connection)->getSchemaBuilder(); + } + + /** + * Get a registered connection instance. + * + * @param string $name + * @return \Illuminate\Database\Connection + */ + public function getConnection($name = null) + { + return $this->manager->connection($name); + } + + /** * Register a connection with the manager. * * @param array $config @@ -74,7 +127,11 @@ class Manager { */ public function addConnection(array $config, $name = 'default') { - $this->container['config']['database.connections'][$name] = $config; + $connections = $this->container['config']['database.connections']; + + $connections[$name] = $config; + + $this->container['config']['database.connections'] = $connections; } /** @@ -96,6 +153,16 @@ class Manager { } /** + * Make this capsule instance available globally. + * + * @return void + */ + public function setAsGlobal() + { + static::$instance = $this; + } + + /** * Get the current event dispatcher instance. * * @return \Illuminate\Events\Dispatcher @@ -120,6 +187,30 @@ class Manager { } /** + * Get the current cache manager instance. + * + * @return \Illuminate\Cache\Manager + */ + public function getCacheManager() + { + if ($this->container->bound('cache')) + { + return $this->container['cache']; + } + } + + /** + * Set the cache manager to bse used by connections. + * + * @param \Illuminate\Cache\CacheManager $cache + * @return void + */ + public function setCacheManager(CacheManager $cache) + { + $this->container->instance('cache', $cache); + } + + /** * Get the IoC container instance. * * @return \Illuminate\Container\Container diff --git a/DatabaseManager.php b/DatabaseManager.php index <HASH>..<HASH> 100755 --- a/DatabaseManager.php +++ b/DatabaseManager.php @@ -110,12 +110,18 @@ class DatabaseManager implements ConnectionResolverInterface { { $connection->setFetchMode($this->app['config']['database.fetch']); - $connection->setEventDispatcher($this->app['events']); + if ($this->app->bound('events')) + { + $connection->setEventDispatcher($this->app['events']); + } // The database connection can also utilize a cache manager instance when cache // functionality is used on queries, which provides an expressive interface // to caching both fluent queries and Eloquent queries that are executed. - $connection->setCacheManager($this->app['cache']); + if ($this->app->bound('cache')) + { + $connection->setCacheManager($this->app['cache']); + } $app = $this->app;
Totally re-doing Capsule implementation for ease of use.
illuminate_database
train
9640881f38d2a5a70652f02de080e272464af965
diff --git a/spring-boot-project/spring-boot-tools/spring-boot-gradle-plugin/src/main/java/org/springframework/boot/gradle/tasks/buildinfo/BuildInfoProperties.java b/spring-boot-project/spring-boot-tools/spring-boot-gradle-plugin/src/main/java/org/springframework/boot/gradle/tasks/buildinfo/BuildInfoProperties.java index <HASH>..<HASH> 100644 --- a/spring-boot-project/spring-boot-tools/spring-boot-gradle-plugin/src/main/java/org/springframework/boot/gradle/tasks/buildinfo/BuildInfoProperties.java +++ b/spring-boot-project/spring-boot-tools/spring-boot-gradle-plugin/src/main/java/org/springframework/boot/gradle/tasks/buildinfo/BuildInfoProperties.java @@ -57,7 +57,10 @@ public class BuildInfoProperties implements Serializable { * @return the group */ public String getGroup() { - return this.group != null ? this.group : this.project.getGroup().toString(); + if (this.group == null) { + this.group = this.project.getGroup().toString(); + } + return this.group; } /** @@ -94,7 +97,10 @@ public class BuildInfoProperties implements Serializable { * @return the version */ public String getVersion() { - return this.version != null ? this.version : this.project.getVersion().toString(); + if (this.version == null) { + this.version = this.project.getVersion().toString(); + } + return this.version; } /** @@ -113,7 +119,10 @@ public class BuildInfoProperties implements Serializable { * @return the name */ public String getName() { - return this.name != null ? this.name : this.project.getName(); + if (this.name == null) { + this.name = this.project.getName(); + } + return this.name; } /** diff --git a/spring-boot-project/spring-boot-tools/spring-boot-gradle-plugin/src/test/java/org/springframework/boot/gradle/tasks/buildinfo/BuildInfoIntegrationTests.java b/spring-boot-project/spring-boot-tools/spring-boot-gradle-plugin/src/test/java/org/springframework/boot/gradle/tasks/buildinfo/BuildInfoIntegrationTests.java index <HASH>..<HASH> 100644 --- a/spring-boot-project/spring-boot-tools/spring-boot-gradle-plugin/src/test/java/org/springframework/boot/gradle/tasks/buildinfo/BuildInfoIntegrationTests.java +++ b/spring-boot-project/spring-boot-tools/spring-boot-gradle-plugin/src/test/java/org/springframework/boot/gradle/tasks/buildinfo/BuildInfoIntegrationTests.java @@ -21,6 +21,7 @@ import java.io.FileReader; import java.io.IOException; import java.util.Properties; +import org.gradle.testkit.runner.BuildResult; import org.gradle.testkit.runner.TaskOutcome; import org.junit.Rule; import org.junit.Test; @@ -84,6 +85,16 @@ public class BuildInfoIntegrationTests { .getOutcome()).isEqualTo(TaskOutcome.UP_TO_DATE); } + @Test + public void notUpToDateWhenExecutedTwiceWithFixedTimeAndChangedProjectVersion() { + assertThat(this.gradleBuild.build("buildInfo", "-PnullTime").task(":buildInfo") + .getOutcome()).isEqualTo(TaskOutcome.SUCCESS); + BuildResult result = this.gradleBuild.build("buildInfo", "-PnullTime", + "-PprojectVersion=0.2.0"); + System.out.println(result.getOutput()); + assertThat(result.task(":buildInfo").getOutcome()).isEqualTo(TaskOutcome.SUCCESS); + } + private Properties buildInfoProperties() { File file = new File(this.gradleBuild.getProjectDir(), "build/build-info.properties"); diff --git a/spring-boot-project/spring-boot-tools/spring-boot-gradle-plugin/src/test/resources/org/springframework/boot/gradle/tasks/buildinfo/BuildInfoIntegrationTests.gradle b/spring-boot-project/spring-boot-tools/spring-boot-gradle-plugin/src/test/resources/org/springframework/boot/gradle/tasks/buildinfo/BuildInfoIntegrationTests.gradle index <HASH>..<HASH> 100644 --- a/spring-boot-project/spring-boot-tools/spring-boot-gradle-plugin/src/test/resources/org/springframework/boot/gradle/tasks/buildinfo/BuildInfoIntegrationTests.gradle +++ b/spring-boot-project/spring-boot-tools/spring-boot-gradle-plugin/src/test/resources/org/springframework/boot/gradle/tasks/buildinfo/BuildInfoIntegrationTests.gradle @@ -8,13 +8,17 @@ def property(String name, Object defaultValue) { project.hasProperty(name) ? project.getProperty(name) : defaultValue } +version = property('projectVersion', '0.1.0') + task buildInfo(type: org.springframework.boot.gradle.tasks.buildinfo.BuildInfo) { destinationDir file(property('buildInfoDestinationDir', project.buildDir)) properties { artifact = property('buildInfoArtifact', 'foo') - version = property('buildInfoVersion', '1.0') group = property('buildInfoGroup', 'foo') name = property('buildInfoName', 'foo') + if (!project.hasProperty('projectVersion')) { + version = property('buildInfoVersion', '1.0') + } additional = ['additional': property('buildInfoAdditional', 'foo')] if (project.hasProperty('nullTime')) { time = null
Fix BuildInfo up-to-date check when group, name, or version changes Previously, if the project's group, name, or version changed the BuildInfo task would still be considered up-to-date as the values of the project's properties were not reflected in the fields of the BuildInfo instance. This commit updates BuildInfo to copy the value of the project's property to the corresponding BuildInfo field when the property is read using its getter method on BuildInfo. Closes gh-<I>
spring-projects_spring-boot
train
07e0a594297f1b07027569a0eedfe99136c07294
diff --git a/router/vulcand/router.go b/router/vulcand/router.go index <HASH>..<HASH> 100644 --- a/router/vulcand/router.go +++ b/router/vulcand/router.go @@ -1,4 +1,4 @@ -// Copyright 2015 tsuru authors. All rights reserved. +// Copyright 2016 tsuru authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. diff --git a/router/vulcand/router_test.go b/router/vulcand/router_test.go index <HASH>..<HASH> 100644 --- a/router/vulcand/router_test.go +++ b/router/vulcand/router_test.go @@ -1,4 +1,4 @@ -// Copyright 2015 tsuru authors. All rights reserved. +// Copyright 2016 tsuru authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file.
router/vulcand: year in copyright header
tsuru_tsuru
train
42e6e433260d729910ea236ce4c8b124ba40b98a
diff --git a/src/Tippy.js b/src/Tippy.js index <HASH>..<HASH> 100644 --- a/src/Tippy.js +++ b/src/Tippy.js @@ -221,7 +221,11 @@ export default function TippyGenerator(tippy) { {mounted && createPortal( render - ? render(toDataAttributes(attrs), singletonContent) + ? render( + toDataAttributes(attrs), + singletonContent, + mutableBox.instance, + ) : content, mutableBox.container, )} diff --git a/test/Tippy.test.js b/test/Tippy.test.js index <HASH>..<HASH> 100644 --- a/test/Tippy.test.js +++ b/test/Tippy.test.js @@ -459,6 +459,25 @@ describe('<Tippy />', () => { expect(instance.popper.firstElementChild).toMatchSnapshot(); }); + test('render prop instance', () => { + let _instance; + render( + <Tippy + render={(attrs, content, instance) => { + _instance = instance; + return <div {...attrs}>Hello</div>; + }} + showOnCreate={true} + > + <button /> + </Tippy>, + ); + + jest.runAllTimers(); + + expect(_instance).toBe(instance); + }); + test('render prop preserve popperOptions', () => { const element = ( <Tippy
feat: expose instance argument from render prop (#<I>)
atomiks_tippy.js-react
train
9f3e16961352624c58dc29df4f567ecf9fe30d60
diff --git a/lib/formtastic/inputs/base/collections.rb b/lib/formtastic/inputs/base/collections.rb index <HASH>..<HASH> 100644 --- a/lib/formtastic/inputs/base/collections.rb +++ b/lib/formtastic/inputs/base/collections.rb @@ -113,12 +113,12 @@ module Formtastic # active: Custom Active Label Here # archived: Custom Archived Label Here def collection_from_enum - pluralized_method = method.to_s.pluralize.to_sym # :status => :statuses - if collection_from_enum? - enum_options_hash = object.class.send(pluralized_method) # Post.statuses + method_name = method.to_s + + enum_options_hash = object.defined_enums[method_name] enum_options_hash.map do |name, value| - key = "activerecord.attributes.#{object_name}.#{pluralized_method}.#{name}" + key = "activerecord.attributes.#{object_name}.#{method_name.pluralize}.#{name}" label = ::I18n.translate(key, :default => name.humanize) [label, name] end
Use `defined_enums` to fetch collection from enum By using `defined_enums` instead of `.send(pluralized_method)`, only one method is needed to implement enum support with form objects.
justinfrench_formtastic
train
3220bf91ef342f7d9003494da6289be1029e7dc3
diff --git a/openxc/src/com/openxc/interfaces/usb/UsbVehicleInterface.java b/openxc/src/com/openxc/interfaces/usb/UsbVehicleInterface.java index <HASH>..<HASH> 100644 --- a/openxc/src/com/openxc/interfaces/usb/UsbVehicleInterface.java +++ b/openxc/src/com/openxc/interfaces/usb/UsbVehicleInterface.java @@ -163,10 +163,13 @@ public class UsbVehicleInterface extends BytestreamDataSource } public boolean receive(RawMeasurement command) { - String message = command.serialize() + "\u0000"; - Log.d(TAG, "Writing string to USB: " + message); - byte[] bytes = message.getBytes(); - return write(bytes); + if(isConnected()) { + String message = command.serialize() + "\u0000"; + Log.d(TAG, "Writing string to USB: " + message); + byte[] bytes = message.getBytes(); + return write(bytes); + } + return false; } public boolean setResource(String otherUri) throws DataSourceException {
Don't try and write to USB unless connected.
openxc_openxc-android
train
8566ac61a250a0a5b7851a220a49f55e925fc3b3
diff --git a/luma/oled/device/__init__.py b/luma/oled/device/__init__.py index <HASH>..<HASH> 100644 --- a/luma/oled/device/__init__.py +++ b/luma/oled/device/__init__.py @@ -780,6 +780,16 @@ class ws0010(parallel_device, character): ``diff_to_previous`` or ``full_frame`` are only supported. :type framebuffer: str + To place text on the display, simply assign the text to the 'text' + instance variable:: + + p = parallel(RS=7, E=8, PINS=[25,24,23,18]) + my_display = ws0010(p, selected_font='FT01') + my_display.text = 'WS0010 Display\\nFont FT01 5x8' + + For more details on how to use the 'text' interface see + :class:`luma.core.virtual.character` + .. note: The ws0010 is a fully graphical device that also supports character-based operations similar to LCD displays such as the hd44780. This driver @@ -810,17 +820,7 @@ class ws0010(parallel_device, character): | 6 | FT10_10 | English Russian | 5x10 | +--------+---------+----------------------+------+ | 7 | FT11_10 | Western European II | 5x10 | - +--------+---------+----------------------+------+ - - To place text on the display, simply assign the text to the 'text' - instance variable:: - - p = parallel(RS=7, E=8, PINS=[25,24,23,18]) - my_display = ws0010(p, selected_font='FT01') - my_display.text = 'WS0010 Display\\nFont FT01 5x8' - - For more details on how to use the 'text' interface see - :class:`luma.core.virtual.character` + +--------+---------+----------------------+------+ .. versionadded:: 3.6.0 """
Minor change to ws<I> docstring
rm-hull_luma.oled
train
42a77b94e545de14d9f2d5fa2fcd1c88977d154a
diff --git a/lib/arjdbc/jdbc/adapter.rb b/lib/arjdbc/jdbc/adapter.rb index <HASH>..<HASH> 100644 --- a/lib/arjdbc/jdbc/adapter.rb +++ b/lib/arjdbc/jdbc/adapter.rb @@ -99,7 +99,7 @@ module ActiveRecord visitor = nil arel2_visitors.each do |k,v| visitor = v - visitors[k] = v unless visitors.has_key?(k) + visitors[k] = v end if visitor && config[:adapter] =~ /^(jdbc|jndi)$/ visitors[config[:adapter]] = visitor
Allow configure_arel2_visitors to override previous visitor for a given key
jruby_activerecord-jdbc-adapter
train
41d1b652c6c8c28d05541661ee484a93eb785505
diff --git a/pkg/deploy/controller/deployment/controller.go b/pkg/deploy/controller/deployment/controller.go index <HASH>..<HASH> 100644 --- a/pkg/deploy/controller/deployment/controller.go +++ b/pkg/deploy/controller/deployment/controller.go @@ -369,8 +369,9 @@ func (c *DeploymentController) cleanupDeployerPods(deployment *kapi.ReplicationC } cleanedAll := true + gracePeriod := int64(10) for _, deployerPod := range deployerList.Items { - if err := c.pn.Pods(deployerPod.Namespace).Delete(deployerPod.Name, &kapi.DeleteOptions{}); err != nil && !kerrors.IsNotFound(err) { + if err := c.pn.Pods(deployerPod.Namespace).Delete(deployerPod.Name, &kapi.DeleteOptions{GracePeriodSeconds: &gracePeriod}); err != nil && !kerrors.IsNotFound(err) { // if the pod deletion failed, then log the error and continue // we will try to delete any remaining deployer pods and return an error later utilruntime.HandleError(fmt.Errorf("couldn't delete completed deployer pod %q for deployment %q: %v", deployerPod.Name, deployutil.LabelForDeployment(deployment), err))
deploy: set gracePeriodSeconds on deployer deletion
openshift_origin
train
095c2676192ea32886e8072d98fba4dcb4279596
diff --git a/src/Providers/BookingsServiceProvider.php b/src/Providers/BookingsServiceProvider.php index <HASH>..<HASH> 100644 --- a/src/Providers/BookingsServiceProvider.php +++ b/src/Providers/BookingsServiceProvider.php @@ -120,12 +120,13 @@ class BookingsServiceProvider extends ServiceProvider ]); // Load resources - require __DIR__.'/../../routes/breadcrumbs/adminarea.php'; $this->loadRoutesFrom(__DIR__.'/../../routes/web/adminarea.php'); $this->loadViewsFrom(__DIR__.'/../../resources/views', 'cortex/bookings'); $this->loadTranslationsFrom(__DIR__.'/../../resources/lang', 'cortex/bookings'); $this->app->runningInConsole() || $this->app->afterResolving('blade.compiler', function () { - require __DIR__.'/../../routes/menus/adminarea.php'; + $accessarea = $this->app['request']->route('accessarea'); + ! file_exists($menus = __DIR__."/../../routes/menus/{$accessarea}.php") || require $menus; + ! file_exists($breadcrumbs = __DIR__."/../../routes/breadcrumbs/{$accessarea}.php") || require $breadcrumbs; }); // Publish resources
Tweak menus & breadcrumbs performance
rinvex_cortex-bookings
train
be2f594e0e498a3dbeae3309c0734e86c1eceb87
diff --git a/rinoh/flowable.py b/rinoh/flowable.py index <HASH>..<HASH> 100644 --- a/rinoh/flowable.py +++ b/rinoh/flowable.py @@ -159,7 +159,7 @@ class GroupedFlowables(Flowable): def render(self, container, descender, state=None): flowables = self.flowables(container.document) item_spacing = self.get_style('flowable_spacing', container.document) - state = state or GroupedFlowablesState(flowables, None) + state = state or GroupedFlowablesState(flowables) flowable = state.next_flowable() try: while True: diff --git a/rinoh/structure.py b/rinoh/structure.py index <HASH>..<HASH> 100644 --- a/rinoh/structure.py +++ b/rinoh/structure.py @@ -6,12 +6,11 @@ # Public License v3. See the LICENSE file or http://www.gnu.org/licenses/. -from copy import copy -from itertools import chain, tee, count, repeat +from itertools import count, repeat from .layout import EndOfContainer, MaybeContainer -from .flowable import Flowable, FlowableState, FlowableStyle -from .flowable import GroupedFlowables, GroupedFlowablesStyle +from .flowable import Flowable, GroupedFlowables, GroupedFlowablesStyle +from .flowable import GroupedFlowablesState from .number import format_number, NUMBER from .paragraph import ParagraphStyle, ParagraphBase, Paragraph, ParagraphState from .paragraph import TabStop, RIGHT @@ -157,23 +156,20 @@ class ListItem(Flowable): except EndOfContainer: raise EndOfContainer(state) try: - flowables_iterator = iter(self.flowables) - first_flowable = next(flowables_iterator) + state = GroupedFlowablesState(iter(self.flowables)) + first_flowable = state.next_flowable() height, last_descender = first_flowable.flow(maybe_container, last_descender) - state = ListItemState(flowables_iterator) maybe_container.do_place() except EndOfContainer as e: if e.flowable_state: maybe_container.do_place() - state = ListItemState(flowables_iterator, e.flowable_state) - state.prepend(first_flowable) + state.prepend(first_flowable, e.flowable_state) raise EndOfContainer(state) - for flowable in state.flowable_iterator: + for flowable in state.flowables: try: - height, last_descender = flowable.flow(container, - last_descender, - state.flowable_state) + _, last_descender = flowable.flow(container, last_descender, + state.first_flowable_state) state.flowable_state = None except EndOfContainer as e: state.prepend(flowable, e.flowable_state) @@ -181,20 +177,6 @@ class ListItem(Flowable): return last_descender -class ListItemState(FlowableState): - def __init__(self, flowable_iterator, flowable_state=None): - self.flowable_iterator = flowable_iterator - self.flowable_state = flowable_state - - def __copy__(self): - self.flowable_iterator, copy_iterator = tee(self.flowable_iterator) - return self.__class__(copy_iterator, copy(self.flowable_state)) - - def prepend(self, flowable, flowable_state=None): - self.flowable_iterator = chain((flowable, ), self.flowable_iterator) - self.flowable_state = flowable_state - - class DefinitionListStyle(ParagraphStyle): attributes = {'term_style': PARENT_STYLE,
ListItemState basically duplicated GroupedFlowablesState Except that it didn't save the first flowable's state on copy; fixed.
brechtm_rinohtype
train
c4699d2289c440b4cb9941446cc28c6ea93be0c9
diff --git a/wikitextparser/wikitextparser.py b/wikitextparser/wikitextparser.py index <HASH>..<HASH> 100644 --- a/wikitextparser/wikitextparser.py +++ b/wikitextparser/wikitextparser.py @@ -680,7 +680,7 @@ class WikiLink(_Indexed_Object): self._common_init(string, spans, index) def __repr__(self): - """Return the string representation of the Argument.""" + """Return the string representation of the WikiLink.""" return 'WikiLink("' + self.string + '")' def _get_span(self): @@ -716,14 +716,14 @@ class WikiLink(_Indexed_Object): class Comment(_Indexed_Object): - """Use to represent External Links.""" + """Use to represent comments.""" def __init__(self, string, spans=None, index=None): - """Detect named or keyword argument.""" + """Run self._common_init.""" self._common_init(string, spans, index) def __repr__(self): - """Return the string representation of the Argument.""" + """Return the string representation of the Comment.""" return 'Comment("' + self.string + '")' def _get_span(self): @@ -741,13 +741,13 @@ class ExternalLink(_Indexed_Object): """Use to represent External Links.""" def __init__(self, string, spans=None, index=None): - """Detect named or keyword argument.""" + """Run self._common_init. Set self._spans['el'] if spans is None.""" self._common_init(string, spans, index) if spans is None: self._spans['el'] = [(0, len(string))] def __repr__(self): - """Return the string representation of the Argument.""" + """Return the string representation of the ExternalLink.""" return 'ExternalLink("' + self.string + '")' def _get_span(self): @@ -805,7 +805,8 @@ class Argument(_Indexed_Object): """Use to represent Template or ParserFunction arguments. Note that in mediawiki documentation `arguments` are (also) called - parameters. Here we {{{p}}} a parameter and {{t|p}} an argument. + parameters. In this module the convention is like this: + {{{parameter}}}, {{t|argument}}. See https://www.mediawiki.org/wiki/Help:Templates for more information. """ @@ -825,11 +826,11 @@ class Argument(_Indexed_Object): @property def name(self): - """Return argument's name-part. Return '' for anonymous parameters.""" + """Return argument's name-part. Return '' for positional arguments.""" pipename, equal, value = self.string.partition('=') if equal: return pipename[1:] - # anonymous parameters + # positional argument return '' @name.setter
Rewriting some of the docstrings.
5j9_wikitextparser
train
8cde3ffadfe363bfa5c87f16dd020ff9b883149b
diff --git a/src/main/java/org/jboss/pressgang/ccms/docbook/processing/JIRABugLinkStrategy.java b/src/main/java/org/jboss/pressgang/ccms/docbook/processing/JIRABugLinkStrategy.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/jboss/pressgang/ccms/docbook/processing/JIRABugLinkStrategy.java +++ b/src/main/java/org/jboss/pressgang/ccms/docbook/processing/JIRABugLinkStrategy.java @@ -128,8 +128,7 @@ public class JIRABugLinkStrategy implements BugLinkStrategy<JIRABugLinkOptions> public boolean hasValuesChanged(ContentSpecWrapper contentSpecEntity, JIRABugLinkOptions bugOptions) { boolean changed = false; // Server - if (EntityUtilities.hasContentSpecMetaDataChanged(CommonConstants.CS_JIRA_SERVER_TITLE, jiraUrl, - contentSpecEntity)) { + if (EntityUtilities.hasContentSpecMetaDataChanged(CommonConstants.CS_JIRA_SERVER_TITLE, jiraUrl, contentSpecEntity)) { changed = true; } @@ -173,14 +172,20 @@ public class JIRABugLinkStrategy implements BugLinkStrategy<JIRABugLinkOptions> protected JIRAProject getJIRAProject(final JIRARESTInterface client, final String project) { try { - if (projects == null) { - // Check our key map first - if (projectKeyMap.containsKey(project)) { - return projectKeyMap.get(project); - } + // Check our key map first + if (projectKeyMap.containsKey(project)) { + return projectKeyMap.get(project); + } + if (projects == null) { // Try and get the project first if the project entered is the project key - JIRAProject projectEntity = client.getProject(project); + JIRAProject projectEntity = null; + try { + projectEntity = client.getProject(project); + } catch (Exception e) { + e.printStackTrace(); + // do nothing as we will pick up a missing project later + } // If the project isn't null then we found a matching one, otherwise load all the projects if (projectEntity != null) { @@ -195,10 +200,12 @@ public class JIRABugLinkStrategy implements BugLinkStrategy<JIRABugLinkOptions> for (final JIRAProject projectEntity : projects) { if (projectEntity.getKey() != null && projectEntity.getKey().equals(project)) { return projectEntity; - } else if (projectEntity.getName() != null && projectEntity.getName().equals(project)) { - return projectEntity; - } else if (NUMBER_PATTERN.matcher(project).matches() && projectEntity.getId().equals(Long.parseLong(project))) { - return projectEntity; + } else if ((projectEntity.getName() != null && projectEntity.getName().equals(project)) || (NUMBER_PATTERN.matcher( + project).matches() && projectEntity.getId().equals(Long.parseLong(project)))) { + // Load the project from the server as we'll need it to get the components/versions + final JIRAProject foundProject = client.getProject(projectEntity.getKey()); + projectKeyMap.put(project, foundProject); + return foundProject; } } } catch (ProviderException e) {
Fixed a bug where the project would fail to be fetched if you didn't use a valid project key.
pressgang-ccms_PressGangCCMSContentSpec
train