diff
stringlengths
65
26.7k
message
stringlengths
7
9.92k
diff --git a/packages/chrysalis-focus/src/lib/chrysalis-focus.js b/packages/chrysalis-focus/src/lib/chrysalis-focus.js index <HASH>..<HASH> 100644 --- a/packages/chrysalis-focus/src/lib/chrysalis-focus.js +++ b/packages/chrysalis-focus/src/lib/chrysalis-focus.js @@ -41,9 +41,9 @@ class Focus { for (let device of devices) { if (parseInt("0x" + port.productId) == device.usb.productId && parseInt("0x" + port.vendorId) == device.usb.vendorId) { - port.device = device - if (!found_devices.includes(port)) - found_devices.push(port) + let newPort = Object.assign({}, port) + newPort.device = device + found_devices.push(newPort) } } }
focus: Support devices sharing the same VID+PID pair
diff --git a/napalm_base/base.py b/napalm_base/base.py index <HASH>..<HASH> 100644 --- a/napalm_base/base.py +++ b/napalm_base/base.py @@ -1445,8 +1445,8 @@ class NetworkDriver(object): Returns a dictionary where the keys are as listed below: * intf_name (unicode) - * physical_channels (int) - * channels (int) + * physical_channels + * channels (list of dicts) * index (int) * state * input_power
Modified keys in the returned data structure for get_optics
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -54,6 +54,7 @@ setup_params = dict( ], setup_requires=[ 'hgtools', + 'pytest-runner', ], use_2to3=True, use_2to3_exclude_fixers=['lib2to3.fixes.fix_import'],
Use pytest-runner to allow tests to be invoked with 'setup.py ptr'
diff --git a/azurerm/helpers/azure/app_service.go b/azurerm/helpers/azure/app_service.go index <HASH>..<HASH> 100644 --- a/azurerm/helpers/azure/app_service.go +++ b/azurerm/helpers/azure/app_service.go @@ -12,7 +12,6 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -// Once Microsoft adds support for `supports_credentials` in their SDK we should add that to this schema. func SchemaAppServiceCorsSettings() *schema.Schema { return &schema.Schema{ Type: schema.TypeList,
Remove comment related to azure SDK update
diff --git a/script/wee.view.js b/script/wee.view.js index <HASH>..<HASH> 100644 --- a/script/wee.view.js +++ b/script/wee.view.js @@ -449,7 +449,29 @@ fn(options.model); - W[name].$observe('*', fn); + W.$extend(W[name], { + /** + * Pause view updating + */ + $pause: function() { + W[name].$unobserve('*', fn); + }, + + /** + * Resume view updating and optionally update + * + * @param {boolean} update + */ + $resume: function(update) { + W[name].$observe('*', fn); + + if (update) { + fn(options.model); + } + } + }); + + W[name].$resume(); } };
Add ability to pause and resume app rendering with cooresponding methods
diff --git a/src/Palladium/Entity/OneTimeIdentity.php b/src/Palladium/Entity/OneTimeIdentity.php index <HASH>..<HASH> 100644 --- a/src/Palladium/Entity/OneTimeIdentity.php +++ b/src/Palladium/Entity/OneTimeIdentity.php @@ -46,9 +46,8 @@ class OneTimeIdentity extends Identity */ public function generateNewKey() { - $key = bin2hex(random_bytes(self::KEY_SIZE)); - $this->key = $key; - $this->hash = $this->makeHash($key); + $this->key = bin2hex(random_bytes(self::KEY_SIZE)); + $this->hash = $this->makeHash($this->key); }
Minor: cleaned up key-generation method
diff --git a/invoice/src/main/java/com/ning/billing/invoice/generator/DefaultInvoiceGenerator.java b/invoice/src/main/java/com/ning/billing/invoice/generator/DefaultInvoiceGenerator.java index <HASH>..<HASH> 100644 --- a/invoice/src/main/java/com/ning/billing/invoice/generator/DefaultInvoiceGenerator.java +++ b/invoice/src/main/java/com/ning/billing/invoice/generator/DefaultInvoiceGenerator.java @@ -306,7 +306,6 @@ public class DefaultInvoiceGenerator implements InvoiceGenerator { final BillingPeriod billingPeriod = thisEvent.getBillingPeriod(); if (billingPeriod != BillingPeriod.NO_BILLING_PERIOD) { - final BillingMode billingMode = instantiateBillingMode(thisEvent.getBillingMode()); final DateTime startDate = thisEvent.getEffectiveDate(); final DateTime tzAdjustedStartDate = startDate.toDateTime(thisEvent.getTimeZone());
invoice: reformat DefaultNotificationQueueService No functional change.
diff --git a/genmodel/manager.py b/genmodel/manager.py index <HASH>..<HASH> 100644 --- a/genmodel/manager.py +++ b/genmodel/manager.py @@ -420,7 +420,7 @@ def cast_spell(job_id): # install dependencies subprocess.call(shlex.split('pip install -r requirements.txt')) # cast spell (non blocking, faster return) - subprocess.call(shlex.split('export JOB_ID={}'.format(job_id))) + os.environ['JOB_ID'] = str(job_id) subprocess.Popen(shlex.split('./venv/bin/python castspell.py')) # unlock os.remove('{}/locked'.format(working_dir))
set envar step shouldn't be done w subprocess foo
diff --git a/cumulusci/core/github.py b/cumulusci/core/github.py index <HASH>..<HASH> 100644 --- a/cumulusci/core/github.py +++ b/cumulusci/core/github.py @@ -80,21 +80,12 @@ def get_pull_requests_with_base_branch(repo, base_branch_name, head=None): return list(repo.pull_requests(base=base_branch_name, head=head)) -def get_pull_request_by_branch_name(repo, branch_name): - """Returns a single pull request if found, or None if nothing is returned. - Will throw an error if more than one pull request is returned""" +def get_pull_request_by_head(repo, branch_name): + """Returns all pull requests with head equal to the given branch name.""" if branch_name == repo.default_branch: return None - pull_requests = list(repo.pull_requests(head=repo.owner.login + ":" + branch_name)) - if len(pull_requests) == 0: - return None - elif len(pull_requests) == 1: - return pull_requests[0] - else: - raise GithubException( - "Expected one pull request but received {}".format(len(pull_requests)) - ) + return list(repo.pull_requests(head=repo.owner.login + ":" + branch_name)) def create_pull_request(repo, branch_name, base=None, title=None):
don't throw exception, update func name
diff --git a/lib/appsignal/transaction.rb b/lib/appsignal/transaction.rb index <HASH>..<HASH> 100644 --- a/lib/appsignal/transaction.rb +++ b/lib/appsignal/transaction.rb @@ -137,7 +137,7 @@ module Appsignal Appsignal.logger.debug("Setting http queue start: #{env_var}") value = env_var.tr('^0-9', '') unless value.empty? - @queue_start = value.to_f / 1000 + @queue_start = value.to_f / 1000.0 end end end
Make sure we don't use resolution in http queue time calculation
diff --git a/fake_switches/brocade/command_processor/enabled.py b/fake_switches/brocade/command_processor/enabled.py index <HASH>..<HASH> 100644 --- a/fake_switches/brocade/command_processor/enabled.py +++ b/fake_switches/brocade/command_processor/enabled.py @@ -324,7 +324,7 @@ def get_port_attributes(port): for ip_address in port.ip_helpers: attributes.append("ip helper-address %s" % ip_address) if port.ip_redirect is False: - attributes.append(" no ip redirect") + attributes.append("no ip redirect") return attributes diff --git a/tests/brocade/test_brocade_switch_protocol.py b/tests/brocade/test_brocade_switch_protocol.py index <HASH>..<HASH> 100644 --- a/tests/brocade/test_brocade_switch_protocol.py +++ b/tests/brocade/test_brocade_switch_protocol.py @@ -1310,7 +1310,7 @@ class TestBrocadeSwitchProtocol(unittest.TestCase): assert_interface_configuration(t, "ve 1201", [ "interface ve 1201", - " no ip redirect", + " no ip redirect", "!" ])
One space too many in the output of the brocade 'no ip redirect'
diff --git a/astrobase/hplc.py b/astrobase/hplc.py index <HASH>..<HASH> 100644 --- a/astrobase/hplc.py +++ b/astrobase/hplc.py @@ -290,7 +290,7 @@ def concatenate_textlcs(lclist, # track which LC goes where # initial LC lccounter = 0 - lcdict['concatenated'] = {lccounter: lclist[0]} + lcdict['concatenated'] = {lccounter: os.path.abspath(lclist[0])} lcdict['lcn'] = np.full_like(lcdict['rjd'], lccounter) # normalize if needed @@ -331,7 +331,7 @@ def concatenate_textlcs(lclist, # update LC tracking lccounter = lccounter + 1 - lcdict['concatenated'][lccounter] = lcf + lcdict['concatenated'][lccounter] = os.path.abspath(lcf) lcdict['lcn'] = np.concatenate(( lcdict['lcn'], np.full_like(thislcd['rjd'],lccounter)
hplc: fixing normalization
diff --git a/src/Modules/Assets.php b/src/Modules/Assets.php index <HASH>..<HASH> 100644 --- a/src/Modules/Assets.php +++ b/src/Modules/Assets.php @@ -64,7 +64,7 @@ class Assets extends Hookable // there was no manifest or no file present if ($this->manifest === null || ! isset($this->manifest[ $file ])) { - return $file; + return get_stylesheet_directory_uri() . $file; } return get_stylesheet_directory_uri() . $this->manifest[ $file ];
return correct asset path if manifest not present
diff --git a/lib/classy/aliasable.rb b/lib/classy/aliasable.rb index <HASH>..<HASH> 100644 --- a/lib/classy/aliasable.rb +++ b/lib/classy/aliasable.rb @@ -8,7 +8,7 @@ # @@classy_aliases, on the extending class. This could concievably lead to # namespace conflicts and strange bugs in the unlikely event that this variable # is used for anything else. Later versions may implement a hash of identity -# maps as a class variable on the Aliasble module itself, but for reasons of +# maps as a class variable on the Aliasable module itself, but for reasons of # complexity and performance, that has not been done at this time. # # Example:
fixed a typo in the rdoc
diff --git a/lib/nucleon/command/bash.rb b/lib/nucleon/command/bash.rb index <HASH>..<HASH> 100644 --- a/lib/nucleon/command/bash.rb +++ b/lib/nucleon/command/bash.rb @@ -129,7 +129,16 @@ class Bash < Plugin::Command def exec(options = {}, overrides = nil, &code) config = Config.ensure(options) - Nucleon.cli_run(build(export, overrides), config.import({ :ui => @ui }), &code) + result = Nucleon.cli_run(build(export, overrides), config.import({ :ui => @ui }), &code) + + if result + logger.debug("Command status: #{result.status}") + logger.debug("Command output:\n#{result.output}") + logger.debug("Command errors:\n#{result.errors}") + else + logger.debug("Command returned no result") + end + result end #-----------------------------------------------------------------------------
Adding more logging to the bash command provider execution process.
diff --git a/src/windows/sslCertCheckPluginProxy.js b/src/windows/sslCertCheckPluginProxy.js index <HASH>..<HASH> 100644 --- a/src/windows/sslCertCheckPluginProxy.js +++ b/src/windows/sslCertCheckPluginProxy.js @@ -55,6 +55,7 @@ cordova.commandProxy.add("SSLCertificateChecker", { }, function (reason) { if (stateHolder.clientSocket.information.serverCertificateErrorSeverity === Windows.Networking.Sockets.SocketSslErrorSeverity.ignorable) { + /* return shouldIgnoreCertificateErrorsAsync( stateHolder.clientSocket.information.serverCertificateErrors) .then(function (userAcceptedRetry) { @@ -64,6 +65,9 @@ cordova.commandProxy.add("SSLCertificateChecker", { errorCallback("CONNECTION_NOT_SECURE"); return }); + */ + // if the severity is ignorable, move on to .done + return; } errorCallback("CONNECTION_FAILED. Details: " + reason); })
fixed crash in Windows with self signed certificate
diff --git a/packages/babel-preset-react-app/create.js b/packages/babel-preset-react-app/create.js index <HASH>..<HASH> 100644 --- a/packages/babel-preset-react-app/create.js +++ b/packages/babel-preset-react-app/create.js @@ -29,6 +29,11 @@ module.exports = function(api, opts, env) { var isEnvProduction = env === 'production'; var isEnvTest = env === 'test'; + var useESModules = validateBoolOption( + 'useESModules', + opts.useESModules, + isEnvDevelopment || isEnvProduction + ); var isFlowEnabled = validateBoolOption('flow', opts.flow, true); var isTypeScriptEnabled = validateBoolOption( 'typescript', @@ -151,7 +156,7 @@ module.exports = function(api, opts, env) { // https://babeljs.io/docs/en/babel-plugin-transform-runtime#useesmodules // We should turn this on once the lowest version of Node LTS // supports ES Modules. - useESModules: isEnvDevelopment || isEnvProduction, + useESModules, // Undocumented option that lets us encapsulate our runtime, ensuring // the correct version is used // https://github.com/babel/babel/blob/090c364a90fe73d36a30707fc612ce037bdbbb24/packages/babel-plugin-transform-runtime/src/index.js#L35-L42
Add allowESModules option to babel-preset-react-app (#<I>) * Add allowESModules option to babel-preset-react-app * changes after feedback * Apply suggestions from code review
diff --git a/hypervisor/context.go b/hypervisor/context.go index <HASH>..<HASH> 100644 --- a/hypervisor/context.go +++ b/hypervisor/context.go @@ -293,8 +293,13 @@ func (ctx *VmContext) InitDeviceContext(spec *pod.UserPod, wg *sync.WaitGroup, } } + hostname := spec.Name + if len(hostname) > 64 { + hostname = spec.Name[:64] + } + ctx.vmSpec = &VmPod{ - Hostname: spec.Name, + Hostname: hostname, Containers: containers, Dns: spec.Dns, Interfaces: nil,
Hostname length should be no more than <I>
diff --git a/src/Datastore/EntityMapper.php b/src/Datastore/EntityMapper.php index <HASH>..<HASH> 100644 --- a/src/Datastore/EntityMapper.php +++ b/src/Datastore/EntityMapper.php @@ -80,7 +80,7 @@ class EntityMapper { $excludes = []; - foreach ($entityData as $property) { + foreach ($entityData as $key => $property) { $type = key($property); if (isset($property['excludeFromIndexes']) && $property['excludeFromIndexes']) {
Bugfix for $key is undefined (#<I>) * Bugfix for $key is undefined * Make it non-associative array
diff --git a/modules/@apostrophecms/page-type/index.js b/modules/@apostrophecms/page-type/index.js index <HASH>..<HASH> 100644 --- a/modules/@apostrophecms/page-type/index.js +++ b/modules/@apostrophecms/page-type/index.js @@ -392,6 +392,9 @@ module.exports = { }, extendMethods(self) { return { + enableAction() { + self.action = self.apos.modules['@apostrophecms/page'].action; + }, copyForPublication(_super, req, from, to) { _super(req, from, to); const newMode = to.aposLocale.endsWith(':published') ? ':published' : ':draft';
sets custom page types to default their action to @apostrophecms/page s
diff --git a/src/Exscriptd/Order.py b/src/Exscriptd/Order.py index <HASH>..<HASH> 100644 --- a/src/Exscriptd/Order.py +++ b/src/Exscriptd/Order.py @@ -58,7 +58,7 @@ class Order(Base): def _read_hosts_from_xml(self, element): for host_elem in element.iterfind('host'): - address = host.get('address').strip() + address = host_elem.get('address').strip() args = self._read_arguments_from_xml(host_elem) host = _Host(address) host.add_host_variables(args) @@ -66,7 +66,7 @@ class Order(Base): def _read_arguments_from_xml(self, host_elem): arg_elem = host_elem.find('argument-list') - if not arg_elem: + if arg_elem is None: return {} args = {} for child in arg_elem.iterfind('variable'):
Exscriptd: fix: last commit broke XML order support.
diff --git a/src/Models/Conversation.php b/src/Models/Conversation.php index <HASH>..<HASH> 100644 --- a/src/Models/Conversation.php +++ b/src/Models/Conversation.php @@ -4,6 +4,7 @@ namespace Musonza\Chat\Models; use Musonza\Chat\BaseModel; use Musonza\Chat\Chat; +use Illuminate\Database\Eloquent\Relations\BelongsToMany; class Conversation extends BaseModel { @@ -16,7 +17,7 @@ class Conversation extends BaseModel /** * Conversation participants. * - * @return User + * @return BelongsToMany */ public function users() {
Change return type of users() (#<I>) Currently, the type is `User`, which is not the return type of `Eloquent`s `belongsToMany`. Since PHP doesn't have generics, there is no way of setting the return type to something more meaningful.
diff --git a/src/Thybag/SharePointAPI.php b/src/Thybag/SharePointAPI.php index <HASH>..<HASH> 100644 --- a/src/Thybag/SharePointAPI.php +++ b/src/Thybag/SharePointAPI.php @@ -835,12 +835,12 @@ class SharePointAPI { $value = strtolower($value); // Default is descending - $sort = 'false'; + $sort = 'FALSE'; // Is value set to allow ascending sorting? if ($value == 'asc' || $value == 'true' || $value == 'ascending') { // Sort ascending - $sort = 'true'; + $sort = 'TRUE'; } // Return it
Use caps 'TRUE' and 'FALSE' for sorting. SP (somethimes?) doesnt like them in lowercase.
diff --git a/git/odb/object_writer.go b/git/odb/object_writer.go index <HASH>..<HASH> 100644 --- a/git/odb/object_writer.go +++ b/git/odb/object_writer.go @@ -13,15 +13,18 @@ import ( // writes data given to it, and keeps track of the SHA1 hash of the data as it // is written. type ObjectWriter struct { - // w is the underling writer that this ObjectWriter is writing to. - w io.Writer - // sum is the in-progress hash calculation. - sum hash.Hash + // members managed via sync/atomic must be aligned at the top of this + // structure (see: https://github.com/git-lfs/git-lfs/pull/2880). // wroteHeader is a uint32 managed by the sync/atomic package. It is 1 // if the header was written, and 0 otherwise. wroteHeader uint32 + // w is the underling writer that this ObjectWriter is writing to. + w io.Writer + // sum is the in-progress hash calculation. + sum hash.Hash + // closeFn supplies an optional function that, when called, frees an // resources (open files, memory, etc) held by this instance of the // *ObjectWriter.
git/odb: note alignment issue in *ObjectWriter
diff --git a/Neos.Flow/Classes/Mvc/FlashMessage/Storage/FlashMessageSessionStorage.php b/Neos.Flow/Classes/Mvc/FlashMessage/Storage/FlashMessageSessionStorage.php index <HASH>..<HASH> 100644 --- a/Neos.Flow/Classes/Mvc/FlashMessage/Storage/FlashMessageSessionStorage.php +++ b/Neos.Flow/Classes/Mvc/FlashMessage/Storage/FlashMessageSessionStorage.php @@ -83,7 +83,10 @@ class FlashMessageSessionStorage implements FlashMessageStorageInterface } /** @var FlashMessageContainer $flashMessageContainer */ $flashMessageContainer = $this->session->getData($this->sessionKey); - return $flashMessageContainer; + if ($flashMessageContainer instanceof FlashMessageContainer) { + return $flashMessageContainer; + } + return null; } /**
BUGFIX: Avoid bool return value in restoreFlashMessageContainerFromSession() It can happen, that `getData(…)` returns a boolean, leading to an error due to the return type declaration.
diff --git a/src/TestSuite/Fixture/FixtureManager.php b/src/TestSuite/Fixture/FixtureManager.php index <HASH>..<HASH> 100644 --- a/src/TestSuite/Fixture/FixtureManager.php +++ b/src/TestSuite/Fixture/FixtureManager.php @@ -75,7 +75,7 @@ class FixtureManager * Modify the debug mode. * * @param bool $debug Whether or not fixture debug mode is enabled. - * @retun void + * @return void */ public function setDebug($debug) {
Fix a typo in docblock
diff --git a/groupy/objects.py b/groupy/objects.py index <HASH>..<HASH> 100644 --- a/groupy/objects.py +++ b/groupy/objects.py @@ -97,11 +97,25 @@ class Group: @classmethod def list(cls): - return List(Group(**g) for g in api.Groups.index()) + groups = [] + page = 1 + next_groups = api.Groups.index(page=page) + while next_groups: + groups.extend(next_groups) + page += 1 + next_groups = api.Groups.index(page=page) + return List(Group(**g) for g in groups) @classmethod def former_list(cls): - return List(Group(**g) for g in api.Groups.index(former=True)) + groups = [] + page = 1 + next_groups = api.Groups.index(former=True, page=page) + while next_groups: + groups.extend(next_groups) + page += 1 + next_groups = api.Groups.index(former=True, page=page) + return List(Group(**g) for g in groups) @staticmethod def _chunkify(text, chunk_size=450):
Fixed max groups of <I> when listed
diff --git a/app/models/renalware/letters/event.rb b/app/models/renalware/letters/event.rb index <HASH>..<HASH> 100644 --- a/app/models/renalware/letters/event.rb +++ b/app/models/renalware/letters/event.rb @@ -3,10 +3,6 @@ require_dependency "renalware/letters" module Renalware module Letters class Event < DumbDelegator - def initialize(object=nil) - super(object) - end - def description raise NotImplementedError end diff --git a/app/models/renalware/letters/event/unknown.rb b/app/models/renalware/letters/event/unknown.rb index <HASH>..<HASH> 100644 --- a/app/models/renalware/letters/event/unknown.rb +++ b/app/models/renalware/letters/event/unknown.rb @@ -3,6 +3,10 @@ require_dependency "renalware/letters/event" module Renalware module Letters class Event::Unknown < Event + def initialize(object=nil) + super(object) + end + def description end
Move initialize method into Unknown class
diff --git a/spec/mongoid_spec.rb b/spec/mongoid_spec.rb index <HASH>..<HASH> 100644 --- a/spec/mongoid_spec.rb +++ b/spec/mongoid_spec.rb @@ -93,6 +93,31 @@ describe CarrierWave::Mongoid do @doc.image.current_path.should == public_path('uploads/test.jpg') end + it "should return valid JSON when to_json is called when image is nil" do + @doc[:image] = nil + hash = JSON.parse(@doc.to_json) + hash.keys.should include("image") + hash["image"].keys.should include("url") + hash["image"]["url"].should be_nil + end + + it "should return valid JSON when to_json is called when image is present" do + @doc[:image] = 'test.jpeg' + @doc.save! + @doc.reload + + JSON.parse(@doc.to_json)["image"].should == {"url" => "/uploads/test.jpeg"} + end + + it "should return valid JSON when to_json is called on a collection containing uploader from a model" do + @doc[:image] = 'test.jpeg' + @doc.save! + @doc.reload + + JSON.parse({:data => @doc.image}.to_json).should == {"data"=>{"image"=>{"url"=>"/uploads/test.jpeg"}}} + end + + end end
Adding tests to the to_json serialization
diff --git a/soda/cmd/version.go b/soda/cmd/version.go index <HASH>..<HASH> 100644 --- a/soda/cmd/version.go +++ b/soda/cmd/version.go @@ -1,3 +1,3 @@ package cmd -const Version = "4.0.0.pre" +const Version = "v4.0.0.pre"
added a v in the tag
diff --git a/cwltool/main.py b/cwltool/main.py index <HASH>..<HASH> 100755 --- a/cwltool/main.py +++ b/cwltool/main.py @@ -502,7 +502,7 @@ def main(args=None, print_dot=args.print_dot, rdf_serializer=args.rdf_serializer) except Exception as e: - _logger.error("I'm sorry, I couldn't load this CWL file.\n%s", e, exc_info=(e if args.debug else False)) + _logger.error("I'm sorry, I couldn't load this CWL file, try again with --debug for more information.\n%s\n", e, exc_info=(e if args.debug else False)) return 1 if type(t) == int: @@ -554,7 +554,10 @@ def main(args=None, _logger.error("Input object failed validation:\n%s", e, exc_info=(e if args.debug else False)) return 1 except workflow.WorkflowException as e: - _logger.error("Workflow error:\n %s", e, exc_info=(e if args.debug else False)) + _logger.error("Workflow error, try again with --debug for more information:\n %s", e, exc_info=(e if args.debug else False)) + return 1 + except Exception as e: + _logger.error("Unhandled error, try again with --debug for more information:\n %s", e, exc_info=(e if args.debug else False)) return 1 return 0
Suggest --debug when reporting exceptions.
diff --git a/eZ/Publish/API/Repository/Repository.php b/eZ/Publish/API/Repository/Repository.php index <HASH>..<HASH> 100644 --- a/eZ/Publish/API/Repository/Repository.php +++ b/eZ/Publish/API/Repository/Repository.php @@ -158,6 +158,13 @@ interface Repository public function getRoleService(); /** + * Get SearchService + * + * @return \eZ\Publish\API\Repository\SearchService + */ + public function getSearchService(); + + /** * Begin transaction * * Begins an transaction, make sure you'll call commit or rollback when done,
Added: getSearchService to repository interface
diff --git a/cli/cmd/snapshot.go b/cli/cmd/snapshot.go index <HASH>..<HASH> 100644 --- a/cli/cmd/snapshot.go +++ b/cli/cmd/snapshot.go @@ -153,11 +153,7 @@ func (c *ServicedCli) cmdSnapshotAdd(ctx *cli.Context) { return } - description := "" - if nArgs <= 3 { - description = ctx.String("description") - } - + description := ctx.String("description") if snapshot, err := c.driver.AddSnapshot(ctx.Args().First(), description); err != nil { fmt.Fprintln(os.Stderr, err) } else if snapshot == "" {
Simplify arg check for snapshot-add
diff --git a/controllers/Oauth_PublicController.php b/controllers/Oauth_PublicController.php index <HASH>..<HASH> 100644 --- a/controllers/Oauth_PublicController.php +++ b/controllers/Oauth_PublicController.php @@ -117,7 +117,22 @@ class Oauth_PublicController extends BaseController Craft::log(__METHOD__." : User Token", LogLevel::Info, true); //die('3'); - $account = $provider->getAccount(); + try { + $account = $provider->getAccount(); + } catch (\Exception $e) { + + $referer = craft()->httpSession->get('oauthReferer'); + craft()->httpSession->remove('oauthReferer'); + + // var_dump($referer); + // die(); + + Craft::log(__METHOD__." : Could not get account, so we redirect.", LogLevel::Info, true); + Craft::log(__METHOD__." : Redirect : ".$referer, LogLevel::Info, true); + + $this->redirect($referer); + + } var_dump($account); if(isset($account->mapping)) {
try/catching $provider->getAccount in order to prevent errors
diff --git a/src/components/BaseComponent.js b/src/components/BaseComponent.js index <HASH>..<HASH> 100644 --- a/src/components/BaseComponent.js +++ b/src/components/BaseComponent.js @@ -36,7 +36,7 @@ export default function createComponent(AntdComponent, mapProps) { ); } } - InputComponent.dispayName = `Redux-form-ANTD${AntdComponent.dispayName}`; + InputComponent.displayName = `Redux-form-ANTD${AntdComponent.displayName}`; return InputComponent; }
Fix for misspelled property name 'displayName'. (#<I>) * Add support for Form.Item's 'required' attribute. * Update import statements to load only required antd components. Fix for #<I> issue. * Fix for misspelled property name 'displayName'.
diff --git a/test/orm/mongoid.rb b/test/orm/mongoid.rb index <HASH>..<HASH> 100644 --- a/test/orm/mongoid.rb +++ b/test/orm/mongoid.rb @@ -8,6 +8,6 @@ end class ActiveSupport::TestCase setup do - Mongoid.purge! + Mongoid.default_session.drop end end
Fix mongoid test failed problem
diff --git a/cake/tests/cases/libs/controller/scaffold.test.php b/cake/tests/cases/libs/controller/scaffold.test.php index <HASH>..<HASH> 100644 --- a/cake/tests/cases/libs/controller/scaffold.test.php +++ b/cake/tests/cases/libs/controller/scaffold.test.php @@ -500,7 +500,6 @@ class ScaffoldViewTest extends CakeTestCase { $this->assertPattern('/input name="data\[ScaffoldMock\]\[published\]" type="text" maxlength="1" value="Y" id="ScaffoldMockPublished"/', $result); $this->assertPattern('/textarea name="data\[ScaffoldMock\]\[body\]" cols="30" rows="6" id="ScaffoldMockBody"/', $result); $this->assertPattern('/<li><a href="\/scaffold_mock\/delete\/1"[^>]*>Delete<\/a>\s*<\/li>/', $result); - debug($result); } /**
Removing debug() from scaffold test.
diff --git a/nabu/http/renders/CNabuHTTPResponseFileRender.php b/nabu/http/renders/CNabuHTTPResponseFileRender.php index <HASH>..<HASH> 100644 --- a/nabu/http/renders/CNabuHTTPResponseFileRender.php +++ b/nabu/http/renders/CNabuHTTPResponseFileRender.php @@ -44,7 +44,9 @@ class CNabuHTTPResponseFileRender extends CNabuHTTPResponseRenderAdapter is_file($this->source_filename) ) { $this->dumpFile($this->source_filename); - unlink($this->source_filename); + if ($this->unlink_source_file_after_render) { + unlink($this->source_filename); + } } elseif ($this->contentBuilder instanceof CNabuAbstractBuilder) { echo $this->contentBuilder->create(); }
Solve issue that unlinks the file always after send it
diff --git a/grunt.js b/grunt.js index <HASH>..<HASH> 100644 --- a/grunt.js +++ b/grunt.js @@ -14,7 +14,7 @@ module.exports = function(grunt) { }, bowerOrganiser : { mapping : { - js : "js" + js : "lib" } }, diff --git a/tasks/bower-organiser.js b/tasks/bower-organiser.js index <HASH>..<HASH> 100644 --- a/tasks/bower-organiser.js +++ b/tasks/bower-organiser.js @@ -35,11 +35,13 @@ module.exports = function(grunt) { if(grunt.utils.kindOf(component.source.main) === 'array') { _.each(component.source.main, function(source) { var extension = source.split('.').pop(); - grunt.file.copy(source, extension + '/' + path.basename(source)); + var targetFolder = config.mapping[extension] || extension; + grunt.file.copy(source, targetFolder + '/' + path.basename(source)); }); } else { var extension = component.source.main.split('.').pop(); - grunt.file.copy(component.source.main, extension + '/' + path.basename(component.source.main)); + var targetFolder = config.mapping[extension] || extension; + grunt.file.copy(component.source.main, targetFolder + '/' + path.basename(component.source.main)); } });
bug fix. Mappings work properly now.
diff --git a/precise/util.py b/precise/util.py index <HASH>..<HASH> 100644 --- a/precise/util.py +++ b/precise/util.py @@ -14,6 +14,7 @@ from typing import * import numpy as np +from os.path import join from precise.params import pr @@ -68,4 +69,5 @@ def glob_all(folder: str, filt: str) -> List[str]: def find_wavs(folder: str) -> Tuple[List[str], List[str]]: """Finds wake-word and not-wake-word wavs in folder""" - return glob_all(folder + '/wake-word', '*.wav'), glob_all(folder + '/not-wake-word', '*.wav') + return (glob_all(join(folder, 'wake-word'), '*.wav'), + glob_all(join(folder, 'not-wake-word'), '*.wav'))
Fix double slash when loading from folder ending in slash
diff --git a/src/Goodby/CSV/Import/Standard/Interpreter.php b/src/Goodby/CSV/Import/Standard/Interpreter.php index <HASH>..<HASH> 100644 --- a/src/Goodby/CSV/Import/Standard/Interpreter.php +++ b/src/Goodby/CSV/Import/Standard/Interpreter.php @@ -84,8 +84,6 @@ class Interpreter implements InterpreterInterface */ private function delegate($observer, $line) { - $this->checkCallable($observer); - call_user_func($observer, $line); }
Remove redundant check of valid callable. Currently the validity of the callable is checked when the callable is added, but also on every row iteration. This PR removes the redundancy.
diff --git a/src/Database/Relationship/OneToMany.php b/src/Database/Relationship/OneToMany.php index <HASH>..<HASH> 100644 --- a/src/Database/Relationship/OneToMany.php +++ b/src/Database/Relationship/OneToMany.php @@ -118,6 +118,26 @@ class OneToMany extends Relationship } /** + * Saves all of the given models + * + * @param array $models An array of models being associated and saved + * @return array + * @since 2.0.0 + **/ + public function saveAll($models) + { + foreach ($models as $model) + { + if (!$this->associate($model)->save()) + { + return false; + } + } + + return true; + } + + /** * Deletes all rows attached to the current model * * @return bool
Add a method for saving all models in a one to many scenario
diff --git a/src/View.php b/src/View.php index <HASH>..<HASH> 100644 --- a/src/View.php +++ b/src/View.php @@ -1,4 +1,35 @@ <?php +/** + * Slim - a micro PHP 5 framework + * + * @author Josh Lockhart <info@slimframework.com> + * @copyright 2011 Josh Lockhart + * @link http://www.slimframework.com + * @license http://www.slimframework.com/license + * @version 2.4.2 + * @package Slim + * + * MIT LICENSE + * + * Permission is hereby granted, free of charge, to any person obtaining + * a copy of this software and associated documentation files (the + * "Software"), to deal in the Software without restriction, including + * without limitation the rights to use, copy, modify, merge, publish, + * distribute, sublicense, and/or sell copies of the Software, and to + * permit persons to whom the Software is furnished to do so, subject to + * the following conditions: + * + * The above copyright notice and this permission notice shall be + * included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE + * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION + * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION + * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ namespace Slender; use \Slim\Collection;
Retain Slim copyright notice as per #6 request from @codeguy
diff --git a/spikewidgets/widgets/multicompgraphwidget/multicompgraphwidget.py b/spikewidgets/widgets/multicompgraphwidget/multicompgraphwidget.py index <HASH>..<HASH> 100644 --- a/spikewidgets/widgets/multicompgraphwidget/multicompgraphwidget.py +++ b/spikewidgets/widgets/multicompgraphwidget/multicompgraphwidget.py @@ -215,6 +215,8 @@ class MultiCompAgreementBySorterWidget(BaseMultiWidget): raise RuntimeError("Number of axes is not number of sortings.") if axes is not None: BaseMultiWidget.__init__(self, figure, axes[0]) + else: + BaseMultiWidget.__init__(self, figure, axes) self.name = 'MultiCompAgreementBySorterWidget' def plot(self):
Never forget the base class :(
diff --git a/deepdish/tools/caffe/combine_scores.py b/deepdish/tools/caffe/combine_scores.py index <HASH>..<HASH> 100644 --- a/deepdish/tools/caffe/combine_scores.py +++ b/deepdish/tools/caffe/combine_scores.py @@ -15,6 +15,7 @@ if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('scores', nargs='+', type=str) parser.add_argument('-o', '--output', default='scores.h5', type=str) + parser.add_argument('-n', '--name', type=str) args = parser.parse_args() @@ -28,7 +29,10 @@ if __name__ == '__main__': #scores = [] for s in args.scores: data = dd.io.load(s) - name = str(data['name']) + if args.name: + name = args.name + else: + name = str(data['name']) if name not in scores: scores[name] = dict(scores=None, seeds=None)
Added option to override name in combine_scores.
diff --git a/pandas/tests/groupby/test_groupby.py b/pandas/tests/groupby/test_groupby.py index <HASH>..<HASH> 100644 --- a/pandas/tests/groupby/test_groupby.py +++ b/pandas/tests/groupby/test_groupby.py @@ -2055,3 +2055,17 @@ def test_groups_repr_truncates(max_seq_items, expected): result = df.groupby(np.array(df.a)).groups.__repr__() assert result == expected + + +def test_group_on_two_row_multiindex_returns_one_tuple_key(): + # GH 18451 + df = pd.DataFrame([{"a": 1, "b": 2, "c": 99}, {"a": 1, "b": 2, "c": 88}]) + df = df.set_index(["a", "b"]) + + grp = df.groupby(["a", "b"]) + result = grp.indices + expected = {(1, 2): np.array([0, 1], dtype=np.int64)} + + assert len(result) == 1 + key = (1, 2) + assert (result[key] == expected[key]).all()
adding test for #<I> (#<I>)
diff --git a/parsl/channels/local/local.py b/parsl/channels/local/local.py index <HASH>..<HASH> 100644 --- a/parsl/channels/local/local.py +++ b/parsl/channels/local/local.py @@ -144,6 +144,9 @@ class LocalChannel(Channel, RepresentationMixin): except OSError as e: raise FileCopyException(e, self.hostname) + else: + os.chmod(local_dest, 0o777) + return local_dest def close(self):
change permission of job submit file, this is needed on theta
diff --git a/source/out/azure/src/js/widgets/oxinputvalidator.js b/source/out/azure/src/js/widgets/oxinputvalidator.js index <HASH>..<HASH> 100644 --- a/source/out/azure/src/js/widgets/oxinputvalidator.js +++ b/source/out/azure/src/js/widgets/oxinputvalidator.js @@ -61,10 +61,10 @@ setTimeout(function(){ if ( $( oTrigger ).is(options.visible) ) { var oFieldSet = self.getFieldSet( oTrigger ); - if ( oFieldSet.children( '.'+options.metodValidateDate ).length <= 0 ) { + if ( oFieldSet.children( '.'+options.metodValidateDate ).length >= 0 ) { var blIsValid = self.isFieldSetValid( oFieldSet, true ); self.hideErrorMessage( oFieldSet ); - if ( blIsValid != true ){ + if ( blIsValid != true ) { self.showErrorMessage( oFieldSet, blIsValid ); } }
ESDEV-<I> #<I> bugfix The check if date options exists was incorrect.
diff --git a/tests/conftest.py b/tests/conftest.py index <HASH>..<HASH> 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -245,9 +245,9 @@ def mcache_server_actual(host, port='11211'): def mcache_server_docker(unused_port, docker, session_id): - docker.pull('memcached:latest') + docker.pull('memcached:alpine') container = docker.create_container( - image='memcached', + image='memcached:alpine', name='memcached-test-server-{}'.format(session_id), ports=[11211], detach=True,
Use memcached:alpine instead of debian image
diff --git a/python/dllib/src/bigdl/dllib/autograd.py b/python/dllib/src/bigdl/dllib/autograd.py index <HASH>..<HASH> 100644 --- a/python/dllib/src/bigdl/dllib/autograd.py +++ b/python/dllib/src/bigdl/dllib/autograd.py @@ -68,6 +68,14 @@ def epsilon(): return Variable.from_jvalue(callBigDlFunc("float", "epsilon")) +def softsign(a): + return Variable.from_jvalue(callBigDlFunc("float", "softsign", a)) + + +def softplus(a): + return Variable.from_jvalue(callBigDlFunc("float", "softplus", a)) + + class Variable(ZooKerasCreator): def __init__(self, input_shape, node=None, jvalue=None): if jvalue:
add softsign, softplus and unittests (#<I>) * update * md * md * style
diff --git a/lib/model/model.js b/lib/model/model.js index <HASH>..<HASH> 100644 --- a/lib/model/model.js +++ b/lib/model/model.js @@ -297,7 +297,12 @@ var Model = Class.extend(/** @lends Model# */ { this._deleteExecuted = false; this._inFlight = false; - _.extend(this, properties); + // cannot _.extend because it knows not to attempt to write properties that + // aren't writable. we want to write the non-writable properties, though, + // so that the proper exception is thrown. + _.forEach(properties, function(value, key) { + this[key] = value; + }, this); }, /** diff --git a/test/relations/belongs_to_tests.js b/test/relations/belongs_to_tests.js index <HASH>..<HASH> 100644 --- a/test/relations/belongs_to_tests.js +++ b/test/relations/belongs_to_tests.js @@ -170,6 +170,12 @@ describe('Model.belongsTo', function() { }).to.throw(/cannot set.*authorId/i); }); + it('does not allow use of foreign key setter via constructor', function() { + expect(function() { + Article.create({ authorId: 25 }); + }).to.throw(/cannot set.*authorId/i); + }); + it('allows create', function() { var user = article.createAuthor({ username: 'jill' }); expect(article.author).to.equal(user);
Ensuring error is thrown for using read-only property with constructor.
diff --git a/lib/setuplib.php b/lib/setuplib.php index <HASH>..<HASH> 100644 --- a/lib/setuplib.php +++ b/lib/setuplib.php @@ -782,7 +782,7 @@ function get_real_size($size=0) { */ function redirect_if_major_upgrade_required() { global $CFG; - $lastmajordbchanges = 2010050404; + $lastmajordbchanges = 2010052700; if (empty($CFG->version) or (int)$CFG->version < $lastmajordbchanges or during_initial_install() or !empty($CFG->adminsetuppending)) { try {
course-section MDL-<I> Bumped major DB change version
diff --git a/tests/containerTests.js b/tests/containerTests.js index <HASH>..<HASH> 100644 --- a/tests/containerTests.js +++ b/tests/containerTests.js @@ -124,6 +124,21 @@ describe('Container', () => { }).toThrow(); }); + it('should resolve group instances in the same order they were registered', () => { + var N_OBJS = 5; + for(var i=0; i<N_OBJS; i++){ + var Obj = createObject({index: {value : i}}); + container.register('object-' + i, Obj) + .inGroup('foo'); + } + + var objs = container.resolveGroup('foo'); + expect(objs.length).toBe(N_OBJS); + for(var i=0; i<N_OBJS; i++){ + expect(objs[i].index).toBe(i); + } + }); + it('should respect the instance lifetime settings when resolving', () => { var A = createObject(); var B = createObject();
Added a test to check group instances are resolved in the same order they were registered
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -42,7 +42,7 @@ install_requires = [ setup( name='nodeconductor', - version='0.12.0.dev0', + version='0.13.0', author='OpenNode Team', author_email='info@opennodecloud.com', url='https://github.com/opennode/nodeconductor',
Preparing new release: <I>
diff --git a/sos/report/plugins/convert2rhel.py b/sos/report/plugins/convert2rhel.py index <HASH>..<HASH> 100644 --- a/sos/report/plugins/convert2rhel.py +++ b/sos/report/plugins/convert2rhel.py @@ -21,7 +21,8 @@ class convert2rhel(Plugin, RedHatPlugin): self.add_copy_spec([ "/var/log/convert2rhel/convert2rhel.log", - "/var/log/convert2rhel/rpm_va.log" + "/var/log/convert2rhel/archive/convert2rhel-*.log", + "/var/log/convert2rhel/rpm_va.log", ])
[convert2rhel] Add archived log collection Convert2RHEL will now archive old logs to maintain the sake of simplicity, and for that, we are including the archive directory to be collected as well.
diff --git a/lib/pause/action.rb b/lib/pause/action.rb index <HASH>..<HASH> 100644 --- a/lib/pause/action.rb +++ b/lib/pause/action.rb @@ -67,6 +67,9 @@ module Pause def ok? Pause.analyzer.check(self).nil? + rescue ::Redis::CannotConnectError => e + $stderr.puts "Error connecting to redis: #{e.inspect}" + false end def analyze diff --git a/spec/pause/action_spec.rb b/spec/pause/action_spec.rb index <HASH>..<HASH> 100644 --- a/spec/pause/action_spec.rb +++ b/spec/pause/action_spec.rb @@ -60,6 +60,15 @@ describe Pause::Action do action.ok?.should be_false end + + it "should return false and silently fail if redis is not available" do + Redis.any_instance.stub(:zrange) { raise Redis::CannotConnectError } + time = period_marker(resolution, Time.now.to_i) + + action.increment! 4, time - 25 + + action.ok?.should be_false + end end describe "#analyze" do
Making ok? check on action silently fail and return false if redis is dead.
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -25,7 +25,8 @@ setup( 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', - 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 2', + 'Programming Language :: Python :: 3', 'Topic :: System :: Logging', ] )
Updated setup.py python version classifiers
diff --git a/openquake/utils/db/loader.py b/openquake/utils/db/loader.py index <HASH>..<HASH> 100644 --- a/openquake/utils/db/loader.py +++ b/openquake/utils/db/loader.py @@ -534,9 +534,8 @@ class SourceModelLoader(object): # for now, just skip this object continue - data = read(src) - - # not serializing on the database - results.extend(data) + results.extend( + write(self.meta, read(src), owner_id=self.owner_id, + input_id=self.input_id)) return results
re-enabled database serialization
diff --git a/gosu-lab/src/main/java/editor/BatchDocument.java b/gosu-lab/src/main/java/editor/BatchDocument.java index <HASH>..<HASH> 100644 --- a/gosu-lab/src/main/java/editor/BatchDocument.java +++ b/gosu-lab/src/main/java/editor/BatchDocument.java @@ -37,7 +37,7 @@ public class BatchDocument extends DefaultStyledDocument synchronized( _batch ) { _batch.addAll( getElementsForString( str, a ) ); - while( _batch.size() > 2000 ) + while( _batch.size() > 100 * 1024 ) { _batch.remove( 0 ); }
increase output buffer size regulator from 2k to <I>k e.g., should be able to run simple loop to print 1 - <I> and see all <I> in the console
diff --git a/linkcheck/checker/httpurl.py b/linkcheck/checker/httpurl.py index <HASH>..<HASH> 100644 --- a/linkcheck/checker/httpurl.py +++ b/linkcheck/checker/httpurl.py @@ -247,6 +247,11 @@ class HttpUrl (internpaturl.InternPatternUrl, proxysupport.ProxySupport): self.aliases.append(newurl) # XXX on redirect errors this is not printed self.add_info(_("Redirected to `%(url)s'.") % {'url': newurl}) + + # Reset extern and recalculate + self.extern = None + self.set_extern(newurl) + self.urlparts = strformat.url_unicode_split(newurl) self.build_url_parts() self.url_connection = response
When following redirections update url.extern
diff --git a/lib/android/index.js b/lib/android/index.js index <HASH>..<HASH> 100644 --- a/lib/android/index.js +++ b/lib/android/index.js @@ -12,6 +12,9 @@ const path = require('path'); class Android { constructor(options) { if (Android.instance) { + // This is hack for https://github.com/sitespeedio/browsertime/issues/1239 + // In the long run we should rework how we use the Android object. + Android.instance.port = options.devToolsPort; return Android.instance; } @@ -135,6 +138,7 @@ class Android { } async removeFw() { + this.forward = undefined; // Remove forwards are missing in the adbkit return execa('adb', [ '-s',
Hack for handling crawling on Android devices from sitespeed.io. (#<I>) How we create the Android instance object isn't optimal and we can rework that in the future.
diff --git a/lib/blocklib.php b/lib/blocklib.php index <HASH>..<HASH> 100644 --- a/lib/blocklib.php +++ b/lib/blocklib.php @@ -1285,7 +1285,10 @@ class block_manager { } else if ($data = $mform->get_data()) { $bi = new stdClass; $bi->id = $block->instance->id; + + // This may get overwritten by the special case handling below. $bi->pagetypepattern = $data->bui_pagetypepattern; + $bi->showinsubcontexts = $data->bui_contexts; if (empty($data->bui_subpagepattern) || $data->bui_subpagepattern == '%@NULL@%') { $bi->subpagepattern = null; } else {
MDL-<I> block editing: show in subcontexts does not work on category pages. Thanks to Ian David Wild for pointing to the proper fix.
diff --git a/pingparsing/_stats.py b/pingparsing/_stats.py index <HASH>..<HASH> 100644 --- a/pingparsing/_stats.py +++ b/pingparsing/_stats.py @@ -18,7 +18,7 @@ class PingStats(object): self.__rtt_mdev = kwargs.pop("rtt_mdev", None) self.__duplicates = kwargs.pop("duplicates", None) - self.__icmp_reply_list = kwargs.pop("icmp_reply_list", []) + self.__icmp_replies = kwargs.pop("icmp_reply_list", []) @property def destination(self): @@ -159,7 +159,7 @@ class PingStats(object): |list| of |dict|: """ - return self.__icmp_reply_list + return self.__icmp_replies def as_dict(self): """
Refactor: rename a private variable
diff --git a/test/lib/model.js b/test/lib/model.js index <HASH>..<HASH> 100644 --- a/test/lib/model.js +++ b/test/lib/model.js @@ -162,7 +162,7 @@ var modelBatch = function(typeName, className, testSchema, testData) { 'passed-in fields are there': function(err, created) { var prop; for (prop in testData.create) { - assert.equal(created[prop], testData.create[prop]); + assert.deepEqual(created[prop], testData.create[prop]); } }, 'and we modify it': { @@ -176,7 +176,7 @@ var modelBatch = function(typeName, className, testSchema, testData) { 'modified fields are modified': function(err, updated) { var prop; for (prop in testData.update) { - assert.equal(updated[prop], testData.update[prop]); + assert.deepEqual(updated[prop], testData.update[prop]); } }, 'and we delete it': {
use deepEqual() to check modified fields
diff --git a/osprey/search_space.py b/osprey/search_space.py index <HASH>..<HASH> 100644 --- a/osprey/search_space.py +++ b/osprey/search_space.py @@ -59,7 +59,7 @@ class SearchSpace(object): raise ValueError('variable %s: warp=%s is not supported. use ' 'None or "log",' % (name, warp)) - self.variables[name] = EnumVariable(name, list(choices)) + self.variables[name] = EnumVariable(name, choices.tolist()) def add_int(self, name, min, max, warp=None): """An integer-valued dimension bounded between `min` <= x <= `max`.
Jump variables now cast to nearest Python equivalent No test case yet
diff --git a/zeroneed.php b/zeroneed.php index <HASH>..<HASH> 100755 --- a/zeroneed.php +++ b/zeroneed.php @@ -30,4 +30,4 @@ require __DIR__ . '/vendor/autoload.php'; | */ -ZN\ZN::run('EIP', '5.7.2.3', 'Vecihi Hürkuş'); \ No newline at end of file +ZN\ZN::run('EIP', '5.7.2.4', 'Vecihi Hürkuş'); \ No newline at end of file
<I>: Updated version.
diff --git a/bundles/org.eclipse.orion.client.editor/web/orion/editor/stylers/text_x-dockerfile/syntax.js b/bundles/org.eclipse.orion.client.editor/web/orion/editor/stylers/text_x-dockerfile/syntax.js index <HASH>..<HASH> 100644 --- a/bundles/org.eclipse.orion.client.editor/web/orion/editor/stylers/text_x-dockerfile/syntax.js +++ b/bundles/org.eclipse.orion.client.editor/web/orion/editor/stylers/text_x-dockerfile/syntax.js @@ -1,6 +1,6 @@ /******************************************************************************* * @license - * Copyright (c) 2014 IBM Corporation and others. + * Copyright (c) 2014, 2017 IBM Corporation and others. * All rights reserved. This program and the accompanying materials are made * available under the terms of the Eclipse Public License v1.0 * (http://www.eclipse.org/legal/epl-v10.html), and the Eclipse Distribution @@ -12,13 +12,17 @@ /*eslint-env browser, amd*/ define("orion/editor/stylers/text_x-dockerfile/syntax", ["orion/editor/stylers/lib/syntax"], function(mLib) { var keywords = [ - "add", + "add", "arg", "cmd", "copy", "entrypoint", "env", "expose", "from", + "healthcheck", + "label", "maintainer", "onbuild", "run", + "shell", + "stopsignal", "user", "volume", "workdir"
Bug <I> - Update Dockerfile keywords Update the syntax file with new keywords that have been introduced into Docker.
diff --git a/gwpy/timeseries/core.py b/gwpy/timeseries/core.py index <HASH>..<HASH> 100644 --- a/gwpy/timeseries/core.py +++ b/gwpy/timeseries/core.py @@ -1296,4 +1296,3 @@ class TimeSeriesBaseList(list): def __getslice__(self, i, j): return type(self)(*super(TimeSeriesBaseList, self).__getslice__(i, j)) - __getslice__.__doc__ = list.__getslice__.__doc__
TimeSeriesBaseList.__getslice__: removed __doc__ in python3 `list` doesn't have `__getslice__`, and we don't really need a docstring anyway
diff --git a/template/app/view/cls/Toolbar.js b/template/app/view/cls/Toolbar.js index <HASH>..<HASH> 100644 --- a/template/app/view/cls/Toolbar.js +++ b/template/app/view/cls/Toolbar.js @@ -71,7 +71,7 @@ Ext.define('Docs.view.cls.Toolbar', { } this.items = this.items.concat([ - { width: 10 }, + { xtype: 'tbspacer', width: 10 }, this.filterField = Ext.widget("textfield", { emptyText: 'Find class members...', enableKeyEvents: true,
Make member-filter separator into spacer. It was rendered as a button before.
diff --git a/angr/analyses/cfg.py b/angr/analyses/cfg.py index <HASH>..<HASH> 100644 --- a/angr/analyses/cfg.py +++ b/angr/analyses/cfg.py @@ -605,6 +605,10 @@ class CFG(Analysis, CFGBase): new_state = current_entry.state.copy() new_state.set_mode('symbolic') new_state.options.add(simuvex.o.DO_RET_EMULATION) + # Remove bad constraints + # FIXME: This is so hackish... + new_state.se._solver.constraints = [ c for c in new_state.se.constraints if c.op != 'I' or c.args[0] is not False ] + new_state.se._solver._result = None # Swap them saved_state, current_entry.state = current_entry.state, new_state sim_run, error_occurred, _ = self._get_simrun(addr, current_entry)
Remove all False constarints before converting a state from fastpath mode to symbolic mode when generating cfg
diff --git a/cell/lrp_test.go b/cell/lrp_test.go index <HASH>..<HASH> 100644 --- a/cell/lrp_test.go +++ b/cell/lrp_test.go @@ -11,7 +11,7 @@ import ( "github.com/cloudfoundry-incubator/bbs/models" "github.com/cloudfoundry-incubator/inigo/fixtures" "github.com/cloudfoundry-incubator/inigo/helpers" - "github.com/cloudfoundry-incubator/route-emitter/cfroutes" + "github.com/cloudfoundry-incubator/routing-info/cfroutes" "github.com/cloudfoundry-incubator/stager/diego_errors" archive_helper "github.com/pivotal-golang/archiver/extractor/test_helper" "github.com/pivotal-golang/lager" diff --git a/helpers/bbs_requests.go b/helpers/bbs_requests.go index <HASH>..<HASH> 100644 --- a/helpers/bbs_requests.go +++ b/helpers/bbs_requests.go @@ -5,7 +5,7 @@ import ( "github.com/cloudfoundry-incubator/bbs" "github.com/cloudfoundry-incubator/bbs/models" - "github.com/cloudfoundry-incubator/route-emitter/cfroutes" + "github.com/cloudfoundry-incubator/routing-info/cfroutes" . "github.com/onsi/gomega" )
Move cfroutes to routing-info [#<I>]
diff --git a/libkbfs/folder_block_ops.go b/libkbfs/folder_block_ops.go index <HASH>..<HASH> 100644 --- a/libkbfs/folder_block_ops.go +++ b/libkbfs/folder_block_ops.go @@ -1327,6 +1327,10 @@ func (fbo *folderBlockOps) GetDirtyDir( return fbo.getDirtyDirLocked(ctx, lState, kmd, dir, rtype) } +var hiddenEntries = map[string]bool{ + ".kbfs_git": true, +} + // GetDirtyDirChildren returns a map of EntryInfos for the (possibly // dirty) children entries of the given directory. func (fbo *folderBlockOps) GetDirtyDirChildren( @@ -1343,6 +1347,10 @@ func (fbo *folderBlockOps) GetDirtyDirChildren( children := make(map[string]EntryInfo) for k, de := range dblock.Children { + if hiddenEntries[k] { + fbo.log.CDebugf(ctx, "Hiding entry %s", k) + continue + } children[k] = de.EntryInfo } return children, nil
folder_block_ops: don't return .kbfs_git entry We don't want it to show up in `ls`, and we don't want `rm -rf` to clobber it on accident. Issue: KBFS-<I>
diff --git a/src/Context/Argument/PageObjectArgumentResolver.php b/src/Context/Argument/PageObjectArgumentResolver.php index <HASH>..<HASH> 100644 --- a/src/Context/Argument/PageObjectArgumentResolver.php +++ b/src/Context/Argument/PageObjectArgumentResolver.php @@ -85,6 +85,6 @@ class PageObjectArgumentResolver implements ArgumentResolver */ private function getClassName(\ReflectionParameter $parameter) { - return $parameter->getClass() ? $parameter->getClass()->getName() : null; + return $parameter->getClass() ? $parameter->getClass()->name : null; } }
Fix a problem discovered by scrutinizer
diff --git a/src/Assimp/Command/CommandExecutor.php b/src/Assimp/Command/CommandExecutor.php index <HASH>..<HASH> 100644 --- a/src/Assimp/Command/CommandExecutor.php +++ b/src/Assimp/Command/CommandExecutor.php @@ -105,7 +105,10 @@ class CommandExecutor */ public function setBinary($bin) { - if (!is_file($bin) || !is_executable($bin)) { + if (!is_file($bin)) { + throw new \InvalidArgumentException('Binary file not exists: '.$bin, ErrorCodes::FILE_NOT_FOUND); + } + if (!is_executable($bin)) { throw new \InvalidArgumentException('Binary file is not executable: '.$bin, ErrorCodes::FILE_NOT_EXECUTABLE); } $this->bin = $bin;
splitted checks on assimp-binary
diff --git a/app/models/esr_record.rb b/app/models/esr_record.rb index <HASH>..<HASH> 100644 --- a/app/models/esr_record.rb +++ b/app/models/esr_record.rb @@ -4,10 +4,10 @@ class EsrRecord < ActiveRecord::Base belongs_to :booking, :dependent => :destroy belongs_to :invoice - named_scope :valid, :conditions => "state = 'valid'" - named_scope :missing, :conditions => "state = 'missing'" - named_scope :bad, :conditions => "state = 'bad'" - named_scope :invalid, :conditions => "state != 'valid'" + scope :valid, where(:state => 'valid') + scope :missing, where(:state => 'missing') + scope :bad, where(:state => 'bad') + scope :invalid, where(:state => 'valid') private def parse_date(value)
Port scope definitions in EsrRecord to Rails 3 syntax.
diff --git a/lib/init.js b/lib/init.js index <HASH>..<HASH> 100644 --- a/lib/init.js +++ b/lib/init.js @@ -66,28 +66,21 @@ class Init { await this.writeFile('package.json', JSON.stringify(packageInfo, null, 2)) } - async writeEditorConfig() { - const name = '.editorconfig' - await copyFile(packagePath(name), this.currentPath(name)) - } - - async writeESLintConfig() { - const name = '.eslintrc.js' + async writeTemplateFile(name) { await copyFile(template(name), this.currentPath(name)) } - async writeCommitlintConfig() { - const name = '.commitlintrc.js' - await copyFile(template(name), this.currentPath(name)) + async writePackageFile(name) { + await copyFile(packagePath(name), this.currentPath(name)) } } module.exports = async function init() { const cmd = new Init(process.cwd()) await cmd.updatePackageFile() - await cmd.writeEditorConfig() - await cmd.writeESLintConfig() - await cmd.writeCommitlintConfig() + await cmd.writePackageFile('.editorconfig') + await cmd.writeTemplateFile('.eslintrc.js') + await cmd.writeTemplateFile('.commitlintrc.js') } module.exports.desc = `Setup npm project:
refactor(init): reduce duplication of methods (#<I>)
diff --git a/manager.go b/manager.go index <HASH>..<HASH> 100644 --- a/manager.go +++ b/manager.go @@ -108,10 +108,7 @@ func NewCookieManager(key string) *Manager { } func (m *Manager) Multi(next http.Handler) http.Handler { - return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - ctx := context.WithValue(r.Context(), m.opts.name, m.Load(r)) - next.ServeHTTP(w, r.WithContext(ctx)) - }) + return m.Use(next) } func (m *Manager) Use(next http.Handler) http.Handler {
[refactor] Alias Multi() to Use()
diff --git a/python_modules/dagster/dagster_tests/daemon_tests/integration_tests/test_queued.py b/python_modules/dagster/dagster_tests/daemon_tests/integration_tests/test_queued.py index <HASH>..<HASH> 100644 --- a/python_modules/dagster/dagster_tests/daemon_tests/integration_tests/test_queued.py +++ b/python_modules/dagster/dagster_tests/daemon_tests/integration_tests/test_queued.py @@ -1,3 +1,4 @@ +import pytest from dagster.core.host_representation import PipelineHandle from dagster.core.storage.pipeline_run import PipelineRun from dagster.core.test_utils import create_run_for_test, poll_for_finished_run @@ -26,6 +27,7 @@ def assert_events_in_order(logs, expected_events): assert filtered_logged_events == expected_events +@pytest.mark.skip("Flaky, see https://github.com/dagster-io/dagster/issues/3771") def test_queue_from_schedule_and_sensor(tmpdir, foo_example_repo): dagster_home_path = tmpdir.strpath with setup_instance(
Disable flaky daemon test Summary: We should investigate why this is segfaulting as it likely indicates a real issue, but skip for now. Test Plan: BK Reviewers: johann, prha, alangenfeld, catherinewu Reviewed By: catherinewu Differential Revision: <URL>
diff --git a/twarc/command2.py b/twarc/command2.py index <HASH>..<HASH> 100644 --- a/twarc/command2.py +++ b/twarc/command2.py @@ -696,18 +696,6 @@ def timeline( Retrieve recent tweets for the given user. """ - tweets = _timeline_tweets( - T, - use_search, - user_id, - since_id, - until_id, - start_time, - end_time, - exclude_retweets, - exclude_replies, - ) - count = 0 pbar = tqdm @@ -726,6 +714,18 @@ def timeline( "disable": hide_progress, } + tweets = _timeline_tweets( + T, + use_search, + user_id, + since_id, + until_id, + start_time, + end_time, + exclude_retweets, + exclude_replies, + ) + with pbar(**pbar_params) as progress: for result in tweets: _write(result, outfile) @@ -896,7 +896,7 @@ def _timeline_tweets( q += " -is:retweet" if exclude_replies and "-is:reply" not in q: q += " -is:reply" - tweets = T.search_all(q, since_id, until_id, start_time, end_time) + tweets = T.search_all(q, since_id, until_id, start_time, end_time, 100) else: tweets = T.timeline( user_id,
fix _timeline_tweets search not working when start_time was not set
diff --git a/src/Field.php b/src/Field.php index <HASH>..<HASH> 100644 --- a/src/Field.php +++ b/src/Field.php @@ -55,6 +55,13 @@ class Field public $editable = true; /** + * Setting this to true will never actually store + * the field in the database. It will action as normal, + * but will be skipped by update/insert. + */ + public $never_persist = false; + + /** * Constructor. You can pass field properties as array. * * @param array $defaults diff --git a/src/Persistence_SQL.php b/src/Persistence_SQL.php index <HASH>..<HASH> 100644 --- a/src/Persistence_SQL.php +++ b/src/Persistence_SQL.php @@ -571,7 +571,7 @@ class Persistence_SQL extends Persistence // apply all fields we got from get foreach ($data as $field => $value) { $f = $m->getElement($field); - if (!$f->editable) { + if (!$f->editable || $f->never_persist) { continue; } $insert->set($f->actual ?: $f->short_name, $value); @@ -649,6 +649,9 @@ class Persistence_SQL extends Persistence $cnt = 0; foreach ($data as $field => $value) { $f = $m->getElement($field); + if ($f->never_persist) { + continue; + } $update->set($f->actual ?: $f->short_name, $value); $cnt++; }
First we need ability to avoid field persistence If model is saving field and join is saving on top, we get problem.
diff --git a/transformers/modeling_bert.py b/transformers/modeling_bert.py index <HASH>..<HASH> 100644 --- a/transformers/modeling_bert.py +++ b/transformers/modeling_bert.py @@ -633,7 +633,7 @@ class BertModel(BertPreTrainedModel): See base class PreTrainedModel """ for layer, heads in heads_to_prune.items(): - self.encoder.layer[layer].attention.prune_heads(heads) + self.encoder.layer[layer].self_attention.prune_heads(heads) def forward(self, input_ids, attention_mask=None, token_type_ids=None, position_ids=None, head_mask=None): if attention_mask is None: @@ -736,7 +736,8 @@ class BertDecoderModel(BertPreTrainedModel): See base class PreTrainedModel """ for layer, heads in heads_to_prune.items(): - self.encoder.layer[layer].attention.prune_heads(heads) + self.decoder.layer[layer].attention.prune_heads(heads) + self.decoder.layer[layer].self_attention.prune_heads(heads) def forward(self, input_ids, encoder_outputs, attention_mask=None, token_type_ids=None, position_ids=None, head_mask=None): if attention_mask is None:
prune both attention and self-attention heads
diff --git a/addons/tab_completion/tab_completion.rb b/addons/tab_completion/tab_completion.rb index <HASH>..<HASH> 100644 --- a/addons/tab_completion/tab_completion.rb +++ b/addons/tab_completion/tab_completion.rb @@ -36,20 +36,24 @@ class TabCompletion < Addon # +pre_word_context+ is the "lib/" if the user hits tab after typing "ls lib/" # because the current word will be set to "" def pre_word_context - # Work our way backwards thru the text because we can stop as soon as - # see a word break character rather than having to keep track of them. - i = @before_text.length - str = "" - loop do - i -= 1 - ch = @before_text[i] - if ch =~ filtered_work_break_characters_rgx && (i>0 && @before_text[i-1] != '\\') - break - else - str << ch + if @before_text.length == 0 + "" + else + # Work our way backwards thru the text because we can stop as soon as + # see a word break character rather than having to keep track of them. + i = @before_text.length + str = "" + loop do + i -= 1 + ch = @before_text[i] + if ch =~ filtered_work_break_characters_rgx && (i>0 && @before_text[i-1] != '\\') + break + else + str << ch + end end + str.reverse end - str.reverse end def get_filename_completion_matches
Update pre_word_context for tab completion to be an empty string if there is no before_text to choose from. Resolves issue hitting tab when nothing has been input into the REPL.
diff --git a/src/client/Client.js b/src/client/Client.js index <HASH>..<HASH> 100644 --- a/src/client/Client.js +++ b/src/client/Client.js @@ -395,8 +395,9 @@ class Client extends EventEmitter { * <warn>Bots can only fetch their own profile.</warn> * @param {Snowflake} [id='@me'] ID of application to fetch * @returns {Promise<OAuth2Application>} + * @example * client.fetchApplication() - * .then(application => console.log(`Obtained application with name: ${application.name}`) + * .then(application => console.log(`Obtained application with name: ${application.name}`)) * .catch(console.error); */ fetchApplication(id = '@me') {
docs(Client): add missing example tag and closing parenthesis (#<I>)
diff --git a/autopython/highlighter.py b/autopython/highlighter.py index <HASH>..<HASH> 100644 --- a/autopython/highlighter.py +++ b/autopython/highlighter.py @@ -130,12 +130,12 @@ else: class Token: pass - Token.Index = object() - Token.Prompt = object() - Token.Text = object() - Token.Literal = object() - Token.Literal.String = object() - Token.Literal.String.Doc = object() + Token.Index = Token() + Token.Prompt = Token() + Token.Text = Token() + Token.Literal = Token() + Token.Literal.String = Token() + Token.Literal.String.Doc = Token() Token.Generic = Token COLOR_SCHEMES = {'default': {}}
Exception when colorama or pygments were not installed
diff --git a/lib/puppet/node/facts.rb b/lib/puppet/node/facts.rb index <HASH>..<HASH> 100644 --- a/lib/puppet/node/facts.rb +++ b/lib/puppet/node/facts.rb @@ -59,7 +59,7 @@ class Puppet::Node::Facts end end - # Sanitize fact values by converting everything not a string, boolean + # Sanitize fact values by converting everything not a string, Boolean # numeric, array or hash into strings. def sanitize values.each do |fact, value|
(maint) Spell-check facts.rb.
diff --git a/lib/chef/provider/file.rb b/lib/chef/provider/file.rb index <HASH>..<HASH> 100644 --- a/lib/chef/provider/file.rb +++ b/lib/chef/provider/file.rb @@ -144,6 +144,7 @@ class Chef end def setup_acl + return if Chef::Platform.windows? acl_scanner = ScanAccessControl.new(@new_resource, @current_resource) acl_scanner.set_all! end
[CHEF-<I>] skip file metadata reporting on win for now
diff --git a/packages/babel-generator/src/printer.js b/packages/babel-generator/src/printer.js index <HASH>..<HASH> 100644 --- a/packages/babel-generator/src/printer.js +++ b/packages/babel-generator/src/printer.js @@ -367,7 +367,7 @@ export default class Printer { const loc = t.isProgram(node) || t.isFile(node) ? null : node.loc; this.withSource("start", loc, () => { - this[node.type](node, parent); + printMethod.call(this, node, parent); }); this._printTrailingComments(node);
change var name for coherence (#<I>)
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -24,6 +24,6 @@ setup( keywords="zha quirks homeassistant hass", packages=find_packages(exclude=["tests"]), python_requires=">=3", - install_requires=["zigpy>=0.28.1"], + install_requires=["zigpy>=0.28.2"], tests_require=["pytest"], ) diff --git a/zhaquirks/xiaomi/__init__.py b/zhaquirks/xiaomi/__init__.py index <HASH>..<HASH> 100644 --- a/zhaquirks/xiaomi/__init__.py +++ b/zhaquirks/xiaomi/__init__.py @@ -541,7 +541,7 @@ def handle_quick_init( if not model: return - for quirk in zigpy.quirks.get_model_quirks(model): + for quirk in zigpy.quirks.get_quirk_list(LUMI, model): if issubclass(quirk, XiaomiQuickInitDevice): sender.debug("Found '%s' quirk for '%s' model", quirk.__name__, model) try:
Syncup with zigpy regression fix (#<I>)
diff --git a/bbq-core/lib/bbq/rspec.rb b/bbq-core/lib/bbq/rspec.rb index <HASH>..<HASH> 100644 --- a/bbq-core/lib/bbq/rspec.rb +++ b/bbq-core/lib/bbq/rspec.rb @@ -21,7 +21,7 @@ module Bbq @locator = locator end - match_for_should do |page| + match do |page| if @locator page.within(@locator) do page.see? text @@ -31,7 +31,7 @@ module Bbq end end - match_for_should_not do |page| + match_when_negated do |page| if @locator page.within(@locator) do page.not_see? text @@ -41,7 +41,7 @@ module Bbq end end - failure_message_for_should do |page| + failure_message do |page| body = if @locator page.find(@locator).text else @@ -50,7 +50,7 @@ module Bbq "expected to see #{text} in #{body}" end - failure_message_for_should_not do |page| + failure_message_when_negated do |page| body = if @locator page.find(@locator).text else
Update rpsec matchers to use non-deprecated methods
diff --git a/prometheus/go_collector_test.go b/prometheus/go_collector_test.go index <HASH>..<HASH> 100644 --- a/prometheus/go_collector_test.go +++ b/prometheus/go_collector_test.go @@ -44,9 +44,14 @@ func TestGoCollectorGoroutines(t *testing.T) { go func() { c.Collect(metricCh) - go func(c <-chan struct{}) { - <-c - }(endGoroutineCh) + for i := 1; i <= 10; i++ { + // Start 10 goroutines to be sure we'll detect an + // increase even if unrelated goroutines happen to + // terminate during this test. + go func(c <-chan struct{}) { + <-c + }(endGoroutineCh) + } <-waitCh c.Collect(metricCh) close(endCollectionCh) @@ -73,9 +78,8 @@ func TestGoCollectorGoroutines(t *testing.T) { continue } - if diff := int(pb.GetGauge().GetValue()) - old; diff != 1 { - // TODO: This is flaky in highly concurrent situations. - t.Errorf("want 1 new goroutine, got %d", diff) + if diff := old - int(pb.GetGauge().GetValue()); diff > -1 { + t.Errorf("want at least one new goroutine, got %d fewer", diff) } case <-time.After(1 * time.Second): t.Fatalf("expected collect timed out")
Unflake TestGoCollectorGoroutines This is not a great solution, but it's also hard to test for this moving target.
diff --git a/mods/parsoid.js b/mods/parsoid.js index <HASH>..<HASH> 100644 --- a/mods/parsoid.js +++ b/mods/parsoid.js @@ -295,8 +295,7 @@ PSP.generateAndSave = function(restbase, req, format, currentContentRes) { }, body: body }); - }) - .catch(function(e) { + }, function(e) { // Fall back to plain GET return restbase.get({ uri: pageBundleUri }); });
Don't retry failed Parsoid POST requests Only fall back to GET if the internal storage request failed, but don't do so if the Parsoid POST fails. While this catch papers over a Parsoid v3 API incompatibility right now, it would be nicer to detect such failures more quickly. Tests for this patch won't pass until <URL>
diff --git a/tests/models.py b/tests/models.py index <HASH>..<HASH> 100644 --- a/tests/models.py +++ b/tests/models.py @@ -35,7 +35,7 @@ class Example(models.Model): @property def property_should_not_index(self): - return True + return False @property def property_string(self): diff --git a/tests/test_index.py b/tests/test_index.py index <HASH>..<HASH> 100644 --- a/tests/test_index.py +++ b/tests/test_index.py @@ -254,7 +254,7 @@ class IndexTestCase(TestCase): class ExampleIndex(AlgoliaIndex): fields = 'name' - should_index = 'static_should_not_index' + should_index = 'property_should_not_index' index = ExampleIndex(Example, self.client) self.assertFalse(index._should_index(self.instance),
fix: test_should_index_property with correct negative case
diff --git a/src/browser-client/main.js b/src/browser-client/main.js index <HASH>..<HASH> 100644 --- a/src/browser-client/main.js +++ b/src/browser-client/main.js @@ -7,13 +7,13 @@ // It doesn't load "q.js" dynamically anymore -- that task has been replaced // by a script tag load in the HTML page: // -// <script src="QM_WWW_URL/homepage.js"></script> +// <script src="./homepage.js"></script> // // KNOWN ISSUES: // https://bugzilla.mozilla.org/show_bug.cgi?id=756028 // // ~~ (c) SRW, 23 May 2012 -// ~~ last updated 10 Jan 2013 +// ~~ last updated 12 Jan 2013 (function () { 'use strict'; @@ -26,10 +26,10 @@ /*properties Q, QM, activeElement, alert, avar, blur, box, call, clearTimeout, - click, console, document, error, exit, getItem, hasOwnProperty, id, is, - jQuery, join, key, keydown, localStorage, log, on, preventDefault, - prototype, ready, revive, setItem, setTimeout, stay, val, value, - volunteer, volunteer_timer, which + click, console, document, error, exit, focus, getItem, hasOwnProperty, + id, is, jQuery, join, key, keydown, localStorage, log, on, + preventDefault, prototype, ready, revive, setItem, setTimeout, stay, + val, value, volunteer, volunteer_timer, which */ // Prerequisites
Linted "main.js" and fixed an out-of-date comment
diff --git a/archive.go b/archive.go index <HASH>..<HASH> 100644 --- a/archive.go +++ b/archive.go @@ -214,6 +214,10 @@ func (v *volume) next() (*fileBlockHeader, error) { } func (v *volume) Close() error { + // may be nil if os.Open fails in next() + if v.f == nil { + return nil + } return v.f.Close() }
check for nil file handle when closing volume
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -8,7 +8,7 @@ from setuptools import setup _version_re = re.compile(r'__version__\s+=\s+(.*)') requirements = [ 'sanic-base-extension==0.1.1', - 'aioamqp==0.11.0', + 'aioamqp==0.12.0', ]
Updated version of the aioamqp package
diff --git a/test/test.js b/test/test.js index <HASH>..<HASH> 100644 --- a/test/test.js +++ b/test/test.js @@ -169,7 +169,8 @@ describe('Puppeteer', function() { await browser2.close(); rm(userDataDir); })); - it('userDataDir option should restore cookies', SX(async function() { + // @see https://github.com/GoogleChrome/puppeteer/issues/1537 + xit('userDataDir option should restore cookies', SX(async function() { const userDataDir = fs.mkdtempSync(path.join(__dirname, 'test-user-data-dir')); const options = Object.assign({userDataDir}, defaultBrowserOptions); const browser = await puppeteer.launch(options);
test: disable 'userDataDir option should restore cookies' (#<I>) References #<I>
diff --git a/lib/scripts/gen-docs.js b/lib/scripts/gen-docs.js index <HASH>..<HASH> 100755 --- a/lib/scripts/gen-docs.js +++ b/lib/scripts/gen-docs.js @@ -911,9 +911,6 @@ var generate = function (options_in, callBack) { fileName = fileName[fileName.length - 1]; fse.copySync(path.resolve(process.cwd(), file), ABS_WEBAPP + '/resources/extra/css/' + fileName); } - - return Q.all(); - }).then(function() { //generate information on the groups for the UI generateGroupManifest(groups);
Remove unneeded promise and Q.all in the end of gen-docs.generate
diff --git a/src/com/google/javascript/jscomp/GatherModuleMetadata.java b/src/com/google/javascript/jscomp/GatherModuleMetadata.java index <HASH>..<HASH> 100644 --- a/src/com/google/javascript/jscomp/GatherModuleMetadata.java +++ b/src/com/google/javascript/jscomp/GatherModuleMetadata.java @@ -261,8 +261,9 @@ public final class GatherModuleMetadata implements HotSwapCompilerPass { @Override public void visit(NodeTraversal t, Node n, Node parent) { if (processCommonJsModules && currentModule != null && currentModule.isScript()) { - if (ProcessCommonJSModules.isCommonJsExport(t, n, moduleResolutionMode) - || ProcessCommonJSModules.isCommonJsImport(n, moduleResolutionMode)) { + // A common JS import (call to "require") does not force a module to be rewritten as + // commonJS. Only an export statement. + if (ProcessCommonJSModules.isCommonJsExport(t, n, moduleResolutionMode)) { currentModule.moduleType(ModuleType.COMMON_JS, t, n); return; }
Only mark a module as CommonJS when an export statement is encountered. The CommonJS "require" call can be used as a module loader from a script tag. The presence of a "require" call does not indicate that the file is a CommonJS module. Fixes #<I> Closes <URL>
diff --git a/pycbc/inference/sampler/__init__.py b/pycbc/inference/sampler/__init__.py index <HASH>..<HASH> 100644 --- a/pycbc/inference/sampler/__init__.py +++ b/pycbc/inference/sampler/__init__.py @@ -22,18 +22,22 @@ from __future__ import absolute_import from .base import (initial_dist_from_config, create_new_output_file) from .emcee import EmceeEnsembleSampler from .emcee_pt import EmceePTSampler -from .epsie import EpsieSampler from .multinest import MultinestSampler # list of available samplers samplers = {cls.name: cls for cls in ( EmceeEnsembleSampler, EmceePTSampler, - EpsieSampler, MultinestSampler )} try: + from .epsie import EpsieSampler + samplers[EpsieSampler.name] = EpsieSampler +except ImportError: + pass + +try: from .cpnest import CPNestSampler samplers[CPNestSampler.name] = CPNestSampler except ImportError:
don't hard require epsie (#<I>)
diff --git a/integration_tests/ctesque/src/sharedTest/java/android/content/res/ResourcesTest.java b/integration_tests/ctesque/src/sharedTest/java/android/content/res/ResourcesTest.java index <HASH>..<HASH> 100644 --- a/integration_tests/ctesque/src/sharedTest/java/android/content/res/ResourcesTest.java +++ b/integration_tests/ctesque/src/sharedTest/java/android/content/res/ResourcesTest.java @@ -452,7 +452,9 @@ public class ResourcesTest { float density = resources.getDisplayMetrics().density; NinePatchDrawable ninePatchDrawable = (NinePatchDrawable) resources.getDrawable(R.drawable.nine_patch_drawable); - assertThat((float) ninePatchDrawable.getIntrinsicWidth()).isEqualTo(98.0f * density); + // Use Math.round to convert calculated float width to int, + // see NinePatchDrawable#scaleFromDensity. + assertThat(ninePatchDrawable.getIntrinsicWidth()).isEqualTo(Math.round(98.0f * density)); } @Test
Round calculated NinePatchDrawable intrinsic width for ResourcesTest See NinePatchDrawable#scaleFromDensity.