hash
stringlengths 40
40
| diff
stringlengths 131
114k
| message
stringlengths 7
980
| project
stringlengths 5
67
| split
stringclasses 1
value |
|---|---|---|---|---|
82f641d496217c28b15f246b4c6608a7c20c265b
|
diff --git a/mwtab/mwschema.py b/mwtab/mwschema.py
index <HASH>..<HASH> 100755
--- a/mwtab/mwschema.py
+++ b/mwtab/mwschema.py
@@ -129,7 +129,7 @@ collection_schema = Schema(
Optional("COLLECTION_PROTOCOL_ID"): str,
Optional("COLLECTION_PROTOCOL_FILENAME"): str,
Optional("COLLECTION_PROTOCOL_COMMENTS"): str,
- "SAMPLE_TYPE": str,
+ Optional("SAMPLE_TYPE"): str, # assumed optional due to large number of files without
Optional("COLLECTION_METHOD"): str,
Optional("COLLECTION_LOCATION"): str,
Optional("COLLECTION_FREQUENCY"): str,
@@ -267,26 +267,28 @@ chromatography_schema = Schema(
analysis_schema = Schema(
{
"ANALYSIS_TYPE": str,
- Optional("NUM_FACTORS"): str,
- Optional("ACQUISITION_TIME"): str,
- Optional("PROCESSING_PARAMETERS_FILE"): str,
- Optional("ANALYSIS_DISPLAY"): str,
- Optional("ANALYSIS_COMMENTS"): str,
Optional("LABORATORY_NAME"): str,
+ Optional("OPERATOR_NAME"): str,
Optional("DETECTOR_TYPE"): str,
Optional("SOFTWARE_VERSION"): str,
- Optional("OPERATOR_NAME"): str,
- Optional("INSTRUMENT_NAME"): str,
Optional("ACQUISITION_DATE"): str,
+ Optional("ANALYSIS_PROTOCOL_FILE"): str,
+ Optional("ACQUISITION_PARAMETERS_FILE"): str,
+ Optional("PROCESSING_PARAMETERS_FILE"): str,
Optional("DATA_FORMAT"): str,
- Optional("NUM_METABOLITES"): str,
+
+ # note specified in mwTab specification (assumed)
Optional("ACQUISITION_ID"): str,
- Optional("RAW_FILE"): str,
- Optional("PROCESSED_FILE"): str,
+ Optional("ACQUISITION_TIME"): str,
+ Optional("ANALYSIS_COMMENTS"): str,
+ Optional("ANALYSIS_DISPLAY"): str,
+ Optional("INSTRUMENT_NAME"): str,
Optional("INSTRUMENT_PARAMETERS_FILE"): str,
- Optional("ACQUISITION_PARAMETERS_FILE"): str,
- Optional("ANALYSIS_PROTOCOL_FILE"): str,
- Optional("RANDOMIZATION_ORDER"): str
+ Optional("NUM_FACTORS"): str,
+ Optional("NUM_METABOLITES"): str,
+ Optional("PROCESSED_FILE"): str,
+ Optional("RANDOMIZATION_ORDER"): str,
+ Optional("RAW_FILE"): str,
}
)
@@ -296,7 +298,7 @@ ms_schema = Schema(
"INSTRUMENT_TYPE": str,
"MS_TYPE": str,
"ION_MODE": str,
- "MS_COMMENTS": str, # changed to required
+ Optional("MS_COMMENTS"): str, # changed to required
Optional("CAPILLARY_TEMPERATURE"): str,
Optional("CAPILLARY_VOLTAGE"): str,
Optional("COLLISION_ENERGY"): str,
|
Updates mwschema to match Metabolomics Workbench's latest formatting specifications.
|
MoseleyBioinformaticsLab_mwtab
|
train
|
6878cd96d4ed33870a19006bbc688e0d2d92fbea
|
diff --git a/helpers/d.js b/helpers/d.js
index <HASH>..<HASH> 100644
--- a/helpers/d.js
+++ b/helpers/d.js
@@ -180,15 +180,6 @@ var json = function (xvalue) {
return undefined;
} else if (_.is.NaN(xvalue)) {
return undefined;
- } else if (_.is.Dictionary(xvalue)) {
- var nd = {};
- _.mapObject(xvalue, function(o, key) {
- var n = json(o);
- if (n !== undefined) {
- nd[key] = n;
- }
- });
- return nd;
} else if (_.is.Array(xvalue)) {
var ns = [];
xvalue.map(function(o) {
@@ -198,6 +189,15 @@ var json = function (xvalue) {
}
});
return ns;
+ } else if (_.is.Object(xvalue)) {
+ var nd = {};
+ _.mapObject(xvalue, function(o, key) {
+ var n = json(o);
+ if (n !== undefined) {
+ nd[key] = n;
+ }
+ });
+ return nd;
} else {
return xvalue;
}
diff --git a/tests/test_d.js b/tests/test_d.js
index <HASH>..<HASH> 100644
--- a/tests/test_d.js
+++ b/tests/test_d.js
@@ -366,6 +366,9 @@ describe('test_d:', function() {
var xd = _.d.clone.deep(d1d);
xd["sub"] = { "good": "times" };
+ // console.log("OD", od);
+ // console.log("XD", xd);
+
assert.ok(_.is.Equal(od, xd));
});
it('call - dirty array', function() {
|
fixed "d.json" which didn't behave correctly with Objects
|
dpjanes_node-iotdb
|
train
|
ebe2d3623050347fdfa4b5dbf1efc55c477e6225
|
diff --git a/RAPIDpy/helper_functions.py b/RAPIDpy/helper_functions.py
index <HASH>..<HASH> 100644
--- a/RAPIDpy/helper_functions.py
+++ b/RAPIDpy/helper_functions.py
@@ -8,13 +8,12 @@
##
from csv import reader as csvreader
from csv import writer as csvwriter
-from datetime import datetime
from netCDF4 import Dataset
from numpy import where, unique
+from numpy.ma import masked
from numpy.testing import assert_almost_equal
from os import remove
-from os.path import dirname, join
-from pytz import utc
+import time
#------------------------------------------------------------------------------
# HELPER FUNCTIONS
@@ -146,11 +145,17 @@ def write_flows_to_csv(path_to_rapid_qout_file, path_to_output_file,
reach_index = where(reach_ids==reach_id)[0][0]
nc_vars = data_nc.variables.keys()
-
+
+ time_var_valid = False
+ if 'time' in nc_vars:
+ if len(data_nc.dimensions['time'])>0:
+ if not (data_nc.variables['time'][:] == masked).any():
+ time_var_valid = True
+
#analyze and write
- if 'time' in nc_vars and len(data_nc.dimensions[id_dim_name])>0:
+ if time_var_valid:
if daily:
- current_day = datetime.fromtimestamp(data_nc.variables['time'][0], tz=utc)
+ current_day = time.gmtime(data_nc.variables['time'][0])
flow = 0
num_days = 0
@@ -158,14 +163,14 @@ def write_flows_to_csv(path_to_rapid_qout_file, path_to_output_file,
with open(path_to_output_file, 'w') as outcsv:
writer = csvwriter(outcsv)
for idx, t in enumerate(data_nc.variables['time'][:]):
- var_time = datetime.fromtimestamp(t, tz=utc)
- if current_day.day == var_time.day:
+ var_time = time.gmtime(t)
+ if current_day.tm_yday == var_time.tm_yday:
flow += qout_arr[idx]
num_days += 1
else:
if num_days > 0:
#write last average
- writer.writerow([current_day.strftime("%Y/%m/%d"), flow/num_days])
+ writer.writerow([time.strftime("%Y/%m/%d", current_day), flow/num_days])
#start new average
current_day = var_time
@@ -173,14 +178,15 @@ def write_flows_to_csv(path_to_rapid_qout_file, path_to_output_file,
flow = qout_arr[idx]
else:
qout = get_rapid_timeseries(data_nc, reach_index, id_dim_name, out_var)
- time = data_nc.variables['time'][:]
+ time_array = data_nc.variables['time'][:]
with open(path_to_output_file, 'w') as outcsv:
writer = csvwriter(outcsv)
for index in xrange(len(qout)):
- var_time = datetime.fromtimestamp(time[index], tz=utc)
- writer.writerow([var_time.strftime("%Y/%m/%d %H:00"), qout[index]])
+ var_time = time.gmtime(time_array[index])
+ writer.writerow([time.strftime("%Y/%m/%d %H:00", var_time), qout[index]])
else:
+ print "Valid time variable not found. Printing values only ..."
qout = get_rapid_timeseries(data_nc, reach_index, id_dim_name, out_var)
with open(path_to_output_file, 'w') as outcsv:
writer = csvwriter(outcsv)
|
updated code for time to help fix epoch issues
|
erdc_RAPIDpy
|
train
|
a88ab914d5cda26f7295ad1fea8e0fa5988a6c65
|
diff --git a/aionationstates/__init__.py b/aionationstates/__init__.py
index <HASH>..<HASH> 100644
--- a/aionationstates/__init__.py
+++ b/aionationstates/__init__.py
@@ -1,4 +1,5 @@
-from aionationstates.nation import Nation, NationControl
+from aionationstates.nation import Nation
+from aionationstates.nation_control import NationControl
from aionationstates.region import Region
from aionationstates.world import World
diff --git a/aionationstates/nation.py b/aionationstates/nation.py
index <HASH>..<HASH> 100644
--- a/aionationstates/nation.py
+++ b/aionationstates/nation.py
@@ -6,7 +6,7 @@ from aionationstates.shards import Census
class Nation(Census, Session):
- def __init__(self, id):
+ def __init__(self, id, *args, **kwargs):
self.id = normalize(id)
super().__init__(*args, **kwargs)
diff --git a/aionationstates/region.py b/aionationstates/region.py
index <HASH>..<HASH> 100644
--- a/aionationstates/region.py
+++ b/aionationstates/region.py
@@ -7,7 +7,7 @@ from aionationstates.shards import Census
class Region(Census, Session):
- def __init__(self, id):
+ def __init__(self, id, *args, **kwargs):
self.id = normalize(id)
super().__init__(*args, **kwargs)
|
i forgot to test things before committing again
|
micha030201_aionationstates
|
train
|
2641a35bfc6c65eb7dc71674fa023d5286a94aff
|
diff --git a/src/tablesort.js b/src/tablesort.js
index <HASH>..<HASH> 100644
--- a/src/tablesort.js
+++ b/src/tablesort.js
@@ -55,11 +55,12 @@
},
sortTable: function(header, update) {
- var that = this;
- var column = header.cellIndex;
- var sortFunction;
- var t = getParent(header, 'table');
- var item = '', i = 0;
+ var that = this,
+ column = header.cellIndex,
+ sortFunction,
+ t = getParent(header, 'table'),
+ item = '',
+ i = 0;
if (t.rows.length <= 1) {
return;
@@ -81,8 +82,8 @@
// Possible sortFunction scenarios
var sortCaseInsensitive = function (a, b) {
- var aa = getInnerText(a.cells[that.col]).toLowerCase();
- var bb = getInnerText(b.cells[that.col]).toLowerCase();
+ var aa = getInnerText(a.cells[that.col]).toLowerCase(),
+ bb = getInnerText(b.cells[that.col]).toLowerCase();
if(aa === bb) {
return 0;
@@ -96,9 +97,10 @@
};
var sortNumber = function (a, b) {
- var aa = getInnerText(a.cells[that.col]);
+ var aa = getInnerText(a.cells[that.col]),
+ bb = getInnerText(b.cells[that.col]);
+
aa = cleanNumber(aa);
- var bb = getInnerText(b.cells[that.col]);
bb = cleanNumber(bb);
return compareNumber(bb, aa);
};
@@ -119,26 +121,19 @@
}
this.col = column;
- var firstRow = [],
- newRows = [],
- k, j;
+ var newRows = [],
+ j = 0;
- for (k = 0; k < t.tBodies.length; k++) {
- for(i = 0; i < t.tBodies[k].rows[0].length; i++) {
- firstRow[i] = t.tBodies[k].rows[0][i];
- }
- }
-
- for (k = 0; k < t.tBodies.length; k++) {
+ for (i = 0; i < t.tBodies.length; i++) {
if (!that.thead) {
// skip the first row
- for(j = 1; j < t.tBodies[k].rows.length; j++) {
- newRows[j - 1] = t.tBodies[k].rows[j];
+ for(j = 1; j < t.tBodies[i].rows.length; j++) {
+ newRows[j - 1] = t.tBodies[i].rows[j];
}
} else {
// don't skip the first row
- for(j = 0; j < t.tBodies[k].rows.length; j++) {
- newRows[j] = t.tBodies[k].rows[j];
+ for(j = 0; j < t.tBodies[i].rows.length; j++) {
+ newRows[j] = t.tBodies[i].rows[j];
}
}
}
@@ -254,9 +249,10 @@
},
compareNumber = function (a, b) {
- var aa = parseFloat(a);
+ var aa = parseFloat(a),
+ bb = parseFloat(b);
+
a = isNaN(aa) ? 0 : aa;
- var bb = parseFloat(b);
b = isNaN(bb) ? 0 : bb;
return a - b;
},
|
Update src/tablesort.js
- Combine some vars into single ones
- Remove unused firstRow in sortTable
- Remove `k` variable in sortTable, and use `i` instead
|
tristen_tablesort
|
train
|
49835c3bb14e501ed71f925751e987c98d33de43
|
diff --git a/core/PaginatedList.php b/core/PaginatedList.php
index <HASH>..<HASH> 100644
--- a/core/PaginatedList.php
+++ b/core/PaginatedList.php
@@ -80,7 +80,7 @@ class PaginatedList extends SS_ListDecorator {
* @param int $page
*/
public function setCurrentPage($page) {
- $this->pageStart = ($page - 1) * $this->pageLength;
+ $this->pageStart = ($page - 1) * $this->getPageLength();
return $this;
}
@@ -91,8 +91,8 @@ class PaginatedList extends SS_ListDecorator {
*/
public function getPageStart() {
if ($this->pageStart === null) {
- if ($this->request && isset($this->request[$this->getVar])) {
- $this->pageStart = (int) $this->request[$this->getVar];
+ if ($this->request && isset($this->request[$this->getPaginationGetVar()])) {
+ $this->pageStart = (int) $this->request[$this->getPaginationGetVar()];
} else {
$this->pageStart = 0;
}
@@ -181,7 +181,7 @@ class PaginatedList extends SS_ListDecorator {
if($this->limitItems) {
$tmptList = clone $this->list;
return new IteratorIterator(
- $tmptList->limit($this->pageLength, $this->getPageStart())
+ $tmptList->limit($this->getPageLength(), $this->getPageStart())
);
} else {
return new IteratorIterator($this->list);
@@ -223,7 +223,7 @@ class PaginatedList extends SS_ListDecorator {
for ($i = $start; $i < $end; $i++) {
$result->push(new ArrayData(array(
'PageNum' => $i + 1,
- 'Link' => HTTP::setGetVar($this->getVar, $i * $this->pageLength),
+ 'Link' => HTTP::setGetVar($this->getPaginationGetVar(), $i * $this->getPageLength()),
'CurrentBool' => $this->CurrentPage() == ($i + 1)
)));
}
@@ -292,7 +292,7 @@ class PaginatedList extends SS_ListDecorator {
}
for ($i = 0; $i < $total; $i++) {
- $link = HTTP::setGetVar($this->getVar, $i * $this->pageLength);
+ $link = HTTP::setGetVar($this->getPaginationGetVar(), $i * $this->getPageLength());
$num = $i + 1;
$emptyRange = $num != 1 && $num != $total && (
@@ -321,14 +321,14 @@ class PaginatedList extends SS_ListDecorator {
* @return int
*/
public function CurrentPage() {
- return floor($this->getPageStart() / $this->pageLength) + 1;
+ return floor($this->getPageStart() / $this->getPageLength()) + 1;
}
/**
* @return int
*/
public function TotalPages() {
- return ceil($this->getTotalItems() / $this->pageLength);
+ return ceil($this->getTotalItems() / $this->getPageLength());
}
/**
@@ -369,9 +369,9 @@ class PaginatedList extends SS_ListDecorator {
*/
public function LastItem() {
if ($start = $this->getPageStart()) {
- return min($start + $this->pageLength, $this->getTotalItems());
+ return min($start + $this->getPageLength(), $this->getTotalItems());
} else {
- return min($this->pageLength, $this->getTotalItems());
+ return min($this->getPageLength(), $this->getTotalItems());
}
}
@@ -381,7 +381,7 @@ class PaginatedList extends SS_ListDecorator {
* @return string
*/
public function FirstLink() {
- return HTTP::setGetVar($this->getVar, 0);
+ return HTTP::setGetVar($this->getPaginationGetVar(), 0);
}
/**
@@ -390,7 +390,7 @@ class PaginatedList extends SS_ListDecorator {
* @return string
*/
public function LastLink() {
- return HTTP::setGetVar($this->getVar, ($this->TotalPages() - 1) * $this->pageLength);
+ return HTTP::setGetVar($this->getPaginationGetVar(), ($this->TotalPages() - 1) * $this->getPageLength());
}
/**
@@ -401,7 +401,7 @@ class PaginatedList extends SS_ListDecorator {
*/
public function NextLink() {
if ($this->NotLastPage()) {
- return HTTP::setGetVar($this->getVar, $this->getPageStart() + $this->pageLength);
+ return HTTP::setGetVar($this->getPaginationGetVar(), $this->getPageStart() + $this->getPageLength());
}
}
@@ -413,8 +413,8 @@ class PaginatedList extends SS_ListDecorator {
*/
public function PrevLink() {
if ($this->NotFirstPage()) {
- return HTTP::setGetVar($this->getVar, $this->getPageStart() - $this->pageLength);
+ return HTTP::setGetVar($this->getPaginationGetVar(), $this->getPageStart() - $this->getPageLength());
}
}
- }
+}
|
Updated calls to methods instead of firect properties in PaginatedList
|
silverstripe_silverstripe-framework
|
train
|
b5710a6a2128405f424b0f6670260b2e662fefc3
|
diff --git a/plugins/outputs/datadog/datadog.go b/plugins/outputs/datadog/datadog.go
index <HASH>..<HASH> 100644
--- a/plugins/outputs/datadog/datadog.go
+++ b/plugins/outputs/datadog/datadog.go
@@ -5,6 +5,7 @@ import (
"encoding/json"
"fmt"
"log"
+ "math"
"net/http"
"net/url"
"strings"
@@ -63,9 +64,6 @@ func (d *Datadog) Connect() error {
}
func (d *Datadog) Write(metrics []telegraf.Metric) error {
- if len(metrics) == 0 {
- return nil
- }
ts := TimeSeries{}
tempSeries := []*Metric{}
metricCounter := 0
@@ -75,6 +73,10 @@ func (d *Datadog) Write(metrics []telegraf.Metric) error {
metricTags := buildTags(m.TagList())
host, _ := m.GetTag("host")
+ if len(dogMs) == 0 {
+ continue
+ }
+
for fieldName, dogM := range dogMs {
// name of the datadog measurement
var dname string
@@ -98,6 +100,10 @@ func (d *Datadog) Write(metrics []telegraf.Metric) error {
}
}
+ if len(tempSeries) == 0 {
+ return nil
+ }
+
redactedApiKey := "****************"
ts.Series = make([]*Metric, metricCounter)
copy(ts.Series, tempSeries[0:])
@@ -166,9 +172,12 @@ func buildTags(tagList []*telegraf.Tag) []string {
}
func verifyValue(v interface{}) bool {
- switch v.(type) {
+ switch v := v.(type) {
case string:
return false
+ case float64:
+ // The payload will be encoded as JSON, which does not allow NaN or Inf.
+ return !math.IsNaN(v) && !math.IsInf(v, 0)
}
return true
}
diff --git a/plugins/outputs/datadog/datadog_test.go b/plugins/outputs/datadog/datadog_test.go
index <HASH>..<HASH> 100644
--- a/plugins/outputs/datadog/datadog_test.go
+++ b/plugins/outputs/datadog/datadog_test.go
@@ -3,15 +3,15 @@ package datadog
import (
"encoding/json"
"fmt"
+ "math"
"net/http"
"net/http/httptest"
"reflect"
"testing"
"time"
- "github.com/influxdata/telegraf/testutil"
-
"github.com/influxdata/telegraf"
+ "github.com/influxdata/telegraf/testutil"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
@@ -249,3 +249,45 @@ func TestVerifyValue(t *testing.T) {
}
}
}
+
+func TestNaNIsSkipped(t *testing.T) {
+ plugin := &Datadog{
+ Apikey: "testing",
+ URL: "", // No request will be sent because all fields are skipped
+ }
+
+ err := plugin.Connect()
+ require.NoError(t, err)
+
+ err = plugin.Write([]telegraf.Metric{
+ testutil.MustMetric(
+ "cpu",
+ map[string]string{},
+ map[string]interface{}{
+ "time_idle": math.NaN(),
+ },
+ time.Now()),
+ })
+ require.NoError(t, err)
+}
+
+func TestInfIsSkipped(t *testing.T) {
+ plugin := &Datadog{
+ Apikey: "testing",
+ URL: "", // No request will be sent because all fields are skipped
+ }
+
+ err := plugin.Connect()
+ require.NoError(t, err)
+
+ err = plugin.Write([]telegraf.Metric{
+ testutil.MustMetric(
+ "cpu",
+ map[string]string{},
+ map[string]interface{}{
+ "time_idle": math.Inf(0),
+ },
+ time.Now()),
+ })
+ require.NoError(t, err)
+}
|
Skip floats that are NaN or Inf in Datadog output. (#<I>)
|
influxdata_telegraf
|
train
|
6837de0dcbf2801822b95d3e07e6e2596276a5f6
|
diff --git a/com/linuxense/javadbf/DBFField.java b/com/linuxense/javadbf/DBFField.java
index <HASH>..<HASH> 100644
--- a/com/linuxense/javadbf/DBFField.java
+++ b/com/linuxense/javadbf/DBFField.java
@@ -4,10 +4,10 @@
This file is part of JavaDBF packege.
- author: anil@linuxense
+ author: anil@linuxense.com
license: LGPL (http://www.gnu.org/copyleft/lesser.html)
- $Id: DBFField.java,v 1.6 2004-01-08 17:47:02 anil Exp $
+ $Id: DBFField.java,v 1.7 2004-03-31 10:50:11 anil Exp $
*/
package com.linuxense.javadbf;
@@ -56,7 +56,7 @@ public class DBFField {
@return Returns the created DBFField object.
@throws IOException If any stream reading problems occures.
*/
- protected static DBFField createField( DataInputStream in)
+ protected static DBFField createField( DataInput in)
throws IOException {
DBFField field = new DBFField();
@@ -68,7 +68,7 @@ public class DBFField {
return null;
}
- in.read( field.fieldName, 1, 10); /* 1-10 */
+ in.readFully( field.fieldName, 1, 10); /* 1-10 */
field.fieldName[0] = t_byte;
for( int i=0; i<field.fieldName.length; i++) {
@@ -88,7 +88,7 @@ public class DBFField {
field.workAreaId = in.readByte(); /* 20 */
field.reserv2 = Utils.readLittleEndianShort( in); /* 21-22 */
field.setFieldsFlag = in.readByte(); /* 23 */
- in.read( field.reserv4); /* 24-30 */
+ in.readFully( field.reserv4); /* 24-30 */
field.indexFieldFlag = in.readByte(); /* 31 */
return field;
@@ -101,10 +101,10 @@ public class DBFField {
@param os OutputStream
@throws IOException if any stream related issues occur.
*/
- protected void write( OutputStream os)
+ protected void write( DataOutput out)
throws IOException {
- DataOutputStream out = new DataOutputStream( os);
+ //DataOutputStream out = new DataOutputStream( os);
// Field Name
out.write( fieldName); /* 0-10 */
@@ -130,7 +130,7 @@ public class DBFField {
*/
public String getName() {
- return new String( fieldName, 0, nameNullIndex);
+ return new String( this.fieldName, 0, nameNullIndex);
}
/**
@@ -182,11 +182,20 @@ public class DBFField {
// byte indexFieldFlag; /* 31 */
/**
+ * @deprecated This method is depricated as of version 0.3.3.1 and is replaced by {@link #setName( String)}.
+ */
+ public void setFieldName( String value) {
+
+ setName( value);
+ }
+
+ /**
Sets the name of the field.
@param name of the field as String.
+ @since 0.3.3.1
*/
- public void setFieldName( String value) {
+ public void setName( String value) {
if( value == null) {
@@ -199,6 +208,7 @@ public class DBFField {
}
this.fieldName = value.getBytes();
+ this.nameNullIndex = this.fieldName.length;
}
/**
|
Code clean up. Method name setFieldName deprecated.
|
albfernandez_javadbf
|
train
|
075fe2ccc6ce472a2e8a40181b4255ce405afab3
|
diff --git a/packages/vaex-core/setup.py b/packages/vaex-core/setup.py
index <HASH>..<HASH> 100644
--- a/packages/vaex-core/setup.py
+++ b/packages/vaex-core/setup.py
@@ -49,6 +49,7 @@ setup(name=name+'-core',
setup_requires=['numpy'],
install_requires=install_requires_core,
license=license,
- packages=['vaex', 'vaex.core', 'vaex.file', 'vaex.test'],
+ package_data={'vaex': ['test/files/*.fits', 'test/files/*.vot', 'test/files/*.hdf5']},
+ packages=['vaex', 'vaex.core', 'vaex.file', 'vaex.test', 'vaex.ext'],
ext_modules=[extension_vaexfast],
zip_safe=False,)
\ No newline at end of file
|
fix: core: include unittest and files for testing
|
vaexio_vaex
|
train
|
4e3b62e466df9eaeec881833cd4e45b5e34f7756
|
diff --git a/cake/libs/configure.php b/cake/libs/configure.php
index <HASH>..<HASH> 100644
--- a/cake/libs/configure.php
+++ b/cake/libs/configure.php
@@ -863,8 +863,6 @@ class App {
if ($name != null && !class_exists($name . $ext['class'])) {
if ($load = self::__mapped($name . $ext['class'], $type, $plugin)) {
if (self::__load($load)) {
- self::__overload($type, $name . $ext['class'], $parent);
-
if (self::$return) {
return include($load);
}
@@ -904,7 +902,6 @@ class App {
if ($directory !== null) {
self::$__cache = true;
self::__map($directory . $file, $name . $ext['class'], $type, $plugin);
- self::__overload($type, $name . $ext['class'], $parent);
if (self::$return) {
return include($directory . $file);
@@ -1047,17 +1044,6 @@ class App {
}
/**
- * Used to overload objects as needed.
- *
- * @param string $type Model or Helper
- * @param string $name Class name to overload
- * @access private
- */
- private static function __overload($type, $name, $parent) {
-
- }
-
-/**
* Loads parent classes based on $type.
* Returns a prefix or suffix needed for loading files.
*
diff --git a/cake/libs/router.php b/cake/libs/router.php
index <HASH>..<HASH> 100644
--- a/cake/libs/router.php
+++ b/cake/libs/router.php
@@ -886,10 +886,9 @@ class Router {
*
* @param array $url A url that didn't match any routes
* @return string A generated url for the array
- * @access protected
* @see Router::url()
*/
- function _handleNoRoute($url) {
+ protected static function _handleNoRoute($url) {
$named = $args = array();
$skip = array_merge(
array('bare', 'action', 'controller', 'plugin', 'prefix'),
|
Removing vestigial method in App.
Fixing E_STRICT error in Router.
|
cakephp_cakephp
|
train
|
eddd25d4b889d639fad9256ddb4b956aae0570ba
|
diff --git a/nudibranch/tests.py b/nudibranch/tests.py
index <HASH>..<HASH> 100644
--- a/nudibranch/tests.py
+++ b/nudibranch/tests.py
@@ -167,6 +167,49 @@ class FileVerifierTests(BaseAPITest):
self.assertEqual('That filename already exists for the project',
info['message'])
+ def test_create_invalid_lines(self):
+ project = Session.query(Project).first()
+ json_data = {'filename': 'File 1', 'min_lines': '10', 'max_lines': '9',
+ 'min_size': '0', 'project_id': str(project.id)}
+ request = self.make_request(json_body=json_data)
+ info = file_verifier_create(request)
+ self.assertEqual(HTTPBadRequest.code, request.response.status_code)
+ self.assertEqual('min_lines cannot be > max_lines', info['messages'])
+
+ def test_create_invalid_maxes(self):
+ project = Session.query(Project).first()
+ json_data = {'filename': 'File 1', 'min_lines': '0', 'min_size': '0',
+ 'max_lines': '10', 'max_size': '9',
+ 'project_id': str(project.id)}
+ request = self.make_request(json_body=json_data)
+ info = file_verifier_create(request)
+ self.assertEqual(HTTPBadRequest.code, request.response.status_code)
+ self.assertEqual('max_lines cannot be > max_size', info['messages'])
+
+ def test_create_invalid_mins(self):
+ project = Session.query(Project).first()
+ json_data = {'filename': 'File 1', 'min_lines': '1', 'min_size': '0',
+ 'project_id': str(project.id)}
+ request = self.make_request(json_body=json_data)
+ info = file_verifier_create(request)
+ self.assertEqual(HTTPBadRequest.code, request.response.status_code)
+ self.assertEqual('min_lines cannot be > min_size', info['messages'])
+
+ def test_create_invalid_size(self):
+ project = Session.query(Project).first()
+ json_data = {'filename': 'File 1', 'min_size': '10', 'max_size': '9',
+ 'min_lines': '0', 'project_id': str(project.id)}
+ request = self.make_request(json_body=json_data)
+ info = file_verifier_create(request)
+ self.assertEqual(HTTPBadRequest.code, request.response.status_code)
+ self.assertEqual('min_size cannot be > max_size', info['messages'])
+
+ def test_create_no_params(self):
+ request = self.make_request(json_body={})
+ info = file_verifier_create(request)
+ self.assertEqual(HTTPBadRequest.code, request.response.status_code)
+ self.assertEqual(4, len(info['messages']))
+
def test_create_valid(self):
project = Session.query(Project).first()
json_data = {'filename': 'File 2', 'min_size': '0', 'min_lines': '0',
diff --git a/nudibranch/views.py b/nudibranch/views.py
index <HASH>..<HASH> 100644
--- a/nudibranch/views.py
+++ b/nudibranch/views.py
@@ -70,6 +70,16 @@ def class_view(request):
project_id=TextNumber('project_id', min_value=0))
def file_verifier_create(request, filename, min_size, max_size, min_lines,
max_lines, project_id):
+ # Additional verification
+ if max_size is not None and max_size < min_size:
+ return http_bad_request(request, 'min_size cannot be > max_size')
+ if max_lines is not None and max_lines < min_lines:
+ return http_bad_request(request, 'min_lines cannot be > max_lines')
+ if min_size < min_lines:
+ return http_bad_request(request, 'min_lines cannot be > min_size')
+ if max_size is not None and max_lines is not None and max_size < max_lines:
+ return http_bad_request(request, 'max_lines cannot be > max_size')
+
session = Session()
project = Project.fetch_by_id(project_id)
if not project:
@@ -93,7 +103,6 @@ def file_verifier_create(request, filename, min_size, max_size, min_lines,
return http_created(request, redir_location=redir_location)
-
@view_config(route_name='home', renderer='templates/home.pt',
request_method='GET')
@site_layout('nudibranch:templates/layout.pt')
|
Add additional file_verifier_create tests.
|
ucsb-cs_submit
|
train
|
e5146c8e742a226c7c48ff403491f90dea1a97b9
|
diff --git a/Net/OpenID/OIDUtil.php b/Net/OpenID/OIDUtil.php
index <HASH>..<HASH> 100644
--- a/Net/OpenID/OIDUtil.php
+++ b/Net/OpenID/OIDUtil.php
@@ -21,6 +21,25 @@ $_Net_OpenID_digits = "0123456789";
$_Net_OpenID_punct = "!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~";
/**
+ * Convenience function for getting array values.
+ */
+function Net_OpenID_array_get($arr, $key, $fallback = null)
+{
+ if (is_array($arr)) {
+ if (array_key_exists($key, $arr)) {
+ return $arr[$key];
+ } else {
+ return $fallback;
+ }
+ } else {
+ trigger_error("Net_OpenID_array_get expected " .
+ "array as first parameter", E_USER_WARNING);
+ return false;
+ }
+}
+
+
+/**
* Prints the specified message using trigger_error(E_USER_NOTICE).
*/
function Net_OpenID_log($message, $unused_level = 0)
|
[project @ Added array_get convenience function]
|
openid_php-openid
|
train
|
c24402ebced91e7ada334e6e48ab119b494bacea
|
diff --git a/src/Bridge/Scope.php b/src/Bridge/Scope.php
index <HASH>..<HASH> 100644
--- a/src/Bridge/Scope.php
+++ b/src/Bridge/Scope.php
@@ -25,6 +25,7 @@ class Scope implements ScopeEntityInterface
*
* @return mixed
*/
+ #[\ReturnTypeWillChange]
public function jsonSerialize()
{
return $this->getIdentifier();
|
fix: Internal method return types, php <I> tests compatibility
|
laravel_passport
|
train
|
949fb754d3f213968c356c93b57f02c32afd2f2c
|
diff --git a/lib/serial.js b/lib/serial.js
index <HASH>..<HASH> 100644
--- a/lib/serial.js
+++ b/lib/serial.js
@@ -14,7 +14,7 @@
* limitations under the License.
*/
-const debug = require('debug')('canbusjs-ntg1')
+const debug = require('debug')('signalk:canbusjs')
const Transform = require('stream').Transform
const SerialPort = require('serialport')
const isArray = require('lodash').isArray
@@ -129,7 +129,7 @@ SerialStream.prototype.start = function () {
if ( this.options.app ) {
this.options.app.on('nmea2000out', msg => {
- debug('sending raw %s', msg)
+ //debug(`sending ${msg}`)
var buf = parseInput(msg)
buf = composeMessage(N2K_MSG_SEND, buf, buf.length)
that.serial.write(buf)
@@ -138,7 +138,6 @@ SerialStream.prototype.start = function () {
this.options.app.on('nmea2000JsonOut', msg => {
var data = toPgn(msg)
var actisense = encodeActisense({ pgn: msg.pgn, data, dst: msg.dst})
- debug('sending pgn %j : %s', msg, actisense)
var buf = parseInput(actisense)
buf = composeMessage(N2K_MSG_SEND, buf, buf.length)
that.serial.write(buf)
@@ -203,7 +202,7 @@ function read1Byte(that, c)
that.bufferOffset = 0
that.stat = MSG_MESSAGE;
}
- else if ((c == DLE) || ((c == ESC) && isFile) || that.noEscape)
+ else if ((c == DLE) || ((c == ESC) && that.isFile) || that.noEscape)
{
that.buffer.writeUInt8(c, that.bufferOffset)
that.bufferOffset++
@@ -393,7 +392,7 @@ SerialStream.prototype.end = function () {
}
SerialStream.prototype._transform = function (chunk, encoding, done) {
- //debug(`got data ${typeof chunk}`)
+ debug(`got data ${typeof chunk}`)
readData(this, chunk)
done()
}
|
feature: add ability to debug n2k sending to actisense (#<I>)
|
canboat_canboatjs
|
train
|
2fcb9c1d878ca083ad75fb8810a88c41e9ee3c1e
|
diff --git a/src/main/java/org/dasein/cloud/cloudstack/compute/VirtualMachines.java b/src/main/java/org/dasein/cloud/cloudstack/compute/VirtualMachines.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/dasein/cloud/cloudstack/compute/VirtualMachines.java
+++ b/src/main/java/org/dasein/cloud/cloudstack/compute/VirtualMachines.java
@@ -628,18 +628,21 @@ public class VirtualMachines extends AbstractVMSupport {
VirtualMachine vm = null;
- Document responseDoc = provider.waitForJob(doc, "Launch Server");
+ if (serverId == null) {
+ //only wait for job if we don't already have the resource id
+ Document responseDoc = provider.waitForJob(doc, "Launch Server");
- //parse vm from job completion response to capture vm passwords on initial launch.
- if (responseDoc != null){
- NodeList nodeList = responseDoc.getElementsByTagName("virtualmachine");
- if (nodeList.getLength() > 0) {
- Node virtualMachine = nodeList.item(0);
- vm = toVirtualMachine(virtualMachine);
- if( vm != null ) {
- return vm;
- }
- }
+ //parse vm from job completion response to capture vm passwords on initial launch.
+ if (responseDoc != null){
+ NodeList nodeList = responseDoc.getElementsByTagName("virtualmachine");
+ if (nodeList.getLength() > 0) {
+ Node virtualMachine = nodeList.item(0);
+ vm = toVirtualMachine(virtualMachine);
+ if( vm != null ) {
+ return vm;
+ }
+ }
+ }
}
if (vm == null){
|
bugzid <I>: speed up server launch so that pause/terminate settings takes effect
|
greese_dasein-cloud-cloudstack
|
train
|
c1ac3fd932297b566693d0bafcf74e45a17972bd
|
diff --git a/lib/surrounded/context.rb b/lib/surrounded/context.rb
index <HASH>..<HASH> 100644
--- a/lib/surrounded/context.rb
+++ b/lib/surrounded/context.rb
@@ -109,12 +109,6 @@ module Surrounded
private
- def preinitialize
- end
-
- def postinitialize
- end
-
def role_map
@role_map ||= RoleMap.new
end
diff --git a/lib/surrounded/context/initializing.rb b/lib/surrounded/context/initializing.rb
index <HASH>..<HASH> 100644
--- a/lib/surrounded/context/initializing.rb
+++ b/lib/surrounded/context/initializing.rb
@@ -3,9 +3,7 @@ module Surrounded
module Initializing
def new(*args, &block)
instance = allocate
- instance.send(:preinitialize)
instance.send(:initialize, *args, &block)
- instance.send(:postinitialize)
instance
end
@@ -18,11 +16,9 @@ module Surrounded
line = __LINE__
mod.class_eval "
def initialize(#{setup_args.join(',')})
- preinitialize
arguments = method(__method__).parameters.map{|arg| eval(arg[1].to_s) }
@role_map = RoleMap.new
map_roles(#{setup_args}.zip(arguments))
- postinitialize
end
", __FILE__, line
const_set("ContextInitializer", mod)
|
remove preinitialize and postinitialize hooks
|
saturnflyer_surrounded
|
train
|
684ae8a07ff727151c2c40ba1ae0a6dcaec10411
|
diff --git a/lib/Parser/MimeDir.php b/lib/Parser/MimeDir.php
index <HASH>..<HASH> 100644
--- a/lib/Parser/MimeDir.php
+++ b/lib/Parser/MimeDir.php
@@ -13,9 +13,11 @@ use
/**
* MimeDir parser.
*
- * This class parses iCalendar/vCard files and returns an array.
+ * This class parses iCalendar 2.0 and vCard 2.1, 3.0 and 4.0 files. This
+ * parser will return one of the following two objects from the parse method:
*
- * The array is identical to the format jCard/jCal use.
+ * Sabre\VObject\Component\VCalendar
+ * Sabre\VObject\Component\VCard
*
* @copyright Copyright (C) 2007-2014 fruux GmbH. All rights reserved.
* @author Evert Pot (http://evertpot.com/)
|
That statement hasn't been true for a long time.
|
sabre-io_vobject
|
train
|
2f43c238db6726475178c0786989909ee83f7a74
|
diff --git a/ToolStart.php b/ToolStart.php
index <HASH>..<HASH> 100755
--- a/ToolStart.php
+++ b/ToolStart.php
@@ -1,11 +1,13 @@
<?php
+// Re-ensure autoload.php for backwards-compatibility with older tools on
+// Wikimedia Tool Labs that include ToolStart.php directly.
+require_once __DIR__ . '/vendor/autoload.php';
require_once __DIR__ . '/src/defines.php';
use Krinkle\Intuition\Intuition;
use Krinkle\Intuition\Util;
-// Kept for backwards-compatibility with older tools on
-// Wikimedia Tool Labs not yet using Composer.
+// Kept for backwards-compatibility with older tools not yet using Composer.
// .. rename from TsIntuition to Intuition in v0.1.3.
class_alias( Intuition::class, 'TsIntuition' );
class_alias( Util::class, 'TsIntuitionUtil' );
|
fixup 2cf6d<I>: Ensure vendor/autoload.php is included
Otherwise the references class names are not known.
Broke <URL>
|
Krinkle_intuition
|
train
|
b4d34c6ad790020f1c0332fdcae1e0441d4d3941
|
diff --git a/src/sap.ui.mdc/src/sap/ui/mdc/condition/Operator.js b/src/sap.ui.mdc/src/sap/ui/mdc/condition/Operator.js
index <HASH>..<HASH> 100644
--- a/src/sap.ui.mdc/src/sap/ui/mdc/condition/Operator.js
+++ b/src/sap.ui.mdc/src/sap/ui/mdc/condition/Operator.js
@@ -37,16 +37,16 @@ sap.ui.define([
* If a function or property is initial, the default implementation is used.
*
* @extends sap.ui.base.Object
- * @param {object} [oConfiguration] Properties of the operator
- * @param {string} [oConfiguration.name] Name of the operator used in the condition
+ * @param {object} oConfiguration Properties of the operator
+ * @param {string} oConfiguration.name Name of the operator used in the condition
* @param {string} [oConfiguration.filterOperator] The operator's default filter operator that is created as defined in <code>sap.ui.model.FilterOperator</code>
- * @param {string} [oConfiguration.tokenParse] The string representation of the regular expression that is used by the operator to parse a value
+ * @param {string} oConfiguration.tokenParse The string representation of the regular expression that is used by the operator to parse a value
* to eliminate the operator and get the data string. A placeholder that refers to the translated tokenText can be used. <code>#tokenText#</code> refers to the
* <code>oConfiguration.tokenText</code> property if given.
- * @param {string} [oConfiguration.tokenFormat] The string representation that is used by the operator to format a value
+ * @param {string} oConfiguration.tokenFormat The string representation that is used by the operator to format a value
* into an output string. For the value placeholder <code>{0}</code> and <code>{1}</code> are used.
* A placeholder that refers to the translated tokenText can be used. <code>#tokenText#</code> refers to the <code>oConfiguration.tokenText</code> property if given.
- * @param {string[]} [oConfiguration.valueTypes] Array of type to be used. The length of the array defines the number of values that
+ * @param {string[]} oConfiguration.valueTypes Array of type to be used. The length of the array defines the number of values that
* need to be entered with the operator.
* If set to Operator.ValueType.Self the <code>Type</code> of the <code>Field</code> or <code>FilterField</code> using the <code>Operator</code> is used.
* If set to Operator.ValueType.Static a simple string type is used to display static text.
diff --git a/src/sap.ui.mdc/src/sap/ui/mdc/condition/RangeOperator.js b/src/sap.ui.mdc/src/sap/ui/mdc/condition/RangeOperator.js
index <HASH>..<HASH> 100644
--- a/src/sap.ui.mdc/src/sap/ui/mdc/condition/RangeOperator.js
+++ b/src/sap.ui.mdc/src/sap/ui/mdc/condition/RangeOperator.js
@@ -23,8 +23,8 @@ sap.ui.define([
* If a function or property is initial, the default implementation is used
*
* @extends sap.ui.mdc.condition.Operator
- * @param {object} [oConfiguration]
- * @param {string} [oConfiguration.name] Name of the operator used in the condition
+ * @param {object} oConfiguration
+ * @param {string} oConfiguration.name Name of the operator used in the condition
* @param {string} [oConfiguration.filterOperator] should not be set
* @param {string} [oConfiguration.tokenParse]
* @param {string} [oConfiguration.tokenFormat]
|
[INTERNAL] Operator, make configuration manatory in JSDoc
Change-Id: Ibd<I>f<I>ffb1e<I>c<I>ed<I>ded9bd5a
|
SAP_openui5
|
train
|
18571af1d1aca7f43a78f01e35700c1542f90879
|
diff --git a/lib/opal/nodes/logic.rb b/lib/opal/nodes/logic.rb
index <HASH>..<HASH> 100644
--- a/lib/opal/nodes/logic.rb
+++ b/lib/opal/nodes/logic.rb
@@ -222,7 +222,7 @@ module Opal
elsif stmt?
push 'return ', return_val
else
- raise SyntaxError, 'void value expression: cannot return as an expression'
+ error 'void value expression: cannot return as an expression'
end
end
end
|
Properly Raise an Opal::SyntaxError through #error
|
opal_opal
|
train
|
3353e142e80cf934f674629ae4b9ef6132440e63
|
diff --git a/src/pyrocore/scripts/rtcontrol.py b/src/pyrocore/scripts/rtcontrol.py
index <HASH>..<HASH> 100644
--- a/src/pyrocore/scripts/rtcontrol.py
+++ b/src/pyrocore/scripts/rtcontrol.py
@@ -139,7 +139,10 @@ class RtorrentControl(ScriptBaseWithConfig):
ARGS_HELP = "<filter>..."
# additonal stuff appended after the command handler's docstring
- ADDITIONAL_HELP = ["", "", "Use --help-fields to list all fields and their description."]
+ ADDITIONAL_HELP = ["", "",
+ "Use --help to get a list of all options.",
+ "Use --help-fields to list all fields and their description.",
+ ]
# additional values for output formatting
FORMATTER_DEFAULTS = dict(
|
hint on --help if called without args
|
pyroscope_pyrocore
|
train
|
0d2e84460078502a49c3cc8f0be04b737f143e6f
|
diff --git a/src/Input/Command.php b/src/Input/Command.php
index <HASH>..<HASH> 100644
--- a/src/Input/Command.php
+++ b/src/Input/Command.php
@@ -278,9 +278,7 @@ class Command extends Parser
protected function handleUnknown(string $arg, string $value = null)
{
if ($this->_allowUnknown) {
- $this->set($this->toCamelCase($arg), $value);
-
- return;
+ return $this->set($this->toCamelCase($arg), $value);
}
$values = \array_filter($this->values(false));
|
refactor(command): set() will tell if it ate value
|
adhocore_php-cli
|
train
|
c3529cdea6dc0f569e5639467fc391db00fccef8
|
diff --git a/src/esquire-inject.js b/src/esquire-inject.js
index <HASH>..<HASH> 100644
--- a/src/esquire-inject.js
+++ b/src/esquire-inject.js
@@ -113,7 +113,7 @@
* @class Module
* @classdesc The definition of an {@link Esquire} module
*/
- function Module(name, dependencies, constructor) {
+ function Module(name, dependencies, constructor, dynamic) {
/* Normalize names to "$global/..." */
name = globalName(name);
@@ -143,8 +143,7 @@
Object.defineProperty(this, 'constructor', { enumerable: true, configurable: false, value: constructor });
/* Hidden $$script for injection and $$dynamic flag */
- var dynamic = this.$$dynamic ? this.$$dynamic : false;
- Object.defineProperty(this, "$$dynamic", { enumerable: false, configurable: false, value: dynamic });
+ Object.defineProperty(this, "$$dynamic", { enumerable: false, configurable: false, value: dynamic || false });
Object.defineProperty(this, '$$script', { enumerable: false, configurable: false, get: function() {
return 'Esquire.define(' + JSON.stringify(this.name)
+ ',' + JSON.stringify(this.dependencies)
@@ -173,7 +172,6 @@
/* A $global dynamic module */
function GlobalModule(name) {
- this.$$dynamic = true;
Module.call(this, name, ['$global'], function($global) {
/* Find a property with a prefix */
@@ -209,9 +207,14 @@
}
return find(this.name.substring(8).split('.'), $global);
- });
+ }, true);
}
+ GlobalModule.prototype = Object.create(Module.prototype);
+ GlobalModule.prototype.constructor = GlobalModule;
+ GlobalModule.prototype.name = "GlobalModule";
+
+
/* ======================================================================== */
/* Stuff exposed statically on the Exquire class */
/* ======================================================================== */
|
Better creation and extension of global modules.
|
usrz_javascript-esquire
|
train
|
c55fb091321f40975cd37cbf6355a563bcc11998
|
diff --git a/tcases-lib/src/main/java/org/cornutum/tcases/Reducer.java b/tcases-lib/src/main/java/org/cornutum/tcases/Reducer.java
index <HASH>..<HASH> 100644
--- a/tcases-lib/src/main/java/org/cornutum/tcases/Reducer.java
+++ b/tcases-lib/src/main/java/org/cornutum/tcases/Reducer.java
@@ -514,6 +514,7 @@ public class Reducer
}
File inputDir = inputDefFile.getParentFile();
+ String project = Tcases.getProjectName( inputDefFile);
// Read the system input definition.
SystemInputDef inputDef = null;
@@ -571,7 +572,7 @@ public class Reducer
File genDefFile = options.getGenDef();
if( genDefFile == null)
{
- genDefFile = new File( inputDir, Tcases.getProjectName( inputDefFile) + "-Generators.xml");
+ genDefFile = new File( inputDir, project + "-Generators.xml");
}
else if( !genDefFile.isAbsolute())
{
@@ -630,7 +631,7 @@ public class Reducer
}
// Find a seed that generates minimum test cases for the specified function(s).
- logger_.info( "Initializing test cases to be reduced");
+ logger_.info( "[{}] Initializing test cases to be reduced", project);
int initialCount = getTestCaseCount( baseDef, genDef, functionInputDefs);
int samples;
int round;
@@ -652,7 +653,7 @@ public class Reducer
round++)
{
// Perform next round of samples.
- logger_.info( "Round {}: starting next {} samples", round, samples);
+ logger_.info( "[{}] Round {}: starting next {} samples", new Object[]{ project, round, samples});
int roundCount;
long roundSeed;
int i;
@@ -673,19 +674,19 @@ public class Reducer
reducing = i < samples;
if( reducing)
{
- logger_.info( "Round {}: after {} samples, reached {} test cases with seed={}", new Object[]{ round, i+1, roundCount, roundSeed});
+ logger_.info( "[{}] Round {}: after {} samples, reached {} test cases with seed={}", new Object[]{ project, round, i+1, roundCount, roundSeed});
minCount = roundCount;
minSeed = roundSeed;
}
else
{
- logger_.info( "Round {}: after {} samples, terminating with {} test cases", new Object[]{ round, samples, minCount});
+ logger_.info( "[{}] Round {}: after {} samples, terminating with {} test cases", new Object[]{ project, round, samples, minCount});
}
}
if( minCount >= initialCount)
{
- logger_.info( "Could not reduce initial {} test cases -- generator definition not changed", initialCount);
+ logger_.info( "[{}] Could not reduce initial {} test cases -- generator definition not changed", project, initialCount);
}
else
{
@@ -694,7 +695,7 @@ public class Reducer
GeneratorSetDocWriter genWriter = null;
try
{
- logger_.info( "Reduced to {} test cases with seed={} -- updating generator definition={}", new Object[]{ minCount, minSeed, genDefFile});
+ logger_.info( "[{}] Reduced to {} test cases with seed={} -- updating generator definition", new Object[]{ project, minCount, minSeed});
genWriter = new GeneratorSetDocWriter( new FileOutputStream( genDefFile));
genWriter.write( genDef);
}
|
Reducer: Rework logging to identify the project being reduced
|
Cornutum_tcases
|
train
|
c75816c02b55be87f4249a40506e1f5997d6def8
|
diff --git a/src/Lib/MatisseEngine.php b/src/Lib/MatisseEngine.php
index <HASH>..<HASH> 100644
--- a/src/Lib/MatisseEngine.php
+++ b/src/Lib/MatisseEngine.php
@@ -1,4 +1,5 @@
<?php
+
namespace Matisse\Lib;
use Electro\Caching\Lib\CachingFileCompiler;
@@ -105,7 +106,9 @@ class MatisseEngine implements ViewEngineInterface
else if (!is_object ($data) || !$data instanceof ViewModel)
throw new MatisseException("Argument must be an array or a <kbd>ViewModel</kbd> instance",
"Invalid data for view model.");
- $compiled->getShadowDom ()->getDataBinder ()->setViewModel ($data);
+ ($compiled instanceof CompositeComponent
+ ? $compiled->getShadowDom ()
+ : $compiled)->getDataBinder ()->setViewModel ($data);
}
/** @var DocumentFragment $compiled */
|
FIX: bug on last commit.
|
electro-modules_matisse
|
train
|
38f82a295633380ed336c81ac9ca097c5cddeda3
|
diff --git a/src/Linna/Storage/ExtendedPDO.php b/src/Linna/Storage/ExtendedPDO.php
index <HASH>..<HASH> 100644
--- a/src/Linna/Storage/ExtendedPDO.php
+++ b/src/Linna/Storage/ExtendedPDO.php
@@ -32,15 +32,10 @@ class ExtendedPDO extends PDO
public function queryWithParam(string $query, array $param) : PDOStatement
{
$statment = $this->prepare($query);
-
+
foreach ($param as $value) {
- if (count($value) < 2) {
- throw new InvalidArgumentException(__METHOD__.': Parameters array must contain at least two elements with this form: [\':name\', \'value\']');
- }
- if (strpos($value[0], ':') !== 0) {
- throw new InvalidArgumentException(__METHOD__.': Parameter name will be in the form :name');
- }
+ $this->checkValue($value);
//reassign as reference
//because bindParam need it as reference
@@ -54,4 +49,21 @@ class ExtendedPDO extends PDO
return $statment;
}
+
+ /**
+ * Check values passed to queryWithParam.
+ *
+ * @param array $value
+ * @throws InvalidArgumentException
+ */
+ private function checkValue(array &$value)
+ {
+ if (count($value) < 2) {
+ throw new InvalidArgumentException(__METHOD__.': Parameters array must contain at least two elements with this form: [\':name\', \'value\']');
+ }
+
+ if (strpos($value[0], ':') !== 0) {
+ throw new InvalidArgumentException(__METHOD__.': Parameter name will be in the form :name');
+ }
+ }
}
diff --git a/tests/Storage/ExtendedPDOTest.php b/tests/Storage/ExtendedPDOTest.php
index <HASH>..<HASH> 100644
--- a/tests/Storage/ExtendedPDOTest.php
+++ b/tests/Storage/ExtendedPDOTest.php
@@ -41,7 +41,8 @@ class ExtendedPDOTest extends TestCase
}
/**
- * Correct parameters provider
+ * Correct parameters provider.
+ *
* @return array
*/
public function correctParametersProvider() : array
@@ -52,7 +53,7 @@ class ExtendedPDOTest extends TestCase
]
],
['SELECT user_id, name, email FROM user WHERE name = :name AND user_id = :id', [
- [':name', 'root', PDO::PARAM_STR],
+ [':name', 'root', PDO::PARAM_STR],
[':id', 1, PDO::PARAM_INT]
]
],
@@ -64,8 +65,8 @@ class ExtendedPDOTest extends TestCase
}
/**
- * Test query with parameter.
- *
+ * Test query with parameters.
+ *
* @dataProvider correctParametersProvider
*/
public function testQueryWithParameters(string $query, array $param)
@@ -87,7 +88,7 @@ class ExtendedPDOTest extends TestCase
*/
public function testQueryWithParameterWithWrongParameterName()
{
- $user = (new PdoStorage($this->options))
+ (new PdoStorage($this->options))
->getResource()
->queryWithParam(
'SELECT user_id, name, email FROM user WHERE name = :name',
@@ -102,7 +103,7 @@ class ExtendedPDOTest extends TestCase
*/
public function testQueryWithParameterWithTooManyParameters()
{
- $user = (new PdoStorage($this->options))
+ (new PdoStorage($this->options))
->getResource()
->queryWithParam(
'SELECT user_id, name, email FROM user WHERE name = :name',
@@ -117,7 +118,7 @@ class ExtendedPDOTest extends TestCase
*/
public function testQueryWithParameterWithoutParameters()
{
- $user = (new PdoStorage($this->options))
+ (new PdoStorage($this->options))
->getResource()
->queryWithParam(
'SELECT user_id, name, email FROM user WHERE name = :name',
|
ExtendedPDO queryWithParam checks refactor
|
linna_framework
|
train
|
98affdfd33d81d8e9b4c7cea9454d5233047233a
|
diff --git a/lib/geocoder/railtie.rb b/lib/geocoder/railtie.rb
index <HASH>..<HASH> 100644
--- a/lib/geocoder/railtie.rb
+++ b/lib/geocoder/railtie.rb
@@ -1,5 +1,4 @@
require 'geocoder'
-require 'geocoder/orms/active_record'
module Geocoder
if defined? Rails::Railtie
@@ -18,53 +17,60 @@ module Geocoder
class Railtie
def self.insert
-
return unless defined?(::ActiveRecord)
+ ::ActiveRecord::Base.extend(ModelMethods)
+ end
+ end
- ##
- # Add methods to ActiveRecord::Base so Geocoder is accessible by models.
- #
- ::ActiveRecord::Base.class_eval do
+ ##
+ # Methods available in the model class before Geocoder is invoked.
+ #
+ module ModelMethods
- ##
- # Set attribute names and include the Geocoder module.
- #
- def self.geocoded_by(address_attr, options = {}, &block)
- _geocoder_init(
- :user_address => address_attr,
- :latitude => options[:latitude] || :latitude,
- :longitude => options[:longitude] || :longitude,
- :block => block
- )
- end
+ ##
+ # Set attribute names and include the Geocoder module.
+ #
+ def geocoded_by(address_attr, options = {}, &block)
+ geocoder_init(
+ :user_address => address_attr,
+ :latitude => options[:latitude] || :latitude,
+ :longitude => options[:longitude] || :longitude,
+ :block => block
+ )
+ end
- ##
- # Set attribute names and include the Geocoder module.
- #
- def self.reverse_geocoded_by(latitude_attr, longitude_attr, options = {})
- _geocoder_init(
- :fetched_address => options[:address] || :address,
- :latitude => latitude_attr,
- :longitude => longitude_attr
- )
- end
+ ##
+ # Set attribute names and include the Geocoder module.
+ #
+ def reverse_geocoded_by(latitude_attr, longitude_attr, options = {})
+ geocoder_init(
+ :fetched_address => options[:address] || :address,
+ :latitude => latitude_attr,
+ :longitude => longitude_attr
+ )
+ end
- def self._geocoder_init(options)
- unless _geocoder_initialized?
- class_inheritable_reader :geocoder_options
- class_inheritable_hash_writer :geocoder_options
- end
- self.geocoder_options = options
- unless _geocoder_initialized?
- include Geocoder::Orm::ActiveRecord
- end
- end
+ def geocoder_options
+ @geocoder_options
+ end
- def self._geocoder_initialized?
- included_modules.include? Geocoder::Orm::ActiveRecord
- end
+
+ private # ----------------------------------------------------------------
+
+ def geocoder_init(options)
+ unless geocoder_initialized?
+ @geocoder_options = options
+ require 'geocoder/orms/active_record'
+ include Geocoder::Orm::ActiveRecord
end
+ end
+ def geocoder_initialized?
+ begin
+ included_modules.include? Geocoder::Orm::ActiveRecord
+ rescue NameError
+ false
+ end
end
end
end
|
Clean up Railtie.
Move model class methods to separate module.
|
alexreisner_geocoder
|
train
|
e96acbf85d72806d2c90ac28f3dec456e2d86886
|
diff --git a/pilot/lib/pilot/build_manager.rb b/pilot/lib/pilot/build_manager.rb
index <HASH>..<HASH> 100644
--- a/pilot/lib/pilot/build_manager.rb
+++ b/pilot/lib/pilot/build_manager.rb
@@ -3,7 +3,7 @@ module Pilot
def upload(options)
start(options)
- options[:changelog] = truncate_changelog(options[:changelog]) if options[:changelog]
+ options[:changelog] = self.class.truncate_changelog(options[:changelog]) if options[:changelog]
UI.user_error!("No ipa file given") unless config[:ipa]
|
fix broken pilot upload (#<I>)
|
fastlane_fastlane
|
train
|
4eef449ac9241eb5bbebb832da618de65a346631
|
diff --git a/src/org/mozilla/javascript/NativeDate.java b/src/org/mozilla/javascript/NativeDate.java
index <HASH>..<HASH> 100644
--- a/src/org/mozilla/javascript/NativeDate.java
+++ b/src/org/mozilla/javascript/NativeDate.java
@@ -798,8 +798,8 @@ final class NativeDate extends IdScriptableObject {
}
private static double internalUTC(Context cx, double t) {
- double LocalTZA = cx.getTimeZone().getRawOffset();
- return t - LocalTZA - DaylightSavingTA(cx, t - LocalTZA);
+ double local = t - cx.getTimeZone().getRawOffset();
+ return local - DaylightSavingTA(cx, local);
}
private static int HourFromTime(double t) {
@@ -1070,7 +1070,7 @@ final class NativeDate extends IdScriptableObject {
// browsers doing this now
if (timeSpecified) {
- date -= cx.getTimeZone().getOffset((long) date);
+ date -= cx.getTimeZone().getRawOffset() + DaylightSavingTA(cx, date);
}
} else {
date -= (tzhour * 60 + tzmin) * msPerMinute * tzmod;
diff --git a/testsrc/org/mozilla/javascript/tests/es6/NativeDateTest.java b/testsrc/org/mozilla/javascript/tests/es6/NativeDateTest.java
index <HASH>..<HASH> 100644
--- a/testsrc/org/mozilla/javascript/tests/es6/NativeDateTest.java
+++ b/testsrc/org/mozilla/javascript/tests/es6/NativeDateTest.java
@@ -51,6 +51,22 @@ public class NativeDateTest {
}
@Test
+ public void ctorDateTimeBerlinDaylightSavingTime() {
+ String js = "new Date('2021-07-18T22:23').toISOString()";
+
+ Utils.runWithAllOptimizationLevels(
+ cx -> {
+ final Scriptable scope = cx.initStandardObjects();
+ cx.setLanguageVersion(Context.VERSION_ES6);
+ cx.setTimeZone(TimeZone.getTimeZone("Europe/Berlin"));
+
+ final Object res = cx.evaluateString(scope, js, "test.js", 0, null);
+ assertEquals("2021-07-18T20:23:00.000Z", res);
+ return null;
+ });
+ }
+
+ @Test
public void ctorDateTimeNewYork() {
String js = "new Date('2021-12-18T22:23').toISOString()";
|
minor optimization and fix daylightsavingtime handling
|
mozilla_rhino
|
train
|
4b87c4f60895c9d2dea2becd6a58c3d3167dd0e1
|
diff --git a/tchannel/sync/client.py b/tchannel/sync/client.py
index <HASH>..<HASH> 100644
--- a/tchannel/sync/client.py
+++ b/tchannel/sync/client.py
@@ -21,13 +21,14 @@
from __future__ import absolute_import
from collections import namedtuple
+from concurrent.futures import TimeoutError
from threadloop import ThreadLoop
from tornado import gen
from tchannel import glossary
from tchannel import tornado as async
-from tchannel.tornado.hyperbahn import FIRST_ADVERTISE_TIME
+from tchannel.tornado.hyperbahn import FIRST_ADVERTISE_TIME, AdvertiseError
class TChannelSyncClient(object):
@@ -107,14 +108,20 @@ class TChannelSyncClient(object):
future = self.threadloop.submit(make_request)
- # we're going to wait 10% longer, or at max 1s,
- # so advertise has a chance to timeout by itself
+ # we're going to wait 1s longer than advertises
+ # timeout mechanism, so it has a chance to timeout
wait_until = timeout or FIRST_ADVERTISE_TIME
wait_until += 1
# block for advertise's first response,
# using wait_until as a fallback timeout mechanism
- result = future.result(wait_until)
+ try:
+ result = future.result(wait_until)
+ except TimeoutError:
+ raise AdvertiseError(
+ "Failed to register with Hyperbahn "
+ "(advertise did not timeout in time)"
+ )
return result
|
only ever throw AdvertiseError when unable to advertise from sync client
|
uber_tchannel-python
|
train
|
f1442e422ccf701de7da7c3298aaf8f0beee1983
|
diff --git a/src/assertions.js b/src/assertions.js
index <HASH>..<HASH> 100644
--- a/src/assertions.js
+++ b/src/assertions.js
@@ -23,6 +23,7 @@ const assertIsEnzymeWrapper = (actual) => expect.assert(
const asserted = expect();
const original = {
+ toNotBeA: asserted.toNotBeA,
toExist: asserted.toExist,
toBeAn: asserted.toBeAn,
toBeA: asserted.toBeA,
@@ -143,9 +144,29 @@ export const toBeAn = handleEnzymeActual(original.toBeAn, function (type) {
return this;
});
+/**
+ * Asserts the enzyme wrapper contains something.
+ * @return {this} - The expectation context.
+ */
export const toExist = handleEnzymeActual(original.toExist, function () {
expect.assert(
this.actual.exists(),
'Expected element to exist'
);
});
+
+/**
+ * Assert the component is not a type.
+ * @param {String|Function} type - The type you expect your element not to be.
+ * @return {this} - The expectation context.
+ */
+export const toNotBeA = handleEnzymeActual(original.toNotBeA, function (type) {
+ const element = this.actual;
+ const notEqual = !element.is(type);
+ const displayName = getDisplayName(type);
+
+ expect.assert(
+ notEqual,
+ `Expected ${element.name()} to not be a ${displayName}`
+ );
+});
diff --git a/src/test.js b/src/test.js
index <HASH>..<HASH> 100644
--- a/src/test.js
+++ b/src/test.js
@@ -208,8 +208,49 @@ describe('expect-enzyme', () => {
});
+ describe('method "toNotBeA"', () => {
+ const element = shallow(<header />);
+
+ it('throws if the type matches', () => {
+ const assertion = () => expect(element).toNotBeA('header');
+
+ expect(assertion).toThrow();
+ });
+
+ it('does not throw if the type does not match', () => {
+ const assertion = () => expect(element).toNotBeA('div');
+
+ expect(assertion).toNotThrow();
+ });
+
+ it('only operates on enzyme values', () => {
+ expect(() => expect('value').toNotBeA('function')).toNotThrow();
+ expect(() => expect('value').toNotBeA('number')).toNotThrow();
+
+ expect(() => expect('value').toNotBeA('string')).toThrow();
+ expect(() => expect(9001).toNotBeA('number')).toThrow();
+ });
+
+ it('works with components', () => {
+ const Component = () => <div />;
+ const element = shallow(<div><Component /></div>);
+ const component = element.find('Component');
+
+ expect(() => expect(component).toNotBeA(Component)).toThrow();
+ expect(() => expect(element).toNotBeA(Component)).toNotThrow();
+ });
+
+ });
+
describe('method "toExist"', () => {
+ it('only operates on enzyme values', () => {
+ expect(() => expect('stuff').toExist()).toNotThrow();
+ expect(() => expect({}).toExist()).toNotThrow();
+
+ expect(() => expect(undefined).toExist()).toThrow();
+ });
+
it('throws if the element does not exist', () => {
const noSuchElement = element.find('NoSuchElement');
const assertion = () => expect(noSuchElement).toExist();
|
Augment `.toNotBeA()` method
The expect .toNotBeA() method now supports enzyme types. Yey!
|
PsychoLlama_expect-enzyme
|
train
|
44b8e2603947cb2920fc3cfbdb8987dd63a0ef62
|
diff --git a/crnk-core/src/main/java/io/crnk/core/engine/internal/document/mapper/DocumentMapperUtil.java b/crnk-core/src/main/java/io/crnk/core/engine/internal/document/mapper/DocumentMapperUtil.java
index <HASH>..<HASH> 100644
--- a/crnk-core/src/main/java/io/crnk/core/engine/internal/document/mapper/DocumentMapperUtil.java
+++ b/crnk-core/src/main/java/io/crnk/core/engine/internal/document/mapper/DocumentMapperUtil.java
@@ -13,7 +13,6 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import io.crnk.core.boot.CrnkProperties;
import io.crnk.core.engine.document.ResourceIdentifier;
-import io.crnk.core.engine.http.HttpRequestContext;
import io.crnk.core.engine.information.resource.ResourceField;
import io.crnk.core.engine.information.resource.ResourceInformation;
import io.crnk.core.engine.internal.dispatcher.path.PathBuilder;
@@ -89,7 +88,8 @@ public class DocumentMapperUtil {
List<ResourceField> results = new ArrayList<>();
for (ResourceField field : fields) {
- if (includedFieldNames.contains(field.getJsonName())) {
+ // TODO remove use of field.getJsonName, here to maintain backward compatibility
+ if (includedFieldNames.contains(field.getUnderlyingName()) || includedFieldNames.contains(field.getJsonName())) {
results.add(field);
}
}
diff --git a/crnk-test/src/main/java/io/crnk/test/mock/repository/ScheduleRepositoryImpl.java b/crnk-test/src/main/java/io/crnk/test/mock/repository/ScheduleRepositoryImpl.java
index <HASH>..<HASH> 100644
--- a/crnk-test/src/main/java/io/crnk/test/mock/repository/ScheduleRepositoryImpl.java
+++ b/crnk-test/src/main/java/io/crnk/test/mock/repository/ScheduleRepositoryImpl.java
@@ -97,6 +97,7 @@ public class ScheduleRepositoryImpl extends ResourceRepositoryBase<Schedule, Lon
Schedule copy = new Schedule();
copy.setId(schedule.getId());
copy.setName(schedule.getName());
+ copy.setDesc(schedule.getDesc());
copy.setTasks(schedule.getTasks());
copy.setDelayed(schedule.isDelayed());
copy.setLazyTask(schedule.getLazyTask());
diff --git a/crnk-test/src/main/java/io/crnk/test/suite/BasicRepositoryAccessTestBase.java b/crnk-test/src/main/java/io/crnk/test/suite/BasicRepositoryAccessTestBase.java
index <HASH>..<HASH> 100644
--- a/crnk-test/src/main/java/io/crnk/test/suite/BasicRepositoryAccessTestBase.java
+++ b/crnk-test/src/main/java/io/crnk/test/suite/BasicRepositoryAccessTestBase.java
@@ -7,6 +7,8 @@ import java.util.List;
import io.crnk.core.engine.http.HttpHeaders;
import io.crnk.core.exception.ResourceNotFoundException;
import io.crnk.core.queryspec.Direction;
+import io.crnk.core.queryspec.FilterOperator;
+import io.crnk.core.queryspec.FilterSpec;
import io.crnk.core.queryspec.QuerySpec;
import io.crnk.core.queryspec.SortSpec;
import io.crnk.core.repository.RelationshipRepositoryV2;
@@ -347,4 +349,31 @@ public abstract class BasicRepositoryAccessTestBase {
Assert.assertEquals(0, relProjects.size());
*/
}
+
+ @Test
+ public void testRenaming() {
+ for (int i = 0; i < 10; i++) {
+ Schedule schedule = new Schedule();
+ schedule.setId((long) i);
+ schedule.setName("schedule" + i);
+ schedule.setDesc("description" + i);
+ scheduleRepo.create(schedule);
+ }
+
+ QuerySpec querySpec = new QuerySpec(Schedule.class);
+ querySpec.addSort(new SortSpec(Arrays.asList("desc"), Direction.DESC));
+ querySpec.includeField(Arrays.asList("desc"));
+ querySpec.addFilter(new FilterSpec(Arrays.asList("desc"), FilterOperator.EQ,
+ Arrays.asList("description0", "description1", "description2")));
+
+ List<Schedule> schedules = scheduleRepo.findAll(querySpec);
+ Assert.assertEquals(3, schedules.size());
+
+ for (int i = 0; i < schedules.size(); i++) {
+ Schedule schedule = schedules.get(schedules.size() - 1 - i);
+ Assert.assertEquals("description" + i, schedule.getDesc());
+ Assert.assertNull(schedule.getName());
+ }
+
+ }
}
|
support @JsonProperty for field sets #<I>
|
crnk-project_crnk-framework
|
train
|
400e3eb20112ec7a39f5590bd3ba174d759c08b7
|
diff --git a/lib/govuk_tech_docs/api_reference/api_reference.rb b/lib/govuk_tech_docs/api_reference/api_reference.rb
index <HASH>..<HASH> 100644
--- a/lib/govuk_tech_docs/api_reference/api_reference.rb
+++ b/lib/govuk_tech_docs/api_reference/api_reference.rb
@@ -22,17 +22,14 @@ module GovukTechDocs
# Is the api_path a url or path?
if uri?@config['api_path']
@api_parser = true
-
@document = Openapi3Parser.load_url(@config['api_path'])
- else
+ elsif File.exist?(@config['api_path'])
# Load api file and set existence flag.
- if File.exist?(@config['api_path'])
- @api_parser = true
- @document = Openapi3Parser.load_file(@config['api_path'])
- else
- # @TODO Throw a middleman error?
- @api_parser = false
- end
+ @api_parser = true
+ @document = Openapi3Parser.load_file(@config['api_path'])
+ else
+ # @TODO Throw a middleman error?
+ @api_parser = false
end
# Load template files
@@ -80,9 +77,8 @@ module GovukTechDocs
else
return text
end
-
else
- return text
+ text
end
end
@@ -130,7 +126,7 @@ module GovukTechDocs
operations['post'] = path.post if defined? path.post
operations['delete'] = path.delete if defined? path.delete
operations['patch'] = path.patch if defined? path.patch
- return operations
+ operations
end
def api_info
@@ -146,7 +142,7 @@ module GovukTechDocs
return nil
end
# Schema dictates that it's always components['schemas']
- text.gsub(%r{/#\/components\/schemas\//}, '')
+ text.gsub(/#\/components\/schemas\//, '')
end
end
end
diff --git a/lib/govuk_tech_docs/tech_docs_html_renderer.rb b/lib/govuk_tech_docs/tech_docs_html_renderer.rb
index <HASH>..<HASH> 100644
--- a/lib/govuk_tech_docs/tech_docs_html_renderer.rb
+++ b/lib/govuk_tech_docs/tech_docs_html_renderer.rb
@@ -4,7 +4,7 @@ module GovukTechDocs
class TechDocsHTMLRenderer < Middleman::Renderers::MiddlemanRedcarpetHTML
include Redcarpet::Render::SmartyPants
- def initialize(options={})
+ def initialize(options = {})
@local_options = options.dup
@app = @local_options[:context].app
super
diff --git a/spec/features/integration_spec.rb b/spec/features/integration_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/features/integration_spec.rb
+++ b/spec/features/integration_spec.rb
@@ -127,24 +127,24 @@ RSpec.describe "The tech docs template" do
def then_there_is_correct_api_info_content
# Title
- expect(page).to have_css('h1', :text => 'Swagger Petstore v1.0.0')
+ expect(page).to have_css('h1', text: 'Swagger Petstore v1.0.0')
# Description
- expect(page).to have_css('p', :text => 'A sample API that uses a petstore as an example to demonstrate features in the OpenAPI 3.0 specification')
+ expect(page).to have_css('p', text: 'A sample API that uses a petstore as an example to demonstrate features in the OpenAPI 3.0 specification')
# Base URL
- expect(page).to have_css('strong', :text => 'http://petstore.swagger.io/v1')
+ expect(page).to have_css('strong', text: 'http://petstore.swagger.io/v1')
end
def then_there_is_correct_api_path_content
# Path title
- expect(page).to have_css('h2#get-pets', :text => 'GET /pets')
+ expect(page).to have_css('h2#get-pets', text: 'GET /pets')
# Path parameters
- expect(page).to have_css('table', :text => /\b(How many items to return at one time)\b/)
+ expect(page).to have_css('table', text: /\b(How many items to return at one time)\b/)
# Link to schema
expect(page).to have_css('table a[href="#schema-error"]')
end
def then_there_is_correct_api_schema_content
# Schema title
- expect(page).to have_css('h3#schema-pet', :text => 'Pet')
+ expect(page).to have_css('h3#schema-pet', text: 'Pet')
end
end
|
GTD-<I>: Fixed remaining rubocop offenses
|
alphagov_tech-docs-gem
|
train
|
d83912caaaa89a2a5b1b2a2e639db8b496060776
|
diff --git a/Model/File.php b/Model/File.php
index <HASH>..<HASH> 100644
--- a/Model/File.php
+++ b/Model/File.php
@@ -22,6 +22,7 @@ class File
'pptx' => 'application/vnd.openxmlformats-officedocument.presentationml.presentation',
'potx' => 'application/vnd.openxmlformats-officedocument.presentationml.template',
'ppsx' => 'application/vnd.openxmlformats-officedocument.presentationml.slideshow',
+ 'sldx' => 'application/vnd.openxmlformats-officedocument.presentationml.slide',
];
/** @var SymfonyFile */
|
Bette support for Microsoft OOXML mime types detection
|
vaniocz_vanio-domain-bundle
|
train
|
600e145538b186505c97f61042bf07374e42ad55
|
diff --git a/src/Contracts/Auth/Access/GateAware.php b/src/Contracts/Auth/Access/GateAware.php
index <HASH>..<HASH> 100644
--- a/src/Contracts/Auth/Access/GateAware.php
+++ b/src/Contracts/Auth/Access/GateAware.php
@@ -5,29 +5,26 @@ namespace Aedart\Laravel\Helpers\Contracts\Auth\Access;
use Illuminate\Contracts\Auth\Access\Gate;
/**
- * <h1>Gate Aware</h1>
- *
- * Components are able to specify and obtain an Access Gate
- * utility component.
+ * Gate Aware
*
* @see \Illuminate\Contracts\Auth\Access\Gate
*
* @author Alin Eugen Deac <aedart@gmail.com>
- * @package Aedart\Laravel\Helpers\Traits
+ * @package Aedart\Laravel\Helpers\Contracts\Auth\Access
*/
interface GateAware
{
/**
- * Set the given gate
+ * Set gate
*
- * @param Gate $gate Instance of the Access Gate
+ * @param Gate|null $gate Access Gate instance
*
- * @return void
+ * @return self
*/
- public function setGate(Gate $gate);
+ public function setGate(?Gate $gate);
/**
- * Get the given gate
+ * Get gate
*
* If no gate has been set, this method will
* set and return a default gate, if any such
@@ -37,26 +34,19 @@ interface GateAware
*
* @return Gate|null gate or null if none gate has been set
*/
- public function getGate();
-
- /**
- * Get a default gate value, if any is available
- *
- * @return Gate|null A default gate value or Null if no default value is available
- */
- public function getDefaultGate();
+ public function getGate(): ?Gate;
/**
* Check if gate has been set
*
* @return bool True if gate has been set, false if not
*/
- public function hasGate();
+ public function hasGate(): bool;
/**
- * Check if a default gate is available or not
+ * Get a default gate value, if any is available
*
- * @return bool True of a default gate is available, false if not
+ * @return Gate|null A default gate value or Null if no default value is available
*/
- public function hasDefaultGate();
+ public function getDefaultGate(): ?Gate;
}
\ No newline at end of file
diff --git a/src/Traits/Auth/Access/GateTrait.php b/src/Traits/Auth/Access/GateTrait.php
index <HASH>..<HASH> 100644
--- a/src/Traits/Auth/Access/GateTrait.php
+++ b/src/Traits/Auth/Access/GateTrait.php
@@ -1,4 +1,5 @@
<?php
+declare(strict_types=1);
namespace Aedart\Laravel\Helpers\Traits\Auth\Access;
@@ -6,36 +7,38 @@ use Illuminate\Contracts\Auth\Access\Gate;
use Illuminate\Support\Facades\Gate as GateFacade;
/**
- * <h1>Gate Trait</h1>
+ * GateTrait
*
* @see \Aedart\Laravel\Helpers\Contracts\Auth\Access\GateAware
*
* @author Alin Eugen Deac <aedart@gmail.com>
- * @package Aedart\Laravel\Helpers\Traits
+ * @package Aedart\Laravel\Helpers\Traits\Auth\Access
*/
trait GateTrait
{
/**
- * Instance of the Access Gate
+ * Access Gate instance
*
* @var Gate|null
*/
protected $gate = null;
/**
- * Set the given gate
+ * Set gate
*
- * @param Gate $gate Instance of the Access Gate
+ * @param Gate|null $gate Access Gate instance
*
- * @return void
+ * @return self
*/
- public function setGate(Gate $gate)
+ public function setGate(?Gate $gate)
{
$this->gate = $gate;
+
+ return $this;
}
/**
- * Get the given gate
+ * Get gate
*
* If no gate has been set, this method will
* set and return a default gate, if any such
@@ -45,42 +48,31 @@ trait GateTrait
*
* @return Gate|null gate or null if none gate has been set
*/
- public function getGate()
+ public function getGate(): ?Gate
{
- if (!$this->hasGate() && $this->hasDefaultGate()) {
+ if (!$this->hasGate()) {
$this->setGate($this->getDefaultGate());
}
return $this->gate;
}
/**
- * Get a default gate value, if any is available
- *
- * @return Gate|null A default gate value or Null if no default value is available
- */
- public function getDefaultGate()
- {
- return GateFacade::getFacadeRoot();
- }
-
- /**
* Check if gate has been set
*
* @return bool True if gate has been set, false if not
*/
- public function hasGate()
+ public function hasGate(): bool
{
return isset($this->gate);
}
/**
- * Check if a default gate is available or not
+ * Get a default gate value, if any is available
*
- * @return bool True of a default gate is available, false if not
+ * @return Gate|null A default gate value or Null if no default value is available
*/
- public function hasDefaultGate()
+ public function getDefaultGate(): ?Gate
{
- $default = $this->getDefaultGate();
- return isset($default);
+ return GateFacade::getFacadeRoot();
}
}
\ No newline at end of file
|
Adapt to PHP <I>
|
aedart_laravel-helpers
|
train
|
27fd56de748eb81d2aada6c83ac0dddb4f88644a
|
diff --git a/website/config.rb b/website/config.rb
index <HASH>..<HASH> 100644
--- a/website/config.rb
+++ b/website/config.rb
@@ -5,7 +5,7 @@ set :vmware_utility_version, "1.0.1"
activate :hashicorp do |h|
h.name = "vagrant"
- h.version = "2.1.0"
+ h.version = "2.1.1"
h.github_slug = "hashicorp/vagrant"
h.website_root = "website"
end
|
Update website vagrant version <I>
|
hashicorp_vagrant
|
train
|
0b5640d108421cbdd9aaa89c11180a16c0281fd0
|
diff --git a/lib/comment-directive-parser.js b/lib/comment-directive-parser.js
index <HASH>..<HASH> 100644
--- a/lib/comment-directive-parser.js
+++ b/lib/comment-directive-parser.js
@@ -1,6 +1,7 @@
class CommentDirectiveParser {
constructor(tokens) {
- this.lastLine = tokens[tokens.length - 1].loc.end.line
+ const lastToken = tokens[tokens.length - 1]
+ this.lastLine = lastToken ? lastToken.loc.end.line : 0
this.ruleStore = new RuleStore(this.lastLine)
this.parseComments(tokens)
|
Prevent linter from failing when there are no tokens in the file
|
protofire_solhint
|
train
|
a43c3b3f7afba242bd90e527ba28a10e305d21ea
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -51,8 +51,6 @@ setup(
packages=find_packages(),
include_package_data=True,
- package_data={'': ['README.txt']},
- exclude_package_data={'': ['README.*']},
entry_points = {
'console_scripts': ['sht = sht_sensor.sensor:main'] })
|
setup: drop old README.* mangling, intended for README.md
|
kizniche_sht-sensor
|
train
|
c03a6681342ec2cc6f9fbe62b87276589880cd72
|
diff --git a/test/filter_relationship_test/belongs_to_polymorphic_test.rb b/test/filter_relationship_test/belongs_to_polymorphic_test.rb
index <HASH>..<HASH> 100644
--- a/test/filter_relationship_test/belongs_to_polymorphic_test.rb
+++ b/test/filter_relationship_test/belongs_to_polymorphic_test.rb
@@ -6,14 +6,16 @@ class BelongsToPolymorphicFilterTest < ActiveSupport::TestCase
create_table "views", force: :cascade do |t|
t.string "subject_type"
t.integer "subject_id"
+ t.integer "account_id"
end
- create_table "properties" do |t|
+ create_table "accounts" do |t|
t.string "name", limit: 255
end
-
- create_table "accounts" do |t|
+
+ create_table "properties" do |t|
t.string "name", limit: 255
+ t.integer "account_id"
end
end
@@ -26,6 +28,7 @@ class BelongsToPolymorphicFilterTest < ActiveSupport::TestCase
end
class Property < ActiveRecord::Base
+ belongs_to :account
end
test "::filter :belongs_to => {ID: VALUE}" do
@@ -50,5 +53,12 @@ class BelongsToPolymorphicFilterTest < ActiveSupport::TestCase
WHERE properties_as_subject.name = 'Name' AND accounts.name = 'Account'
SQL
end
+
+ test '::filter beyond polymorphic boundary' do
+ query = View.filter(subject: {as: "BelongsToPolymorphicFilterTest::Property", account: {name: 'Name'}})
+ assert_sql(<<-SQL, query)
+
+ SQL
+ end
end
|
add test for filtering beyond polymorphic boundary
|
malomalo_activerecord-filter
|
train
|
b4ac8072c6285a91a3c93a51cc5b257ef6cf9cd0
|
diff --git a/salt/modules/status.py b/salt/modules/status.py
index <HASH>..<HASH> 100644
--- a/salt/modules/status.py
+++ b/salt/modules/status.py
@@ -140,8 +140,7 @@ def uptime():
key/value pairs containing uptime information, instead of the output
from a ``cmd.run`` call.
.. versionchanged:: carbon
- Support for OpenBSD and Solaris-like platforms.
- Fall back to output of `uptime` when /proc/uptime is not available.
+ Support for OpenBSD, FreeBSD, and Solaris-like platforms.
CLI Example:
@@ -169,8 +168,16 @@ def uptime():
raise CommandExecutionError('The sysctl kern.boottime was not found.')
utc_time = datetime.datetime.utcfromtimestamp(float(res['stdout'].strip()))
ut_ret['seconds'] = int(time.time()-time.mktime(utc_time.timetuple()))
- elif salt.utils.which('uptime'):
- return __salt__['cmd.run']('uptime')
+ elif salt.utils.is_freebsd():
+ res = __salt__['cmd.run_all']('sysctl -n kern.boottime')
+ if res['retcode'] > 0:
+ raise CommandExecutionError('The sysctl kern.boottime was not found.')
+ # format: { sec = 1477761334, usec = 664698 } Sat Oct 29 17:15:34 2016
+ # note: sec -> unixtimestamp
+ utc_time = datetime.datetime.utcfromtimestamp(
+ float(res['stdout'][1:res['stdout'].index(',')].split('=')[1].strip())
+ )
+ ut_ret['seconds'] = int(time.time()-time.mktime(utc_time.timetuple()))
else:
raise CommandExecutionError('This platform is not supported')
|
status.uptime - want freebsd support, removed fallback once again so we are consistant
|
saltstack_salt
|
train
|
16240c0f04f3cf70eef702b72fe4929bf1daae71
|
diff --git a/lib/endpoint/find-parameters.js b/lib/endpoint/find-parameters.js
index <HASH>..<HASH> 100644
--- a/lib/endpoint/find-parameters.js
+++ b/lib/endpoint/find-parameters.js
@@ -6,14 +6,14 @@ const markdownTable = require('markdown-table')
const PAGINATION_VARIABLES = [
{
name: 'per_page',
- type: 'number',
+ type: 'integer',
required: false,
description: 'Results per page (max 100)',
default: 30
},
{
name: 'page',
- type: 'number',
+ type: 'integer',
required: false,
description: 'Page number of the results to fetch.',
default: 1
@@ -68,7 +68,7 @@ function findInRoutePath (state) {
routeParameters.push({
name,
- type: /number$/.test(name) ? 'number' : 'string',
+ type: /number$/.test(name) ? 'integer' : 'string',
required: true,
description: ''
})
|
fix: "number" -> "integer"
|
octokit_routes
|
train
|
959efde76f5976054e55e829d6520e42e707de9d
|
diff --git a/private_storage/views.py b/private_storage/views.py
index <HASH>..<HASH> 100644
--- a/private_storage/views.py
+++ b/private_storage/views.py
@@ -46,6 +46,12 @@ class PrivateStorageView(View):
"""
return self.kwargs['path']
+ def get_storage(self):
+ """
+ Tell which storage to retrieve the file from.
+ """
+ return self.storage
+
def get_private_file(self):
"""
Return all relevant data in a single object, so this is easy to extend
@@ -53,7 +59,7 @@ class PrivateStorageView(View):
"""
return PrivateFile(
request=self.request,
- storage=self.storage,
+ storage=self.get_storage(),
relative_name=self.get_path()
)
@@ -152,11 +158,15 @@ class PrivateStorageDetailView(SingleObjectMixin, PrivateStorageView):
file = getattr(self.object, self.model_file_field)
return file.name
+ def get_storage(self):
+ field = self.object._meta.get_field(self.model_file_field)
+ return field.storage
+
def get_private_file(self):
# Provide the parent object as well.
return PrivateFile(
request=self.request,
- storage=self.storage,
+ storage=self.get_storage(),
relative_name=self.get_path(),
parent_object=self.object
)
|
Make sure PrivateStorageDetailView uses the storage class from the model
|
edoburu_django-private-storage
|
train
|
d2eeaa60efc3c6f046052ec10893d3a08dad17a1
|
diff --git a/pymongo/mongo_replica_set_client.py b/pymongo/mongo_replica_set_client.py
index <HASH>..<HASH> 100644
--- a/pymongo/mongo_replica_set_client.py
+++ b/pymongo/mongo_replica_set_client.py
@@ -130,7 +130,8 @@ def _partition_node(node):
class RSState(object):
def __init__(
self, threadlocal, hosts=None, host_to_member=None, arbiters=None,
- writer=None, error_message='No primary available', exc=None):
+ writer=None, error_message='No primary available', exc=None,
+ initial=False):
"""An immutable snapshot of the client's view of the replica set state.
Stores Member instances for all members we're connected to, and a
@@ -145,6 +146,7 @@ class RSState(object):
- `writer`: Optional (host, port) of primary
- `error_message`: Optional error if `writer` is None
- `exc`: Optional error if state is unusable
+ - `initial`: Whether this is the initial client state
"""
self._threadlocal = threadlocal # threading.local or gevent local
self._arbiters = frozenset(arbiters or []) # set of (host, port)
@@ -154,6 +156,7 @@ class RSState(object):
self._hosts = frozenset(hosts or [])
self._members = frozenset(self._host_to_member.values())
self._exc = exc
+ self._initial = initial
self._primary_member = self.get(writer)
def clone_with_host_down(self, host, error_message):
@@ -250,6 +253,11 @@ class RSState(object):
"""Reason RSState is unusable, or None."""
return self._exc
+ @property
+ def initial(self):
+ """Whether this is the initial client state."""
+ return self._initial
+
def get(self, host):
"""Return a Member instance or None for the given (host, port)."""
return self._host_to_member.get(host)
@@ -619,7 +627,7 @@ class MongoReplicaSetClient(common.BaseObject):
"The gevent module is not available. "
"Install the gevent package from PyPI.")
- self.__rs_state = RSState(self.__make_threadlocal())
+ self.__rs_state = RSState(self.__make_threadlocal(), initial=True)
self.__request_counter = thread_util.Counter(self.__use_greenlets)
@@ -1570,10 +1578,12 @@ class MongoReplicaSetClient(common.BaseObject):
tag_sets = [{}]
if not rs_state.primary_member:
- # Primary was down last we checked. Start a refresh if one is not
- # already in progress. If caller requested the primary, wait to
- # see if it's up, otherwise continue with known-good members.
- sync = (mode == ReadPreference.PRIMARY)
+ # If we were initialized with _connect=False then connect now.
+ # Otherwise, the primary was down last we checked. Start a refresh
+ # if one is not already in progress. If caller requested the
+ # primary, wait to see if it's up, otherwise continue with
+ # known-good members.
+ sync = (rs_state.initial or mode == ReadPreference.PRIMARY)
self.__schedule_refresh(sync=sync)
rs_state = self.__rs_state
diff --git a/test/test_replica_set_client.py b/test/test_replica_set_client.py
index <HASH>..<HASH> 100644
--- a/test/test_replica_set_client.py
+++ b/test/test_replica_set_client.py
@@ -1163,7 +1163,14 @@ class TestReplicaSetWireVersion(unittest.TestCase):
class TestReplicaSetClientLazyConnect(
TestReplicaSetClientBase,
_TestLazyConnectMixin):
- pass
+
+ def test_read_mode_secondary(self):
+ client = MongoReplicaSetClient(
+ pair, replicaSet=self.name, _connect=False,
+ read_preference=ReadPreference.SECONDARY)
+
+ # No error.
+ client.pymongo_test.test_collection.find_one()
# Test concurrent access to a lazily-connecting RS client, with Gevent.
|
Avoid error when first operation on lazy-connecting RS client is a secondary read.
If MongoReplicaSetClient was initialized with _connect=False (for example, when
wrapped by Motor), the first read triggers a refresh and should wait for that
refresh to complete before deciding whether an appropriate member is available.
After the initial refresh completes, future queries from secondaries should
fail fast if no secondary is available.
|
mongodb_mongo-python-driver
|
train
|
b6d839cf27434ff6e5f7a6596836765166648d13
|
diff --git a/toolsrc/org/mozilla/javascript/tools/jsc/Main.java b/toolsrc/org/mozilla/javascript/tools/jsc/Main.java
index <HASH>..<HASH> 100644
--- a/toolsrc/org/mozilla/javascript/tools/jsc/Main.java
+++ b/toolsrc/org/mozilla/javascript/tools/jsc/Main.java
@@ -178,12 +178,20 @@ public class Main {
if (arg.equals("-implements") && ++i < args.length) {
// TODO: allow for multiple comma-separated interfaces.
String targetImplements = args[i];
- try {
- Class[] implementsClasses = { Class.forName(targetImplements) };
- cx.setTargetImplements(implementsClasses);
- } catch (ClassNotFoundException e) {
- throw new Error(e.toString()); // TODO: better error
+ StringTokenizer st = new StringTokenizer(targetImplements,
+ ",");
+ Vector v = new Vector();
+ while (st.hasMoreTokens()) {
+ String className = st.nextToken();
+ try {
+ v.addElement(Class.forName(className));
+ } catch (ClassNotFoundException e) {
+ throw new Error(e.toString()); // TODO: better error
+ }
}
+ Class[] implementsClasses = new Class[v.size()];
+ v.copyInto(implementsClasses);
+ cx.setTargetImplements(implementsClasses);
continue;
}
usage(arg);
|
Add missing support for multiple -implements classes
|
mozilla_rhino
|
train
|
1a34e78873b59e0d92c401b3f5ea6dc0fd70257e
|
diff --git a/src/Soluble/Japha/Bridge/Driver/Pjb62/Client.php b/src/Soluble/Japha/Bridge/Driver/Pjb62/Client.php
index <HASH>..<HASH> 100644
--- a/src/Soluble/Japha/Bridge/Driver/Pjb62/Client.php
+++ b/src/Soluble/Japha/Bridge/Driver/Pjb62/Client.php
@@ -621,9 +621,9 @@ class Client
$this->protocol->writeException($e->__java, $trace);
$this->protocol->resultEnd();
} catch (\Exception $ex) {
- error_log($ex->__toString());
- trigger_error("Unchecked exception detected in callback", E_USER_ERROR);
- die(1);
+ $msg = "Unchecked exception detected in callback (" . $ex->__toString() . ')';
+ $uncheckedException = new Exception\RuntimeException($message);
+ throw $uncheckedException;
}
$this->isAsync = $isAsync;
$this->methodCache = $methodCache;
|
Remove a dies and trigger error , #2
|
belgattitude_soluble-japha
|
train
|
04bbff3d3b5666636de94edb9c33549bb0043af6
|
diff --git a/cassandra/concurrent.py b/cassandra/concurrent.py
index <HASH>..<HASH> 100644
--- a/cassandra/concurrent.py
+++ b/cassandra/concurrent.py
@@ -166,9 +166,11 @@ class ConcurrentExecutorGenResults(_ConcurrentExecutor):
self._condition.wait()
while self._results_queue and self._results_queue[0][0] == self._current:
_, res = heappop(self._results_queue)
+ self._condition.release()
if self._fail_fast and not res[0]:
self._raise(res[1])
yield res
+ self._condition.acquire()
self._current += 1
|
Unlock while yielding execute concurrent generator results
Fixes an issue where the event thread could be held up on the executor
lock while the client thread waits for a paged result to return.
|
datastax_python-driver
|
train
|
071ee5379361b41c5a32b95a7c41d333f6c5aea6
|
diff --git a/pypmc/mix_adapt/hierarchical_test.py b/pypmc/mix_adapt/hierarchical_test.py
index <HASH>..<HASH> 100644
--- a/pypmc/mix_adapt/hierarchical_test.py
+++ b/pypmc/mix_adapt/hierarchical_test.py
@@ -1,7 +1,6 @@
"""Unit tests for the hierarchical clustering.
"""
-from __future__ import division
from .hierarchical import *
from ..density.gauss import Gauss
from ..density.mixture import MixtureDensity
diff --git a/pypmc/mix_adapt/r_value.py b/pypmc/mix_adapt/r_value.py
index <HASH>..<HASH> 100644
--- a/pypmc/mix_adapt/r_value.py
+++ b/pypmc/mix_adapt/r_value.py
@@ -2,7 +2,6 @@
'''
-from __future__ import division as _div
import numpy as _np
from ..tools._doc import _add_to_docstring
from ..tools import partition as _part
diff --git a/pypmc/sampler/markov_chain.py b/pypmc/sampler/markov_chain.py
index <HASH>..<HASH> 100644
--- a/pypmc/sampler/markov_chain.py
+++ b/pypmc/sampler/markov_chain.py
@@ -1,6 +1,5 @@
"""Collect Markov Chain"""
-from __future__ import division as _div
from copy import deepcopy as _cp
import numpy as _np
from ..tools import History as _History
diff --git a/pypmc/tools/_plot.py b/pypmc/tools/_plot.py
index <HASH>..<HASH> 100644
--- a/pypmc/tools/_plot.py
+++ b/pypmc/tools/_plot.py
@@ -1,5 +1,3 @@
-from __future__ import division
-
_max_color = 0.9
def plot_mixture(mixture, i=0, j=1, center_style=dict(s=0.15),
diff --git a/pypmc/tools/convergence.py b/pypmc/tools/convergence.py
index <HASH>..<HASH> 100644
--- a/pypmc/tools/convergence.py
+++ b/pypmc/tools/convergence.py
@@ -1,7 +1,6 @@
'''Provide functions to rate the quality of weighted samples.
'''
-from __future__ import division as _div
import numpy as _np
def perp(weights):
|
[pypmc] Remove <I> compatibility (division)
|
fredRos_pypmc
|
train
|
8d79e1a7759714a3bc8abf4d580f80e36ec14808
|
diff --git a/eztv_api.js b/eztv_api.js
index <HASH>..<HASH> 100644
--- a/eztv_api.js
+++ b/eztv_api.js
@@ -29,8 +29,16 @@ exports.getLatestShows = function() {
$('tr.forum_header_border[name="hover"]').filter(function(){
var entry = $(this);
- var show = entry.children('td').first().children('a').first().children('img').first().attr('title').replace('Show Description about ', '');
+ var show = entry.children('td').first()
+ .children('a').first()
+ .children('img').first()
+ .attr('title').replace('Show Description about ', '');
+ var tvrage_id = entry.children('td').first()
+ .children('a').last()
+ .attr('href').replace(/http:\/\/www.tvrage.com\/(.*)\/episodes\//, '');
+
+ console.log('TVRage ID: ' + tvrage_id);
console.log('Show: '+ show);
});
}
|
Get TVRage ID in show list
|
SlashmanX_eztv_api
|
train
|
6327479a0306ef819fdcdb751544b5f5f2450ed6
|
diff --git a/salt/states/ssh_auth.py b/salt/states/ssh_auth.py
index <HASH>..<HASH> 100644
--- a/salt/states/ssh_auth.py
+++ b/salt/states/ssh_auth.py
@@ -200,7 +200,7 @@ def present(
fullkey = sshre.search(name)
# if it is {key} [comment]
if not fullkey:
- key_and_comment = name.split()
+ key_and_comment = name.split(None,1)
name = key_and_comment[0]
if len(key_and_comment) == 2:
comment = key_and_comment[1]
@@ -209,7 +209,7 @@ def present(
if fullkey.group(1):
options = fullkey.group(1).split(',')
# key is of format: {enc} {key} [comment]
- comps = fullkey.group(2).split()
+ comps = fullkey.group(2).split(None,2)
enc = comps[0]
name = comps[1]
if len(comps) == 3:
|
Fix for comments containing whitespaces
When comments contain whitespaces they won't be copied over currently.
This is because the split keeps splitting along the whitespaces.
Reproduce:
```
name = "ecdsa-sha2-nistp<I> awesomlylongkey= The Loeki proposes a fix for this"
```
|
saltstack_salt
|
train
|
2bd5e374d1d573f6a8914f8be3285fe5f510ad18
|
diff --git a/lib/Rails/Console/ApplicationConsole.php b/lib/Rails/Console/ApplicationConsole.php
index <HASH>..<HASH> 100755
--- a/lib/Rails/Console/ApplicationConsole.php
+++ b/lib/Rails/Console/ApplicationConsole.php
@@ -94,11 +94,26 @@ class ApplicationConsole extends Console
$this->write($routes);
break;
+ /**
+ * Install database.
+ */
+ case 'db:create':
+ $m = new \Rails\ActiveRecord\Migration\Migrator();
+ $m->loadSchema();
+ break;
+
+ /**
+ * Run all/pending migrations.
+ * Creates migrations table as well.
+ */
case 'db:migrate':
$m = new \Rails\ActiveRecord\Migration\Migrator();
$m->run();
break;
+ /**
+ * Runs seeds.
+ */
case 'db:seed':
$m = new \Rails\ActiveRecord\Migration\Migrator();
$m->runSeeds();
|
added support to db:create
|
railsphp_railsphp
|
train
|
c3d7a3328d759fd55cf044205b88c0e8fbc488d7
|
diff --git a/src/controllers/controller.doughnut.js b/src/controllers/controller.doughnut.js
index <HASH>..<HASH> 100644
--- a/src/controllers/controller.doughnut.js
+++ b/src/controllers/controller.doughnut.js
@@ -102,7 +102,19 @@ module.exports = function(Chart) {
return '';
},
label: function(tooltipItem, data) {
- return data.labels[tooltipItem.index] + ': ' + data.datasets[tooltipItem.datasetIndex].data[tooltipItem.index];
+ var dataLabel = data.labels[tooltipItem.index];
+ var value = ': ' + data.datasets[tooltipItem.datasetIndex].data[tooltipItem.index];
+
+ if (helpers.isArray(dataLabel)) {
+ // show value on first line of multiline label
+ // need to clone because we are changing the value
+ dataLabel = dataLabel.slice();
+ dataLabel[0] += value;
+ } else {
+ dataLabel += value;
+ }
+
+ return dataLabel;
}
}
}
|
In the doughnut chart, specifically handle multiline strings.
|
chartjs_Chart.js
|
train
|
1210749e605d421413730bb4dcea11e9ad9a73bf
|
diff --git a/system_test/full_system_test.py b/system_test/full_system_test.py
index <HASH>..<HASH> 100644
--- a/system_test/full_system_test.py
+++ b/system_test/full_system_test.py
@@ -1220,15 +1220,17 @@ class TestRequestForwarding(unittest.TestCase):
self.assertEqual(j, d_("{'results': [{'last_insert_id': 1, 'rows_affected': 1}]}"))
fsmIdx = l.wait_for_all_fsm()
- j = f.execute_queued('INSERT INTO foo(name) VALUES("declan")')
- self.assertTrue(is_sequence_number(str(j)))
+ # Load up the queue!
+ for i in range(0,2000):
+ j = f.execute_queued('INSERT INTO foo(name) VALUES("declan")')
+ self.assertTrue(is_sequence_number(str(j)))
j = f.execute_queued('INSERT INTO foo(name) VALUES(?)', wait=True, params=["aoife"])
self.assertTrue(is_sequence_number(str(j)))
# Data should be ready immediately, since we waited.
j = l.query('SELECT COUNT(*) FROM foo')
- self.assertEqual(j, d_("{'results': [{'columns': ['COUNT(*)'], 'types': [''], 'values': [[3]]}]}"))
+ self.assertEqual(j, d_("{'results': [{'columns': ['COUNT(*)'], 'types': [''], 'values': [[2002]]}]}"))
class TestEndToEndNonVoter(unittest.TestCase):
def setUp(self):
|
Load the queue during end-to-end testing
|
rqlite_rqlite
|
train
|
9194ec98c86b4ef1bb1fcfcaa80b49608584beda
|
diff --git a/tests/Helpers/Builders/PaymentBuilder.php b/tests/Helpers/Builders/PaymentBuilder.php
index <HASH>..<HASH> 100644
--- a/tests/Helpers/Builders/PaymentBuilder.php
+++ b/tests/Helpers/Builders/PaymentBuilder.php
@@ -1,6 +1,7 @@
<?php
namespace Tests\Helpers\Builders;
+use Ebanx\Benjamin\Models\Currency;
use Faker;
use Ebanx\Benjamin\Models\Payment;
@@ -25,6 +26,7 @@ class PaymentBuilder extends BaseBuilder
public function boleto()
{
+ $this->instance->currencyCode = Currency::BRL;
$this->instance->dueDate = $this->faker->dateTimeBetween('+1 days', '+3 days');
return $this;
|
If it is Boleto, then the currency code is BRL
|
ebanx_benjamin
|
train
|
2dd1e5fbabe15fa4edae49d807b349c1aa8cd503
|
diff --git a/quilt/add.py b/quilt/add.py
index <HASH>..<HASH> 100644
--- a/quilt/add.py
+++ b/quilt/add.py
@@ -38,11 +38,11 @@ class Add(Command):
self.db = Db(quilt_pc)
self.series = Series(quilt_patches)
- def _file_in_patch(self, filename, patch):
+ def _file_in_patch(self, filename, patch, ignore):
""" Checks if a backup file of the filename in the current patch
exists """
file = self.quilt_pc + File(os.path.join(patch.get_name(), filename))
- if file.exists():
+ if file.exists() and not ignore:
raise QuiltError("File %s is already in patch %s" % (filename,
patch.get_name()))
@@ -68,9 +68,11 @@ class Add(Command):
backup = Backup()
backup.backup_file(filename, dest_dir, copy_empty=True)
- def add_file(self, filename, patch_name=None):
+ def add_file(self, filename, patch_name=None, ignore=False):
""" Add file to the patch with patch_name.
If patch_name is None or empty the topmost patch will be used.
+ Adding an already added patch will raise an QuiltError if ignore is
+ False.
"""
file = File(filename)
@@ -79,13 +81,9 @@ class Add(Command):
else:
patch = self.db.top_patch()
if not patch:
- patch = self.series.first_patch()
-
- if not patch:
- raise QuiltError("No patch available. Please create a new patch " \
- "before adding a file")
+ raise QuiltError("No patches applied.")
- self._file_in_patch(filename, patch)
+ self._file_in_patch(filename, patch, ignore)
self._file_in_next_patches(filename, patch)
if file.is_link():
@@ -95,8 +93,9 @@ class Add(Command):
if file.exists():
# be sure user can write original file
- os.chmod(filename, stat.S_IWUSR | stat.S_IRUSR)
+ mode = os.stat(pathname).st_mode
+ os.chmod(filename, mode | stat.S_IWUSR | stat.S_IRUSR)
- def add_files(self, filenames, patch_name=None):
+ def add_files(self, filenames, patch_name=None, ignore=False):
for filename in filenames:
- self.add_file(filename, patch_name)
+ self.add_file(filename, patch_name, ignore)
|
Add possibility to ignore already added patch
Don't raise an exception if not wanted. E.g. the edit cli command should be able
to be run several times on the same file.
|
bjoernricks_python-quilt
|
train
|
4db30ab0674b8cfdb2a48b03c3a62e2157e08bd6
|
diff --git a/src/includes/properties/class-papi-property.php b/src/includes/properties/class-papi-property.php
index <HASH>..<HASH> 100644
--- a/src/includes/properties/class-papi-property.php
+++ b/src/includes/properties/class-papi-property.php
@@ -47,7 +47,8 @@ class Papi_Property extends Papi_Core_Property {
if ( is_array( $suffix ) || is_object( $suffix ) ) {
return '_' . $this->html_name( $suffix, $row );
} else {
- $suffix = empty( $suffix ) || ! is_string( $suffix ) ? '' : '_' . $suffix;
+ $suffix = empty( $suffix ) ||
+ ! is_string( $suffix ) ? '' : '_' . $suffix;
$suffix = papi_underscorify( papi_slugify( $suffix ) );
}
@@ -81,7 +82,11 @@ class Papi_Property extends Papi_Core_Property {
}
}
- return sprintf( '%s[%s]', $base_slug, papi_remove_papi( $sub_property->get_slug() ) );
+ return sprintf(
+ '%s[%s]',
+ $base_slug,
+ papi_remove_papi( $sub_property->get_slug() )
+ );
}
/**
@@ -101,7 +106,8 @@ class Papi_Property extends Papi_Core_Property {
if ( $this->get_option( 'lang' ) === strtolower( papi_get_qs( 'lang' ) ) ) {
$render = true;
} else {
- $render = $this->get_option( 'lang' ) === false && papi_is_empty( papi_get_qs( 'lang' ) );
+ $render = $this->get_option( 'lang' ) === false &&
+ papi_is_empty( papi_get_qs( 'lang' ) );
}
if ( $this->display ) {
@@ -201,7 +207,9 @@ class Papi_Property extends Papi_Core_Property {
papi_render_html_tag( 'label', [
'for' => $this->html_id(),
- 'title' => trim( $title . ' ' . papi_require_text( $this->get_options() ) ),
+ 'title' => trim(
+ $title . ' ' . papi_require_text( $this->get_options() )
+ ),
$title,
papi_required_html( $this->get_options() )
] );
@@ -248,7 +256,7 @@ class Papi_Property extends Papi_Core_Property {
}
/**
- * Render Conditional rules as JSON.
+ * Render Conditional rules as script tag with JSON.
*/
private function render_rules_json() {
$rules = $this->get_rules();
@@ -258,10 +266,12 @@ class Papi_Property extends Papi_Core_Property {
}
$rules = $this->conditional->prepare_rules( $rules, $this );
- ?>
- <script type="application/json" data-papi-rules="true" data-papi-rule-source-slug="<?php echo $this->html_name(); ?>">
- <?php echo json_encode( $rules ); ?>
- </script>
- <?php
+
+ papi_render_html_tag( 'script', [
+ 'data-papi-rule-source-slug' => $this->html_name(),
+ 'data-papi-rules' => 'true',
+ 'type' => 'application/json',
+ json_encode( $rules )
+ ] );
}
}
|
Clean up. Render rules json with render html tag method.
|
wp-papi_papi
|
train
|
83802ed27a8a2507af9354813b9d967bb2308e87
|
diff --git a/read_closer.go b/read_closer.go
index <HASH>..<HASH> 100644
--- a/read_closer.go
+++ b/read_closer.go
@@ -1,6 +1,7 @@
package statos
import (
+ "fmt"
"io"
"syscall"
)
@@ -29,11 +30,12 @@ func (r *ReadCloserStatos) Read(p []byte) (n int, err error) {
n, err = r.iterator.Read(p)
r.prevReadV = r.curReadV
- r.curReadV += 1
+ fmt.Println(n, err)
if err != nil && err != syscall.EINTR {
r.done = true
} else if n >= 0 {
+ r.curReadV += 1
r.finished += uint64(n)
}
return
diff --git a/reader.go b/reader.go
index <HASH>..<HASH> 100644
--- a/reader.go
+++ b/reader.go
@@ -7,40 +7,28 @@ import (
// ReaderStatos implements the Read() interface
type ReaderStatos struct {
- done bool
- // Monotomically increasing number to track the number of reads
- curReadV uint64
- // Track the previous
- prevReadV uint64
- lastRead int
- finished uint64
- iterator io.Reader
+ iterator io.Reader
+ commChan chan int
}
func NewReader(rd io.Reader) *ReaderStatos {
return &ReaderStatos{
- finished: 0,
- iterator: rd,
- curReadV: 0,
- prevReadV: 0,
- lastRead: 0,
+ iterator: rd,
+ commChan: make(chan int),
}
}
func (r *ReaderStatos) Read(p []byte) (n int, err error) {
n, err = r.iterator.Read(p)
- r.prevReadV = r.curReadV
- r.lastRead = n
if err != nil && err != syscall.EINTR {
- r.done = true
+ close(r.commChan)
} else if n >= 0 {
- r.curReadV += 1
- r.finished += uint64(n)
+ r.commChan <- n
}
return
}
-func (r *ReaderStatos) Progress() (lastRead int, finished uint64, fresh, done bool) {
- return r.lastRead, r.finished, r.curReadV > r.prevReadV, r.done
+func (r *ReaderStatos) ProgressChan() chan int {
+ return r.commChan
}
diff --git a/writer.go b/writer.go
index <HASH>..<HASH> 100644
--- a/writer.go
+++ b/writer.go
@@ -8,40 +8,27 @@ import (
// WriterStatos implements the Write() interface
type WriterStatos struct {
iterator io.WriteCloser
- done bool
- finished uint64
- // Monotomically increasing number to track the number of Writes
- curWriteV uint64
- // Track the previous
- prevWriteV uint64
- nlast int
+ commChan chan int
}
func NewWriter(w io.WriteCloser) *WriterStatos {
return &WriterStatos{
- nlast: 0,
- curWriteV: 0,
- prevWriteV: 0,
- finished: 0,
- iterator: w,
+ commChan: make(chan int),
+ iterator: w,
}
}
func (w *WriterStatos) Write(p []byte) (n int, err error) {
n, err = w.iterator.Write(p)
- w.prevWriteV = w.curWriteV
- w.nlast = n
-
if err != nil && err != syscall.EINTR {
- w.done = true
+ close(w.commChan)
} else if n >= 0 {
- w.curWriteV += 1
- w.finished += uint64(n)
+ w.commChan <- n
}
return
}
-func (w *WriterStatos) Progress() (nlast int, finished uint64, fresh, done bool) {
- return w.nlast, w.finished, w.curWriteV > w.prevWriteV, w.done
+func (w *WriterStatos) ProgressChan() chan int {
+ return w.commChan
}
|
shaved off unnecessary and buggy state info
|
odeke-em_statos
|
train
|
00160aedbe0306125d459593237287232a962394
|
diff --git a/web/concrete/controllers/login.php b/web/concrete/controllers/login.php
index <HASH>..<HASH> 100644
--- a/web/concrete/controllers/login.php
+++ b/web/concrete/controllers/login.php
@@ -355,7 +355,7 @@ class LoginController extends Controller {
$this->redirect('/');
}
- public function forward($cID) {
+ public function forward($cID = 0) {
$this->set('rcID', $cID);
}
diff --git a/web/concrete/controllers/register.php b/web/concrete/controllers/register.php
index <HASH>..<HASH> 100644
--- a/web/concrete/controllers/register.php
+++ b/web/concrete/controllers/register.php
@@ -25,7 +25,7 @@ class RegisterController extends Controller {
}
- public function forward($cID) {
+ public function forward($cID = 0) {
$this->set('rcID', $cID);
}
|
fix some more issues with errors
Former-commit-id: cda1c<I>b<I>bba<I>ee<I>dcc<I>
|
concrete5_concrete5
|
train
|
1081b517f678291405db2278f29a8ac64abb1d1c
|
diff --git a/src/Model/SlugModel.php b/src/Model/SlugModel.php
index <HASH>..<HASH> 100644
--- a/src/Model/SlugModel.php
+++ b/src/Model/SlugModel.php
@@ -11,7 +11,19 @@ trait SlugModel
*/
public function slugify()
{
- return $this->vulgarize($this->__toString()) ?: $slug = $this->getTableName().'-'.$this->id;
+ return $this->vulgarize($this->getSluggable()) ?: $slug = $this->getTableName().'-'.$this->id;
+ }
+
+
+
+ /**
+ * Defines the string to use for slug generation
+ *
+ * @return string
+ */
+ public function getSluggable()
+ {
+ return '';
}
diff --git a/tests/Model/SlugModelTest.php b/tests/Model/SlugModelTest.php
index <HASH>..<HASH> 100644
--- a/tests/Model/SlugModelTest.php
+++ b/tests/Model/SlugModelTest.php
@@ -11,10 +11,10 @@ class SlugModelTest extends \PHPUnit_Framework_TestCase
*/
public function testSlugify()
{
- $model = $this->getMock('Neemzy\Patchwork\Tests\TestEntity', ['__toString', 'getTableName']);
+ $model = $this->getMock('Neemzy\Patchwork\Tests\TestEntity', ['getSluggable', 'getTableName']);
$model->expects($this->once())->method('getTableName')->will($this->returnValue('test'));
- $model->expects($this->any())->method('__toString')->will(
+ $model->expects($this->any())->method('getSluggable')->will(
$this->onConsecutiveCalls(' Sample string -representation-', '!$#@&%£?')
);
|
set up proper method to handle slug generation seed definition
|
neemzy_patchwork-core
|
train
|
c6b082a6263bbfcfab0dcc4f4c4bb2c024c84f10
|
diff --git a/fpdf.go b/fpdf.go
index <HASH>..<HASH> 100644
--- a/fpdf.go
+++ b/fpdf.go
@@ -1931,19 +1931,19 @@ func (f *Fpdf) CellFormat(w, h float64, txtStr string, borderStr string, ln int,
if len(txtStr) > 0 {
var dx, dy float64
// Horizontal alignment
- if strings.Index(alignStr, "R") != -1 {
+ if strings.Contains(alignStr, "R") {
dx = w - f.cMargin - f.GetStringWidth(txtStr)
- } else if strings.Index(alignStr, "C") != -1 {
+ } else if strings.Contains(alignStr, "C") {
dx = (w - f.GetStringWidth(txtStr)) / 2
} else {
dx = f.cMargin
}
// Vertical alignment
- if strings.Index(alignStr, "T") != -1 {
+ if strings.Contains(alignStr, "T") {
dy = (f.fontSize - h) / 2.0
- } else if strings.Index(alignStr, "B") != -1 {
+ } else if strings.Contains(alignStr, "B") {
dy = (h - f.fontSize) / 2.0
- } else if strings.Index(alignStr, "A") != -1 {
+ } else if strings.Contains(alignStr, "A") {
var descent float64
d := f.currentFont.Desc
if d.Descent == 0 {
diff --git a/template.go b/template.go
index <HASH>..<HASH> 100644
--- a/template.go
+++ b/template.go
@@ -254,9 +254,7 @@ func templateChainDependencies(template Template) []Template {
requires := template.Templates()
chain := make([]Template, len(requires)*2)
for _, req := range requires {
- for _, sub := range templateChainDependencies(req) {
- chain = append(chain, sub)
- }
+ chain = append(chain, templateChainDependencies(req)...)
}
chain = append(chain, template)
return chain
|
Replace strings.Index with strings.Contains
|
jung-kurt_gofpdf
|
train
|
d7c5fd6b490ad2edea851b5d4a6a3d1031559cac
|
diff --git a/deployutils/__init__.py b/deployutils/__init__.py
index <HASH>..<HASH> 100644
--- a/deployutils/__init__.py
+++ b/deployutils/__init__.py
@@ -22,4 +22,4 @@
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-__version__ = '0.5.14'
+__version__ = '0.5.16-dev'
|
bumps version to <I>-dev
|
djaodjin_djaodjin-deployutils
|
train
|
7097da4b7cf45dce2c1d4e7654f364ef71c0c4f7
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -1,7 +1,7 @@
from setuptools import setup, find_packages
setup(name='descent',
- version='0.0.12',
+ version='0.1.0',
description='First order optimization tools',
author='Niru Maheshwaranathan',
author_email='nirum@stanford.edu',
|
bumped version in setup.py
|
nirum_descent
|
train
|
d848ea00df080261774a798c4c12c15ad24ca94f
|
diff --git a/test/jsdom/parsing.js b/test/jsdom/parsing.js
index <HASH>..<HASH> 100644
--- a/test/jsdom/parsing.js
+++ b/test/jsdom/parsing.js
@@ -294,3 +294,12 @@ exports["should parse namespace prefixes properly"] = function (t) {
t.done();
};
+
+exports["should parse self closing tags properly (GH-863)"] = function (t) {
+ var doc = jsdom();
+ doc.body.innerHTML = "<p>hello <img src=\"test\"> world</p>";
+
+ t.strictEqual(doc.body.firstChild.childNodes.length, 3, "paragraph should contain 3 children");
+
+ t.done();
+};
|
Add test for proper parsing of in-text elements
|
jsdom_jsdom
|
train
|
9377935221b6eeb47ab3f5099836f73cd2d59edd
|
diff --git a/tests/test_30_decompressors.py b/tests/test_30_decompressors.py
index <HASH>..<HASH> 100644
--- a/tests/test_30_decompressors.py
+++ b/tests/test_30_decompressors.py
@@ -13,6 +13,10 @@ import StreamDecompressor
class GuesserTest(unittest2.TestCase):
def _check_decompressor(self,
decompressor, compressed_fileobj, decompressed_fileobj):
+ try:
+ decompressor.__checkavailability__()
+ except:
+ self.skipTest("decompressor not available")
with StreamDecompressor.open(fileobj=compressed_fileobj) as archive:
self.assertEqual(
archive.compressions,
|
Updated: tests: check decompressor availability
|
cecton_destream
|
train
|
654eb3080c67f41b45795c5c32f1bbfe15bfe4b3
|
diff --git a/internal/api/network_server_new_test.go b/internal/api/network_server_new_test.go
index <HASH>..<HASH> 100644
--- a/internal/api/network_server_new_test.go
+++ b/internal/api/network_server_new_test.go
@@ -386,6 +386,7 @@ func (ts *NetworkServerAPITestSuite) TestDevice() {
868100000,
868300000,
868500000,
+ 867100000,
},
RXDelay1: 3,
RXDROffset1: 2,
@@ -465,6 +466,7 @@ func (ts *NetworkServerAPITestSuite) TestDevice() {
t.Run("Activate", func(t *testing.T) {
assert := require.New(t)
+ assert.NoError(band.Band().AddChannel(867100000, 0, 5))
devEUI := [8]byte{1, 2, 3, 4, 5, 6, 7, 8}
devAddr := [4]byte{6, 2, 3, 4}
@@ -560,20 +562,26 @@ func (ts *NetworkServerAPITestSuite) TestDevice() {
NFCntDown: 11,
AFCntDown: 12,
SkipFCntValidation: true,
- EnabledUplinkChannels: band.Band().GetEnabledUplinkChannelIndices(),
- ChannelFrequencies: []int{868100000, 868300000, 868500000},
- ExtraUplinkChannels: map[int]loraband.Channel{},
- RXDelay: 3,
- RX1DROffset: 2,
- RX2DR: 5,
- RX2Frequency: 868900000,
- PingSlotNb: 128,
- PingSlotDR: 5,
- PingSlotFrequency: 868100000,
- NbTrans: 1,
- MACVersion: "1.0.2",
- MACCommandErrorCount: make(map[lorawan.CID]int),
- IsDisabled: true,
+ EnabledUplinkChannels: []int{0, 1, 2, 3},
+ ChannelFrequencies: []int{868100000, 868300000, 868500000, 867100000},
+ ExtraUplinkChannels: map[int]loraband.Channel{
+ 3: loraband.Channel{
+ Frequency: 867100000,
+ MinDR: 0,
+ MaxDR: 5,
+ },
+ },
+ RXDelay: 3,
+ RX1DROffset: 2,
+ RX2DR: 5,
+ RX2Frequency: 868900000,
+ PingSlotNb: 128,
+ PingSlotDR: 5,
+ PingSlotFrequency: 868100000,
+ NbTrans: 1,
+ MACVersion: "1.0.2",
+ MACCommandErrorCount: make(map[lorawan.CID]int),
+ IsDisabled: true,
}, ds)
})
diff --git a/internal/storage/device_session.go b/internal/storage/device_session.go
index <HASH>..<HASH> 100644
--- a/internal/storage/device_session.go
+++ b/internal/storage/device_session.go
@@ -265,6 +265,23 @@ func (s *DeviceSession) ResetToBootParameters(dp DeviceProfile) {
if dp.PingSlotPeriod != 0 {
s.PingSlotNb = (1 << 12) / dp.PingSlotPeriod
}
+
+ if len(dp.FactoryPresetFreqs) > len(s.EnabledUplinkChannels) {
+ for _, f := range dp.FactoryPresetFreqs[len(s.EnabledUplinkChannels):] {
+ i, err := band.Band().GetUplinkChannelIndex(int(f), false)
+ if err != nil {
+ continue
+ }
+
+ s.EnabledUplinkChannels = append(s.EnabledUplinkChannels, i)
+
+ c, err := band.Band().GetUplinkChannel(i)
+ if err != nil {
+ continue
+ }
+ s.ExtraUplinkChannels[i] = c
+ }
+ }
}
// GetRandomDevAddr returns a random DevAddr, prefixed with NwkID based on the
|
Set ExtraUplinkChannels on ABP activate for additional channels.
|
brocaar_loraserver
|
train
|
2ea680fa740cf2be6d0b5f5ddc81b160d0f8534a
|
diff --git a/src/ContaoCommunityAlliance/Contao/Bindings/Subscribers/ControllerSubscriber.php b/src/ContaoCommunityAlliance/Contao/Bindings/Subscribers/ControllerSubscriber.php
index <HASH>..<HASH> 100644
--- a/src/ContaoCommunityAlliance/Contao/Bindings/Subscribers/ControllerSubscriber.php
+++ b/src/ContaoCommunityAlliance/Contao/Bindings/Subscribers/ControllerSubscriber.php
@@ -28,6 +28,13 @@ class ControllerSubscriber
implements EventSubscriberInterface
{
/**
+ * Kill parent constructor.
+ */
+ public function __construct()
+ {
+ }
+
+ /**
* Returns an array of event names this subscriber wants to listen to.
*
* @return array
|
Remove parent constructor of Controller class.
|
contao-community-alliance_events-contao-bindings
|
train
|
f47366c659feeea4c085b40c02600a7b9b0e6362
|
diff --git a/make.go b/make.go
index <HASH>..<HASH> 100644
--- a/make.go
+++ b/make.go
@@ -891,7 +891,11 @@ func genEmbeds() error {
}
for _, embeds := range []string{"server/camlistored/ui", "pkg/server", "vendor/embed/react", "vendor/embed/less", "vendor/embed/glitch", "vendor/embed/fontawesome", "vendor/embed/leaflet", "app/publisher", "app/scanningcabinet/ui"} {
embeds := buildSrcPath(embeds)
- args := []string{"--output-files-stderr", embeds}
+ var args []string
+ if *all {
+ args = append(args, "-all")
+ }
+ args = append(args, "-output-files-stderr", embeds)
cmd := exec.Command(cmdName, args...)
cmd.Env = append(cleanGoEnv(),
"GOPATH="+buildGoPath,
diff --git a/pkg/fileembed/genfileembed/genfileembed.go b/pkg/fileembed/genfileembed/genfileembed.go
index <HASH>..<HASH> 100644
--- a/pkg/fileembed/genfileembed/genfileembed.go
+++ b/pkg/fileembed/genfileembed/genfileembed.go
@@ -163,7 +163,7 @@ func main() {
fmt.Fprintf(&b, "import \""+*fileEmbedPkgPath+"\"\n\n")
b.WriteString(imports)
fmt.Fprintf(&b, "func init() {\n\tFiles.Add(%q, %d, time.Unix(0, %d), %s(%s));\n}\n",
- fileName, fileSize, fi.ModTime().UnixNano(), byteStreamType, qb)
+ filepath.ToSlash(fileName), fileSize, fi.ModTime().UnixNano(), byteStreamType, qb)
// gofmt it
fset := token.NewFileSet()
|
genfileembed: always embed paths with slashes
Also change make.go to propagate -all to genfileembed.
Fixes #<I>
Change-Id: I<I>d5fc<I>ae0fc9c<I>fca<I>ff0a<I>b<I>c
|
perkeep_perkeep
|
train
|
52f6f1eaa89b5cfa1c5ad172254b057b634ceac8
|
diff --git a/lib/rubyx/rubyx_compiler.rb b/lib/rubyx/rubyx_compiler.rb
index <HASH>..<HASH> 100644
--- a/lib/rubyx/rubyx_compiler.rb
+++ b/lib/rubyx/rubyx_compiler.rb
@@ -4,11 +4,20 @@ module RubyX
# There are methods to go from ruby to any of the layers in the system
# (mainly for testing). ruby_to_binary creates actual binary code
# for a given platform.
+ # The compiler keeps the vool source as an instance.
+ # To compile several sources, more vool can be added, ie ruby_to_vool
+ # can be called several times.
+ #
+ # All other methods come in pairs, one takes ruby source (those are for testing)
+ # and the other uses the stored vool source for further processing.
+ #
# Only builtin is loaded, so no runtime , but the compiler
# can be used to read the runtime and then any other code
#
class RubyXCompiler
+ attr_reader :vool
+
# initialize boots Parfait and Risc (ie load Builin)
def initialize
Parfait.boot!
@@ -22,9 +31,19 @@ module RubyX
#
# A Linker is returned that may be used to create an elf binay
#
- # The compiling is done by ruby_to_risc
+ # The compiling is done by to_binary
def ruby_to_binary(ruby , platform)
- linker = ruby_to_risc(ruby, platform)
+ ruby_to_vool(ruby)
+ to_binary(platform)
+ end
+
+ # Process previously stored vool source to binary.
+ # Binary code is generated byu calling to_risc, then positioning and calling
+ # create_binary on the linker. The linker may then be used to creat a binary file.
+ # The biary the method name refers to is binary code in memory, or in BinaryCode
+ # objects to be precise.
+ def to_binary(platform)
+ linker = to_risc(platform)
linker.position_all
linker.create_binary
linker
@@ -33,40 +52,49 @@ module RubyX
# ruby_to_risc creates Risc instructions (as the name implies), but also
# translates those to the platform given
#
- # The higher level translation is done by ruby_to_mom
+ # After creating vool, we call to_risc
def ruby_to_risc(ruby, platform)
- mom = ruby_to_mom(ruby)
+ ruby_to_vool(ruby)
+ to_risc(platform)
+ end
+
+ # Process previously stored vool source. First to mom, then to platform.
+ # Translating to platform returns a linker that is returned and can be used
+ # to generate binaries
+ def to_risc(platform)
+ mom = to_mom
mom.translate(platform)
end
# ruby_to_mom does exactly that, it transform the incoming ruby source (string)
# to mom
- #
- # the method calls ruby_to_vool to compile the first layer
+ # The vool is stored using ruby_to_vool, and if there was previous source,
+ # this will also be momed
def ruby_to_mom(ruby)
- vool_tree = ruby_to_vool(ruby)
- vool_tree.to_mom(nil)
+ ruby_to_vool(ruby)
+ to_mom
+ end
+
+ # return mom for the previously stored vool source.
+ def to_mom
+ @vool.to_mom(nil)
end
# ruby_to_vool compiles the ruby to ast, and then to vool
def ruby_to_vool(ruby_source)
ruby_tree = Ruby::RubyCompiler.compile( ruby_source )
- vool_tree = ruby_tree.to_vool
- vool_tree
+ @vool = ruby_tree.to_vool
end
def self.ruby_to_binary( ruby , platform)
compiler = RubyXCompiler.new
- vool_tree = compiler.ruby_to_vool(ruby)
+ compiler.ruby_to_vool(ruby)
# integrate other sources into vool tree
- mom = vool_tree.to_mom(nil)
- linker = mom.translate(platform)
- linker.position_all
- linker.create_binary
- linker
+ compiler.to_binary(platform)
+
end
end
end
|
cleaner interfaces for rubyXcompiler
store the vool
seperate api for ruby -> X and stored vool -> X
|
ruby-x_rubyx
|
train
|
9a90ae944ec597f07ea841306c5a3bc66e319efe
|
diff --git a/storage/memory/memory.go b/storage/memory/memory.go
index <HASH>..<HASH> 100644
--- a/storage/memory/memory.go
+++ b/storage/memory/memory.go
@@ -55,7 +55,7 @@ func (s *memoryStore) Name(ctx context.Context) string {
// Version returns the version of the driver implementation.
func (s *memoryStore) Version(ctx context.Context) string {
- return "0.1.vcli"
+ return "0.2.vcli"
}
// NewGraph creates a new graph.
@@ -142,7 +142,7 @@ func (m *memory) AddTriples(ctx context.Context, ts []*triple.Triple) error {
for _, t := range ts {
suuid := UUIDToByteString(t.UUID())
sUUID := UUIDToByteString(t.Subject().UUID())
- pUUID := UUIDToByteString(t.Predicate().UUID())
+ pUUID := UUIDToByteString(t.Predicate().PartialUUID())
oUUID := UUIDToByteString(t.Object().UUID())
// Update master index
m.idx[suuid] = t
@@ -188,7 +188,7 @@ func (m *memory) RemoveTriples(ctx context.Context, ts []*triple.Triple) error {
for _, t := range ts {
suuid := UUIDToByteString(t.UUID())
sUUID := UUIDToByteString(t.Subject().UUID())
- pUUID := UUIDToByteString(t.Predicate().UUID())
+ pUUID := UUIDToByteString(t.Predicate().PartialUUID())
oUUID := UUIDToByteString(t.Object().UUID())
// Update master index
m.rwmu.Lock()
@@ -263,7 +263,7 @@ func (c *checker) CheckAndUpdate(p *predicate.Predicate) bool {
func (m *memory) Objects(ctx context.Context, s *node.Node, p *predicate.Predicate, lo *storage.LookupOptions, objs chan<- *triple.Object) error {
sUUID := UUIDToByteString(s.UUID())
- pUUID := UUIDToByteString(p.UUID())
+ pUUID := UUIDToByteString(p.PartialUUID())
spIdx := sUUID + pUUID
m.rwmu.RLock()
defer m.rwmu.RUnlock()
@@ -284,7 +284,7 @@ func (m *memory) Subjects(ctx context.Context, p *predicate.Predicate, o *triple
if subjs == nil {
return fmt.Errorf("cannot provide an empty channel")
}
- pUUID := UUIDToByteString(p.UUID())
+ pUUID := UUIDToByteString(p.PartialUUID())
oUUID := UUIDToByteString(o.UUID())
poIdx := pUUID + oUUID
m.rwmu.RLock()
@@ -386,7 +386,7 @@ func (m *memory) TriplesForPredicate(ctx context.Context, p *predicate.Predicate
if trpls == nil {
return fmt.Errorf("cannot provide an empty channel")
}
- pUUID := UUIDToByteString(p.UUID())
+ pUUID := UUIDToByteString(p.PartialUUID())
m.rwmu.RLock()
defer m.rwmu.RUnlock()
defer close(trpls)
@@ -427,7 +427,7 @@ func (m *memory) TriplesForSubjectAndPredicate(ctx context.Context, s *node.Node
return fmt.Errorf("cannot provide an empty channel")
}
sUUID := UUIDToByteString(s.UUID())
- pUUID := UUIDToByteString(p.UUID())
+ pUUID := UUIDToByteString(p.PartialUUID())
spIdx := sUUID + pUUID
m.rwmu.RLock()
defer m.rwmu.RUnlock()
@@ -448,7 +448,7 @@ func (m *memory) TriplesForPredicateAndObject(ctx context.Context, p *predicate.
if trpls == nil {
return fmt.Errorf("cannot provide an empty channel")
}
- pUUID := UUIDToByteString(p.UUID())
+ pUUID := UUIDToByteString(p.PartialUUID())
oUUID := UUIDToByteString(o.UUID())
poIdx := pUUID + oUUID
m.rwmu.RLock()
|
Upgrade the in memory model to work with partialUUIDs fro predicates
|
google_badwolf
|
train
|
4d83b5b9b731b9832dccf2b9ca5619dbba0284f6
|
diff --git a/libs/xmlforms/formdef.js b/libs/xmlforms/formdef.js
index <HASH>..<HASH> 100644
--- a/libs/xmlforms/formdef.js
+++ b/libs/xmlforms/formdef.js
@@ -7,17 +7,9 @@ exports.fetchFormDefs = function(db, pgsql) {
// unwraps the objects and decodes the base64
return db.query(pgsql.getFormDefinitionsXML())
.then(function (formlist) {
- if (!formlist) {
- return {'xmlstrs': [], 'vers': []};
- }
- return {
- 'xmlstrs': formlist.map(function (el) {
- return new Buffer(el.form, 'base64').toString('utf8');
- }),
- 'vers': formlist.map(function (el) {
- return el.version;
- })
- };
+ return formlist.map(function (el) {
+ return new Buffer(el.form, 'base64').toString('utf8');
+ });
});
};
@@ -77,8 +69,8 @@ exports.parseFormDefXML = function(xmldatalist) {
// track the leaves wrapped up in the form name
flatdefs[formname] = {};
flatdefs[formname].fields = flattaglist;
- // apply version
- flatdefs[formname].version = 'fail';
+ // apply version attribute taken from form's eponymous tag
+ flatdefs[formname].version = jsondata._Attribs.version;
}
return resolve(flatdefs);
});
|
code passes tests for version from XML
|
medic_couch2pg
|
train
|
53d97e034051bb5077345730bddc18d2fcc71678
|
diff --git a/test/ManuscriptEditor.test.js b/test/ManuscriptEditor.test.js
index <HASH>..<HASH> 100644
--- a/test/ManuscriptEditor.test.js
+++ b/test/ManuscriptEditor.test.js
@@ -5,7 +5,7 @@ import {
loadBodyFixture, getDocument, setSelection, LOREM_IPSUM,
openContextMenuAndFindTool, openMenuAndFindTool, clickUndo,
isToolEnabled, createKeyEvent, selectNode, getSelection, selectRange,
- getCurrentViewName, deleteSelection, createSurfaceEvent
+ getCurrentViewName, deleteSelection, createSurfaceEvent, canSwitchTextTypeTo, switchTextType
} from './shared/integrationTestHelpers'
import setupTestApp from './shared/setupTestApp'
import { doesNotThrowInNodejs, DOMEvent, ClipboardEventData } from './shared/testHelpers'
@@ -180,8 +180,8 @@ test('ManuscriptEditor: Switch paragraph to heading', t => {
loadBodyFixture(editor, ONE_PARAGRAPH)
setCursor(editor, 'p1.content', 0)
- t.ok(_canSwitchTo(editor, 'heading1'), 'switch to heading1 should be possible')
- _switchTo(editor, 'heading1')
+ t.ok(canSwitchTextTypeTo(editor, 'heading1'), 'switch to heading1 should be possible')
+ switchTextType(editor, 'heading1')
// ATTENTION: we do not change id, which might be confusing for others
let h1El = editor.find('.sc-surface.sm-body > h1')
t.notNil(h1El, 'there should be a <h1> element now')
@@ -196,11 +196,11 @@ test('ManuscriptEditor: Switch to heading', t => {
}
loadBodyFixture(editor, ONE_PARAGRAPH)
setCursor(editor, 'p1.content', 0)
- _switchTo(editor, 'heading1')
+ switchTextType(editor, 'heading1')
t.ok(_isHeadingDisplayed(1), 'heading level 1 should be displayed')
- _switchTo(editor, 'heading2')
+ switchTextType(editor, 'heading2')
t.ok(_isHeadingDisplayed(2), 'heading level 2 should be displayed')
- _switchTo(editor, 'heading3')
+ switchTextType(editor, 'heading3')
t.ok(_isHeadingDisplayed(3), 'heading level 3 should be displayed')
t.end()
})
@@ -211,8 +211,8 @@ test('ManuscriptEditor: Switch paragraph to preformat', t => {
loadBodyFixture(editor, ONE_PARAGRAPH)
setCursor(editor, 'p1.content', 0)
- t.ok(_canSwitchTo(editor, 'preformat'), 'switch to preformat should be possible')
- _switchTo(editor, 'preformat')
+ t.ok(canSwitchTextTypeTo(editor, 'preformat'), 'switch to preformat should be possible')
+ switchTextType(editor, 'preformat')
let preformatEl = editor.find('.sc-surface.sm-body > .sc-text-node.sm-preformat')
t.notNil(preformatEl, 'there should be a div with preformat component class now')
@@ -613,15 +613,6 @@ test('ManuscriptEditor: copy and pasting list items', t => {
t.end()
})
-function _canSwitchTo (editor, type) {
- let tool = openMenuAndFindTool(editor, 'text-types', `.sm-switch-to-${type}`)
- return tool && !tool.attr('disabled')
-}
-
-function _switchTo (editor, type) {
- return openMenuAndFindTool(editor, 'text-types', `.sm-switch-to-${type}`).el.click()
-}
-
function _getLineCount (str) {
return str.split(/\r\n|\r|\n/).length
}
diff --git a/test/shared/integrationTestHelpers.js b/test/shared/integrationTestHelpers.js
index <HASH>..<HASH> 100644
--- a/test/shared/integrationTestHelpers.js
+++ b/test/shared/integrationTestHelpers.js
@@ -309,6 +309,15 @@ export function isToolEnabled (editor, menuName, toolSelector) {
return tool && !tool.getAttribute('disabled')
}
+export function switchTextType (editor, type) {
+ return openMenuAndFindTool(editor, 'text-types', `.sm-switch-to-${type}`).el.click()
+}
+
+export function canSwitchTextTypeTo (editor, type) {
+ let tool = openMenuAndFindTool(editor, 'text-types', `.sm-switch-to-${type}`)
+ return tool && !tool.attr('disabled')
+}
+
const TOOL_SPECS = {
'bold': {
menu: 'format',
|
Keep text type switching in integration test helpers.
|
substance_texture
|
train
|
480936acaf3c80e26618c0d8ee2d137e6d47aa42
|
diff --git a/web/concrete/src/Page/Type/Type.php b/web/concrete/src/Page/Type/Type.php
index <HASH>..<HASH> 100644
--- a/web/concrete/src/Page/Type/Type.php
+++ b/web/concrete/src/Page/Type/Type.php
@@ -886,9 +886,11 @@ class Type extends Object implements \Concrete\Core\Permission\ObjectInterface
foreach ($existingDefaultTemplateIDs as $existingPageTemplateID) {
if (!in_array($existingPageTemplateID, $templateIDs)) {
$existingPageTemplate = Template::getByID($existingPageTemplateID);
- $c = $this->getPageTypePageTemplateDefaultPageObject($existingPageTemplate);
- if (is_object($c)) {
- $c->delete();
+ if (is_object($existingPageTemplate)) {
+ $c = $this->getPageTypePageTemplateDefaultPageObject($existingPageTemplate);
+ if (is_object($c)) {
+ $c->delete();
+ }
}
$db->Execute('delete from PageTypePageTemplateDefaultPages where pTemplateID = ? and ptID = ?', array($existingPageTemplateID, $this->getPageTypeID()));
}
|
Fix second part of #<I>.
Former-commit-id: 7c<I>d<I>cdea<I>b<I>bd<I>e8de1f3c7ea<I>a
Former-commit-id: 5aaf<I>a<I>d6da<I>f9cf<I>f<I>fb<I>ea<I>
|
concrete5_concrete5
|
train
|
d8217386f818b21bc85b6ab3aad531a94fb662f4
|
diff --git a/demo_parser.py b/demo_parser.py
index <HASH>..<HASH> 100644
--- a/demo_parser.py
+++ b/demo_parser.py
@@ -38,6 +38,7 @@ if __name__ == '__main__':
# 3. mixed
# print_query_and_parse_tree(r"author:ellis title:'boson'")
# print_query_and_parse_tree(r"author:ellis title:'boson'")
+ # print_query_and_parse_tree(r"find cn atlas not tc c")
# repl()
@@ -120,6 +121,8 @@ if __name__ == '__main__':
print_query_and_parse_tree(r"title:/dense ([^ $]* )?matter/")
# Nestable keywords
+ print_query_and_parse_tree(r"referstox:author:s.p.martin.1")
+ print_query_and_parse_tree(r"refersto:author:s.p.martin.1")
print_query_and_parse_tree(r"citedbyx:author:s.p.martin.1")
print_query_and_parse_tree(r"citedby:author:s.p.martin.1")
print_query_and_parse_tree(r"-refersto:recid:1374998 and citedby:(A.A.Aguilar.Arevalo.1)")
@@ -142,7 +145,7 @@ if __name__ == '__main__':
# G, GE, LT, LE, E queries
print_query_and_parse_tree(r"date > 10-2000 and title foo")
print_query_and_parse_tree(r"date after 10/2000 - title foo")
- print_query_and_parse_tree(r"date >= 2000 - author ellis")
+ print_query_and_parse_tree(r"date >= nov 2000 - author ellis")
print_query_and_parse_tree(r"date 1978+ + -ac 100+")
print_query_and_parse_tree(r"date 2010-06+ or foo")
print_query_and_parse_tree(r"date 2010-06 + or foo")
diff --git a/inspire_query_parser/config.py b/inspire_query_parser/config.py
index <HASH>..<HASH> 100644
--- a/inspire_query_parser/config.py
+++ b/inspire_query_parser/config.py
@@ -35,11 +35,17 @@ INSPIRE_PARSER_KEYWORDS = {
'bulletin-bd-no': 'reportnumber',
'eprint': 'reportnumber',
+ # Cataloguer
+ 'cataloguer': 'cataloguer',
+ 'cat': 'cataloguer',
+
# Caption
'caption': 'caption',
- # Citedby
+ # Citedby related
'citedby': 'citedby',
+ 'citedexcludingselfcites': 'citedexcludingselfcites',
+ 'cx': 'citedexcludingselfcites',
# coden
'bc': 'journal',
@@ -181,6 +187,9 @@ INSPIRE_PARSER_KEYWORDS = {
'title': 'title',
't': 'title',
+ # texkey
+ 'texkey': 'texkey',
+
# Topcite
'cited': 'cited',
'topcit': 'cited',
diff --git a/inspire_query_parser/parser.py b/inspire_query_parser/parser.py
index <HASH>..<HASH> 100644
--- a/inspire_query_parser/parser.py
+++ b/inspire_query_parser/parser.py
@@ -440,9 +440,13 @@ Expression.grammar = attr('op', [
NestedKeywordQuery.grammar = \
attr('left', [
- re.compile('refersto', re.IGNORECASE),
- re.compile('citedbyx', re.IGNORECASE),
- re.compile('citedby', re.IGNORECASE),
+ # Most specific regex must be higher.
+ re.compile(r'citedbyexcludingselfcites', re.IGNORECASE),
+ re.compile(r'citedbyx', re.IGNORECASE),
+ re.compile(r'citedby', re.IGNORECASE),
+ re.compile(r'referstoexcludingselfcites', re.IGNORECASE),
+ re.compile(r'referstox', re.IGNORECASE),
+ re.compile(r'refersto', re.IGNORECASE),
]), \
optional(omit(":")), \
attr('right', Expression)
|
parser: add more generic and nested keywords
|
inspirehep_inspire-query-parser
|
train
|
9e755c76e9d8e3a5e0a486002b3898be9d6d5d25
|
diff --git a/presto-base-jdbc/src/main/java/com/facebook/presto/plugin/jdbc/JdbcRecordCursor.java b/presto-base-jdbc/src/main/java/com/facebook/presto/plugin/jdbc/JdbcRecordCursor.java
index <HASH>..<HASH> 100644
--- a/presto-base-jdbc/src/main/java/com/facebook/presto/plugin/jdbc/JdbcRecordCursor.java
+++ b/presto-base-jdbc/src/main/java/com/facebook/presto/plugin/jdbc/JdbcRecordCursor.java
@@ -220,11 +220,10 @@ public class JdbcRecordCursor
public void close()
{
closed = true;
-
// use try with resources to close everything properly
- try (ResultSet resultSet = this.resultSet;
+ try (Connection connection = this.connection;
Statement statement = this.statement;
- Connection connection = this.connection) {
+ ResultSet resultSet = this.resultSet) {
// do nothing
}
catch (SQLException e) {
|
Fix "Connection already closed" error in JdbcRecordCursor
The resultset, statement and connection were being closed in
the wrong order.
|
prestodb_presto
|
train
|
df927ae620bf692a942a81627ae1a9647112e51d
|
diff --git a/liberty-maven-plugin/src/main/java/io/openliberty/tools/maven/applications/DeployMojoSupport.java b/liberty-maven-plugin/src/main/java/io/openliberty/tools/maven/applications/DeployMojoSupport.java
index <HASH>..<HASH> 100644
--- a/liberty-maven-plugin/src/main/java/io/openliberty/tools/maven/applications/DeployMojoSupport.java
+++ b/liberty-maven-plugin/src/main/java/io/openliberty/tools/maven/applications/DeployMojoSupport.java
@@ -106,7 +106,7 @@ public class DeployMojoSupport extends PluginConfigSupport {
private void setLooseProjectRootForContainer(MavenProject proj, LooseConfigData config) throws MojoExecutionException {
try {
// Set up the config to replace the absolute path names with ${variable}/target type references
- String projectRoot = multiModuleProjectDirectory == null ? proj.getBasedir().getCanonicalPath() : multiModuleProjectDirectory.getCanonicalPath();
+ String projectRoot = DevUtil.getLooseAppProjectRoot(proj.getBasedir(), multiModuleProjectDirectory).getCanonicalPath();
config.setProjectRoot(projectRoot);
config.setSourceOnDiskName("${"+DevUtil.DEVMODE_PROJECT_ROOT+"}");
if (copyLibsDirectory == null) { // in container mode, copy dependencies from .m2 dir to the target dir to mount in container
|
Find common project root for multi module loose app
|
WASdev_ci.maven
|
train
|
886628968ce05e1cd9073f9147b0f7e0c080a48f
|
diff --git a/hazelcast/src/main/java/com/hazelcast/internal/cluster/impl/ClusterJoinManager.java b/hazelcast/src/main/java/com/hazelcast/internal/cluster/impl/ClusterJoinManager.java
index <HASH>..<HASH> 100644
--- a/hazelcast/src/main/java/com/hazelcast/internal/cluster/impl/ClusterJoinManager.java
+++ b/hazelcast/src/main/java/com/hazelcast/internal/cluster/impl/ClusterJoinManager.java
@@ -88,9 +88,8 @@ import static java.lang.String.format;
public class ClusterJoinManager {
public static final String STALE_JOIN_PREVENTION_DURATION_PROP = "hazelcast.stale.join.prevention.duration.seconds";
+ private static final int DEFAULT_STALE_JOIN_PREVENTION_DURATION_IN_SECS = 30;
private static final int CLUSTER_OPERATION_RETRY_COUNT = 100;
- private static final int STALE_JOIN_PREVENTION_DURATION_SECONDS
- = Integer.getInteger(STALE_JOIN_PREVENTION_DURATION_PROP, 30);
private final ILogger logger;
private final Node node;
@@ -115,7 +114,7 @@ public class ClusterJoinManager {
private final ConcurrentMap<UUID, Long> leftMembersUuids = new ConcurrentHashMap<>();
private final long maxWaitMillisBeforeJoin;
private final long waitMillisBeforeJoin;
- private final long staleJoinPreventionDuration;
+ private final long staleJoinPreventionDurationInMillis;
private long firstJoinRequest;
private long timeToStartJoin;
@@ -133,7 +132,8 @@ public class ClusterJoinManager {
maxWaitMillisBeforeJoin = node.getProperties().getMillis(ClusterProperty.MAX_WAIT_SECONDS_BEFORE_JOIN);
waitMillisBeforeJoin = node.getProperties().getMillis(ClusterProperty.WAIT_SECONDS_BEFORE_JOIN);
- staleJoinPreventionDuration = TimeUnit.SECONDS.toMillis(STALE_JOIN_PREVENTION_DURATION_SECONDS);
+ staleJoinPreventionDurationInMillis = TimeUnit.SECONDS.toMillis(
+ Integer.getInteger(STALE_JOIN_PREVENTION_DURATION_PROP, DEFAULT_STALE_JOIN_PREVENTION_DURATION_IN_SECS));
}
boolean isJoinInProgress() {
@@ -364,7 +364,7 @@ public class ClusterJoinManager {
private void cleanupRecentlyJoinedMemberUuids() {
long currentTime = Clock.currentTimeMillis();
- recentlyJoinedMemberUuids.values().removeIf(joinTime -> (currentTime - joinTime) >= staleJoinPreventionDuration);
+ recentlyJoinedMemberUuids.values().removeIf(joinTime -> (currentTime - joinTime) >= staleJoinPreventionDurationInMillis);
}
private boolean authenticate(JoinRequest joinRequest, Connection connection) {
|
Convert staleJoinPreventionDuration to not static field (#<I>)
|
hazelcast_hazelcast
|
train
|
32655611d9e2775869a288ebfb3c9bb1e1fd7dcf
|
diff --git a/lib/parser.js b/lib/parser.js
index <HASH>..<HASH> 100644
--- a/lib/parser.js
+++ b/lib/parser.js
@@ -34,28 +34,21 @@ export function matchSymbolFile (line) {
}
}
-export function matchPropsColonDuo (line) {
- const regexp = /^(?<property1>.+): (?<value1>.+), # (?<property2>.+): (?<value2>.+)/
- const match = line.match(regexp)
- if (match) {
- let match1 = {}
- match1[match.groups.property1] = match.groups.value1
- let match2 = {}
- match2[match.groups.property2] = match.groups.value2
- const props = [match1, match2]
- return props
- } else {
- return null
- }
-}
-
export function matchPropsColon (line) {
- const regexp = /^(?<property>.+): (?<value>.+)$/
- const match = line.match(regexp)
- if (match) {
- let match1 = {}
- match1[match.groups.property1] = match.groups.value1
- const props = [match1]
+ const regexp = /^(?<property>.+?): (?<value>.+)$/
+ const rawProps = line.split(/, +# /)
+ const props = rawProps.map(function (raw) {
+ const match = raw.match(regexp)
+ if (match) {
+ const groups = match.groups
+ const props = {[groups.property]: groups.value}
+ return props
+ } else {
+ return null
+ }
+ }).filter((i) => (i !== null))
+ const isInfoInLine = (props.length > 0)
+ if (isInfoInLine) {
return props
} else {
return null
diff --git a/test/lib/parser.spec.js b/test/lib/parser.spec.js
index <HASH>..<HASH> 100644
--- a/test/lib/parser.spec.js
+++ b/test/lib/parser.spec.js
@@ -1,14 +1,14 @@
/* global describe, it */
import { expect } from 'chai'
-import { matchProgress, matchPropsColon, matchPropsColonDuo, matchPropsEquals, matchSymbolFile } from '../../lib/parser.js'
+import { matchProgress, matchPropsColon, matchPropsEquals, matchSymbolFile } from '../../lib/parser.js'
describe('Specification: parser.js', function () {
- it('progress should return null on non match', function () {
+ it('matchProgress() should return null on non match', function () {
const r = matchProgress('+ test/file/null')
expect(r).to.be.null
})
- it('progress should return progress info as int', function () {
+ it('matchProgress() should return progress info as int', function () {
const digit1File = matchProgress(' 1% 3')
expect(digit1File.percent).to.equal(1)
expect(digit1File.fileCount).to.equal(3)
@@ -30,4 +30,35 @@ describe('Specification: parser.js', function () {
const digit3NoFile = matchProgress('100%')
expect(digit3NoFile.percent).to.equal(100)
})
+
+ it('matchPropsColon() should return null on non match', function () {
+ const r = matchPropsColon('+ test/file/null')
+ expect(r).to.be.null
+ })
+
+ it('matchPropsColon() should return props and values', function () {
+ const basic = matchPropsColon('Prop: Data')
+ expect(basic).to.be.an('array')
+ expect(basic).to.have.lengthOf(1)
+ expect(basic[0]['Prop']).to.equal('Data')
+
+ const space = matchPropsColon('Prop of archive: 322 MB')
+ expect(space).to.be.an('array')
+ expect(space).to.have.lengthOf(1)
+ expect(space[0]['Prop of archive']).to.equal('322 MB')
+ })
+
+ it('matchPropsColon() should works with line containing 2 infos', function () {
+ const basic = matchPropsColon('Prop1: Data1, # Prop2: Data2')
+ expect(basic).to.be.an('array')
+ expect(basic).to.have.lengthOf(2)
+ expect(basic[0]['Prop1']).to.equal('Data1')
+ expect(basic[1]['Prop2']).to.equal('Data2')
+
+ const space = matchPropsColon('Prop 1: Data 1, # Prop 2: Data 2')
+ expect(space).to.be.an('array')
+ expect(space).to.have.lengthOf(2)
+ expect(space[0]['Prop 1']).to.equal('Data 1')
+ expect(space[1]['Prop 2']).to.equal('Data 2')
+ })
})
|
feat: Parse lines with colon-separated value
|
quentinrossetti_node-7z
|
train
|
960fa6ec0b25c4a9fe64765fbf683b2d4a51b9e1
|
diff --git a/spec/tree/builder/treeAdapterSpec.js b/spec/tree/builder/treeAdapterSpec.js
index <HASH>..<HASH> 100644
--- a/spec/tree/builder/treeAdapterSpec.js
+++ b/spec/tree/builder/treeAdapterSpec.js
@@ -83,6 +83,25 @@ describe( "TreeAdapter", () => {
expect( expectedStructuredNode instanceof StructuredNode ).toBe( true );
} );
+
+ it( "can create a formatting element without attributes", () => {
+ const adapter = new TreeAdapter();
+
+ const expected = adapter.createElement( "strong", "html", [] );
+
+ expect( expected ).toBeInstanceOf( FormattingElement );
+ expect( expected.attributes ).toEqual( null );
+ } );
+
+ it( "can create a formatting element with attributes", () => {
+ const adapter = new TreeAdapter();
+
+ const attributes = [ { name: "href", value: "https://www.yoast.com/" } ];
+ const expected = adapter.createElement( "a", "html", attributes );
+
+ expect( expected ).toBeInstanceOf( FormattingElement );
+ expect( expected.attributes ).toEqual( { href: "https://www.yoast.com/" } );
+ } );
} );
describe( "TreeAdapter insertText", () => {
@@ -241,4 +260,13 @@ describe( "TreeAdapter", () => {
expect( paragraph.textContainer.formatting[ 1 ].type ).toEqual( "emph" );
} );
} );
+
+ describe( "TreeAdapter detachNode", () => {
+ it( "does not detach a node from its parent when it does not have one", () => {
+ const adapter = new TreeAdapter();
+ const node = new StructuredNode( "root" );
+
+ adapter.detachNode( node );
+ } );
+ } );
} );
|
Added tests to improve coverage of TreeAdapter to <I>%.
|
Yoast_YoastSEO.js
|
train
|
98c4e1ac498830bd282e5fd8977774aef2fa448e
|
diff --git a/src/build_dotnet.py b/src/build_dotnet.py
index <HASH>..<HASH> 100644
--- a/src/build_dotnet.py
+++ b/src/build_dotnet.py
@@ -6,6 +6,13 @@ import sys
import fileinput
import shutil
+def system(cmd):
+ # copied from setup.py
+ rv = os.system(cmd)
+ rc = rv if os.name == 'nt' else os.WEXITSTATUS(rv)
+ if (rc != 0):
+ raise RuntimeError('The command "{}" exited with {}'.format(cmd, rc))
+
def methodgen(dotnetcore):
# set up args to pass to methodgen application
dir_cpp = os.getcwd() + '/librhino3dmio_native'
@@ -31,18 +38,18 @@ def methodgen(dotnetcore):
if os.path.isfile(full_path):
shutil.copy(full_path, build_dir + '/methodgen.csproj')
# compile methodgen
- os.system('dotnet build ' + './' + build_dir)
+ system('dotnet build ' + './' + build_dir)
# execute methodgen
- os.system('dotnet ./'+build_dir+'/bin/Debug/netcoreapp2.2/methodgen.dll '+ args)
+ system('dotnet ./'+build_dir+'/bin/Debug/netcoreapp2.2/methodgen.dll '+ args)
else:
# compile methodgen
- os.system('msbuild ./methodgen')
+ system('msbuild ./methodgen')
# execute methodgen for Rhino3dm
app = os.getcwd() + '/methodgen/bin/Debug/methodgen.exe'
if os.name == 'nt': # windows build
- os.system(app + args)
+ system(app + args)
else:
- os.system('mono ' + app + args)
+ system('mono ' + app + args)
def create_cpp_project(bitness, compile):
@@ -59,18 +66,18 @@ def create_cpp_project(bitness, compile):
if bitness == 64:
arch = " Win64"
args = '-G "Visual Studio 15 2017{0}"'.format(arch)
- os.system('cmake ' + args + ' ../../librhino3dmio_native')
+ system('cmake ' + args + ' ../../librhino3dmio_native')
if bitness == 64:
for line in fileinput.input("librhino3dmio_native.vcxproj", inplace=1):
print(line.replace("WIN32;", "WIN64;"))
for line in fileinput.input("opennurbs_static.vcxproj", inplace=1):
print(line.replace("WIN32;", "WIN64;"))
if compile:
- os.system("cmake --build . --config Release --target librhino3dmio_native")
+ system("cmake --build . --config Release --target librhino3dmio_native")
else:
- rv = os.system("cmake ../../librhino3dmio_native")
- if compile and int(rv) == 0:
- os.system("make")
+ system("cmake ../../librhino3dmio_native")
+ if compile:
+ system("make")
os.chdir("../..")
@@ -78,10 +85,10 @@ def create_cpp_project(bitness, compile):
def compilerhino3dm(dotnetcore):
if dotnetcore:
conf = '/p:Configuration=Release;OutDir="../build/dotnet"'
- os.system('dotnet build ./dotnet/Rhino3dm.core.csproj {}'.format(conf))
+ system('dotnet build ./dotnet/Rhino3dm.core.csproj {}'.format(conf))
else:
conf = '/p:Configuration=Release;OutDir="../build/dotnet"'
- os.system('msbuild ./dotnet/Rhino3dm.csproj {}'.format(conf))
+ system('msbuild ./dotnet/Rhino3dm.csproj {}'.format(conf))
if __name__ == '__main__':
|
Ensure dotnet build script fails when it should
|
mcneel_rhino3dm
|
train
|
95bcaaf3345132828dd706f2205a9a708d221dda
|
diff --git a/worker/upgradeseries/worker.go b/worker/upgradeseries/worker.go
index <HASH>..<HASH> 100644
--- a/worker/upgradeseries/worker.go
+++ b/worker/upgradeseries/worker.go
@@ -281,13 +281,11 @@ func (w *upgradeSeriesWorker) handleCompleteStarted() error {
return errors.Trace(w.transitionUnitsStarted(unitServices))
}
- if servicesPresent {
- // If the units have all completed their workflow, then we are done.
- // Make the final update to the lock to say the machine is completed.
- unitServices, allConfirmed, err = w.compareUnitAgentServices(w.UnitsCompleted)
- if err != nil {
- return errors.Trace(err)
- }
+ // If the units have all completed their workflow, then we are done.
+ // Make the final update to the lock to say the machine is completed.
+ unitServices, allConfirmed, err = w.compareUnitAgentServices(w.UnitsCompleted)
+ if err != nil {
+ return errors.Trace(err)
}
if allConfirmed {
@@ -363,6 +361,9 @@ func (w *upgradeSeriesWorker) compareUnitAgentServices(
}
unitServices := service.FindUnitServiceNames(services)
+ if len(unitServices) == 0 {
+ w.logger.Debugf("no unit agent services found")
+ }
if len(units) != len(unitServices) {
return unitServices, false, nil
}
diff --git a/worker/upgradeseries/worker_test.go b/worker/upgradeseries/worker_test.go
index <HASH>..<HASH> 100644
--- a/worker/upgradeseries/worker_test.go
+++ b/worker/upgradeseries/worker_test.go
@@ -183,7 +183,8 @@ func (s *workerSuite) TestMachineCompleteStartedNoUnitsProgressComplete(c *gc.C)
// Machine with no units - API calls return none, no services discovered.
exp.UnitsPrepared().Return(nil, nil)
- s.service.EXPECT().ListServices().Return(nil, nil)
+ exp.UnitsCompleted().Return(nil, nil)
+ s.service.EXPECT().ListServices().Return(nil, nil).Times(2)
// Progress directly to completed.
exp.SetMachineStatus(model.UpgradeSeriesCompleted).Return(nil)
|
Upgrade-series worker always checks for completed units.
|
juju_juju
|
train
|
3b60e813a4167a4dd5bc05e92775ec7a3bf12195
|
diff --git a/src/Generator.php b/src/Generator.php
index <HASH>..<HASH> 100644
--- a/src/Generator.php
+++ b/src/Generator.php
@@ -176,8 +176,8 @@ class Generator implements GeneratorInterface
* Returns a MODE message.
*
* @param string $target
- * @param string $mode
- * @param string $param
+ * @param string|null $mode
+ * @param string|null $param
* @return string
*/
public function ircMode($target, $mode = null, $param = null)
|
Updated PHPDoc for ircMode() in Generator
|
phergie_phergie-irc-generator
|
train
|
de49193687c18d799df1ed37743ce716e5d55ed4
|
diff --git a/bin/cloud-start b/bin/cloud-start
index <HASH>..<HASH> 100755
--- a/bin/cloud-start
+++ b/bin/cloud-start
@@ -22,10 +22,11 @@ EOS
puts "#{cld.nodes(:status => "running").size} running instances (#{cld.minimum_instances} - #{cld.maximum_instances})"
inst = cld.expand
- cld.nodes.each do |node|
- node.bootstrap!
- node.configure!(:cloud => cld)
- end
+ # cld.nodes.each do |node|
+ # vputs "Bootstrapping: #{node.instance_id}"
+ # node.bootstrap!
+ # node.configure!(:cloud => cld)
+ # end
msg = [
"Your cloud has started with the following public_ips:"
diff --git a/lib/cloud_providers/cloud_provider_instance.rb b/lib/cloud_providers/cloud_provider_instance.rb
index <HASH>..<HASH> 100644
--- a/lib/cloud_providers/cloud_provider_instance.rb
+++ b/lib/cloud_providers/cloud_provider_instance.rb
@@ -58,7 +58,7 @@ module CloudProviders
# Configure the node
def configure!(opts={})
- bootstrap! unless bootstrapped?
+ # bootstrap! unless bootstrapped?
set_vars_from_options opts
raise StandardError.new("You must pass in a cloud to configure an instance") unless cloud
cloud.compile(self)
@@ -99,7 +99,8 @@ module CloudProviders
# Determine if the node is bootstrapped
def bootstrapped?
- @bootstrapped ||= !run('if [ -f /var/poolparty/bootstrapped ]; then echo "YES"; fi').match(/YES/).nil?
+ # @bootstrapped ||= !run('if [ -f /var/poolparty/bootstrapped ]; then echo "YES"; fi').match(/YES/).nil?
+ @bootstrapped ||= !run('if [ -f /var/poolparty/bootstrapped ]; then echo "YES"; fi').chomp.empty? || false
end
# Wait for port
diff --git a/lib/cloud_providers/connections.rb b/lib/cloud_providers/connections.rb
index <HASH>..<HASH> 100644
--- a/lib/cloud_providers/connections.rb
+++ b/lib/cloud_providers/connections.rb
@@ -78,19 +78,18 @@ module CloudProviders
ddputs("Running command: #{cmd}")
Open3.popen3(cmd) do |stdout, stdin, stderr|
begin
- while (block = stdin.sysread(opts[:sysread]))
- buf << block
- $stdout.write_nonblock(block) if block
+ while (chunk = stdin.readpartial(opts[:sysread]))
+ buf << chunk
+ unless chunk.nil? || chunk.empty?
+ $stdout.write_nonblock(chunk) if debugging? || verbose?
+ end
end
- puts stderr.readlines
+ err = stderr.readlines
+ $stderr.write_nonblock(err) unless err.empty?
rescue SystemCallError => error
$stderr.write_nonblock(stderr)
rescue EOFError => error
# nothing
- ensure
- stdout.close
- stderr.close
- stdin.close
end
end
buf
diff --git a/lib/poolparty/cloud.rb b/lib/poolparty/cloud.rb
index <HASH>..<HASH> 100644
--- a/lib/poolparty/cloud.rb
+++ b/lib/poolparty/cloud.rb
@@ -95,6 +95,7 @@ module PoolParty
callback :after_launch_instance
instance.callback :before_bootstrap
instance.bootstrap!
+ instance.run("uptime")
instance.callback :after_bootstrap
instance.callback :before_configure
instance.configure!(:cloud => self)
|
Updating the expand to call bootstrap! and configure
|
auser_poolparty
|
train
|
2f06b93016bc9e4aed1ed2dfaf6ddf8b601b25aa
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -18,7 +18,7 @@ from setuptools import setup, find_packages
import subprocess
setup(name="transit-python",
- version="0.8.279",
+ version="0.8.284",
description="Transit marshalling for Python",
author="Cognitect",
url="https://github.com/cognitect/transit-python",
|
Move setup.py to the correct version
|
cognitect_transit-python
|
train
|
749049c59632f75c6f94bb130f6018057b28446b
|
diff --git a/lib/sass/plugin.rb b/lib/sass/plugin.rb
index <HASH>..<HASH> 100644
--- a/lib/sass/plugin.rb
+++ b/lib/sass/plugin.rb
@@ -292,14 +292,14 @@ module Sass
FileUtils.mkdir_p dir
end
- result = begin
- Sass::Files.tree_for(filename, engine_options(:css_filename => css, :filename => filename)).render
- rescue Exception => e
- run_compilation_error e, filename, css
- Sass::SyntaxError.exception_to_css(e, options)
- else
- run_updating_stylesheet filename, css
- end
+ begin
+ result = Sass::Files.tree_for(filename, engine_options(:css_filename => css, :filename => filename)).render
+ rescue Exception => e
+ run_compilation_error e, filename, css
+ result = Sass::SyntaxError.exception_to_css(e, options)
+ else
+ run_updating_stylesheet filename, css
+ end
# Finally, write the file
File.open(css, 'w') {|file| file.print(result)}
|
[Sass] Fix a dumb bug.
|
sass_ruby-sass
|
train
|
0b86de6a64ccf5cba398ded0ae05615ce4182cb6
|
diff --git a/frontends/default/views/destroy.rjs b/frontends/default/views/destroy.rjs
index <HASH>..<HASH> 100644
--- a/frontends/default/views/destroy.rjs
+++ b/frontends/default/views/destroy.rjs
@@ -1,4 +1,4 @@
-if @successful
+if controller.send(:successful?)
page.remove element_row_id(:action => 'list', :id => params[:id])
page << "ActiveScaffold.toggleEmptyMessage('#{active_scaffold_tbody_id}','#{empty_message_id}');"
page << "ActiveScaffold.stripe('#{active_scaffold_tbody_id}');"
diff --git a/lib/actions/delete.rb b/lib/actions/delete.rb
index <HASH>..<HASH> 100644
--- a/lib/actions/delete.rb
+++ b/lib/actions/delete.rb
@@ -16,16 +16,15 @@ module ActiveScaffold::Actions
do_destroy
- @successful = successful?
respond_to do |type|
type.html do
flash[:info] = as_('Deleted %s', @record.to_label)
return_to_main
end
type.js { render(:action => 'destroy.rjs', :layout => false) }
- type.xml { render :xml => @successful ? "" : response_object.to_xml, :content_type => Mime::XML, :status => response_status }
- type.json { render :text => @successful ? "" : response_object.to_json, :content_type => Mime::JSON, :status => response_status }
- type.yaml { render :text => @successful ? "" : response_object.to_yaml, :content_type => Mime::YAML, :status => response_status }
+ type.xml { render :xml => successful? ? "" : response_object.to_xml, :content_type => Mime::XML, :status => response_status }
+ type.json { render :text => successful? ? "" : response_object.to_json, :content_type => Mime::JSON, :status => response_status }
+ type.yaml { render :text => successful? ? "" : response_object.to_yaml, :content_type => Mime::YAML, :status => response_status }
end
end
@@ -35,7 +34,7 @@ module ActiveScaffold::Actions
# May be overridden to customize the behavior
def do_destroy
@record = find_if_allowed(params[:id], :destroy)
- @record.destroy
+ self.successful = @record.destroy
end
# The default security delegates to ActiveRecordPermissions.
diff --git a/lib/actions/show.rb b/lib/actions/show.rb
index <HASH>..<HASH> 100644
--- a/lib/actions/show.rb
+++ b/lib/actions/show.rb
@@ -7,7 +7,7 @@ module ActiveScaffold::Actions
def show
do_show
- @successful = successful?
+ successful?
respond_to do |type|
type.html { render :action => 'show', :layout => true }
type.js { render :partial => 'show', :layout => false }
|
issue #<I>
also cleaned up some references to @successful
git-svn-id: <URL>
|
activescaffold_active_scaffold
|
train
|
8762b63b30210b87c0b1bc8505897d5b1056482d
|
diff --git a/pyres/worker.py b/pyres/worker.py
index <HASH>..<HASH> 100644
--- a/pyres/worker.py
+++ b/pyres/worker.py
@@ -128,8 +128,6 @@ class Worker(object):
that job to make sure another worker won't run it, then *forks* itself to
work on that job.
- Finally, the ``process`` method actually processes the job by eventually calling the Job instance's ``perform`` method.
-
"""
self._setproctitle("Starting")
self.startup()
@@ -142,63 +140,7 @@ class Worker(object):
job = self.reserve(interval)
if job:
- logger.debug('picked up job')
- logger.debug('job details: %s' % job)
- self.before_fork(job)
- self.child = os.fork()
- if self.child:
- self._setproctitle("Forked %s at %s" %
- (self.child,
- datetime.datetime.now()))
- logger.info('Forked %s at %s' % (self.child,
- datetime.datetime.now()))
-
- try:
- start = datetime.datetime.now()
-
- # waits for the result or times out
- while True:
- result = os.waitpid(self.child, os.WNOHANG)
- if result != (0, 0):
- break
- time.sleep(0.5)
-
- now = datetime.datetime.now()
- if self.timeout and ((now - start).seconds > self.timeout):
- os.kill(self.child, signal.SIGKILL)
- os.waitpid(-1, os.WNOHANG)
- raise TimeoutError("Timed out after %d seconds" % self.timeout)
-
- except OSError as ose:
- import errno
-
- if ose.errno != errno.EINTR:
- raise ose
-
- except TimeoutError as e:
- exceptionType, exceptionValue, exceptionTraceback = sys.exc_info()
- logger.exception("%s timed out: %s" % (job, e))
- job.fail(exceptionTraceback)
- self.failed()
- self.done_working()
-
- logger.debug('done waiting')
- else:
- self._setproctitle("Processing %s since %s" %
- (job._queue,
- datetime.datetime.now()))
- logger.info('Processing %s since %s' %
- (job._queue, datetime.datetime.now()))
- self.after_fork(job)
-
- # re-seed the Python PRNG after forking, otherwise
- # all job process will share the same sequence of
- # random numbers
- random.seed()
-
- self.process(job)
- os._exit(0)
- self.child = None
+ self.fork_worker(job)
else:
if interval == 0:
break
@@ -207,6 +149,73 @@ class Worker(object):
#time.sleep(interval)
self.unregister_worker()
+ def fork_worker(self, job):
+ """Invoked by ``work`` method. ``fork_worker`` does the actual forking to create the child
+ process that will process the job. It's also responsible for monitoring the child process
+ and handling hangs and crashes.
+
+ Finally, the ``process`` method actually processes the job by eventually calling the Job
+ instance's ``perform`` method.
+
+ """
+ logger.debug('picked up job')
+ logger.debug('job details: %s' % job)
+ self.before_fork(job)
+ self.child = os.fork()
+ if self.child:
+ self._setproctitle("Forked %s at %s" %
+ (self.child,
+ datetime.datetime.now()))
+ logger.info('Forked %s at %s' % (self.child,
+ datetime.datetime.now()))
+
+ try:
+ start = datetime.datetime.now()
+
+ # waits for the result or times out
+ while True:
+ result = os.waitpid(self.child, os.WNOHANG)
+ if result != (0, 0):
+ break
+ time.sleep(0.5)
+
+ now = datetime.datetime.now()
+ if self.timeout and ((now - start).seconds > self.timeout):
+ os.kill(self.child, signal.SIGKILL)
+ os.waitpid(-1, os.WNOHANG)
+ raise TimeoutError("Timed out after %d seconds" % self.timeout)
+
+ except OSError as ose:
+ import errno
+
+ if ose.errno != errno.EINTR:
+ raise ose
+
+ except TimeoutError as e:
+ exceptionType, exceptionValue, exceptionTraceback = sys.exc_info()
+ logger.exception("%s timed out: %s" % (job, e))
+ job.fail(exceptionTraceback)
+ self.failed()
+ self.done_working()
+
+ logger.debug('done waiting')
+ else:
+ self._setproctitle("Processing %s since %s" %
+ (job._queue,
+ datetime.datetime.now()))
+ logger.info('Processing %s since %s' %
+ (job._queue, datetime.datetime.now()))
+ self.after_fork(job)
+
+ # re-seed the Python PRNG after forking, otherwise
+ # all job process will share the same sequence of
+ # random numbers
+ random.seed()
+
+ self.process(job)
+ os._exit(0)
+ self.child = None
+
def before_fork(self, job):
"""
hook for making changes immediately before forking to process
|
Splitting the Worker.work in two methods, no behavior changes.
The purpose of this change was to make testing easier, but it also
helps with making the code easier to read.
|
binarydud_pyres
|
train
|
b131596d9beebfab4025397b5347985edf5630f9
|
diff --git a/lib/spork/run_strategy.rb b/lib/spork/run_strategy.rb
index <HASH>..<HASH> 100644
--- a/lib/spork/run_strategy.rb
+++ b/lib/spork/run_strategy.rb
@@ -28,7 +28,11 @@ class Spork::RunStrategy
protected
def self.factory(test_framework)
- Spork::RunStrategy::Forking.new(test_framework)
+ if Spork::RunStrategy::Forking.available?
+ Spork::RunStrategy::Forking.new(test_framework)
+ else
+ Spork::RunStrategy::Magazine.new(test_framework)
+ end
end
def self.inherited(subclass)
|
Use magazine strategy if forking not available.
TODO: add check for JRuby. It will NOT work with magazine.
|
sporkrb_spork
|
train
|
f5f7701258f858698562579cbfb4e1934056e009
|
diff --git a/airflow/hooks/webhdfs_hook.py b/airflow/hooks/webhdfs_hook.py
index <HASH>..<HASH> 100644
--- a/airflow/hooks/webhdfs_hook.py
+++ b/airflow/hooks/webhdfs_hook.py
@@ -61,7 +61,7 @@ class WebHDFSHook(BaseHook):
return client
except HdfsError as e:
self.log.debug(
- "Read operation on namenode {nn.host} failed witg error: {e.message}".format(**locals())
+ "Read operation on namenode {nn.host} failed with error: {e}".format(**locals())
)
nn_hosts = [c.host for c in nn_connections]
no_nn_error = "Read operations failed on the namenodes below:\n{}".format("\n".join(nn_hosts))
diff --git a/airflow/www/views.py b/airflow/www/views.py
index <HASH>..<HASH> 100644
--- a/airflow/www/views.py
+++ b/airflow/www/views.py
@@ -712,7 +712,7 @@ class Airflow(BaseView):
logs = handler.read(ti)
except AttributeError as e:
logs = ["Task log handler {} does not support read logs.\n{}\n" \
- .format(task_log_reader, e.message)]
+ .format(task_log_reader, str(e))]
for i, log in enumerate(logs):
if PY2 and not isinstance(log, unicode):
|
[AIRFLOW-<I>] Remove uses of Exception.message for Python 3
Closes #<I> from dhuang/AIRFLOW-<I>
|
apache_airflow
|
train
|
d2757d13dfffa73c2105d7e901540a4a59466028
|
diff --git a/galpy/potential/DehnenBarPotential.py b/galpy/potential/DehnenBarPotential.py
index <HASH>..<HASH> 100644
--- a/galpy/potential/DehnenBarPotential.py
+++ b/galpy/potential/DehnenBarPotential.py
@@ -142,7 +142,7 @@ class DehnenBarPotential(Potential):
indx=(t < self._tform)
smooth[indx]=0.
- indx=(t < self._tsteady)
+ indx=(t < self._tsteady) * (t >= self._tform)
deltat=t[indx]-self._tform
xi= 2.*deltat/(self._tsteady-self._tform)-1.
smooth[indx]= (3./16.*xi**5.-5./8*xi**3.+15./16.*xi+.5)
|
Fixed indx in smooth so bar does not grow for t < t_form
|
jobovy_galpy
|
train
|
004272e6f54c16e16f71ceddff44ae4d423da7c0
|
diff --git a/server/server.go b/server/server.go
index <HASH>..<HASH> 100644
--- a/server/server.go
+++ b/server/server.go
@@ -11,7 +11,6 @@ import (
"github.com/dotcloud/docker/image"
"github.com/dotcloud/docker/rcli"
"io"
- "log"
"net/http"
"net/url"
"os"
|
docker/server: no more dependency on log
|
containers_storage
|
train
|
38a532eddc745aeb2a7bcbe938ae7eaa45d1a532
|
diff --git a/src/server/pfs/pretty/pretty.go b/src/server/pfs/pretty/pretty.go
index <HASH>..<HASH> 100644
--- a/src/server/pfs/pretty/pretty.go
+++ b/src/server/pfs/pretty/pretty.go
@@ -105,7 +105,11 @@ func PrintCommitInfoHeader(w io.Writer) {
// PrintCommitInfo pretty-prints commit info.
func PrintCommitInfo(w io.Writer, commitInfo *pfs.CommitInfo, fullTimestamps bool) {
fmt.Fprintf(w, "%s\t", commitInfo.Commit.Repo.Name)
- fmt.Fprintf(w, "%s\t", commitInfo.Branch.Name)
+ if commitInfo.Branch != nil {
+ fmt.Fprintf(w, "%s\t", commitInfo.Branch.Name)
+ } else {
+ fmt.Fprintf(w, "<none>\t")
+ }
fmt.Fprintf(w, "%s\t", commitInfo.Commit.ID)
if commitInfo.ParentCommit != nil {
fmt.Fprintf(w, "%s\t", commitInfo.ParentCommit.ID)
|
don't crash if branch is nil
|
pachyderm_pachyderm
|
train
|
3045ffd9414090b18ec4116ecba958580aea0689
|
diff --git a/classes/Backtrace.php b/classes/Backtrace.php
index <HASH>..<HASH> 100644
--- a/classes/Backtrace.php
+++ b/classes/Backtrace.php
@@ -123,14 +123,38 @@ class QM_Backtrace {
$components = array();
foreach ( $this->trace as $frame ) {
+ $component = self::get_frame_component( $frame );
+
+ if ( $component ) {
+ if ( 'plugin' === $component->type ) {
+ // If the component is a plugin then it can't be anything else,
+ // so short-circuit and return early.
+ return $component;
+ }
+
+ $components[ $component->type ] = $component;
+ }
+ }
+
+ foreach ( QM_Util::get_file_dirs() as $type => $dir ) {
+ if ( isset( $components[ $type ] ) ) {
+ return $components[ $type ];
+ }
+ }
+
+ # This should not happen
+
+ }
+
+ public static function get_frame_component( array $frame ) {
try {
if ( isset( $frame['class'] ) ) {
if ( ! class_exists( $frame['class'], false ) ) {
- continue;
+ return null;
}
if ( ! method_exists( $frame['class'], $frame['function'] ) ) {
- continue;
+ return null;
}
$ref = new ReflectionMethod( $frame['class'], $frame['function'] );
$file = $ref->getFileName();
@@ -140,31 +164,14 @@ class QM_Backtrace {
} elseif ( isset( $frame['file'] ) ) {
$file = $frame['file'];
} else {
- continue;
+ return null;
}
- $comp = QM_Util::get_file_component( $file );
- $components[ $comp->type ] = $comp;
+ return QM_Util::get_file_component( $file );
- if ( 'plugin' === $comp->type ) {
- // If the component is a plugin then it can't be anything else,
- // so short-circuit and return early.
- return $comp;
- }
- // phpcs:ignore Generic.CodeAnalysis.EmptyStatement.DetectedCatch
} catch ( ReflectionException $e ) {
- # nothing
+ return null;
}
- }
-
- foreach ( QM_Util::get_file_dirs() as $type => $dir ) {
- if ( isset( $components[ $type ] ) ) {
- return $components[ $type ];
- }
- }
-
- # This should not happen
-
}
public function get_trace() {
|
Separate the component detection for the complete call stack and for individual frames from a call stack.
|
johnbillion_query-monitor
|
train
|
c8f3d64def5ee304baa8317fcde99a6c0118b036
|
diff --git a/phoebe/backend/bundle.py b/phoebe/backend/bundle.py
index <HASH>..<HASH> 100644
--- a/phoebe/backend/bundle.py
+++ b/phoebe/backend/bundle.py
@@ -451,18 +451,12 @@ class Bundle(object):
@return: dataset
@rtype: parameterSet
"""
- counter1 = 0
- counter2 = 0
if objectname is None:
# then search the whole system
return_ = []
for objname in self.get_system_structure(return_type='obj',flat=True):
- #~ counter1+=1
- #~ print("Counter1 = {}".format(counter1))
parsets = self.get_syn(objname,ref=ref)
for parset in parsets:
- #~ counter2 +=1
- #~ print("Counter1 = {} - Counter2 = {}".format(counter1,counter2))
if parset not in return_:
return_.append(parset)
return return_
@@ -472,7 +466,6 @@ class Bundle(object):
if ref is not None:
# then search for the ref by name/index
- #~ print ref
parset = obj.get_synthetic(ref=ref)
if parset != None:
return [parset]
diff --git a/phoebe/backend/plotting.py b/phoebe/backend/plotting.py
index <HASH>..<HASH> 100644
--- a/phoebe/backend/plotting.py
+++ b/phoebe/backend/plotting.py
@@ -787,6 +787,8 @@ class Axes(parameters.ParameterSet):
all kwargs will be added to the plotting:axes ParameterSet
it is suggested to at least initialize with a category (lc,rv,etc) and title
"""
+ super(Axes, self).__init__()
+
self.axesoptions = parameters.ParameterSet(context="plotting:axes")
self.plots = []
|
fixed plotting.Axes to comply with recent changes to parametersets
|
phoebe-project_phoebe2
|
train
|
eba1db722534a9bc282d50c8cc28f669e8afdecc
|
diff --git a/Module.php b/Module.php
index <HASH>..<HASH> 100644
--- a/Module.php
+++ b/Module.php
@@ -28,11 +28,14 @@ class Module
{
return array(
'invokables' => array(
- 'Zf2Forum_post_form_hydrator' => 'Zend\Stdlib\Hydrator\ClassMethods'
+ 'Zf2Forum_post_form_hydrator' => 'Zend\Stdlib\Hydrator\ClassMethods',
+ 'Zf2Forum_thread' => 'Zf2Forum\Model\Thread\Thread',
+ 'Zf2Forum_message' => 'Zf2Forum\Model\Message\Message',
+ 'Zf2Forum_form' => 'Zf2Forum\Form\PostForm',
),
'factories' => array(
- 'Zf2Forum\ModuleOptions' => 'Zf2Forum\Factory\ModuleOptionsFactory',
- 'Zf2Forum_user_mapper' => 'Zf2Forum\Factory\UserMapperFactory',
+ 'Zf2Forum\ModuleOptions' => 'Zf2Forum\Factory\ModuleOptionsFactory',
+ 'Zf2Forum_user_mapper' => 'Zf2Forum\Factory\UserMapperFactory',
'Zf2Forum_discuss_service' => function($sm) {
$service = new \Zf2Forum\Service\Discuss;
@@ -74,18 +77,6 @@ class Module
$mapper->setHydrator(new \Zend\StdLib\Hydrator\ClassMethods);
return $mapper;
},
- 'Zf2Forum_thread' => function ($sm) {
- $thread = new \Zf2Forum\Model\Thread\Thread;
- return $thread;
- },
- 'Zf2Forum_message' => function ($sm) {
- $message = new \Zf2Forum\Model\Message\Message;
- return $message;
- },
- 'Zf2Forum_form' => function ($sm) {
- $form = new \Zf2Forum\Form\PostForm;
- return $form;
- },
'Zf2Forum_visit' => function ($sm) {
$visit = new \Zf2Forum\Model\Visit\Visit;
$visit->setIpAddress($_SERVER['REMOTE_ADDR'])
@@ -95,7 +86,7 @@ class Module
),
'initializers' => array(
function($instance, $sm){
- if($instance instanceof Service\DbAdapterAwareInterface){
+ if ($instance instanceof Service\DbAdapterAwareInterface) {
$dbAdapter = $sm->get('Zf2Forum_zend_db_adapter');
return $instance->setDbAdapter($dbAdapter);
}
|
Moved factories to invakables + some factories doen't need sm so not
created for that
|
stijnhau_ZfForum
|
train
|
2f7ba80bf6ed28216bb810a50ad139be32630830
|
diff --git a/lib/avatax/client/transactions.rb b/lib/avatax/client/transactions.rb
index <HASH>..<HASH> 100644
--- a/lib/avatax/client/transactions.rb
+++ b/lib/avatax/client/transactions.rb
@@ -26,9 +26,9 @@ module AvaTax
# @param include [String] A comma separated list of child objects to return underneath the primary object.
# @param model [Object] information about the transaction and lines to be added
# @return [Object]
- def add_lines(model, options={})
+ def add_lines(model)
path = "/api/v2/companies/transactions/lines/add"
- post(path, model, options)
+ post(path, model)
end
@@ -181,9 +181,9 @@ module AvaTax
# @param include [String] A comma separated list of child objects to return underneath the primary object.
# @param model [Object] The transaction you wish to create
# @return [Object]
- def create_or_adjust_transaction(model, options={})
+ def create_or_adjust_transaction(model)
path = "/api/v2/transactions/createoradjust"
- post(path, model, options)
+ post(path, model)
end
@@ -210,9 +210,9 @@ module AvaTax
# @param include [String] A comma separated list of child objects to return underneath the primary object.
# @param model [Object] The transaction you wish to create
# @return [Object]
- def create_transaction(model, options={})
+ def create_transaction(model)
path = "/api/v2/transactions/create"
- post(path, model, options)
+ post(path, model)
end
@@ -236,9 +236,9 @@ module AvaTax
# @param include [String] A comma separated list of child objects to return underneath the primary object.
# @param model [Object] information about the transaction and lines to be removed
# @return [Object]
- def delete_lines(model, options={})
+ def delete_lines(model)
path = "/api/v2/companies/transactions/lines/delete"
- post(path, model, options)
+ post(path, model)
end
@@ -379,9 +379,9 @@ module AvaTax
# @param include [String] A comma separated list of child objects to return underneath the primary object.
# @param model [Object] Information about the refund to create
# @return [Object]
- def refund_transaction(companyCode, transactionCode, model, options={})
+ def refund_transaction(companyCode, transactionCode, model)
path = "/api/v2/companies/#{companyCode}/transactions/#{transactionCode}/refund"
- post(path, model, options)
+ post(path, model)
end
@@ -432,4 +432,4 @@ module AvaTax
end
end
-end
\ No newline at end of file
+end
diff --git a/lib/avatax/connection.rb b/lib/avatax/connection.rb
index <HASH>..<HASH> 100644
--- a/lib/avatax/connection.rb
+++ b/lib/avatax/connection.rb
@@ -24,7 +24,6 @@ module AvaTax
logger.filter(/(Authorization\:\ \"Basic\ )(\w+)\=/, '\1[REMOVED]')
end
end
- c.use Faraday::Request::UrlEncoded
c.use Faraday::Response::ParseJson
c.basic_auth(username, password)
diff --git a/lib/avatax/request.rb b/lib/avatax/request.rb
index <HASH>..<HASH> 100644
--- a/lib/avatax/request.rb
+++ b/lib/avatax/request.rb
@@ -26,8 +26,9 @@ module AvaTax
request.url(URI.encode(path), options)
when :post, :put
request.path = URI.encode(path)
- puts "BODY", options
- request.body = options unless options.empty?
+ request.headers['Content-Type'] = 'application/json'
+ puts "BODY", options.to_json
+ request.body = options.to_json unless options.empty?
end
end
|
Correct errors with POST-based methods
|
avadev_AvaTax-REST-V2-Ruby-SDK
|
train
|
622ec9e04727043b8e1571f570c615b88e45d081
|
diff --git a/tests/test_using.py b/tests/test_using.py
index <HASH>..<HASH> 100644
--- a/tests/test_using.py
+++ b/tests/test_using.py
@@ -657,9 +657,9 @@ class ExtendedTransitionImplementationTestCase(unittest2.TestCase):
def copy_from(cls, implem):
return cls(implem.transition, implem.field_name, implem.implementation)
- def _post_transition(self, instance, res, cls_kwargs, *args, **kwargs):
- super(MyImplementation, self)._post_transition(instance, res, cls_kwargs, *args, **kwargs)
- instance.blah = cls_kwargs['blah']
+ def _post_transition(self, instance, res, *args, **kwargs):
+ super(MyImplementation, self)._post_transition(instance, res, *args, **kwargs)
+ instance.blah = kwargs['blah']
# Helpers in order to use MyImplementation instead of base.TransitionImplementation
class MyImplementationList(base.ImplementationList):
@@ -680,7 +680,7 @@ class ExtendedTransitionImplementationTestCase(unittest2.TestCase):
class MyWorkflowObject(MyWorkflowEnabled):
state = self.MyWorkflow()
- def foobar(self):
+ def foobar(self, **kwargs):
return 1
def gobaz(self, blah=10):
|
Update test_implementation according to cls_kwargs removal.
|
rbarrois_xworkflows
|
train
|
01189ee94290b191c18f1f6e4e1d6f5b1bfd40ac
|
diff --git a/src/mixed.js b/src/mixed.js
index <HASH>..<HASH> 100644
--- a/src/mixed.js
+++ b/src/mixed.js
@@ -499,7 +499,7 @@ for (const method of ['validate', 'validateSync'])
options.context,
);
- return schema[method](parent[parentPath], {
+ return schema[method](parent && parent[parentPath], {
...options,
parent,
path: parentPath,
|
fix: handle empty parent in *At() methods
|
jquense_yup
|
train
|
a9d402425489a4467bb1b5ed6504b4210fa4576d
|
diff --git a/android/server/src/java/org/openqa/selenium/android/events/WebViewAction.java b/android/server/src/java/org/openqa/selenium/android/events/WebViewAction.java
index <HASH>..<HASH> 100644
--- a/android/server/src/java/org/openqa/selenium/android/events/WebViewAction.java
+++ b/android/server/src/java/org/openqa/selenium/android/events/WebViewAction.java
@@ -44,7 +44,11 @@ public class WebViewAction {
* @param webview
*/
public static void clearFocusFromCurrentElement(WebView webview) {
- Method clearTextEntry;
+ // Froyo fixed the focus issue, so no action is needed.
+ if (Platform.FROYO <= Platform.sdk()) {
+ return;
+ }
+ Method clearTextEntry;
try {
// This allows to clear the focus from the current element, despite the confusing
// method name.
diff --git a/android/server/src/java/org/openqa/selenium/android/server/JettyService.java b/android/server/src/java/org/openqa/selenium/android/server/JettyService.java
index <HASH>..<HASH> 100644
--- a/android/server/src/java/org/openqa/selenium/android/server/JettyService.java
+++ b/android/server/src/java/org/openqa/selenium/android/server/JettyService.java
@@ -34,6 +34,7 @@ import org.mortbay.jetty.nio.SelectChannelConnector;
import org.mortbay.jetty.servlet.ServletHolder;
import org.openqa.jetty.util.IO;
import org.openqa.selenium.android.AndroidDriver;
+import org.openqa.selenium.android.Platform;
import org.openqa.selenium.android.app.R;
import java.io.IOException;
@@ -49,7 +50,7 @@ public class JettyService extends Service {
private Server server;
private int port = 8080;
- PowerManager.WakeLock wakeLock;
+ private PowerManager.WakeLock wakeLock;
/**
* Android Service create
@@ -141,13 +142,13 @@ public class JettyService extends Service {
return server;
}
- protected Server newServer() {
- // TODO(berrada): This method seems a little redundant.
- return new Server();
- }
-
protected void configureConnectors() {
if (server != null) {
+ // Workaround a Froyo bug
+ // http://code.google.com/p/android/issues/detail?id=9431
+ if (Platform.FROYO == Platform.sdk()) {
+ System.setProperty("java.net.preferIPv6Addresses", "false");
+ }
SelectChannelConnector nioConnector = new SelectChannelConnector();
nioConnector.setUseDirectBuffers(false);
nioConnector.setPort(port);
@@ -194,22 +195,12 @@ public class JettyService extends Service {
}
}
- protected void configureDeployers() throws Exception {}
-
- public void configureRealm() throws IOException {}
-
protected void startJetty() throws Exception {
- // Bridge Jetty logging to Android logging
- //AndroidLog.__isDebugEnabled = false;
System.setProperty("org.mortbay.log.class", "org.mortbay.log.AndroidLog");
- //org.mortbay.log.Log.setLog(new AndroidLog());
+ server = new Server();
- server = newServer();
-
configureConnectors();
configureHandlers();
- configureDeployers();
- configureRealm();
server.start();
|
DouniaBerrada: Updating the APK to Froyo (Android <I>) is supported.
r<I>
|
SeleniumHQ_selenium
|
train
|
f0ef41e9255b118ef3d30a7495e515149e2ecb0c
|
diff --git a/salt/modules/smartos_vmadm.py b/salt/modules/smartos_vmadm.py
index <HASH>..<HASH> 100644
--- a/salt/modules/smartos_vmadm.py
+++ b/salt/modules/smartos_vmadm.py
@@ -65,7 +65,7 @@ def _create_update_from_file(mode, path):
if not os.path.isfile(path):
ret['Error'] = 'File ({0}) does not exists!'.format(path)
return ret
- # vmadm validate create [-f <filename>]
+ # vmadm validate create|update [-f <filename>]
cmd = '{vmadm} validate {mode} -f {path}'.format(
vmadm=vmadm,
mode=mode,
@@ -81,7 +81,7 @@ def _create_update_from_file(mode, path):
else:
ret['Error'] = res['stderr']
return ret
- # vmadm create [-f <filename>]
+ # vmadm create|update [-f <filename>]
cmd = '{vmadm} {mode} -f {path}'.format(
vmadm=vmadm,
mode=mode,
@@ -107,7 +107,44 @@ def _create_update_from_cfg(mode, vmcfg):
'''
Create vm from configuration
'''
- return False
+ ret = {}
+ vmadm = _check_vmadm()
+ # vmadm validate create|update [-f <filename>]
+ cmd = 'echo {vmcfg} | {vmadm} validate {mode}'.format(
+ vmadm=vmadm,
+ mode=mode,
+ vmcfg=_quote_args(json.dumps(vmcfg))
+ )
+ res = __salt__['cmd.run_all'](cmd, python_shell=True)
+ retcode = res['retcode']
+ if retcode != 0:
+ ret['Error'] = _exit_status(retcode)
+ if 'stderr' in res:
+ if res['stderr'][0] == '{':
+ ret['Error'] = json.loads(res['stderr'])
+ else:
+ ret['Error'] = res['stderr']
+ return ret
+ # vmadm create|update [-f <filename>]
+ cmd = 'echo {vmcfg} | {vmadm} {mode}'.format(
+ vmadm=vmadm,
+ mode=mode,
+ vmcfg=_quote_args(json.dumps(vmcfg))
+ )
+ res = __salt__['cmd.run_all'](cmd, python_shell=True)
+ retcode = res['retcode']
+ if retcode != 0:
+ ret['Error'] = _exit_status(retcode)
+ if 'stderr' in res:
+ if res['stderr'][0] == '{':
+ ret['Error'] = json.loads(res['stderr'])
+ else:
+ ret['Error'] = res['stderr']
+ return ret
+ else:
+ if res['stderr'].startswith('Successfully created VM'):
+ return res['stderr'][24:]
+ return True
## TODO
|
vmadm.create now works based on parameters also
|
saltstack_salt
|
train
|
f2b045f9d4493f640feb7d7247a509bad1554997
|
diff --git a/src/Controller/Component/PaginatorComponent.php b/src/Controller/Component/PaginatorComponent.php
index <HASH>..<HASH> 100644
--- a/src/Controller/Component/PaginatorComponent.php
+++ b/src/Controller/Component/PaginatorComponent.php
@@ -183,6 +183,7 @@ class PaginatorComponent extends Component {
'page' => $page,
'current' => $numResults,
'count' => $count,
+ 'perPage' => $limit,
'prevPage' => ($page > 1),
'nextPage' => ($count > ($page * $limit)),
'pageCount' => $pageCount,
diff --git a/src/View/Helper/PaginatorHelper.php b/src/View/Helper/PaginatorHelper.php
index <HASH>..<HASH> 100644
--- a/src/View/Helper/PaginatorHelper.php
+++ b/src/View/Helper/PaginatorHelper.php
@@ -533,9 +533,9 @@ class PaginatorHelper extends Helper {
}
$start = 0;
if ($paging['count'] >= 1) {
- $start = (($paging['page'] - 1) * $paging['limit']) + 1;
+ $start = (($paging['page'] - 1) * $paging['perPage']) + 1;
}
- $end = $start + $paging['limit'] - 1;
+ $end = $start + $paging['perPage'] - 1;
if ($paging['count'] < $end) {
$end = $paging['count'];
}
diff --git a/tests/TestCase/Controller/Component/PaginatorComponentTest.php b/tests/TestCase/Controller/Component/PaginatorComponentTest.php
index <HASH>..<HASH> 100644
--- a/tests/TestCase/Controller/Component/PaginatorComponentTest.php
+++ b/tests/TestCase/Controller/Component/PaginatorComponentTest.php
@@ -627,12 +627,14 @@ class PaginatorComponentTest extends TestCase {
];
$this->Paginator->paginate($table, $settings);
$this->assertEquals(100, $this->request->params['paging']['PaginatorPosts']['limit']);
+ $this->assertEquals(100, $this->request->params['paging']['PaginatorPosts']['perPage']);
$this->request->query = [
'limit' => '10'
];
$this->Paginator->paginate($table, $settings);
$this->assertEquals(10, $this->request->params['paging']['PaginatorPosts']['limit']);
+ $this->assertEquals(10, $this->request->params['paging']['PaginatorPosts']['perPage']);
}
/**
@@ -683,6 +685,8 @@ class PaginatorComponentTest extends TestCase {
$this->assertEquals(2, $result['pageCount']);
$this->assertTrue($result['nextPage']);
$this->assertFalse($result['prevPage']);
+ $this->assertEquals(2, $result['perPage']);
+ $this->assertNull($result['limit']);
}
/**
diff --git a/tests/TestCase/View/Helper/PaginatorHelperTest.php b/tests/TestCase/View/Helper/PaginatorHelperTest.php
index <HASH>..<HASH> 100644
--- a/tests/TestCase/View/Helper/PaginatorHelperTest.php
+++ b/tests/TestCase/View/Helper/PaginatorHelperTest.php
@@ -1800,6 +1800,7 @@ class PaginatorHelperTest extends TestCase {
'page' => 1,
'current' => 3,
'count' => 13,
+ 'perPage' => 3,
'prevPage' => false,
'nextPage' => true,
'pageCount' => 5,
@@ -1938,6 +1939,7 @@ class PaginatorHelperTest extends TestCase {
'page' => 0,
'current' => 0,
'count' => 0,
+ 'perPage' => 10,
'prevPage' => false,
'nextPage' => false,
'pageCount' => 0,
|
Fix incorrect value in PaginatorHelper::counter()
Now that limit is conditionally defined to only exist when the limit is
different than the default, a second perPage key is needed to correctly
generate counter() output.
Fixes #<I>
|
cakephp_cakephp
|
train
|
b32c01710f23ae296edffb0d259683f622169282
|
diff --git a/aiohttp_devtools/start/template/app/views.py b/aiohttp_devtools/start/template/app/views.py
index <HASH>..<HASH> 100644
--- a/aiohttp_devtools/start/template/app/views.py
+++ b/aiohttp_devtools/start/template/app/views.py
@@ -8,8 +8,8 @@ from aiohttp import web
# {% if example.is_message_board %}
from aiohttp.hdrs import METH_POST
+from aiohttp.web import json_response
from aiohttp.web_exceptions import HTTPFound
-from aiohttp.web_reqrep import json_response
# {% if database.is_pg_sqlalchemy %}
from .models import sa_messages
diff --git a/tests/test_runserver_main.py b/tests/test_runserver_main.py
index <HASH>..<HASH> 100644
--- a/tests/test_runserver_main.py
+++ b/tests/test_runserver_main.py
@@ -93,7 +93,7 @@ def create_app(loop):
@if_boxed
@slow
-def test_start_runserver_app_instance(tmpworkdir, caplog):
+def test_start_runserver_app_instance(tmpworkdir, loop, caplog):
mktree(tmpworkdir, {
'app.py': """\
from aiohttp import web
@@ -105,12 +105,12 @@ app = web.Application()
app.router.add_get('/', hello)
"""
})
+ asyncio.set_event_loop(loop)
aux_app, observer, aux_port = runserver(app_path='app.py')
assert len(observer._handlers) == 1
event_handlers = list(observer._handlers.values())[0]
code_event_handler = next(eh for eh in event_handlers if isinstance(eh, PyCodeEventHandler))
- loop = asyncio.get_event_loop()
try:
loop.run_until_complete(check_server_running(loop, live_reload=True))
finally:
@@ -119,7 +119,7 @@ app.router.add_get('/', hello)
@if_boxed
@slow
-def test_start_runserver_yml_no_checks(tmpworkdir, caplog):
+def test_start_runserver_yml_no_checks(tmpworkdir, caplog, loop):
mktree(tmpworkdir, {
'app.py': """\
from aiohttp import web
@@ -138,7 +138,6 @@ dev:
livereload: false
"""
})
- loop = asyncio.new_event_loop()
aux_app, observer, aux_port = runserver(app_path='settings.yml', loop=loop)
assert isinstance(aux_app, aiohttp.web.Application)
assert aux_port == 8001
@@ -214,7 +213,7 @@ def test_serve_main_app(tmpworkdir, loop, mocker):
@if_boxed
@slow
-def test_serve_main_app_app_instance(tmpworkdir, mocker):
+def test_serve_main_app_app_instance(tmpworkdir, loop, mocker):
mktree(tmpworkdir, {
'app.py': """\
from aiohttp import web
@@ -226,7 +225,7 @@ app = web.Application()
app.router.add_get('/', hello)
"""
})
- loop = asyncio.get_event_loop()
+ asyncio.set_event_loop(loop)
mocker.spy(loop, 'create_server')
mock_modify_main_app = mocker.patch('aiohttp_devtools.runserver.serve.modify_main_app')
loop.call_later(0.5, loop.stop)
|
fix issues with aiohttp <I>a
|
aio-libs_aiohttp-devtools
|
train
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.