hash
stringlengths 40
40
| diff
stringlengths 131
26.7k
| message
stringlengths 7
694
| project
stringlengths 5
67
| split
stringclasses 1
value | diff_languages
stringlengths 2
24
|
---|---|---|---|---|---|
ac4266833761858da71c6809a4bf9dc038de5c5f
|
diff --git a/lib/zipline/output_stream.rb b/lib/zipline/output_stream.rb
index <HASH>..<HASH> 100644
--- a/lib/zipline/output_stream.rb
+++ b/lib/zipline/output_stream.rb
@@ -17,12 +17,16 @@ module Zipline
def put_next_entry(entry_name, size)
new_entry = Zip::Entry.new(@file_name, entry_name)
- new_entry.size = size
#THIS IS THE MAGIC, tells zip to look after data for size, crc
new_entry.gp_flags = new_entry.gp_flags | 0x0008
super(new_entry)
+
+ # Uncompressed size in the local file header must be zero when bit 3
+ # of the general purpose flags is set, so set the size after the header
+ # has been written.
+ new_entry.size = size
end
# just reset state, no rewinding required
|
Set uncompressed size in local file header to 0
The ZIP spec [1] describes in section <I> that the uncompressed size
field is set to zero in the local file header if bit 3 of the general
purpose flags is set, which zipline does.
Some applications actually care about this, e.g. Powerpoint regards a
file as corrupt when the uncompressed size and bit 3 are set.
[1] <URL>
|
fringd_zipline
|
train
|
rb
|
26e498a21538a76421409881fd301d7a8b9278fb
|
diff --git a/lib/kleisli/maybe.rb b/lib/kleisli/maybe.rb
index <HASH>..<HASH> 100644
--- a/lib/kleisli/maybe.rb
+++ b/lib/kleisli/maybe.rb
@@ -18,6 +18,7 @@ module Kleisli
def *(other)
self >-> f {
+ f = f.to_proc
other >-> val {
Maybe.lift(f.arity > 1 ? f.curry.call(val) : f.call(val))
}
diff --git a/lib/kleisli/version.rb b/lib/kleisli/version.rb
index <HASH>..<HASH> 100644
--- a/lib/kleisli/version.rb
+++ b/lib/kleisli/version.rb
@@ -1,3 +1,3 @@
module Kleisli
- VERSION = "0.2.3"
+ VERSION = "0.2.4"
end
|
Improve appl. functor in maybe
|
txus_kleisli
|
train
|
rb,rb
|
fc8ed811841d25c69ee5b8536c98d4e681a01d28
|
diff --git a/peri/fft.py b/peri/fft.py
index <HASH>..<HASH> 100644
--- a/peri/fft.py
+++ b/peri/fft.py
@@ -54,7 +54,7 @@ def load_wisdom(wisdomfile):
return
try:
- pyfftw.import_wisdom(pickle.load(open(wisdomfile)))
+ pyfftw.import_wisdom(pickle.load(open(wisdomfile, 'rb')))
except IOError as e:
log.warn("No wisdom present, generating some at %r" % wisdomfile)
save_wisdom(wisdomfile)
|
load with rb in load_wisdom to fix EOFError.
|
peri-source_peri
|
train
|
py
|
7c9ced4a3e23ebfdea01b80e87b5a7de569dee1a
|
diff --git a/src/test/java/com/ecyrd/speed4j/MultiThreadTest.java b/src/test/java/com/ecyrd/speed4j/MultiThreadTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/com/ecyrd/speed4j/MultiThreadTest.java
+++ b/src/test/java/com/ecyrd/speed4j/MultiThreadTest.java
@@ -9,11 +9,11 @@ import com.ecyrd.speed4j.log.PeriodicalLog;
public class MultiThreadTest
{
StopWatchFactory swf;
-
+ PeriodicalLog pl;
@Before
public void setUp()
{
- PeriodicalLog pl = new PeriodicalLog();
+ pl = new PeriodicalLog();
pl.setName( "multithread" );
pl.setPeriod( 5 );
@@ -25,7 +25,7 @@ public class MultiThreadTest
@After
public void cleanUp()
{
- swf.shutdown();
+ pl.shutdown();
}
// This test fails if there's a ConcurrentModificationException or similar being thrown.
|
Now closes the PeriodicalLog properly.
|
jalkanen_speed4j
|
train
|
java
|
aec60960cef7a30b2b7269cd2183fe865b764ff0
|
diff --git a/synapse/cores/common.py b/synapse/cores/common.py
index <HASH>..<HASH> 100644
--- a/synapse/cores/common.py
+++ b/synapse/cores/common.py
@@ -321,6 +321,10 @@ class Cortex(EventBus,DataModel,Runtime,Configable):
for form,fnfo,props in modl.get('forms',()):
+ # allow forms declared without ptype if their name *is* one
+ if fnfo.get('ptype') == None:
+ fnfo['ptype'] = form
+
tufo = self.formTufoByProp('syn:form',form,**fnfo)
tufo = self.setTufoProps(tufo,**fnfo)
|
make ptype default to the same name as the property during model import
|
vertexproject_synapse
|
train
|
py
|
875d7923318233513e4b602a8dad984d10d384c0
|
diff --git a/source/js/date/VCO.Date.js b/source/js/date/VCO.Date.js
index <HASH>..<HASH> 100644
--- a/source/js/date/VCO.Date.js
+++ b/source/js/date/VCO.Date.js
@@ -102,7 +102,7 @@ VCO.Date = VCO.Class.extend({
_createDisplayType: function() {
if (Date == this.data.date_obj.constructor) {
this.data.display_text = VCO.DateFormat(this.data.date_obj, this.data.format);
- this.data.display_text_short = VCO.DateFormat(this.data.date_obj, this.data.format);
+ this.data.display_text_short = VCO.DateFormat(this.data.date_obj, this.data.format_short);
} else {
this.data.display_text = this.data.date_obj.getDisplayText();
this.data.display_text_short = this.data.date_obj.getDisplayTextShort();
|
fix typo that wasn't using the short format. but this stuff is not long for this world anyway...
|
NUKnightLab_TimelineJS3
|
train
|
js
|
a9e616d87fc3b50ba81ea2bc3599b9d4d7b7d9cb
|
diff --git a/lib/components/resource-editing/resource-form/TypeAheadInput.js b/lib/components/resource-editing/resource-form/TypeAheadInput.js
index <HASH>..<HASH> 100644
--- a/lib/components/resource-editing/resource-form/TypeAheadInput.js
+++ b/lib/components/resource-editing/resource-form/TypeAheadInput.js
@@ -359,7 +359,7 @@ var TypeAheadInput = React.createClass({
searchObject[field] = result[field];
var isSelected = _.where(this.state.fieldValue, searchObject).length > 0;
- let classes = className({
+ var classes = className({
'btn btn-flat btn-sm': ! isSelected,
'btn btn-default btn-raised btn-sm': isSelected
});
|
Switching back to ES5 variable declaration to be compatible with build process
|
MortarJS_Mortar-JS
|
train
|
js
|
195be9ef023cc582e9d75da6e8521e1efc1843df
|
diff --git a/spec/unit/shared/attributes_spec.rb b/spec/unit/shared/attributes_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/unit/shared/attributes_spec.rb
+++ b/spec/unit/shared/attributes_spec.rb
@@ -222,7 +222,7 @@ module ActiveGraph::Shared
it 'raises when setting an undefined attribute' do
expect do
model.send(method, :initials, 'BP')
- end.to raise_error ActiveGraph::UnknownAttributeError, 'unknown attribute: initials'
+ end.to raise_error ActiveGraph::UnknownAttributeError, /^unknown attribute: initials/
end
end
end
|
relaxed match in a spec due to ruby <I> changes
|
neo4jrb_neo4j
|
train
|
rb
|
902783c4dda9c5bfc73a3efe9f5daac682fb3fbe
|
diff --git a/volume/drivers/fake/fake.go b/volume/drivers/fake/fake.go
index <HASH>..<HASH> 100644
--- a/volume/drivers/fake/fake.go
+++ b/volume/drivers/fake/fake.go
@@ -322,6 +322,7 @@ func (d *driver) Set(volumeID string, locator *api.VolumeLocator, spec *api.Volu
v.Spec.Journal = spec.Journal
v.Spec.SnapshotInterval = spec.SnapshotInterval
v.Spec.IoProfile = spec.IoProfile
+ v.Spec.SnapshotSchedule = spec.SnapshotSchedule
}
return d.UpdateVol(v)
|
Fake driver needs to update spec
|
libopenstorage_openstorage
|
train
|
go
|
4a2eab84257a00f4c3e93ed8c9ec28221b1ddabb
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -28,7 +28,7 @@ test_requirements = [
setup(
name='satel_integra',
- version='0.1.0',
+ version='0.2.0',
description="Communication library and basic testing tool for Satel "
"Integra alarm system. Communication via tcpip protocol "
"published by SATEL. ",
@@ -52,9 +52,6 @@ setup(
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
- "Programming Language :: Python :: 2",
- 'Programming Language :: Python :: 2.6',
- 'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
|
Bounced version change, dropped Python 2 support in setup.py.
|
c-soft_satel_integra
|
train
|
py
|
e757c3d2f10f2b19051017ccb6219251da288c22
|
diff --git a/haphilipsjs/__init__.py b/haphilipsjs/__init__.py
index <HASH>..<HASH> 100644
--- a/haphilipsjs/__init__.py
+++ b/haphilipsjs/__init__.py
@@ -315,7 +315,9 @@ class PhilipsTV(object):
try:
resp = await self.session.get(self._url(path), timeout=TIMEOUT)
if resp.status_code != 200:
+ LOG.debug("Get failed: %s -> %d %s", path, resp.status_code, resp.text)
return None
+ LOG.debug("Get succeded: %s -> %s", path, resp.text)
return decode_xtv_json(resp.text)
except httpx.HTTPError as err:
raise ConnectionFailure(repr(err)) from err
|
Log get responses when debug is on
|
danielperna84_ha-philipsjs
|
train
|
py
|
41964721309abbf521bbe84235644153aa56b268
|
diff --git a/bosh_aws_cpi/spec/unit/spot_manager_spec.rb b/bosh_aws_cpi/spec/unit/spot_manager_spec.rb
index <HASH>..<HASH> 100644
--- a/bosh_aws_cpi/spec/unit/spot_manager_spec.rb
+++ b/bosh_aws_cpi/spec/unit/spot_manager_spec.rb
@@ -102,19 +102,15 @@ describe Bosh::AwsCloud::SpotManager do
# When erroring, should cancel any pending spot requests
expect(aws_client).to receive(:cancel_spot_instance_requests)
- start_waiting = Time.now
+ expect(Bosh::Common).to receive(:retryable).
+ with(sleep: 0.01, tries: 10, on: [AWS::EC2::Errors::InvalidSpotInstanceRequestID::NotFound]).
+ and_call_original
expect {
spot_manager.create(instance_params, spot_bid_price)
}.to raise_error(Bosh::Clouds::VMCreationFailed){ |error|
expect(error.ok_to_retry).to eq true
}
-
- duration = Time.now - start_waiting
-
- # Exact duration will vary, but anything around 0.1s is correct
- expect(duration).to be > 0.08
- expect(duration).to be < 0.12
end
it 'should retry checking spot instance request state when AWS::EC2::Errors::InvalidSpotInstanceRequestID::NotFound raised' do
|
Remove time-related assertions from spot manager tests.
|
cloudfoundry_bosh
|
train
|
rb
|
9db026de95bb8f2cda6a1a61d870bbcfe51deb90
|
diff --git a/framework/core/js/forum/src/components/UserCard.js b/framework/core/js/forum/src/components/UserCard.js
index <HASH>..<HASH> 100644
--- a/framework/core/js/forum/src/components/UserCard.js
+++ b/framework/core/js/forum/src/components/UserCard.js
@@ -26,10 +26,11 @@ export default class UserCard extends Component {
view() {
const user = this.props.user;
const controls = UserControls.controls(user, this).toArray();
+ const color = user.color();
return (
<div className={'UserCard ' + (this.props.className || '')}
- style={{backgroundColor: user.color()}}>
+ style={color ? {backgroundColor: color} : ''}>
<div className="darkenBackground">
<div className="container">
|
Don't apply background color if there is none
|
flarum_core
|
train
|
js
|
7a1759af2c97c1dee091f2cc478a9c7b8d00de1a
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -11,7 +11,7 @@ setup(
name='duckling',
version='1.1.0',
description='A wrapper for wit.ai\'s Duckling',
- url='https://github.com/FraBle/duckling',
+ url='https://github.com/FraBle/python-duckling',
author='Frank Blechschmidt',
author_email='frank.blechschmidt@sap.com',
license='Apache License 2.0',
|
fixed link to repo for pypi
|
FraBle_python-duckling
|
train
|
py
|
de3a47344859f1c3f5391dec566d2ad639cbd0d8
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -76,16 +76,14 @@ class BuildWithPly(build):
tn.write(version)
def build_nt2(self):
- """
- Install NT2 from the github-generated archive
- """
+ """ Install NT2 from the github-generated archive. """
nt2_dir = 'nt2'
nt2_version = '1.2.3-pythran' # fake!
cwd = os.getcwd()
nt2_src_dir = os.path.join(cwd, self.build_temp, nt2_dir + '_src')
if not os.path.isdir(nt2_src_dir):
print('nt2 archive needed, downloading it')
- url = 'https://github.com/NumScale/nt2/archive/release.zip'
+ url = 'https://github.com/pbrunet/nt2/archive/gemv_release.zip'
location = urlopen(url)
zipfile = ZipFile(StringIO(location.read()))
zipfile.extractall(self.build_temp)
|
Ugly hack to make numpy.dot work
|
serge-sans-paille_pythran
|
train
|
py
|
c79ca7fb58cda19852baea2de9a90c703ef9aa90
|
diff --git a/benchmarks/benchmarks/compute.py b/benchmarks/benchmarks/compute.py
index <HASH>..<HASH> 100644
--- a/benchmarks/benchmarks/compute.py
+++ b/benchmarks/benchmarks/compute.py
@@ -32,14 +32,14 @@ class BenchmarkConstellation:
state = (0,) * 7
self.subsys = Subsystem(network, state, network.node_indices)
else:
- raise
+ raise ValueError(network)
if mode == 'parallel':
config.PARALLEL_CONCEPT_EVALUATION = True
elif mode == 'sequential':
config.PARALLEL_CONCEPT_EVALUATION = False
else:
- raise
+ raise ValueError(mode)
def time_constellation(self, mode, network):
clear_subsystem_caches(self.subsys)
@@ -79,7 +79,7 @@ class BenchmarkMainComplex():
self.network = examples.fig16()
self.state = (1, 0, 0, 1, 1, 1, 0)
else:
- raise
+ raise ValueError(network)
# Save config
self.default_config = copy.copy(config.__dict__)
@@ -90,7 +90,7 @@ class BenchmarkMainComplex():
elif mode == 'sequential':
config.PARALLEL_CUT_EVALUATION = False
else:
- raise
+ raise ValueError(mode)
# Cache mode
if cache == 'local':
@@ -101,7 +101,7 @@ class BenchmarkMainComplex():
# No server running
raise NotImplementedError
else:
- raise
+ raise ValueError(cache)
config.CACHE_BIGMIPS = False
|
Raise `ValueError` in `compute` benchmarks for bad params
|
wmayner_pyphi
|
train
|
py
|
ce2f17b4f7631f7989c668495f93f1756312d350
|
diff --git a/src/Laravel/Cashier/StripeGateway.php b/src/Laravel/Cashier/StripeGateway.php
index <HASH>..<HASH> 100644
--- a/src/Laravel/Cashier/StripeGateway.php
+++ b/src/Laravel/Cashier/StripeGateway.php
@@ -138,6 +138,10 @@ class StripeGateway {
$this->quantity(
$customer->subscription->quantity
);
+ } else {
+ $this->quantity(
+ $quantity
+ );
}
return $this->create(null, null, $customer);
|
swap() doesn't reflect $quantity when provided
As best as I can tell $quantity is never assigned, only checked on Line <I>.
|
laravel_cashier
|
train
|
php
|
bdff45f7434bab2962b870c485f8becce7345a80
|
diff --git a/mode/xml/xml.js b/mode/xml/xml.js
index <HASH>..<HASH> 100644
--- a/mode/xml/xml.js
+++ b/mode/xml/xml.js
@@ -320,6 +320,7 @@ CodeMirror.defineMode("xml", function(config, parserConfig) {
};
});
+CodeMirror.defineMIME("text/xml", "xml");
CodeMirror.defineMIME("application/xml", "xml");
if (!CodeMirror.mimeModes.hasOwnProperty("text/html"))
CodeMirror.defineMIME("text/html", {name: "xml", htmlMode: true});
|
[xml] support mime type `text/xml` (see RFC <I>)
|
codemirror_CodeMirror
|
train
|
js
|
595c19376329bd55f9a7b35f681a32ebe87e91f2
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -73,8 +73,8 @@ function createXHR(options, callback) {
xhr.setRequestHeader(key, headers[key])
}
}
- } else {
- options.headers || throw new Error("Headers cannot be set on an XDomainRequest object");
+ } else if (options.headers) {
+ throw new Error("Headers cannot be set on an XDomainRequest object");
}
if ("responseType" in options) {
|
Throw is a statement. Fix #<I>
|
naugtur_xhr
|
train
|
js
|
baeeaea4c504704de421f38c11fb279a337c4384
|
diff --git a/solr/parser.go b/solr/parser.go
index <HASH>..<HASH> 100644
--- a/solr/parser.go
+++ b/solr/parser.go
@@ -45,8 +45,8 @@ func (parser *StandardResultParser) ParseResponseHeader(response *SolrResponse,
}
func (parser *StandardResultParser) ParseError(response *SolrResponse, sr *SolrResult) {
- if error, ok := response.Response["error"].(map[string]interface{}); ok {
- sr.Error = error
+ if err, ok := response.Response["error"].(map[string]interface{}); ok {
+ sr.Error = err
}
}
@@ -132,8 +132,8 @@ func (parser *MoreLikeThisParser) Parse(response *SolrResponse) (*SolrMltResult,
ParseDocResponse(match, sr.Match)
}
} else {
- if error, ok := response.Response["error"].(map[string]interface{}); ok {
- sr.Error = error
+ if err, ok := response.Response["error"].(map[string]interface{}); ok {
+ sr.Error = err
}
}
return sr, nil
|
Avoid to user \"error\" as variable name since we have type error
|
vanng822_go-solr
|
train
|
go
|
090024c297e61170e3231aa2f7ca9d1a8ce7edfc
|
diff --git a/python/ray/air/config.py b/python/ray/air/config.py
index <HASH>..<HASH> 100644
--- a/python/ray/air/config.py
+++ b/python/ray/air/config.py
@@ -1,14 +1,5 @@
from dataclasses import dataclass
-from typing import (
- TYPE_CHECKING,
- Any,
- Callable,
- Dict,
- List,
- Mapping,
- Optional,
- Union,
-)
+from typing import TYPE_CHECKING, Any, Callable, Dict, List, Mapping, Optional, Union
from ray.air.constants import WILDCARD_KEY
from ray.tune.syncer import SyncConfig
@@ -267,6 +258,7 @@ class DatasetConfig:
return new_config
+@dataclass
@PublicAPI(stability="alpha")
class FailureConfig:
"""Configuration related to failure handling of each run/trial.
|
[AIR] Fix `FailureConfig` not being a dataclass (#<I>)
|
ray-project_ray
|
train
|
py
|
aa3d0c6aa14e6f0970810c3f34b9bd3d0f8c35c3
|
diff --git a/astroplan/core.py b/astroplan/core.py
index <HASH>..<HASH> 100644
--- a/astroplan/core.py
+++ b/astroplan/core.py
@@ -33,7 +33,7 @@ import numpy as np
__all__ = ["Observer", "Target", "FixedTarget", "NonFixedTarget",
"Constraint", "TimeWindow", "AltitudeRange",
- "AboveAirmass", "Observation", "MAGIC_TIME"]
+ "AboveAirmass", "MAGIC_TIME"]
#__doctest_requires__ = {'*': ['scipy.integrate']}
|
Also removing Observation from __all__
|
astropy_astroplan
|
train
|
py
|
d92086568acf00f4d5553510fbe6b6b12b04f6df
|
diff --git a/src/main/java/com/redhat/victims/VictimsRecord.java b/src/main/java/com/redhat/victims/VictimsRecord.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/redhat/victims/VictimsRecord.java
+++ b/src/main/java/com/redhat/victims/VictimsRecord.java
@@ -167,7 +167,7 @@ public class VictimsRecord {
}
// we have gone through all algorithms without finding a subset match
- return false;
+ return this.hash.equals(that.hash);
}
/**
|
If not fingerprint matches were found, containsAll should rely on hash
|
victims_victims-lib-java
|
train
|
java
|
08b3d3898dff93c61765084f3dd324f68fdad3e8
|
diff --git a/src/index.js b/src/index.js
index <HASH>..<HASH> 100644
--- a/src/index.js
+++ b/src/index.js
@@ -77,8 +77,30 @@ export class TypeaheadEditor extends Editor {
}
};
+ onEscape = (e) => {
+ const { onEscape, onTypeaheadChange } = this.props;
+
+ if (!this.getTypeaheadRange()) {
+ if (onEscape) {
+ onEscape(e);
+ }
+ return;
+ }
+
+ e.preventDefault();
+ if (onTypeaheadChange) {
+ onTypeaheadChange(null);
+ }
+ };
+
render() {
- const { onChange, onTypeaheadChange, ...other } = this.props;
- return <Editor {...other} onChange={this.onChange} />;
+ const { onChange, onEscape, onTypeaheadChange, ...other } = this.props;
+ return (
+ <Editor
+ {...other}
+ onChange={this.onChange}
+ onEscape={this.onEscape}
+ />
+ );
}
};
|
Dismiss the typeahead when escape is pressed.
|
dooly-ai_draft-js-typeahead
|
train
|
js
|
532a88c95f69f7b75f60cc62f6d8520ce13227ee
|
diff --git a/agent/lib/agent/handler.rb b/agent/lib/agent/handler.rb
index <HASH>..<HASH> 100644
--- a/agent/lib/agent/handler.rb
+++ b/agent/lib/agent/handler.rb
@@ -34,7 +34,7 @@ module Bosh::Agent
klazz = Bosh::Agent::Message.const_get(c)
if klazz.respond_to?(:process)
# CamelCase -> under_score -> downcased
- processor_key = c.gsub(/(.)([A-Z])/,'\1_\2').downcase
+ processor_key = c.to_s.gsub(/(.)([A-Z])/,'\1_\2').downcase
@processors[processor_key] = klazz
end
end
|
<I> fix in handler - explicit to_s on symbol
|
cloudfoundry_bosh
|
train
|
rb
|
d4aa74f7a7fe58fe4d065843e698a12281ca1d97
|
diff --git a/src/ACacheManager.php b/src/ACacheManager.php
index <HASH>..<HASH> 100644
--- a/src/ACacheManager.php
+++ b/src/ACacheManager.php
@@ -84,7 +84,10 @@ abstract class ACacheManager
*/
protected function _createData(callable $func): array
{
- $data = (array) \call_user_func($func);
+ $data = \call_user_func($func);
+ if(!$data instanceof \Traversable) { //ignore invalid results
+ return [];
+ }
$return = [];
foreach($data as $key => $value) {
$return[$key] = [
|
Just a little improvement on last commit
|
GustavSoftware_Cache
|
train
|
php
|
8c5ebff406e83eddf23698abc5167581293e060c
|
diff --git a/utils.py b/utils.py
index <HASH>..<HASH> 100644
--- a/utils.py
+++ b/utils.py
@@ -239,7 +239,6 @@ class MessagesHandlerMixIn(object):
chkid = msg.msgid[1:3]
if not msg.may_be_emitted():
self._msgs_state[msg.msgid] = False
- continue
self._messages[msg.symbol] = msg
self._alternative_names[msg.msgid] = msg
for old_id, old_symbol in msg.old_names:
@@ -537,6 +536,8 @@ class MessagesHandlerMixIn(object):
"""output full messages list documentation in ReST format"""
msgs = sorted(self._messages.itervalues(), key=lambda msg: msg.msgid)
for msg in msgs:
+ if not msg.may_be_emitted():
+ continue
print msg.format_help(checkerref=False)
print
@@ -661,7 +662,6 @@ class PyLintASTWalker(object):
def _is_method_enabled(self, method):
if not hasattr(method, 'checks_msgs'):
return True
-
for msg_desc in method.checks_msgs:
if self.linter.is_message_enabled(msg_desc):
return True
|
Always register messages even if they may not be emitted.
This avoid several problems:
* error reported because the message is used in enable/disable
* check_message doesn't work properly because message is erroneously considered
as enabled (because check_message_id raise UnknownMessage)
So, put them all in linter._messages, filtering out non-emitable message in
list_messages.
|
PyCQA_pylint
|
train
|
py
|
4dd60e3cd8bdba1812f42713dd927ce05c7e874b
|
diff --git a/lib/seahorse/model/shapes/shape.rb b/lib/seahorse/model/shapes/shape.rb
index <HASH>..<HASH> 100644
--- a/lib/seahorse/model/shapes/shape.rb
+++ b/lib/seahorse/model/shapes/shape.rb
@@ -57,7 +57,8 @@ module Seahorse
property :serialized_name, String
def serialized_name
- @serialized_name || @member_name.to_s
+ name = @serialized_name || @member_name.to_s
+ name == '' ? nil : name
end
def to_hash
|
Shape#serialized_name will no longer return the empty string.
|
aws_aws-sdk-ruby
|
train
|
rb
|
3ac3bfcede7d4d523f9f9458500dd78b5eae7df4
|
diff --git a/http/org.wso2.carbon.transport.http.netty/src/main/java/org/wso2/carbon/transport/http/netty/listener/WebSocketServerConnectorErrorHandler.java b/http/org.wso2.carbon.transport.http.netty/src/main/java/org/wso2/carbon/transport/http/netty/listener/WebSocketServerConnectorErrorHandler.java
index <HASH>..<HASH> 100644
--- a/http/org.wso2.carbon.transport.http.netty/src/main/java/org/wso2/carbon/transport/http/netty/listener/WebSocketServerConnectorErrorHandler.java
+++ b/http/org.wso2.carbon.transport.http.netty/src/main/java/org/wso2/carbon/transport/http/netty/listener/WebSocketServerConnectorErrorHandler.java
@@ -43,7 +43,7 @@ public class WebSocketServerConnectorErrorHandler implements ServerConnectorErro
@Override
public void handleError(Exception e, CarbonMessage carbonMessage, CarbonCallback carbonCallback) throws Exception {
//This debug log will be used in error debugging in the server connector.
- logger.debug("Error occurred : " + e.getMessage());
+ logger.debug("Error occurred : " + e.getMessage(), e);
}
@Override
|
Modified WebSocketServerConnectorErrorHandler to include more details about the error.
|
wso2_transport-http
|
train
|
java
|
f8408d70e3cea58343d5dabdccbec4fc6f718449
|
diff --git a/models/Assoc.py b/models/Assoc.py
index <HASH>..<HASH> 100644
--- a/models/Assoc.py
+++ b/models/Assoc.py
@@ -51,7 +51,7 @@ class Assoc:
return
- def addAssociationToGraph(self,g):
+ def addAssociationToGraph(self, g):
cu = self.cu
# first, add the direct triple
@@ -68,6 +68,7 @@ class Assoc:
g.add((node, RDF['type'], URIRef(cu.get_uri('Annotation:'))))
g.add((node, self.BASE['hasSubject'], s))
g.add((node, self.BASE['hasObject'], o))
+ g.add((node, self.BASE['hasPredicate'], p))
# this is handling the occasional messy pubs that are sometimes literals
if self.pub_id is not None:
|
added MONARCH:hasPredicate to assoc class
|
monarch-initiative_dipper
|
train
|
py
|
bf3641311b6baaef760bee0df4bbeae2e778538a
|
diff --git a/client/me/concierge/shared/upsell.js b/client/me/concierge/shared/upsell.js
index <HASH>..<HASH> 100644
--- a/client/me/concierge/shared/upsell.js
+++ b/client/me/concierge/shared/upsell.js
@@ -30,7 +30,7 @@ class Upsell extends Component {
</CompactCard>
<CompactCard>
<p>
- { translate( 'Only sites on a Business Plan are eligible for a site setup chat.' ) }
+ { translate( 'Only sites on a Business plan are eligible for a Concierge session.' ) }
</p>
<Button href={ `/plans/${ this.props.site.slug }` } primary>
{ translate( 'Upgrade to Business' ) }
|
Update copy (#<I>)
|
Automattic_wp-calypso
|
train
|
js
|
8758ca89a18fafd50be8105a41640f48cb766089
|
diff --git a/nitro.js b/nitro.js
index <HASH>..<HASH> 100644
--- a/nitro.js
+++ b/nitro.js
@@ -336,7 +336,7 @@ function processPid(host,path,api_key,pid) {
.add(api.mProgrammesGenreGroupings)
if (upcoming || children) {
- query.add(api.fProgrammesChildrenOf,pid)
+ query.add(api.fProgrammesDescendantsOf,pid)
.add(api.fProgrammesAvailabilityPending);
}
else {
@@ -363,6 +363,9 @@ function processPid(host,path,api_key,pid) {
else {
query.add(api.fProgrammesAvailabilityEntityTypeEpisode);
}
+ if (embargoed) {
+ query.add(api.xProgrammesEmbargoed,embargoed);
+ }
nitro.make_request(host,path,api_key,query,{},function(obj){
return dispatch(obj);
});
@@ -383,6 +386,9 @@ function processVpid(host,path,api_key,vpid) {
query.add(api.fVersionsAvailabilityAvailable);
}
}
+ if (embargoed) {
+ query.add(api.xProgrammesEmbargoed,embargoed);
+ }
nitro.make_request(host,api.nitroVersions,api_key,query,{},function(obj){
for (var i in obj.nitro.results.items) {
var item = obj.nitro.results.items[i];
|
nitro; allow -z and -p together. -k now descendants
|
MikeRalphson_bbcparse
|
train
|
js
|
8a8a04d88609acf735db0a7b176f6bfe9efa506e
|
diff --git a/lib/rika.rb b/lib/rika.rb
index <HASH>..<HASH> 100644
--- a/lib/rika.rb
+++ b/lib/rika.rb
@@ -4,7 +4,7 @@ raise "You need to run JRuby to use Rika" unless RUBY_PLATFORM =~ /java/
require "rika/version"
require 'uri'
-require 'net/http'
+require 'open-uri'
require 'java'
Dir[File.join(File.dirname(__FILE__), "../target/dependency/*.jar")].each do |jar|
@@ -104,7 +104,7 @@ module Rika
def get_input_type
if File.exists?(@uri) && File.directory?(@uri) == false
:file
- elsif URI(@uri).is_a?(URI::HTTP) && Net::HTTP.get_response(URI(@uri)).is_a?(Net::HTTPSuccess)
+ elsif URI(@uri).is_a?(URI::HTTP) && open(@uri)
:http
else
raise IOError, "Input (#{@uri}) is neither file nor http."
|
Changed Net::HTTP to OpenURI
|
ricn_rika
|
train
|
rb
|
e50d33308a910db54158ea438282f09621769e74
|
diff --git a/src/Symfony/Component/HttpKernel/Profiler/FileProfilerStorage.php b/src/Symfony/Component/HttpKernel/Profiler/FileProfilerStorage.php
index <HASH>..<HASH> 100644
--- a/src/Symfony/Component/HttpKernel/Profiler/FileProfilerStorage.php
+++ b/src/Symfony/Component/HttpKernel/Profiler/FileProfilerStorage.php
@@ -39,7 +39,7 @@ class FileProfilerStorage implements ProfilerStorageInterface
$this->folder = substr($dsn, 5);
if (!is_dir($this->folder)) {
- mkdir($this->folder);
+ mkdir($this->folder, 0777, true);
}
}
|
[HttpKernel] fixed the creation of the Profiler directory
|
symfony_symfony
|
train
|
php
|
b252468ca293b1a72fc7af4184533b556e84df18
|
diff --git a/telethon/telegram_bare_client.py b/telethon/telegram_bare_client.py
index <HASH>..<HASH> 100644
--- a/telethon/telegram_bare_client.py
+++ b/telethon/telegram_bare_client.py
@@ -299,6 +299,13 @@ class TelegramBareClient:
self.disconnect()
return self.connect()
+ def set_proxy(self, proxy):
+ """Change the proxy used by the connections.
+ """
+ if self.is_connected():
+ raise RuntimeError("You can't change the proxy while connected.")
+ self._sender.connection.conn.proxy = proxy
+
# endregion
# region Working with different connections/Data Centers
|
TelegramBareClient: Add set_proxy() method
This allows to change proxy without recreation of the client instance.
|
LonamiWebs_Telethon
|
train
|
py
|
be2492f2528258f12865b62ae6695587e60bd716
|
diff --git a/writer/src/main/java/com/ning/metrics/serialization/writer/DiskSpoolEventWriter.java b/writer/src/main/java/com/ning/metrics/serialization/writer/DiskSpoolEventWriter.java
index <HASH>..<HASH> 100644
--- a/writer/src/main/java/com/ning/metrics/serialization/writer/DiskSpoolEventWriter.java
+++ b/writer/src/main/java/com/ning/metrics/serialization/writer/DiskSpoolEventWriter.java
@@ -127,6 +127,12 @@ public class DiskSpoolEventWriter implements EventWriter
}
}
+ public void shutdown() throws InterruptedException
+ {
+ executor.shutdown();
+ executor.awaitTermination(15, TimeUnit.SECONDS);
+ }
+
private void scheduleFlush()
{
executor.schedule(new Runnable()
|
writer: add shutdown method for executor
The DiskSpoolEventWriter periodically flushes events via an executor
service. Add a shutdown method for shutdown hooks.
|
pierre_serialization
|
train
|
java
|
5796908aba8d96e10578eb69d1bffc2dd745c79d
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -99,11 +99,7 @@ for accessing Google's Cloud Platform services such as Google BigQuery.
'plotly==1.12.5',
'httplib2==0.10.3',
'oauth2client==2.2.0',
- # TODO(rajivpb): Pinning pandas to 0.19.1 is a temporary work-around for
- # https://issues.apache.org/jira/browse/AIRFLOW-1179, because the version of airflow on PyPI
- # does not seem to have the fix. We should unpin after installing a version of airflow that's
- # built from HEAD.
- 'pandas==0.19.1',
+ 'pandas==0.22.0',
'google_auth_httplib2==0.0.2',
'pandas-profiling>=1.0.0a2',
'python-dateutil==2.5.0',
|
Upgrade pandas (#<I>)
* Upgrade pandas and add pandas-gbq since apache-airflow wants these
* Remove pandas-gbq since this is only required by apache-airflow
|
googledatalab_pydatalab
|
train
|
py
|
a659e0f8a5dad714a6acf427149726edf248b054
|
diff --git a/lib/router.js b/lib/router.js
index <HASH>..<HASH> 100644
--- a/lib/router.js
+++ b/lib/router.js
@@ -293,7 +293,8 @@ mergeInto(Router.prototype, {
},
/**
- * Renders this router's component to the given DOM node.
+ * Renders this router's component to the given DOM node and returns a
+ * reference to the rendered component.
*/
renderComponent: function (node) {
if (!this._component) {
@@ -306,6 +307,8 @@ mergeInto(Router.prototype, {
var component = this.handler(this.getComponentProps(urlStore.getCurrentPath()));
this._component = React.renderComponent(component, node);
+
+ return this._component;
},
_handleRouteChange: function () {
|
Return a reference to the rendered component
|
taion_rrtr
|
train
|
js
|
b8db85e45d64457f92899341e431198208184923
|
diff --git a/lib/plugin.js b/lib/plugin.js
index <HASH>..<HASH> 100644
--- a/lib/plugin.js
+++ b/lib/plugin.js
@@ -56,7 +56,14 @@ ManifestPlugin.prototype.apply = function(compiler) {
var seed = this.opts.seed || {};
var publicPath = this.opts.publicPath != null ? this.opts.publicPath : compilation.options.output.publicPath;
- var stats = compilation.getStats().toJson();
+ var stats = compilation.getStats().toJson({
+ // Disable data generation of everything we don't use
+ all: false,
+ // Add asset Information
+ assets: true,
+ // Show cached assets (setting this to `false` only shows emitted files)
+ cachedAssets: true,
+ });
var files = compilation.chunks.reduce(function(files, chunk) {
return chunk.files.reduce(function (files, path) {
|
Fix high memory usage (#<I>)
closes #<I>
|
danethurber_webpack-manifest-plugin
|
train
|
js
|
aba469e01bb4cae739035c370efbd5cfbe7ff71b
|
diff --git a/docs/source/conf.py b/docs/source/conf.py
index <HASH>..<HASH> 100644
--- a/docs/source/conf.py
+++ b/docs/source/conf.py
@@ -133,7 +133,7 @@ html_theme = 'default'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
-html_static_path = ['_static']
+html_static_path = []
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
|
Workaround for sphinx warning
Fixes following warning when generating documentation.
WARNING: html_static_path entry u'.../_static' does not exist
More info: <URL>
|
product-definition-center_pdc-client
|
train
|
py
|
cd6e9d84fc78fe1f9c809ab78d13cfd0374e8ac6
|
diff --git a/Tests/TestCase.php b/Tests/TestCase.php
index <HASH>..<HASH> 100644
--- a/Tests/TestCase.php
+++ b/Tests/TestCase.php
@@ -15,7 +15,7 @@ class TestCase extends \PHPUnit_Framework_TestCase
{
protected function setUp()
{
- if (!class_exists('Monolog\Logger')) {
+ if (!class_exists('Monolog\\Logger')) {
$this->markTestSkipped('Monolog is not available.');
}
}
|
Skip tests if some vendor are not available (prevents fatal errors)
|
symfony_monolog-bundle
|
train
|
php
|
d3eda26bc4efb44c8794c662577bd4c949eeb7be
|
diff --git a/opal/corelib/regexp.rb b/opal/corelib/regexp.rb
index <HASH>..<HASH> 100644
--- a/opal/corelib/regexp.rb
+++ b/opal/corelib/regexp.rb
@@ -194,6 +194,10 @@ class Regexp
return result;
}
end
+
+ def casefold?
+ `self.ignoreCase`
+ end
alias to_s source
end
diff --git a/spec/filters/bugs/regexp.rb b/spec/filters/bugs/regexp.rb
index <HASH>..<HASH> 100644
--- a/spec/filters/bugs/regexp.rb
+++ b/spec/filters/bugs/regexp.rb
@@ -1,5 +1,4 @@
opal_filter "regular_expressions" do
- fails "Regexp#casefold? returns the value of the case-insensitive flag"
fails "Regexp#hash is based on the text and options of Regexp"
fails "Regexp#hash returns the same value for two Regexps differing only in the /n option"
fails "Regexp#initialize is a private method"
|
Implement Regexp#casefold?
|
opal_opal
|
train
|
rb,rb
|
3a1c3e5f7e7c619ef64e06448ff9a55cd10f6362
|
diff --git a/actionbarsherlock/src/com/actionbarsherlock/internal/ResourcesCompat.java b/actionbarsherlock/src/com/actionbarsherlock/internal/ResourcesCompat.java
index <HASH>..<HASH> 100644
--- a/actionbarsherlock/src/com/actionbarsherlock/internal/ResourcesCompat.java
+++ b/actionbarsherlock/src/com/actionbarsherlock/internal/ResourcesCompat.java
@@ -23,7 +23,7 @@ public final class ResourcesCompat {
* can use to simulate filtering based on width and smallest width
* qualifiers on pre-3.2.
*
- * @param context Context to load booleans from on 3.2+ and to fetch the
+ * @param context Context to load booleans from on 4.0+ and to fetch the
* display metrics.
* @param id Id of boolean to load.
* @return Associated boolean value as reflected by the current display
|
Update docs to reflect behavior.
|
JakeWharton_ActionBarSherlock
|
train
|
java
|
11072ba940d4351ce6ed0c9ea1dfd432e29565a3
|
diff --git a/scripts/build-dev-server-client.js b/scripts/build-dev-server-client.js
index <HASH>..<HASH> 100644
--- a/scripts/build-dev-server-client.js
+++ b/scripts/build-dev-server-client.js
@@ -29,7 +29,7 @@ async function bundleDevServerClient() {
banner: [
'<meta charset="utf-8">',
'💎 Stencil Dev Server',
- '<script type="module">',
+ '<script>',
'/* Dev Server Client */'
].join('\n'),
|
fix(hmr): main script needs to be sync
|
ionic-team_stencil
|
train
|
js
|
ec38dffbd548bee4bd129b8bd907994c3cb68e2e
|
diff --git a/penaltymodel_maxgap/tests/test_interface.py b/penaltymodel_maxgap/tests/test_interface.py
index <HASH>..<HASH> 100644
--- a/penaltymodel_maxgap/tests/test_interface.py
+++ b/penaltymodel_maxgap/tests/test_interface.py
@@ -56,12 +56,3 @@ class TestInterface(unittest.TestCase):
else:
self.assertGreaterEqual(energy, widget.ground_energy + widget.classical_gap - 10**-6)
- def test_nonzero_configuration(self):
- """MaxGap is currently not supporting non-zero feasible states. This is checking that
- non-zero feasible state problems don't get run.
- """
- graph = nx.complete_graph(3)
- spec = pm.Specification(graph, [0, 1], {(-1, 1): 0, (-1, -1): -2}, dimod.SPIN)
-
- with self.assertRaises(ImpossiblePenaltyModel):
- maxgap.get_penalty_model(spec)
|
Remove test that checks the non-zero feasible state filter
Since MaxGap now supports non-zero feasible states, we no longer need to filter out non-zero feasible state problems. Since the filter has been removed, this test is no longer valid.
|
dwavesystems_penaltymodel
|
train
|
py
|
9851579ef6514edf05c6af19aa5fd1f029405cbb
|
diff --git a/geocoder/gisgraphy.py b/geocoder/gisgraphy.py
index <HASH>..<HASH> 100644
--- a/geocoder/gisgraphy.py
+++ b/geocoder/gisgraphy.py
@@ -54,7 +54,7 @@ class GisgraphyQuery(MultipleResultsQuery):
API Reference
-------------
- http://www.gisgraphy.com/documentation/api/
+ http://www.gisgraphy.com/documentation/user-guide.php
"""
provider = 'gisgraphy'
method = 'geocode'
diff --git a/geocoder/gisgraphy_reverse.py b/geocoder/gisgraphy_reverse.py
index <HASH>..<HASH> 100644
--- a/geocoder/gisgraphy_reverse.py
+++ b/geocoder/gisgraphy_reverse.py
@@ -23,7 +23,7 @@ class GisgraphyReverse(GisgraphyQuery):
API Reference
-------------
- http://www.gisgraphy.com/documentation/api/
+ http://www.gisgraphy.com/documentation/user-guide.php
"""
provider = 'gisgraphy'
method = 'reverse'
|
Change documentation to url provided by David Masclet
|
DenisCarriere_geocoder
|
train
|
py,py
|
d9e8c0afd5deb6c60bfdc8aa600438fefacd84ee
|
diff --git a/urlfetch.py b/urlfetch.py
index <HASH>..<HASH> 100644
--- a/urlfetch.py
+++ b/urlfetch.py
@@ -677,16 +677,6 @@ class ObjectDict(dict):
self[name] = value
-def _flatten(lst):
- """Flatten nested list/tuple/set.
-
- modified from https://gist.github.com/1308410
- """
- return reduce(lambda l, i: l + _flatten(i)
- if isinstance(i, (list, tuple, set))
- else l + [i], lst, [])
-
-
def decode_gzip(data):
"""Decode gzipped content."""
import gzip
@@ -791,8 +781,7 @@ def random_useragent(filename=None, *filenames):
else:
filenames.append(filename)
- filenames = set(_flatten(filenames))
- for filename in filenames:
+ for filename in set(filenames):
try:
st = os.stat(filename)
if stat.S_ISREG(st.st_mode) and os.access(filename, os.R_OK):
|
_flatten_list did so much, too. Remove it.
|
ifduyue_urlfetch
|
train
|
py
|
9a7a420b648f9eca890704d85268a4bd71afaaa8
|
diff --git a/lib/restify/context.rb b/lib/restify/context.rb
index <HASH>..<HASH> 100644
--- a/lib/restify/context.rb
+++ b/lib/restify/context.rb
@@ -40,7 +40,7 @@ module Restify
processor.new(context, response).resource
end
- def request(method, uri, data = nil, **kwargs)
+ def request(method, uri, data = nil, opts = {})
request = Request.new \
method: method,
uri: join(uri),
|
Fix wrong request data handling
Last hash passed to request method was handled as empty
keyword argument list instead of data argument.
|
jgraichen_restify
|
train
|
rb
|
e2c64edd50ac177db6856fc9930c5e71a48b9a62
|
diff --git a/lib/openapi3_parser/nodes/example.rb b/lib/openapi3_parser/nodes/example.rb
index <HASH>..<HASH> 100644
--- a/lib/openapi3_parser/nodes/example.rb
+++ b/lib/openapi3_parser/nodes/example.rb
@@ -8,19 +8,19 @@ module Openapi3Parser
include Node::Object
def summary
- fields["summary"]
+ node_data["summary"]
end
def description
- fields["description"]
+ node_data["description"]
end
def value
- fields["value"]
+ node_data["value"]
end
def external_value
- fields["externalValue"]
+ node_data["externalValue"]
end
end
end
|
Fix using old accessor in Example node
This still referred to fields which was the old means to refer to
a nodes data.
|
kevindew_openapi3_parser
|
train
|
rb
|
f936128f5b63ac563512e80c6802876f97857110
|
diff --git a/lib/editor/tinymce/plugins/managefiles/tinymce/editor_plugin.js b/lib/editor/tinymce/plugins/managefiles/tinymce/editor_plugin.js
index <HASH>..<HASH> 100644
--- a/lib/editor/tinymce/plugins/managefiles/tinymce/editor_plugin.js
+++ b/lib/editor/tinymce/plugins/managefiles/tinymce/editor_plugin.js
@@ -93,7 +93,9 @@
var managefiles = ed.getParam('managefiles', {});
// Get draft area id from filepicker options.
- if (!managefiles.itemid && M.editor_tinymce.filepicker_options && M.editor_tinymce.filepicker_options[ed.id]) {
+ if (!managefiles.itemid && M.editor_tinymce.filepicker_options
+ && M.editor_tinymce.filepicker_options[ed.id]
+ && M.editor_tinymce.filepicker_options[ed.id].image) {
managefiles.itemid = M.editor_tinymce.filepicker_options[ed.id].image.itemid;
ed.settings['managefiles'].itemid = managefiles.itemid;
}
|
MDL-<I> fix TinyMCE managefiles plugin for textareas that do not support file uploads
The current managefiles plugin assumes that every file manager will be created
by either calling use_editor without filepicker options ($fpoptions) or with
filepicker options that include options for the moodleimage plugin.
|
moodle_moodle
|
train
|
js
|
fc6844cca6bac20a8e4c79cd04dd71a937afc980
|
diff --git a/src/File/File.php b/src/File/File.php
index <HASH>..<HASH> 100644
--- a/src/File/File.php
+++ b/src/File/File.php
@@ -13,7 +13,7 @@ use function Siler\Functional\concat;
*
* @param string $dirname
* @param string $regex
- * @param int $mode
+ * @param int(0)|int(1)|int(2)|int(3)|int(4) $mode
*
* @psalm-return list<SplFileInfo>
* @return SplFileInfo[]
|
types: fix finite set of ints
|
leocavalcante_siler
|
train
|
php
|
26400c8c4da9cb51d16fcebb3ad1a47498fd3f8c
|
diff --git a/lib/dynamoid/adapter_plugin/aws_sdk_v2.rb b/lib/dynamoid/adapter_plugin/aws_sdk_v2.rb
index <HASH>..<HASH> 100644
--- a/lib/dynamoid/adapter_plugin/aws_sdk_v2.rb
+++ b/lib/dynamoid/adapter_plugin/aws_sdk_v2.rb
@@ -587,7 +587,7 @@ module Dynamoid
expected = Hash.new { |h,k| h[k] = {} }
return expected unless conditions
- conditions[:unless_exists].try(:each) do |col|
+ conditions.delete(:unless_exists).try(:each) do |col|
expected[col.to_s][:exists] = false
end
conditions[:if].try(:each) do |col,val|
|
remove :unless_exists options as it is no longer accepted by aws sdk v2
|
Dynamoid_dynamoid
|
train
|
rb
|
917fbfaf368f2274f0928f8703b15a85a0c7bd6b
|
diff --git a/lib/fetch/index.js b/lib/fetch/index.js
index <HASH>..<HASH> 100644
--- a/lib/fetch/index.js
+++ b/lib/fetch/index.js
@@ -1597,13 +1597,10 @@ function httpNetworkFetch (
// 2. If aborted, then:
function onResponseAborted () {
- // 1. Finalize response for fetchParams and response.
- finalizeResponse(fetchParams, response)
-
- // 2. Let aborted be the termination’s aborted flag.
+ // 1. Let aborted be the termination’s aborted flag.
const aborted = this.terminated.aborted
- // 3. If aborted is set, then:
+ // 2. If aborted is set, then:
if (aborted) {
// 1. Set response’s aborted flag.
response.aborted = true
@@ -1613,14 +1610,14 @@ function httpNetworkFetch (
this.controller.error(new AbortError())
}
} else {
- // 4. Otherwise, if stream is readable, error stream with a TypeError.
+ // 3. Otherwise, if stream is readable, error stream with a TypeError.
if (isReadable(stream)) {
this.controller.error(new TypeError('terminated'))
}
}
- // 5. If connection uses HTTP/2, then transmit an RST_STREAM frame.
- // 6. Otherwise, the user agent should close connection unless it would be bad for performance to do so.
+ // 4. If connection uses HTTP/2, then transmit an RST_STREAM frame.
+ // 5. Otherwise, the user agent should close connection unless it would be bad for performance to do so.
this.connection.destroy()
}
|
fix: fetch don't finalize on abort
|
mcollina_undici
|
train
|
js
|
36663add9f53da925f1d29c8c567ab30a1f33139
|
diff --git a/tests/api_resources/checkout/test_session.py b/tests/api_resources/checkout/test_session.py
index <HASH>..<HASH> 100644
--- a/tests/api_resources/checkout/test_session.py
+++ b/tests/api_resources/checkout/test_session.py
@@ -3,7 +3,7 @@ from __future__ import absolute_import, division, print_function
import stripe
-TEST_RESOURCE_ID = "loc_123"
+TEST_RESOURCE_ID = "cs_123"
class TestSession(object):
@@ -27,3 +27,10 @@ class TestSession(object):
)
request_mock.assert_requested("post", "/v1/checkout/sessions")
assert isinstance(resource, stripe.checkout.Session)
+
+ def test_is_retrievable(self, request_mock):
+ resource = stripe.checkout.Session.retrieve(TEST_RESOURCE_ID)
+ request_mock.assert_requested(
+ "get", "/v1/checkout/sessions/%s" % TEST_RESOURCE_ID
+ )
+ assert isinstance(resource, stripe.checkout.Session)
|
Add support for retrieving a Checkout Session
|
stripe_stripe-python
|
train
|
py
|
8f827456d240489ad8c8deaf95170c0ea41e0cf8
|
diff --git a/lib/chalk-log.rb b/lib/chalk-log.rb
index <HASH>..<HASH> 100644
--- a/lib/chalk-log.rb
+++ b/lib/chalk-log.rb
@@ -36,6 +36,13 @@ module Chalk::Log
# add.)
LEVELS = [:debug, :info, :warn, :error, :fatal].freeze
+ module Clevels
+ Sheddable = 'sheddable'
+ SheddablePlus = 'sheddableplus'
+ Critical = 'critical'
+ CriticalPlus = 'criticalplus'
+ end
+
@included = Set.new
# Method which goes through heroic efforts to ensure that the whole
|
Add statically-defined constants for criticality levels
|
stripe-archive_chalk-log
|
train
|
rb
|
eff9fbb3d3e0cea4a02821e46aee630ae73293b5
|
diff --git a/dockerclient/client.go b/dockerclient/client.go
index <HASH>..<HASH> 100644
--- a/dockerclient/client.go
+++ b/dockerclient/client.go
@@ -25,6 +25,11 @@ import (
"io/ioutil"
)
+// NewClientFromEnv is exposed to simplify getting a client when vendoring this library.
+func NewClientFromEnv() (*docker.Client, error) {
+ return docker.NewClientFromEnv()
+}
+
// Mount represents a binding between the current system and the destination client
type Mount struct {
SourcePath string
|
Add a helper method for getting client library
Simplifies using the version of the docker client that is vendored with
this package (due to drastic changes in Docker client library
dependencies).
|
openshift_imagebuilder
|
train
|
go
|
5fb000f7b458ff807985d9f00089bc7992e9e9dc
|
diff --git a/tests/TestCase/Database/Driver/SqlserverTest.php b/tests/TestCase/Database/Driver/SqlserverTest.php
index <HASH>..<HASH> 100644
--- a/tests/TestCase/Database/Driver/SqlserverTest.php
+++ b/tests/TestCase/Database/Driver/SqlserverTest.php
@@ -143,6 +143,7 @@ class SqlserverTest extends TestCase
$expected['failoverPartner'] = null;
$expected['loginTimeout'] = null;
$expected['multiSubnetFailover'] = null;
+ $expected['port'] = '';
$connection = $this->getMockBuilder('stdClass')
->setMethods(['exec', 'quote'])
@@ -206,6 +207,7 @@ class SqlserverTest extends TestCase
$expected['failoverPartner'] = null;
$expected['loginTimeout'] = null;
$expected['multiSubnetFailover'] = null;
+ $expected['port'] = '';
$driver->expects($this->once())->method('_connect')
->with($dsn, $expected);
|
add 'port' to expected result
|
cakephp_cakephp
|
train
|
php
|
d04f4b794ec2b27725223c2f807fc7c2e8cfcfa3
|
diff --git a/metamorphosis-server/src/main/java/com/taobao/metamorphosis/server/stats/StatsManager.java b/metamorphosis-server/src/main/java/com/taobao/metamorphosis/server/stats/StatsManager.java
index <HASH>..<HASH> 100644
--- a/metamorphosis-server/src/main/java/com/taobao/metamorphosis/server/stats/StatsManager.java
+++ b/metamorphosis-server/src/main/java/com/taobao/metamorphosis/server/stats/StatsManager.java
@@ -152,6 +152,10 @@ public class StatsManager implements Service {
else if ("help".equals(item)) {
this.appendHelp(sb);
}
+ else if ("reset".equals(item)) {
+ this.realTimeStat.resetStat();
+ this.append(sb, "reset", "ok");
+ }
else {
// ������topic
this.appendTopic(item, sb);
|
Fixed issue <I>:added stats reset protocol to reset realtime statistics
|
killme2008_Metamorphosis
|
train
|
java
|
aeb28073e95557122d7325f7addac9eca7b07e7f
|
diff --git a/paramiko/transport.py b/paramiko/transport.py
index <HASH>..<HASH> 100644
--- a/paramiko/transport.py
+++ b/paramiko/transport.py
@@ -1534,7 +1534,9 @@ class Transport (threading.Thread, ClosingContextManager):
m.add_bytes(self.H)
m.add_byte(b(id))
m.add_bytes(self.session_id)
- hash_algo = self.kex_engine.hash_algo
+ # Fallback to SHA1 for kex engines that fail to specify a hex
+ # algorithm, or for e.g. transport tests that don't run kexinit.
+ hash_algo = getattr(self.kex_engine, 'hex_algo', sha1)
out = sofar = hash_algo(m.asbytes()).digest()
while len(out) < nbytes:
m = Message()
|
Fallback to sha1 in _compute_key
|
paramiko_paramiko
|
train
|
py
|
d730d4809273fe51a10274e6ac04432cbf5e47a5
|
diff --git a/Tests/LocaleTest.php b/Tests/LocaleTest.php
index <HASH>..<HASH> 100644
--- a/Tests/LocaleTest.php
+++ b/Tests/LocaleTest.php
@@ -36,7 +36,7 @@ class LocaleTest extends \PHPUnit_Framework_TestCase
public function testGetDisplayCountriesForSwitzerland()
{
- IntlTestHelper::requireFullIntl($this);
+ IntlTestHelper::requireFullIntl($this, false);
$countries = Locale::getDisplayCountries('de_CH');
$this->assertEquals('Schweiz', $countries['CH']);
|
[ci] Testing with UTC hides bugs
|
symfony_locale
|
train
|
php
|
07ecaa614b127a1a0b319c5f8eab3d6cad630ddc
|
diff --git a/activestorage/app/models/active_storage/blob.rb b/activestorage/app/models/active_storage/blob.rb
index <HASH>..<HASH> 100644
--- a/activestorage/app/models/active_storage/blob.rb
+++ b/activestorage/app/models/active_storage/blob.rb
@@ -205,8 +205,8 @@ class ActiveStorage::Blob < ActiveRecord::Base
# blobs. Note, though, that deleting the file off the service will initiate a HTTP connection to the service, which may
# be slow or prevented, so you should not use this method inside a transaction or in callbacks. Use #purge_later instead.
def purge
- delete
destroy
+ delete
end
# Enqueues an ActiveStorage::PurgeJob to call #purge. This is the recommended way to purge blobs from a transaction,
|
Destroy blob record before deleting stored data
|
rails_rails
|
train
|
rb
|
151ac861ed7c49d82ca5eae03ccd627123861853
|
diff --git a/src/Keboola/Syrup/Controller/ApiController.php b/src/Keboola/Syrup/Controller/ApiController.php
index <HASH>..<HASH> 100644
--- a/src/Keboola/Syrup/Controller/ApiController.php
+++ b/src/Keboola/Syrup/Controller/ApiController.php
@@ -49,10 +49,7 @@ class ApiController extends BaseController
$this->checkMappingParams($params);
// Create new job
- /** @var JobFactory $jobFactory */
- $jobFactory = $this->container->get('syrup.job_factory');
- $jobFactory->setStorageApiClient($this->storageApi);
- $job = $jobFactory->create('run', $params);
+ $job = $this->createJob('run', $params);
// Add job to Elasticsearch
try {
@@ -131,7 +128,6 @@ class ApiController extends BaseController
}
/**
- * @deprecated
* @param string $command
* @param array $params
* @return JobInterface
|
fix: wrongly deprecated method
|
keboola_syrup
|
train
|
php
|
0e6c5058cbd2a74d1606070f11ed3dd8c797adf8
|
diff --git a/Lib/ufo2ft/featureWriters/kernFeatureWriter.py b/Lib/ufo2ft/featureWriters/kernFeatureWriter.py
index <HASH>..<HASH> 100644
--- a/Lib/ufo2ft/featureWriters/kernFeatureWriter.py
+++ b/Lib/ufo2ft/featureWriters/kernFeatureWriter.py
@@ -69,6 +69,8 @@ class KernFeatureWriter(BaseFeatureWriter):
ctx.rightClassKerning = {}
ctx.classPairKerning = {}
+ return ctx
+
def _write(self):
self._collectFeaClasses()
|
[kernFeatureWriter] set_context must return updated namespace
so that subclasses can call the super's method and assign the returned
value to a variable and keep updating the context namespace.
|
googlefonts_ufo2ft
|
train
|
py
|
1dc2c9964b3af2f089d6c4a81b3a8249584c34eb
|
diff --git a/src/Tokens/Element.php b/src/Tokens/Element.php
index <HASH>..<HASH> 100644
--- a/src/Tokens/Element.php
+++ b/src/Tokens/Element.php
@@ -211,7 +211,7 @@ class Element extends AbstractToken
$token = TokenFactory::buildFromHtml(
$remainingHtml,
- null,
+ $this,
$this->getThrowOnError()
);
diff --git a/tests/Tokens/ElementTest.php b/tests/Tokens/ElementTest.php
index <HASH>..<HASH> 100644
--- a/tests/Tokens/ElementTest.php
+++ b/tests/Tokens/ElementTest.php
@@ -232,7 +232,9 @@ class ElementTest extends \PHPUnit_Framework_TestCase
*/
public function testImpliedClosingTag($html, $expectedArray)
{
-
+ $element = new Element();
+ $element->parse($html);
+ $this->assertEquals($expectedArray, $element->toArray());
}
public function impliedClosingTagDataProvider()
|
Added element tests and element bug fix.
|
kevintweber_HtmlTokenizer
|
train
|
php,php
|
086083db0eb4dc505702217a32905968016a6e2b
|
diff --git a/spec/unit/parser_spec.rb b/spec/unit/parser_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/unit/parser_spec.rb
+++ b/spec/unit/parser_spec.rb
@@ -33,9 +33,18 @@ describe Metar::Parser do
parser.date. should == Date.new(2010, 2, 6)
end
- it 'observer_real' do
- parser = setup_parser('PAIL', "2010/02/06 16:10\nPAIL 061610Z 24006KT 1 3/4SM -SN BKN016 OVC030 M17/M20 A2910 RMK AO2 P0000")
- parser.observer. should == :real
+ context '.observer' do
+
+ it 'real' do
+ parser = setup_parser('PAIL', "2010/02/06 16:10\nPAIL 061610Z 24006KT 1 3/4SM -SN BKN016 OVC030 M17/M20 A2910 RMK AO2 P0000")
+ parser.observer. should == :real
+ end
+
+ it 'corrected' do
+ parser = setup_parser('PAIL', "2010/02/06 16:10\nPAIL 061610Z COR 24006KT 1 3/4SM -SN BKN016 OVC030 M17/M20 A2910 RMK AO2 P0000")
+ parser.observer. should == :corrected
+ end
+
end
it 'wind' do
|
Example for parsing of observer 'corrected'
|
joeyates_metar-parser
|
train
|
rb
|
e2dea14e0573d9b4293d2eef13b03ec038c203ea
|
diff --git a/src/Illuminate/Foundation/Console/EventListCommand.php b/src/Illuminate/Foundation/Console/EventListCommand.php
index <HASH>..<HASH> 100644
--- a/src/Illuminate/Foundation/Console/EventListCommand.php
+++ b/src/Illuminate/Foundation/Console/EventListCommand.php
@@ -4,7 +4,6 @@ namespace Illuminate\Foundation\Console;
use Closure;
use Illuminate\Console\Command;
-use Illuminate\Foundation\Support\Providers\EventServiceProvider;
use Illuminate\Support\Str;
use ReflectionFunction;
@@ -58,12 +57,6 @@ class EventListCommand extends Command
{
$events = [];
- foreach ($this->laravel->getProviders(EventServiceProvider::class) as $provider) {
- $providerEvents = array_merge_recursive($provider->shouldDiscoverEvents() ? $provider->discoverEvents() : [], $provider->listens());
-
- $events = array_merge_recursive($events, $providerEvents);
- }
-
$events = $this->addListenersOnDispatcher($events);
if ($this->filteringByEvent()) {
|
Remove extra code from event list (#<I>)
|
laravel_framework
|
train
|
php
|
7b80400de3b1670b6349a1616df98a7fa0578e81
|
diff --git a/src/Prooph/EventStore/Aggregate/AggregateRepository.php b/src/Prooph/EventStore/Aggregate/AggregateRepository.php
index <HASH>..<HASH> 100644
--- a/src/Prooph/EventStore/Aggregate/AggregateRepository.php
+++ b/src/Prooph/EventStore/Aggregate/AggregateRepository.php
@@ -66,6 +66,7 @@ class AggregateRepository
$this->aggregateTranslator = $aggregateTranslator;
$this->streamStrategy = $streamStrategy;
+ $this->aggregateType = $aggregateType;
}
/**
|
Fix AggregateType is ignored in repository
|
prooph_event-store
|
train
|
php
|
afc18c7479e0d454b002f05d463b4f8778de02e1
|
diff --git a/api_ai/schema_handlers.py b/api_ai/schema_handlers.py
index <HASH>..<HASH> 100644
--- a/api_ai/schema_handlers.py
+++ b/api_ai/schema_handlers.py
@@ -194,9 +194,9 @@ class IntentGenerator(SchemaHandler):
mapping = {}
for a in [a for a in annotations if a]:
for annotation, entity in a.items():
- mapping.update({annotation:entity})
+ mapping.update({str(annotation):str(entity)})
for synonym in self.get_synonyms(annotation, entity):
- mapping.update({synonym:entity})
+ mapping.update({str(synonym):str(entity)})
for phrase in [p for p in phrases if p]:
if phrase != '':
|
Fix. Issue #<I>. Treat entities and synonyms as strings when building entity_map.
|
treethought_flask-assistant
|
train
|
py
|
877a78da4faa3cff819159a6869ba67c86d23f7e
|
diff --git a/src/Service/Config/Validation.php b/src/Service/Config/Validation.php
index <HASH>..<HASH> 100644
--- a/src/Service/Config/Validation.php
+++ b/src/Service/Config/Validation.php
@@ -375,10 +375,10 @@ class Validation extends Fallback
// Check if the combination of class and method is blacklisted.
foreach ($this->methodBlacklist as $classname => $debugMethod) {
- if (is_a($data, $classname) === true) {
- if (in_array($method, $debugMethod, true)) {
- return false;
- }
+ if (is_a($data, $classname) === true &&
+ in_array($method, $debugMethod, true) === true
+ ) {
+ return false;
}
}
|
Fixed a SonarCube finding in the configuratiuon validation class.
|
brainworxx_kreXX
|
train
|
php
|
cceedcac4e84827316b0c7f7ce8857427a26d577
|
diff --git a/tests/unittests/test_commands.py b/tests/unittests/test_commands.py
index <HASH>..<HASH> 100644
--- a/tests/unittests/test_commands.py
+++ b/tests/unittests/test_commands.py
@@ -218,11 +218,11 @@ class TestText(ShoebotTestCase):
"""
font("Inconsolata", var_wdth=100, var_wght=200)
fontstr = font()
- self.assertEquals(fontstr, "Inconsolata @wdth=100,wght=200")
+ self.assertEqual(fontstr, "Inconsolata @wdth=100,wght=200")
font("Inconsolata", vars={"wdth": 50, "wght": 400})
fontstr = font()
- self.assertEquals(fontstr, "Inconsolata @wdth=50,wght=400")
+ self.assertEqual(fontstr, "Inconsolata @wdth=50,wght=400")
class TestFontUtils(ShoebotTestCase):
@@ -233,7 +233,7 @@ class TestFontUtils(ShoebotTestCase):
"""
output = fontnames()
self.assertIsInstance(output, list)
- self.assertRegexpMatches(output[0], r"(.*)\s(.*)")
+ self.assertRegex(output[0], r"(.*)\s(.*)")
if __name__ == "__main__":
|
Fix use of deprecated assertions in tests.
|
shoebot_shoebot
|
train
|
py
|
e4a4fe97bc404b0a6c12f43f6a3788e34f378279
|
diff --git a/grimoire_elk/elk/discourse.py b/grimoire_elk/elk/discourse.py
index <HASH>..<HASH> 100644
--- a/grimoire_elk/elk/discourse.py
+++ b/grimoire_elk/elk/discourse.py
@@ -206,9 +206,10 @@ class DiscourseEnrich(Enrich):
# The first post is the first published, and it is the question
first_post = topic['post_stream']['posts'][0]
eitem['category_id'] = topic['category_id']
- eitem['category_name'] = self.categories[topic['category_id']]
eitem['categories'] = self.__related_categories(topic['category_id'])
- eitem['categories'] += [eitem['category_name']]
+ if topic['category_id'] in self.categories:
+ eitem['category_name'] = self.categories[topic['category_id']]
+ eitem['categories'] += [eitem['category_name']]
eitem['url'] = eitem['origin'] + "/t/" + first_post['topic_slug']
eitem['url'] += "/" + str(first_post['topic_id']) + "/" + str(first_post['post_number'])
eitem['display_username'] = first_post['display_username']
|
[enrich][discourse] Support that some categories ids don't appear in categories
names list.
|
chaoss_grimoirelab-elk
|
train
|
py
|
d945672b89be7957b268e01df1dc6425de5932b1
|
diff --git a/src/touch.js b/src/touch.js
index <HASH>..<HASH> 100644
--- a/src/touch.js
+++ b/src/touch.js
@@ -57,6 +57,8 @@
cancelLongTap()
touch.x2 = e.touches[0].pageX
touch.y2 = e.touches[0].pageY
+ if (Math.abs(touch.x1 - touch.x2) > 10)
+ e.preventDefault()
})
.bind('touchend', function(e){
cancelLongTap()
|
Prevent scrolling when swipe up/down is detected
|
madrobby_zepto
|
train
|
js
|
736347b337c239fcd6d592db5b29e819f753c1ba
|
diff --git a/exchangelib/version.py b/exchangelib/version.py
index <HASH>..<HASH> 100644
--- a/exchangelib/version.py
+++ b/exchangelib/version.py
@@ -37,6 +37,7 @@ VERSIONS = {
'Exchange2015': ('Exchange2015', 'Microsoft Exchange Server 2015'),
'Exchange2015_SP1': ('Exchange2015_SP1', 'Microsoft Exchange Server 2015 SP1'),
'Exchange2016': ('Exchange2016', 'Microsoft Exchange Server 2016'),
+ 'Exchange2019': ('Exchange2019', 'Microsoft Exchange Server 2019'),
}
# Build a list of unique API versions, used when guessing API version supported by the server. Use reverse order so we
@@ -67,6 +68,7 @@ class Build(object):
15: {
0: 'Exchange2013', # Minor builds starting from 847 are Exchange2013_SP1, see api_version()
1: 'Exchange2016',
+ 2: 'Exchange2019',
20: 'Exchange2016', # This is Office365. See issue #221
},
}
@@ -164,6 +166,7 @@ EXCHANGE_2010_SP2 = Build(14, 2)
EXCHANGE_2013 = Build(15, 0)
EXCHANGE_2013_SP1 = Build(15, 0, 847)
EXCHANGE_2016 = Build(15, 1)
+EXCHANGE_2019 = Build(15, 2)
@python_2_unicode_compatible
|
Support version of Exchange server <I> (#<I>)
|
ecederstrand_exchangelib
|
train
|
py
|
380d82b0782986a272dca89ce70d668a7cdede38
|
diff --git a/meta/store.go b/meta/store.go
index <HASH>..<HASH> 100644
--- a/meta/store.go
+++ b/meta/store.go
@@ -256,6 +256,7 @@ func (s *Store) Open() error {
// Wait for a leader to be elected so we know the raft log is loaded
// and up to date
+ <-s.ready
return s.WaitForLeader(0)
}
|
Fix race in local node creation
It was possible for the metastore Open call to return before it actually
created it's local node.
|
influxdata_influxdb
|
train
|
go
|
b3b5412ea72034743524f448e8be2da9c450ae11
|
diff --git a/lib/generators/rails/active_record/active_record_generator.rb b/lib/generators/rails/active_record/active_record_generator.rb
index <HASH>..<HASH> 100644
--- a/lib/generators/rails/active_record/active_record_generator.rb
+++ b/lib/generators/rails/active_record/active_record_generator.rb
@@ -44,5 +44,5 @@ if defined?(Rails::Generators::ModelGenerator) && defined?(ActiveRecord)
end
else
- raise "ignore this file"
+ raise LoadError.new "ignore this file #{__FILE__.sub(/.rb$/, '')}"
end
|
subpress the warning when rails tries to find the generator
|
mkristian_ixtlan-generators
|
train
|
rb
|
195a0686d3b1d75ca0899ad8fdc65fea118f49ee
|
diff --git a/pandas/tests/io/parser/test_network.py b/pandas/tests/io/parser/test_network.py
index <HASH>..<HASH> 100644
--- a/pandas/tests/io/parser/test_network.py
+++ b/pandas/tests/io/parser/test_network.py
@@ -22,14 +22,11 @@ from pandas.io.parsers import read_csv
@pytest.mark.network
-@pytest.mark.parametrize(
- "compress_type, extension",
- icom._compression_to_extension.items(),
-)
@pytest.mark.parametrize("mode", ["explicit", "infer"])
@pytest.mark.parametrize("engine", ["python", "c"])
-def test_compressed_urls(salaries_table, compress_type, extension, mode, engine):
- check_compressed_urls(salaries_table, compress_type, extension, mode, engine)
+def test_compressed_urls(salaries_table, mode, engine, compression_only):
+ extension = icom._compression_to_extension[compression_only]
+ check_compressed_urls(salaries_table, compression_only, extension, mode, engine)
@tm.network
|
Fix Zstandard compression unit test (#<I>)
|
pandas-dev_pandas
|
train
|
py
|
ae7791d30bdcde97332f6777102a6cfdf9a4d66e
|
diff --git a/salt/log/handlers/__init__.py b/salt/log/handlers/__init__.py
index <HASH>..<HASH> 100644
--- a/salt/log/handlers/__init__.py
+++ b/salt/log/handlers/__init__.py
@@ -178,7 +178,7 @@ if sys.version_info < (3, 2):
self.queue.put_nowait(record)
except self.queue.Full:
sys.stderr.write('[WARNING ] Message queue is full, '
- 'unable to write "{0}" to log', record
+ 'unable to write "{0}" to log'.format(record)
)
def prepare(self, record):
|
Missing `format` in the call to write.
|
saltstack_salt
|
train
|
py
|
22ebd9e5feab3ef6c818a30b0981600589002139
|
diff --git a/storage/ctab/src/main/java/org/openscience/cdk/io/MDLV3000Writer.java b/storage/ctab/src/main/java/org/openscience/cdk/io/MDLV3000Writer.java
index <HASH>..<HASH> 100644
--- a/storage/ctab/src/main/java/org/openscience/cdk/io/MDLV3000Writer.java
+++ b/storage/ctab/src/main/java/org/openscience/cdk/io/MDLV3000Writer.java
@@ -627,8 +627,8 @@ public final class MDLV3000Writer extends DefaultChemObjectWriter {
final Point2d p1 = bracket.getFirstPoint();
final Point2d p2 = bracket.getSecondPoint();
writer.write("9");
- writer.write(' ').write(p1.x).write(' ').write(DECIMAL_FORMAT.format(p1.y)).write(" 0");
- writer.write(' ').write(p2.x).write(' ').write(DECIMAL_FORMAT.format(p2.y)).write(" 0");
+ writer.write(' ').write(DECIMAL_FORMAT.format(p1.x)).write(' ').write(DECIMAL_FORMAT.format(p1.y)).write(" 0");
+ writer.write(' ').write(DECIMAL_FORMAT.format(p2.x)).write(' ').write(DECIMAL_FORMAT.format(p2.y)).write(" 0");
writer.write(" 0 0 0");
writer.write(")");
}
|
X coordinates should also be formatted using the same format
|
cdk_cdk
|
train
|
java
|
4b6ba4951488c3c52a0cad31d1f112c6458589ba
|
diff --git a/cucumber/ios/features/step_definitions/shared_steps.rb b/cucumber/ios/features/step_definitions/shared_steps.rb
index <HASH>..<HASH> 100644
--- a/cucumber/ios/features/step_definitions/shared_steps.rb
+++ b/cucumber/ios/features/step_definitions/shared_steps.rb
@@ -1,4 +1,4 @@
-Given(/^I see the (controls|gestures|scrolls|special|tapping) tab$/) do |tab|
+Given(/^I see the (controls|gestures|scrolls|special) tab$/) do |tab|
wait_for_view('tabBarButton')
case tab
when 'controls'
@@ -9,8 +9,6 @@ Given(/^I see the (controls|gestures|scrolls|special|tapping) tab$/) do |tab|
index = 2
when 'special'
index = 3
- when 'tapping'
- index = 4
end
tap("tabBarButton index:#{index}")
expected_view = "#{tab} page"
|
iOS Cucumber: remove tapping from shared_steps
|
calabash_calabash
|
train
|
rb
|
4148d847c15320ad048c2afe04a3bb5a10351e4a
|
diff --git a/lib/geminabox.rb b/lib/geminabox.rb
index <HASH>..<HASH> 100644
--- a/lib/geminabox.rb
+++ b/lib/geminabox.rb
@@ -55,7 +55,7 @@ class Geminabox < Sinatra::Base
end
get '/api/v1/dependencies' do
- query_gems = (params[:gems] or '').split(',')
+ query_gems = params[:gems].to_s.split(',')
deps = query_gems.inject([]){|memo, query_gem| memo + gem_dependencies(query_gem) }
Marshal.dump(deps)
end
diff --git a/test/integration/dependency_api/dependencies_api_test.rb b/test/integration/dependency_api/dependencies_api_test.rb
index <HASH>..<HASH> 100644
--- a/test/integration/dependency_api/dependencies_api_test.rb
+++ b/test/integration/dependency_api/dependencies_api_test.rb
@@ -65,6 +65,11 @@ class DependenciesApiTest < Geminabox::TestCase
assert_equal expected, deps
end
+ test "dependency api with empty params" do
+ deps = Marshal.load HTTPClient.new.get_content(url_for("api/v1/dependencies"))
+ assert_equal [], deps
+ end
+
protected
def fetch_deps(*gems)
Marshal.load HTTPClient.new.get_content(url_for("api/v1/dependencies?gems=#{gems.join(",")}"))
|
Test the blank API request bundler now makes.
|
geminabox_geminabox
|
train
|
rb,rb
|
f02aef884897ec446c57331f19a1092185cd741e
|
diff --git a/lib/convert_ids.js b/lib/convert_ids.js
index <HASH>..<HASH> 100644
--- a/lib/convert_ids.js
+++ b/lib/convert_ids.js
@@ -10,7 +10,7 @@ module.exports = async (property, ids, fromWdIds) => {
ids = uniq(ids)
if (ids.length === 0) return output({})
const sparqlBuilder = fromWdIds ? fromWbIds : fromExternalIds
- const sparqlRequests = chunk(ids, 100).map(sparqlBuilder(property))
+ const sparqlRequests = chunk(ids, 1000).map(sparqlBuilder(property))
const results = initResultsObj(ids)
getResultsSequentially(sparqlRequests, results, fromWdIds)
|
convert_ids: increase batch size
as it seems to get faster results
|
maxlath_wikidata-cli
|
train
|
js
|
a75c99eb12ce3c68c07a7a3ed7ed4090a9d30e79
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -4,7 +4,7 @@ import sys
VERSION = '1.46'
-deps = ['psutil', 'colorama', 'six']
+install_requires = ['psutil', 'colorama', 'six']
if sys.version_info < (3,4):
deps.append('pathlib')
@@ -20,7 +20,7 @@ setup(name='thefuck',
'tests', 'release']),
include_package_data=True,
zip_safe=False,
- install_requires=deps,
+ install_requires=install_requires,
entry_points={'console_scripts': [
'thefuck = thefuck.main:main',
'thefuck-alias = thefuck.shells:app_alias']})
|
change deps to install_requires
|
nvbn_thefuck
|
train
|
py
|
1ceb8c7c074e84249737c1f2674e73e4c6d83b86
|
diff --git a/src/nu/validator/json/Serializer.java b/src/nu/validator/json/Serializer.java
index <HASH>..<HASH> 100644
--- a/src/nu/validator/json/Serializer.java
+++ b/src/nu/validator/json/Serializer.java
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2007 Mozilla Foundation
+ * Copyright (c) 2007-2018 Mozilla Foundation
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
@@ -222,6 +222,7 @@ public class Serializer implements JsonHandler {
switch (state) {
case OBJECT:
writer.write('}');
+ writer.flush();
pop();
first = false;
if (peek() == State.VALUE) {
|
Flush JSON output after emitting each JSON object
This change causes the output stream the checker uses for JSON output to
be flushed after emitting each JSON object. That prevents a problem
where the output can get truncated unexpectedly when it exceeds 8KB.
Fixes <URL>
|
validator_validator
|
train
|
java
|
486a3ca14f0a24a491272e64a915190ddf7b3c1a
|
diff --git a/src/mongo/util/IndexUtils.class.php b/src/mongo/util/IndexUtils.class.php
index <HASH>..<HASH> 100644
--- a/src/mongo/util/IndexUtils.class.php
+++ b/src/mongo/util/IndexUtils.class.php
@@ -38,7 +38,7 @@ class IndexUtils
foreach ($indexes as $indexName=>$fields)
{
$indexName = substr($indexName,0,127); // ensure max 128 chars
- if (is_int($indexName))
+ if (is_numeric($indexName))
{
// no name
$db->selectCollection($collectionName)->ensureIndex($fields,array("background"=>$background));
|
Updated is_int to is_numeric
|
talis_tripod-php
|
train
|
php
|
637b7e9623a16a2ae81c02a1826abc818a21db5a
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -13,7 +13,7 @@ setup(name='RPLCD',
url='https://github.com/dbrgn/RPLCD',
license='MIT',
keywords='raspberry, raspberry pi, lcd, liquid crystal, hitachi, hd44780',
- packages=['RPLCD', 'RPLCD_Tests'],
+ packages=['RPLCD', 'RPLCD.codecs', 'RPLCD_Tests'],
entry_points={
'console_scripts': ['rplcd-tests=RPLCD_Tests.entrypoint:run'],
},
|
Fix packages in setup.py (#<I>)
|
dbrgn_RPLCD
|
train
|
py
|
1a3ceeb4305da20f11d07ec37597477149d7fa17
|
diff --git a/lib/ansiblelint/skip_utils.py b/lib/ansiblelint/skip_utils.py
index <HASH>..<HASH> 100644
--- a/lib/ansiblelint/skip_utils.py
+++ b/lib/ansiblelint/skip_utils.py
@@ -1,4 +1,4 @@
-# (c) 2012-2019, Ansible by Red Hat
+# (c) 2019, Ansible by Red Hat
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
@@ -18,13 +18,12 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
-"""This module contains utils related to inline skiping of rules."""
-
+"""Utils related to inline skipping of rules."""
from itertools import product
import ruamel.yaml
-INLINE_SKIP_FLAG = '# noqa'
+INLINE_SKIP_FLAG = '# noqa '
def get_rule_skips_from_line(line):
@@ -138,7 +137,7 @@ def _get_tasks_from_blocks(task_blocks):
def _get_rule_skips_from_yaml(yaml_input):
- """Travese yaml for comments with rule skips and return list of rules."""
+ """Traverse yaml for comments with rule skips and return list of rules."""
def traverse_yaml(obj):
yaml_comment_obj_strs.append(str(obj.ca.items))
if isinstance(obj, dict):
|
Fix a few nitpicks in skip_utils.py
|
ansible_ansible-lint
|
train
|
py
|
3c1700860c498fe6405f6620890411c95e17c456
|
diff --git a/lib/delfos/code_location/call_site.rb b/lib/delfos/code_location/call_site.rb
index <HASH>..<HASH> 100644
--- a/lib/delfos/code_location/call_site.rb
+++ b/lib/delfos/code_location/call_site.rb
@@ -15,10 +15,6 @@ module Delfos
@called_method = called_method
end
- def paths
- [raw_path, container_method_path, called_method_path]
- end
-
def container_method_path
container_method.raw_path
end
diff --git a/lib/delfos/method_trace/handler.rb b/lib/delfos/method_trace/handler.rb
index <HASH>..<HASH> 100644
--- a/lib/delfos/method_trace/handler.rb
+++ b/lib/delfos/method_trace/handler.rb
@@ -16,7 +16,7 @@ module Delfos
end
def relevant?
- AppDirectories.include_files?(*call_site.paths)
+ AppDirectories.include_files?(call_site.called_method_path)
end
STACK_OFFSET = 5
|
only make logging dependent on call_site.called_method_path
|
ruby-analysis_delfos
|
train
|
rb,rb
|
8dae5e07e347153c693b9cf243fadf8229c08fff
|
diff --git a/Command/GenerateDoctrineFormCommand.php b/Command/GenerateDoctrineFormCommand.php
index <HASH>..<HASH> 100644
--- a/Command/GenerateDoctrineFormCommand.php
+++ b/Command/GenerateDoctrineFormCommand.php
@@ -66,9 +66,8 @@ EOT
$entityClass = $this->getContainer()->get('doctrine')->getAliasNamespace($bundle).'\\'.$entity;
$metadata = $this->getEntityMetadata($entityClass);
$bundle = $this->getApplication()->getKernel()->getBundle($bundle);
+ $generator = $this->getGenerator($bundle);
- $generator = new DoctrineFormGenerator($this->getContainer()->get('filesystem'));
- $generator->setSkeletonDirs($this->getSkeletonDirs($bundle));
$generator->generate($bundle, $entity, $metadata[0]);
$output->writeln(sprintf(
|
Fixed FormCommand to use unified generator interface
|
sensiolabs_SensioGeneratorBundle
|
train
|
php
|
bccd3f85ed7c13331c8a1a0225266023927effb5
|
diff --git a/app/models/user.rb b/app/models/user.rb
index <HASH>..<HASH> 100644
--- a/app/models/user.rb
+++ b/app/models/user.rb
@@ -62,6 +62,14 @@ class User < ActiveRecord::Base
end
end
+ # destroy own role for user
+ before_destroy do |u|
+ u.own_role.destroy
+ unless u.own_role.destroyed?
+ Rails.logger.error error.to_s
+ end
+ end
+
# support for session (thread-local) variables
include Katello::ThreadSession::UserModel
include Ldap
|
<I> - Role - delete self-role on user delete
This commit is to ensure that a user's self-role is deleted when
the user is deleted.
|
Katello_katello
|
train
|
rb
|
2f476160a0cb824a9c81c7148bf4439953ccbb0d
|
diff --git a/restcomm/restcomm.dao/src/main/java/org/restcomm/connect/dao/mybatis/MybatisCallDetailRecordsDao.java b/restcomm/restcomm.dao/src/main/java/org/restcomm/connect/dao/mybatis/MybatisCallDetailRecordsDao.java
index <HASH>..<HASH> 100644
--- a/restcomm/restcomm.dao/src/main/java/org/restcomm/connect/dao/mybatis/MybatisCallDetailRecordsDao.java
+++ b/restcomm/restcomm.dao/src/main/java/org/restcomm/connect/dao/mybatis/MybatisCallDetailRecordsDao.java
@@ -40,7 +40,6 @@ import org.restcomm.connect.commons.annotations.concurrency.ThreadSafe;
import org.restcomm.connect.commons.dao.Sid;
import org.restcomm.connect.dao.CallDetailRecordsDao;
import org.restcomm.connect.dao.DaoUtils;
-import org.restcomm.connect.dao.common.SortDirection;
import org.restcomm.connect.dao.entities.CallDetailRecord;
import org.restcomm.connect.dao.entities.CallDetailRecordFilter;
|
FS-<I>: Fixed code style issue
|
RestComm_Restcomm-Connect
|
train
|
java
|
1b0db1a92b5798ed3ba061240dea143e14f90cff
|
diff --git a/Gruntfile.js b/Gruntfile.js
index <HASH>..<HASH> 100644
--- a/Gruntfile.js
+++ b/Gruntfile.js
@@ -78,7 +78,6 @@ module.exports = function(grunt) {
// These plugins provide necessary tasks.
grunt.loadNpmTasks('grunt-contrib-jshint');
grunt.loadNpmTasks('grunt-contrib-clean');
- grunt.loadNpmTasks('grunt-contrib-copy');
grunt.loadNpmTasks('grunt-mocha-istanbul');
grunt.loadNpmTasks('grunt-mocha-test');
|
Remove unnecessary task load from Gruntfile
|
Wirecloud_grunt-wirecloud
|
train
|
js
|
ba2926caad6c33c4ba4a0db73beace4fb787d006
|
diff --git a/src/main/java/io/airlift/slice/Slice.java b/src/main/java/io/airlift/slice/Slice.java
index <HASH>..<HASH> 100644
--- a/src/main/java/io/airlift/slice/Slice.java
+++ b/src/main/java/io/airlift/slice/Slice.java
@@ -659,7 +659,7 @@ public final class Slice
compareLength--;
}
- return Integer.compare(size, that.size);
+ return Integer.compare(length, otherLength);
}
/**
|
Fix bug in Slice.compareTo(int, int, Slice, int, int)
|
airlift_slice
|
train
|
java
|
0f8176235e21379bb683b0e35b2fc25de63be1dc
|
diff --git a/lib/statistrano.rb b/lib/statistrano.rb
index <HASH>..<HASH> 100644
--- a/lib/statistrano.rb
+++ b/lib/statistrano.rb
@@ -47,11 +47,15 @@ module Statistrano
# @return [Statistrano::Deployment::Base]
def define_deployment name, type=:base
- @deployment = Statistrano::Deployment.const_get(type.to_s.capitalize).new( name )
+ begin
+ @deployment = Statistrano::Deployment.const_get(type.to_s.capitalize).new( name )
+ rescue NameError
+ LOG.error "The deployment type '#{type}' is not defined"
+ end
yield(@deployment.config) if block_given?
-
return @deployment
+
end
end
|
add error handling for undefined deployment types
|
mailchimp_statistrano
|
train
|
rb
|
6ffcc4e9ae18ac2df63c24cff68f02be3d140889
|
diff --git a/preview/preview.js b/preview/preview.js
index <HASH>..<HASH> 100644
--- a/preview/preview.js
+++ b/preview/preview.js
@@ -179,7 +179,8 @@
tileLayerCode += "'" + escapeHtml(options[option]) + "'";
//jshint quotmark:single
} else {
- tileLayerCode += options[option];
+ /* global JSON:true */
+ tileLayerCode += JSON.stringify(options[option]);
}
}
tileLayerCode += '\n});\n';
|
fixed source generation for arrays and objects
|
leaflet-extras_leaflet-providers
|
train
|
js
|
78a555bca051581f26b830d7c7183adc75fc939f
|
diff --git a/addon/components/select-2.js b/addon/components/select-2.js
index <HASH>..<HASH> 100644
--- a/addon/components/select-2.js
+++ b/addon/components/select-2.js
@@ -333,7 +333,7 @@ var Select2Component = Ember.Component.extend({
break;
}
}
- } else {
+ } else {
// ...or flat data structure: try to match simple item
matchIndex = values.indexOf("" + get(item, optionValuePath));
if (matchIndex !== -1) {
|
Fixing an issue which, when compiling for production, a stange character is entered in this whitespace
|
iStefo_ember-select-2
|
train
|
js
|
0dbc26e352b770a7d029e82cee4ed86dd34265ec
|
diff --git a/logger.go b/logger.go
index <HASH>..<HASH> 100644
--- a/logger.go
+++ b/logger.go
@@ -462,6 +462,7 @@ func (l *Logger) Fprint(logLevel level, calldepth int,
var ok bool
pgmC, file, line, ok = runtime.Caller(calldepth)
+ // FIXME: NOT SURE HOW TO TEST THIS!
if !ok {
file = "???"
line = 0
@@ -694,6 +695,7 @@ func (l *Logger) Write(p []byte) (n int, err error) {
} else {
n, err = w.Write(p)
}
+ // FIXME: NOT SURE HOW TO TEST THIS!
if err != nil {
return
}
|
Add FIXME for missing tests
|
demizer_go-logs
|
train
|
go
|
5101b5a0c7fc0b39800b442aaa75ef74821e1247
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -63,6 +63,13 @@ exports.attach = function (server) {
return shell.setVar('newCookies', newCookies);
};
+ // Create a shell helper function for retrieving a cookie.
+ shell.getCookie = function (name) {
+ var cookies = shell.getVar(name);
+ if (!cookies) return;
+ if (cookies.hasOwnProperty(name)) return cookies[name];
+ };
+
// Setup socket.io namespace for the current shell.
sockets.of('/' + shell.settings.namespace)
.on('connection', function (socket) {
|
Added shell helper for retrieving cookies.
|
chevex-archived_shotgun-client
|
train
|
js
|
b6c3e51f4f36e71a2a4afc5aad92c4ea448a5aab
|
diff --git a/src/ErrorHandler/UnknownReferenceIgnoringErrorHandler.php b/src/ErrorHandler/UnknownReferenceIgnoringErrorHandler.php
index <HASH>..<HASH> 100644
--- a/src/ErrorHandler/UnknownReferenceIgnoringErrorHandler.php
+++ b/src/ErrorHandler/UnknownReferenceIgnoringErrorHandler.php
@@ -37,7 +37,10 @@ class UnknownReferenceIgnoringErrorHandler implements ErrorHandlerInterface
$innerException = $exception->getPrevious();
if (
$innerException instanceof SyntaxError
- && strpos($innerException->getMessage(), 'Unknown ') === 0
+ && (
+ strpos($innerException->getMessage(), 'Unknown ') === 0
+ || strpos($innerException->getMessage(), ' does not exist') !== false
+ )
) {
$this->logger->warning((string)$innerException);
} else {
|
Fix for unknown references in older twig versions
|
mariusbalcytis_webpack-bundle
|
train
|
php
|
69ef4bacb62de1ec7304d1658bd320c8b597c14d
|
diff --git a/agrona/src/main/java/org/agrona/concurrent/HighResolutionTimer.java b/agrona/src/main/java/org/agrona/concurrent/HighResolutionTimer.java
index <HASH>..<HASH> 100644
--- a/agrona/src/main/java/org/agrona/concurrent/HighResolutionTimer.java
+++ b/agrona/src/main/java/org/agrona/concurrent/HighResolutionTimer.java
@@ -20,7 +20,7 @@ package org.agrona.concurrent;
*/
public class HighResolutionTimer
{
- private static Thread thread;
+ private static volatile Thread thread;
/**
* Has the high resolution timer been enabled?
|
[Java] thread field should be volatile.
|
real-logic_agrona
|
train
|
java
|
19d1ef63112c0bce0fab53ff7bc64d7804336cf1
|
diff --git a/NavigationReactNative/sample/twitter/App.js b/NavigationReactNative/sample/twitter/App.js
index <HASH>..<HASH> 100644
--- a/NavigationReactNative/sample/twitter/App.js
+++ b/NavigationReactNative/sample/twitter/App.js
@@ -29,7 +29,6 @@ var getSceneTitle = (state, data) => {
var notificationsNavigator = new StateNavigator(stateNavigator);
stateNavigator.navigate('home');
-notificationsNavigator.navigate('notifications');
export default () => (
Platform.OS === 'ios' ? (
@@ -40,7 +39,10 @@ export default () => (
</NavigationStack>
</NavigationHandler>
</TabBarItemIOS>
- <TabBarItemIOS title="Notifications">
+ <TabBarItemIOS title="Notifications" onPress={() => {
+ if (!notificationsNavigator.stateContext.state)
+ notificationsNavigator.navigate('notifications');
+ }}>
<NavigationHandler stateNavigator={notificationsNavigator}>
<NavigationStack title={getSceneTitle}>
</NavigationStack>
|
Waited to load notifictions until tab pressed
|
grahammendick_navigation
|
train
|
js
|
Subsets and Splits
Java Commits in Train Set
Queries for all entries where the diff_languages column is 'java', providing a filtered dataset but without deeper analysis.
Java Commits Test Data
Returns a subset of 5000 entries from the dataset where the programming language difference is Java, providing basic filtering for exploration.
Java Commits Sample
Retrieves the first 1,000 records where the 'diff_languages' column is 'java', providing limited insight into the specific data entries.
Java Commits Validation Sample
Retrieves a sample of entries from the validation dataset where the diff languages are Java, providing limited insight into specific Java-related data points.
Java Commits in Validation
This query retrieves a limited sample of entries from the validation dataset where the programming language difference is Java, providing basic filtering with minimal insight.
Java Commits Sample
This query retrieves a sample of 100 records where the 'diff_languages' is 'java', providing basic filtering but limited analytical value.
Java Commits Sample
Retrieves 100 samples where the language difference is Java, providing basic filtering but minimal analytical value.
Java Commits Sample
Retrieves 10 samples where the diff_languages column is 'java', providing basic examples of data entries with this specific language.
Java Commits Validation Sample
Retrieves 1,000 records where the differences in languages are marked as Java, providing a snapshot of that specific subset but limited to raw data.
Java Commits Sample
This query retrieves 1000 random samples from the dataset where the programming language is Java, offering limited insight beyond raw data.