Unnamed: 0
int64
0
2.44k
repo
stringlengths
32
81
hash
stringlengths
40
40
diff
stringlengths
113
1.17k
old_path
stringlengths
5
84
rewrite
stringlengths
34
79
initial_state
stringlengths
75
980
final_state
stringlengths
76
980
2,300
https://:@github.com/yunojuno/django-s3-upload.git
65e49f3277f987c47d189105c6d979d666bb3004
@@ -45,7 +45,7 @@ def create_upload_data(content_type, source_filename, upload_to): bucket_url = "https://%s/%s" % (endpoint, bucket) return { - "policy": policy, + "policy": encoded, "signature": signature_b64, "key": key, "AWSAccessKeyId": access_key,
s3direct/utils.py
ReplaceText(target='encoded' @(48,18)->(48,24))
def create_upload_data(content_type, source_filename, upload_to): bucket_url = "https://%s/%s" % (endpoint, bucket) return { "policy": policy, "signature": signature_b64, "key": key, "AWSAccessKeyId": access_key,
def create_upload_data(content_type, source_filename, upload_to): bucket_url = "https://%s/%s" % (endpoint, bucket) return { "policy": encoded, "signature": signature_b64, "key": key, "AWSAccessKeyId": access_key,
2,301
https://:@github.com/Manticore-attic/pyfft.git
01cdc24797472eec5114b68f02ce8c68fa348853
@@ -53,7 +53,7 @@ def clFFT_ExecuteInterleaved(plan, batchSize, dir, data_in, data_out): currWrite = 1 if numKernelsOdd else 2 for kInfo in kernelInfo: - if isInPlace and numKernelsOdd and not inPlaceDone and kernelInfo.in_place_possible: + if isInPlace and numKernelsOdd and not inPlaceDone and kInfo.in_place_possible: currWrite = currRead inPlaceDone = True
pycudafft/fft_execute.py
ReplaceText(target='kInfo' @(56,58)->(56,68))
def clFFT_ExecuteInterleaved(plan, batchSize, dir, data_in, data_out): currWrite = 1 if numKernelsOdd else 2 for kInfo in kernelInfo: if isInPlace and numKernelsOdd and not inPlaceDone and kernelInfo.in_place_possible: currWrite = currRead inPlaceDone = True
def clFFT_ExecuteInterleaved(plan, batchSize, dir, data_in, data_out): currWrite = 1 if numKernelsOdd else 2 for kInfo in kernelInfo: if isInPlace and numKernelsOdd and not inPlaceDone and kInfo.in_place_possible: currWrite = currRead inPlaceDone = True
2,302
https://:@github.com/sepandhaghighi/pyshutdown.git
8b1b84e10d5acc1b1ec07ae271fdd3f61a414c87
@@ -12,7 +12,7 @@ def get_method(): get_method=input("Please Enter Method , Shutdown[1] , Hibernate[2] , Restart[3]") if get_method=="2": flag="-h" - elif flag=="3": + elif get_method=="3": flag="-r" else: flag="-s"
main.py
ReplaceText(target='get_method' @(15,9)->(15,13))
def get_method(): get_method=input("Please Enter Method , Shutdown[1] , Hibernate[2] , Restart[3]") if get_method=="2": flag="-h" elif flag=="3": flag="-r" else: flag="-s"
def get_method(): get_method=input("Please Enter Method , Shutdown[1] , Hibernate[2] , Restart[3]") if get_method=="2": flag="-h" elif get_method=="3": flag="-r" else: flag="-s"
2,303
https://:@github.com/cfhamlet/os-m3-engine.git
b6a8890df08b38e6201e3b8cc18e870a7f61fda4
@@ -89,7 +89,7 @@ def create(frontend_cls='os_m3_engine.ootb.StdinFrontend', default_engine_transport_config = ENGINE_TRANSPORT_BRIDGE_CONFIG \ if backend_cls is not None else ENGINE_TRANSPORT_CONFIG - e_transport_config = engine_backend_config + e_transport_config = engine_transport_config if engine_transport_config in (ENGINE_TRANSPORT_CONFIG, ENGINE_TRANSPORT_BRIDGE_CONFIG): e_transport_config = None
src/os_m3_engine/launcher.py
ReplaceText(target='engine_transport_config' @(92,25)->(92,46))
def create(frontend_cls='os_m3_engine.ootb.StdinFrontend', default_engine_transport_config = ENGINE_TRANSPORT_BRIDGE_CONFIG \ if backend_cls is not None else ENGINE_TRANSPORT_CONFIG e_transport_config = engine_backend_config if engine_transport_config in (ENGINE_TRANSPORT_CONFIG, ENGINE_TRANSPORT_BRIDGE_CONFIG): e_transport_config = None
def create(frontend_cls='os_m3_engine.ootb.StdinFrontend', default_engine_transport_config = ENGINE_TRANSPORT_BRIDGE_CONFIG \ if backend_cls is not None else ENGINE_TRANSPORT_CONFIG e_transport_config = engine_transport_config if engine_transport_config in (ENGINE_TRANSPORT_CONFIG, ENGINE_TRANSPORT_BRIDGE_CONFIG): e_transport_config = None
2,304
https://:@github.com/ssi-dk/bifrost.git
d05050feff39d057a8db6b64d31645efbf741200
@@ -143,7 +143,7 @@ def query_ncbi_species(species_entry): if result is not None: return result["ncbi_species"] elif group_result is not None: - return result["ncbi_species"] + return group_result["ncbi_species"] else: return None except Exception as e:
lib/bifrostlib/bifrostlib/mongo_interface.py
ReplaceText(target='group_result' @(146,23)->(146,29))
def query_ncbi_species(species_entry): if result is not None: return result["ncbi_species"] elif group_result is not None: return result["ncbi_species"] else: return None except Exception as e:
def query_ncbi_species(species_entry): if result is not None: return result["ncbi_species"] elif group_result is not None: return group_result["ncbi_species"] else: return None except Exception as e:
2,305
https://:@github.com/ssi-dk/bifrost.git
6f801b0adf5cf2209dec1052337b3eff413d65b0
@@ -268,7 +268,7 @@ def update_run_report(run, n_intervals): ) def update_rerun_form(run_name): run_name = run_name.split("/")[0] - if run_name == "" or hasattr(keys, "rerun"): + if run_name == "" or not hasattr(keys, "rerun"): return None run_data = import_data.get_run(run_name)
reporter/run_checker.py
ReplaceText(target='not ' @(271,25)->(271,25))
def update_run_report(run, n_intervals): ) def update_rerun_form(run_name): run_name = run_name.split("/")[0] if run_name == "" or hasattr(keys, "rerun"): return None run_data = import_data.get_run(run_name)
def update_run_report(run, n_intervals): ) def update_rerun_form(run_name): run_name = run_name.split("/")[0] if run_name == "" or not hasattr(keys, "rerun"): return None run_data = import_data.get_run(run_name)
2,306
https://:@github.com/ayharano/pppipam.git
13ee95b04c1ccf5febedcd9568e15543b8c66365
@@ -190,7 +190,7 @@ class AddressSpace_description_TestCase(unittest.TestCase): description_str = "address 0 for ipv4" self.address_space.describe( description=description_str, - ip_parameter=network, + ip_parameter=zero_ipv4, ) self.assertEqual( self.address_space.description(
tests/test_description.py
ReplaceText(target='zero_ipv4' @(193,25)->(193,32))
class AddressSpace_description_TestCase(unittest.TestCase): description_str = "address 0 for ipv4" self.address_space.describe( description=description_str, ip_parameter=network, ) self.assertEqual( self.address_space.description(
class AddressSpace_description_TestCase(unittest.TestCase): description_str = "address 0 for ipv4" self.address_space.describe( description=description_str, ip_parameter=zero_ipv4, ) self.assertEqual( self.address_space.description(
2,307
https://:@github.com/ayharano/pppipam.git
6d57637be02e5d8597a63ba7fc3fd8525a5c2952
@@ -263,7 +263,7 @@ class AddressSpace: self.__parent_supernet[child] = as_network children_of_as_network.add(child) children_of_supernet.remove(child) - children_of_supernet.add(as_address) + children_of_supernet.add(as_network) else: raise TypeError("ip_parameter must be a valid IP parameter")
pppipam/pppipam.py
ReplaceText(target='as_network' @(266,37)->(266,47))
class AddressSpace: self.__parent_supernet[child] = as_network children_of_as_network.add(child) children_of_supernet.remove(child) children_of_supernet.add(as_address) else: raise TypeError("ip_parameter must be a valid IP parameter")
class AddressSpace: self.__parent_supernet[child] = as_network children_of_as_network.add(child) children_of_supernet.remove(child) children_of_supernet.add(as_network) else: raise TypeError("ip_parameter must be a valid IP parameter")
2,308
https://:@github.com/ayharano/pppipam.git
12fedde026bcd8de6562f0d3c6e3224e529c557e
@@ -289,7 +289,7 @@ class AddressSpace: self.__description[as_network] = description described = True - self.__parent_supernet[as_address] = supernet + self.__parent_supernet[as_network] = supernet children_of_as_network = ( self.__children_ip_object.setdefault(as_network, set()) )
pppipam/pppipam.py
ReplaceText(target='as_network' @(292,35)->(292,45))
class AddressSpace: self.__description[as_network] = description described = True self.__parent_supernet[as_address] = supernet children_of_as_network = ( self.__children_ip_object.setdefault(as_network, set()) )
class AddressSpace: self.__description[as_network] = description described = True self.__parent_supernet[as_network] = supernet children_of_as_network = ( self.__children_ip_object.setdefault(as_network, set()) )
2,309
https://:@github.com/alx-k/flask-jerify.git
cf3942ac578dbcb0deb358bf7bfb7c230a5d1f34
@@ -23,7 +23,7 @@ def jerror_handler(e): """http://jsonapi.org/format/#errors """ - if not hasattr('name', e): + if not hasattr(e, 'name'): raise InternalServerError(e.description) app.logger.error(e.description)
flask_jerify/flask_jerify.py
ArgSwap(idxs=0<->1 @(26,11)->(26,18))
def jerror_handler(e): """http://jsonapi.org/format/#errors """ if not hasattr('name', e): raise InternalServerError(e.description) app.logger.error(e.description)
def jerror_handler(e): """http://jsonapi.org/format/#errors """ if not hasattr(e, 'name'): raise InternalServerError(e.description) app.logger.error(e.description)
2,310
https://:@github.com/chenliangomc/RTFMaker.git
f82b42a9366a5ed2cbc83ebbe25dc6f0668f75c6
@@ -139,7 +139,7 @@ class RTable(object): self._table_elements['body'].append(new_row) html_foot = getattr(obj, 'tfoot') if html_foot: - for a_foot in html_body.find_all('td'): + for a_foot in html_foot.find_all('td'): foot_cell = { 'value': a_foot.get_text(strip=True), }
RTFMaker/utils.py
ReplaceText(target='html_foot' @(142,30)->(142,39))
class RTable(object): self._table_elements['body'].append(new_row) html_foot = getattr(obj, 'tfoot') if html_foot: for a_foot in html_body.find_all('td'): foot_cell = { 'value': a_foot.get_text(strip=True), }
class RTable(object): self._table_elements['body'].append(new_row) html_foot = getattr(obj, 'tfoot') if html_foot: for a_foot in html_foot.find_all('td'): foot_cell = { 'value': a_foot.get_text(strip=True), }
2,311
https://:@github.com/jianlins/PyFastNER.git
442227d122a7feee9766e2676cee1702c7ba645b
@@ -324,7 +324,7 @@ class FastCNER: self.logger.debug( 'try add matched rule ({}-{})\t{}'.format(match_begin, match_end, str(self.rule_store[rule_id]))) current_span.rule_id = rule_id - if key in matches: + if key in overlap_checkers: current_spans_list = matches[key] overlap_checker = overlap_checkers[key] overlapped_pos = overlap_checker.search(current_span.begin, current_span.end)
PyFastNER/FastCNER.py
ReplaceText(target='overlap_checkers' @(327,22)->(327,29))
class FastCNER: self.logger.debug( 'try add matched rule ({}-{})\t{}'.format(match_begin, match_end, str(self.rule_store[rule_id]))) current_span.rule_id = rule_id if key in matches: current_spans_list = matches[key] overlap_checker = overlap_checkers[key] overlapped_pos = overlap_checker.search(current_span.begin, current_span.end)
class FastCNER: self.logger.debug( 'try add matched rule ({}-{})\t{}'.format(match_begin, match_end, str(self.rule_store[rule_id]))) current_span.rule_id = rule_id if key in overlap_checkers: current_spans_list = matches[key] overlap_checker = overlap_checkers[key] overlapped_pos = overlap_checker.search(current_span.begin, current_span.end)
2,312
https://:@github.com/groupe-conseil-nutshimit-nippour/django-geoprisma.git
a3a4dc9a0142e237dfac5961107b7338e9ef6298
@@ -142,7 +142,7 @@ class FeatureServerProxyFactory(object): def isCreate(self): data_id = self.featureServerProxy.getID() - return data_id is not None and self.request.body != "" and self.request.method == "POST" + return data_id is None and self.request.body != "" and self.request.method == "POST" def isUpdate(self): data_id = self.featureServerProxy.getID()
geoprisma/core/proxies/featureserverproxy.py
ReplaceText(target=' is ' @(145,22)->(145,30))
class FeatureServerProxyFactory(object): def isCreate(self): data_id = self.featureServerProxy.getID() return data_id is not None and self.request.body != "" and self.request.method == "POST" def isUpdate(self): data_id = self.featureServerProxy.getID()
class FeatureServerProxyFactory(object): def isCreate(self): data_id = self.featureServerProxy.getID() return data_id is None and self.request.body != "" and self.request.method == "POST" def isUpdate(self): data_id = self.featureServerProxy.getID()
2,313
https://:@github.com/collective/mr.poe.git
dedca8e5c98124f6a43a18986e142e8cb7ecc3cf
@@ -59,7 +59,7 @@ def setup_logging(handler, exclude=['raven', 'sentry.errors']): Returns a boolean based on if logging was configured or not. """ logger = logging.getLogger() - if handler.__class__ not in map(type, logger.handlers): + if handler.__class__ in map(type, logger.handlers): return False logger.addHandler(handler)
raven/conf/__init__.py
ReplaceText(target=' in ' @(62,24)->(62,32))
def setup_logging(handler, exclude=['raven', 'sentry.errors']): Returns a boolean based on if logging was configured or not. """ logger = logging.getLogger() if handler.__class__ not in map(type, logger.handlers): return False logger.addHandler(handler)
def setup_logging(handler, exclude=['raven', 'sentry.errors']): Returns a boolean based on if logging was configured or not. """ logger = logging.getLogger() if handler.__class__ in map(type, logger.handlers): return False logger.addHandler(handler)
2,314
https://:@github.com/espenmn/medialog.mobilethemeTwo.git
21b37308d028659d0a540289ba2e1f30340a9481
@@ -39,7 +39,7 @@ class Scrape(BrowserView): parts = url.split('//', 1) this_base_url = parts[0]+'//'+parts[1].split('/', 1)[0] - if url not in scrape_whitelist: + if this_base_url not in scrape_whitelist: return "URL domain is not in whitelist" #get html from the requested url
medialog/mobilethemeTwo/views.py
ReplaceText(target='this_base_url' @(42,11)->(42,14))
class Scrape(BrowserView): parts = url.split('//', 1) this_base_url = parts[0]+'//'+parts[1].split('/', 1)[0] if url not in scrape_whitelist: return "URL domain is not in whitelist" #get html from the requested url
class Scrape(BrowserView): parts = url.split('//', 1) this_base_url = parts[0]+'//'+parts[1].split('/', 1)[0] if this_base_url not in scrape_whitelist: return "URL domain is not in whitelist" #get html from the requested url
2,315
https://:@github.com/pyrated/vinyl.git
dec097819ed4e5635c3d8f64754fa6f3531278cc
@@ -18,7 +18,7 @@ import os import sphinx_rtd_theme # We cannot install llvmlite on READTHEDOCS -if os.environ.get('READTHEDOCS') != 'True': +if os.environ.get('READTHEDOCS') == 'True': from unittest.mock import MagicMock class MockModule(MagicMock): @classmethod
docs/source/conf.py
ReplaceText(target='==' @(21,33)->(21,35))
import os import sphinx_rtd_theme # We cannot install llvmlite on READTHEDOCS if os.environ.get('READTHEDOCS') != 'True': from unittest.mock import MagicMock class MockModule(MagicMock): @classmethod
import os import sphinx_rtd_theme # We cannot install llvmlite on READTHEDOCS if os.environ.get('READTHEDOCS') == 'True': from unittest.mock import MagicMock class MockModule(MagicMock): @classmethod
2,316
https://:@github.com/marcofinalist/weathervane.git
4ddc4538762f2bb439b55019f2fea94480654988
@@ -169,6 +169,6 @@ class WeatherVaneInterface(object): bits = self.spi.read_pin(self.station_bits) result = 0 for index, value in enumerate(bits): - result = value * 2**index + result += value * 2**index return self.STATIONS[result] \ No newline at end of file
weathervane/weathervaneinterface.py
ReplaceText(target='+=' @(172,19)->(172,20))
class WeatherVaneInterface(object): bits = self.spi.read_pin(self.station_bits) result = 0 for index, value in enumerate(bits): result = value * 2**index return self.STATIONS[result] \ No newline at end of file
class WeatherVaneInterface(object): bits = self.spi.read_pin(self.station_bits) result = 0 for index, value in enumerate(bits): result += value * 2**index return self.STATIONS[result] \ No newline at end of file
2,317
https://:@github.com/ofgulban/compoda.git
610dfad6de73410b4b99e8e9260b06dbe4ab91b7
@@ -49,7 +49,7 @@ def closure(data, k=1.0): out = np.copy(data) for i in range(data.shape[1]): out[:, i] = np.divide(out[:, i], data_sum) - out = data * k + out = out * k return out
compoda/core.py
ReplaceText(target='out' @(52,10)->(52,14))
def closure(data, k=1.0): out = np.copy(data) for i in range(data.shape[1]): out[:, i] = np.divide(out[:, i], data_sum) out = data * k return out
def closure(data, k=1.0): out = np.copy(data) for i in range(data.shape[1]): out[:, i] = np.divide(out[:, i], data_sum) out = out * k return out
2,318
https://:@github.com/monashbiomedicalimaging/arcana.git
bf8c927f1d0f7c4af2175ddec836c6ffea5a3858
@@ -88,7 +88,7 @@ class XNATSource(ArchiveSource, XNATMixin): proj_summ_sess_name) = XNATArchive.project_summary_name( project.id) try: - proc_session = xnat_login.experiments[ + proc_session = subject.experiments[ self.session_id + XNATArchive.PROCESSED_SUFFIX] proc_datasets = dict( (s.type, s) for s in proc_session.scans.itervalues())
nianalysis/archive/xnat.py
ReplaceText(target='subject' @(91,31)->(91,41))
class XNATSource(ArchiveSource, XNATMixin): proj_summ_sess_name) = XNATArchive.project_summary_name( project.id) try: proc_session = xnat_login.experiments[ self.session_id + XNATArchive.PROCESSED_SUFFIX] proc_datasets = dict( (s.type, s) for s in proc_session.scans.itervalues())
class XNATSource(ArchiveSource, XNATMixin): proj_summ_sess_name) = XNATArchive.project_summary_name( project.id) try: proc_session = subject.experiments[ self.session_id + XNATArchive.PROCESSED_SUFFIX] proc_datasets = dict( (s.type, s) for s in proc_session.scans.itervalues())
2,319
https://:@github.com/monashbiomedicalimaging/arcana.git
31946410e68317edea4ee76c6c24441065fc93ae
@@ -417,7 +417,7 @@ class TestProjectInfo(BaseMultiSubjectTestCase): proj_dir, subject.id, SUMMARY_NAME, dataset.filename) for session in subject.sessions: - for dataset in subject.datasets: + for dataset in session.datasets: dataset.path = os.path.join( proj_dir, session.subject_id, session.visit_id, dataset.filename)
test/unittests/archive/test_local.py
ReplaceText(target='session' @(420,35)->(420,42))
class TestProjectInfo(BaseMultiSubjectTestCase): proj_dir, subject.id, SUMMARY_NAME, dataset.filename) for session in subject.sessions: for dataset in subject.datasets: dataset.path = os.path.join( proj_dir, session.subject_id, session.visit_id, dataset.filename)
class TestProjectInfo(BaseMultiSubjectTestCase): proj_dir, subject.id, SUMMARY_NAME, dataset.filename) for session in subject.sessions: for dataset in session.datasets: dataset.path = os.path.join( proj_dir, session.subject_id, session.visit_id, dataset.filename)
2,320
https://:@github.com/monashbiomedicalimaging/arcana.git
6bf29a9a339f7985d310d567a66cfca085cc4708
@@ -268,7 +268,7 @@ class BaseArchiveSink(BaseArchiveNode): PATH_TRAIT) # Add input fields for field in fields: - assert isinstance(dataset, FieldSpec) + assert isinstance(field, FieldSpec) self._add_trait(self.inputs, field.name + FIELD_SUFFIX, field.dtype)
nianalysis/archive/base.py
ReplaceText(target='field' @(271,30)->(271,37))
class BaseArchiveSink(BaseArchiveNode): PATH_TRAIT) # Add input fields for field in fields: assert isinstance(dataset, FieldSpec) self._add_trait(self.inputs, field.name + FIELD_SUFFIX, field.dtype)
class BaseArchiveSink(BaseArchiveNode): PATH_TRAIT) # Add input fields for field in fields: assert isinstance(field, FieldSpec) self._add_trait(self.inputs, field.name + FIELD_SUFFIX, field.dtype)
2,321
https://:@github.com/monashbiomedicalimaging/arcana.git
8bf472a77f50efc4d28d38d4aca46200c35e87e2
@@ -353,7 +353,7 @@ class LocalArchive(Archive): Dataset.from_path( os.path.join(session_path, dname), multiplicity=multiplicity)) - if FIELDS_FNAME in dname: + if FIELDS_FNAME in dnames: fields = self.fields_from_json(os.path.join( session_path, FIELDS_FNAME), multiplicity=multiplicity)
nianalysis/archive/local.py
ReplaceText(target='dnames' @(356,31)->(356,36))
class LocalArchive(Archive): Dataset.from_path( os.path.join(session_path, dname), multiplicity=multiplicity)) if FIELDS_FNAME in dname: fields = self.fields_from_json(os.path.join( session_path, FIELDS_FNAME), multiplicity=multiplicity)
class LocalArchive(Archive): Dataset.from_path( os.path.join(session_path, dname), multiplicity=multiplicity)) if FIELDS_FNAME in dnames: fields = self.fields_from_json(os.path.join( session_path, FIELDS_FNAME), multiplicity=multiplicity)
2,322
https://:@github.com/monashbiomedicalimaging/arcana.git
b3a6e4ec34cbfc92ef62dde78f995c4075feaa46
@@ -424,7 +424,7 @@ class Study(object): "is not a valid option ('{}')".format( "', '".join(unrecognised_values), name, self._param_error_location, - "', '".join(switch.choices))) + "', '".join(spec.choices))) if self._referenced_switches is not None: self._referenced_switches.add(name) return switch.value in values
arcana/study/base.py
ReplaceText(target='spec' @(427,32)->(427,38))
class Study(object): "is not a valid option ('{}')".format( "', '".join(unrecognised_values), name, self._param_error_location, "', '".join(switch.choices))) if self._referenced_switches is not None: self._referenced_switches.add(name) return switch.value in values
class Study(object): "is not a valid option ('{}')".format( "', '".join(unrecognised_values), name, self._param_error_location, "', '".join(spec.choices))) if self._referenced_switches is not None: self._referenced_switches.add(name) return switch.value in values
2,323
https://:@github.com/monashbiomedicalimaging/arcana.git
be154e732593c6c8dd36391db9048315ad74fabc
@@ -214,7 +214,7 @@ class Study(object): "to\n{}.".format(e, bound_inpt, spec)) raise e else: - if inpt.format not in spec.valid_formats: + if bound_inpt.format not in spec.valid_formats: raise ArcanaUsageError( "Cannot pass {} as an input to {} as it is" " not in one of the valid formats ('{}')"
arcana/study/base.py
ReplaceText(target='bound_inpt' @(217,31)->(217,35))
class Study(object): "to\n{}.".format(e, bound_inpt, spec)) raise e else: if inpt.format not in spec.valid_formats: raise ArcanaUsageError( "Cannot pass {} as an input to {} as it is" " not in one of the valid formats ('{}')"
class Study(object): "to\n{}.".format(e, bound_inpt, spec)) raise e else: if bound_inpt.format not in spec.valid_formats: raise ArcanaUsageError( "Cannot pass {} as an input to {} as it is" " not in one of the valid formats ('{}')"
2,324
https://:@github.com/monashbiomedicalimaging/arcana.git
21199db4a62a164140eef9c7a45966056c069541
@@ -116,7 +116,7 @@ class ModulesEnvironment(BaseEnvironment): .format(req.name, local_name)) avail_versions = [] for local_ver_name in version_names: - ver_name = self.map_version(req_range, local_ver_name) + ver_name = self.map_version(req, local_ver_name) try: avail_versions.append( req.v(ver_name, local_name=local_name,
arcana/environment/modules.py
ReplaceText(target='req' @(119,44)->(119,53))
class ModulesEnvironment(BaseEnvironment): .format(req.name, local_name)) avail_versions = [] for local_ver_name in version_names: ver_name = self.map_version(req_range, local_ver_name) try: avail_versions.append( req.v(ver_name, local_name=local_name,
class ModulesEnvironment(BaseEnvironment): .format(req.name, local_name)) avail_versions = [] for local_ver_name in version_names: ver_name = self.map_version(req, local_ver_name) try: avail_versions.append( req.v(ver_name, local_name=local_name,
2,325
https://:@github.com/monashbiomedicalimaging/arcana.git
35edaede784a97ddf4c0f961a4d6aac1cf3fb878
@@ -683,7 +683,7 @@ class Study(object): in_branch = switch.value in values if not in_branch: try: - in_branch = switch.fallbacks[switch.value] in values + in_branch = spec.fallbacks[switch.value] in values except KeyError: pass return in_branch
arcana/study/base.py
ReplaceText(target='spec' @(686,32)->(686,38))
class Study(object): in_branch = switch.value in values if not in_branch: try: in_branch = switch.fallbacks[switch.value] in values except KeyError: pass return in_branch
class Study(object): in_branch = switch.value in values if not in_branch: try: in_branch = spec.fallbacks[switch.value] in values except KeyError: pass return in_branch
2,326
https://:@github.com/agartland/metadataVis.git
469bf0c8e514d09c966c73d21b9ff0c335d12255
@@ -39,7 +39,7 @@ def _generateWideform(longform_df, rx=None): for entry in rowmeta_columns: rowmeta_dict[entry] = longform_df[entry] - if (rx is None): + if (rx is not None): ptid_md = pd.DataFrame(data=rowmeta_dict, columns=rowmeta_dict.keys()) ptid_md = ptid_md.drop_duplicates()
LongformReader.py
ReplaceText(target=' is not ' @(42,10)->(42,14))
def _generateWideform(longform_df, rx=None): for entry in rowmeta_columns: rowmeta_dict[entry] = longform_df[entry] if (rx is None): ptid_md = pd.DataFrame(data=rowmeta_dict, columns=rowmeta_dict.keys()) ptid_md = ptid_md.drop_duplicates()
def _generateWideform(longform_df, rx=None): for entry in rowmeta_columns: rowmeta_dict[entry] = longform_df[entry] if (rx is not None): ptid_md = pd.DataFrame(data=rowmeta_dict, columns=rowmeta_dict.keys()) ptid_md = ptid_md.drop_duplicates()
2,327
https://:@github.com/combatopera/pyven.git
2deb919498af4943aa18a7281037468809bda2d0
@@ -22,7 +22,7 @@ class BinMix(Node): self.blockbuf.copybuf(self.tone(self.block)) if not noiseflag: self.blockbuf.orbuf(self.noise(self.block)) - elif noiseflag: + elif not noiseflag: self.blockbuf.copybuf(self.noise(self.block)) else: self.blockbuf.fill(0)
pym2149/mix.py
ReplaceText(target='not ' @(25,9)->(25,9))
class BinMix(Node): self.blockbuf.copybuf(self.tone(self.block)) if not noiseflag: self.blockbuf.orbuf(self.noise(self.block)) elif noiseflag: self.blockbuf.copybuf(self.noise(self.block)) else: self.blockbuf.fill(0)
class BinMix(Node): self.blockbuf.copybuf(self.tone(self.block)) if not noiseflag: self.blockbuf.orbuf(self.noise(self.block)) elif not noiseflag: self.blockbuf.copybuf(self.noise(self.block)) else: self.blockbuf.fill(0)
2,328
https://:@github.com/rolurq/flask-gulp.git
615013477dcfcf53b64e5cba0858f3b01e66b8fb
@@ -61,7 +61,7 @@ def cjsx(filename, data): command = "%s -c -s" % (executable or 'cjsx') if bare: - command = ' '.join((executable, '-b')) + command = ' '.join((command, '-b')) return runner(command, filename, data, '.js')
flask_static/extensions.py
ReplaceText(target='command' @(64,28)->(64,38))
def cjsx(filename, data): command = "%s -c -s" % (executable or 'cjsx') if bare: command = ' '.join((executable, '-b')) return runner(command, filename, data, '.js')
def cjsx(filename, data): command = "%s -c -s" % (executable or 'cjsx') if bare: command = ' '.join((command, '-b')) return runner(command, filename, data, '.js')
2,329
https://:@github.com/jiep/unicode.git
fe2f6fe46f14778cfcb74852d817d1876d171352
@@ -92,7 +92,7 @@ def main(): print('Similar domains to {}'.format(dom)) domains.difference_update(set(dom)) for d in domains: - print_diff(d, args.domain) + print_diff(args.domain, d) if write: f.write(d + "\n") if (args.check):
v2d/main.py
ArgSwap(idxs=0<->1 @(95,16)->(95,26))
def main(): print('Similar domains to {}'.format(dom)) domains.difference_update(set(dom)) for d in domains: print_diff(d, args.domain) if write: f.write(d + "\n") if (args.check):
def main(): print('Similar domains to {}'.format(dom)) domains.difference_update(set(dom)) for d in domains: print_diff(args.domain, d) if write: f.write(d + "\n") if (args.check):
2,330
https://:@gitlab.com/admintotal/django-cfdi.git
9f131df38460abaafb00566867ce47b522fce7fd
@@ -882,7 +882,7 @@ def get_xml_object(xml_text): nominas.append(nomina_object) - if nomina_object: + if nominas: xml.complemento.nominas = nominas xml.complemento.nomina = nominas[0] else:
cfdi/utils.py
ReplaceText(target='nominas' @(885,15)->(885,28))
def get_xml_object(xml_text): nominas.append(nomina_object) if nomina_object: xml.complemento.nominas = nominas xml.complemento.nomina = nominas[0] else:
def get_xml_object(xml_text): nominas.append(nomina_object) if nominas: xml.complemento.nominas = nominas xml.complemento.nomina = nominas[0] else:
2,331
https://:@github.com/ludeeus/addonupdater.git
fe325d28af7bcade5a806009c943fc0afbca63b1
@@ -378,7 +378,7 @@ class AddonUpdater(): remote_buildfile = self.get_file_obj(buildfile) buildfile_content = self.get_file_content(remote_buildfile) - used_file = remote_dockerfile.split('BUILD_FROM=hassioaddons/')[1] + used_file = dockerfile_content.split('BUILD_FROM=hassioaddons/')[1] used_file = used_file.split('\n')[0] base = used_file.split(':')[1]
addonupdater/updater.py
ReplaceText(target='dockerfile_content' @(381,20)->(381,37))
class AddonUpdater(): remote_buildfile = self.get_file_obj(buildfile) buildfile_content = self.get_file_content(remote_buildfile) used_file = remote_dockerfile.split('BUILD_FROM=hassioaddons/')[1] used_file = used_file.split('\n')[0] base = used_file.split(':')[1]
class AddonUpdater(): remote_buildfile = self.get_file_obj(buildfile) buildfile_content = self.get_file_content(remote_buildfile) used_file = dockerfile_content.split('BUILD_FROM=hassioaddons/')[1] used_file = used_file.split('\n')[0] base = used_file.split(':')[1]
2,332
https://:@github.com/VictorPavlushin/netbox-netdev-inventory.git
00111f3155731e4bf1b380744519a89576a96a49
@@ -161,7 +161,7 @@ class DeviceImporter(ContextDecorator): self._search_key_case_insensitive(interfaces, lag) ) except KeyError: - logger.error("%s not exist in polled interfaces", ifname) + logger.error("%s not exist in polled interfaces", lag) continue interfaces[ifname]["lag"] = real_lag_name
netbox_netprod_importer/importer.py
ReplaceText(target='lag' @(164,66)->(164,72))
class DeviceImporter(ContextDecorator): self._search_key_case_insensitive(interfaces, lag) ) except KeyError: logger.error("%s not exist in polled interfaces", ifname) continue interfaces[ifname]["lag"] = real_lag_name
class DeviceImporter(ContextDecorator): self._search_key_case_insensitive(interfaces, lag) ) except KeyError: logger.error("%s not exist in polled interfaces", lag) continue interfaces[ifname]["lag"] = real_lag_name
2,333
https://:@github.com/tswicegood/cbv_utils.git
25d2e1ce328e485ec26f4debd8f7aebc1ee6a623
@@ -96,7 +96,7 @@ class ProcessInlineFormsetView(ProcessFormView): obj = form.save(commit=False) inline_formset = self.get_inline_formset() if inline_formset.is_valid(): - form.save() + obj.save() inline_formset.save() return self.form_valid(form, inline_formset) return self.form_invalid(form=form, inline_formset=inline_formset)
cbv_utils/views.py
ReplaceText(target='obj' @(99,16)->(99,20))
class ProcessInlineFormsetView(ProcessFormView): obj = form.save(commit=False) inline_formset = self.get_inline_formset() if inline_formset.is_valid(): form.save() inline_formset.save() return self.form_valid(form, inline_formset) return self.form_invalid(form=form, inline_formset=inline_formset)
class ProcessInlineFormsetView(ProcessFormView): obj = form.save(commit=False) inline_formset = self.get_inline_formset() if inline_formset.is_valid(): obj.save() inline_formset.save() return self.form_valid(form, inline_formset) return self.form_invalid(form=form, inline_formset=inline_formset)
2,334
https://:@github.com/romeric/florence.git
aa75d1bb83ec9de5e8ee60d9a87d2a86d7293aeb
@@ -6010,7 +6010,7 @@ class Mesh(object): else: quality_func = lambda mesh: mesh.Lengths() elif quality_assessor == "aspect_ratio": - quality_assessor = lambda mesh: mesh.AspectRatios() + quality_func = lambda mesh: mesh.AspectRatios() elif quality_assessor == "angle": quality_func = lambda mesh: mesh.Angles() else:
Florence/MeshGeneration/Mesh.py
ReplaceText(target='quality_func' @(6013,16)->(6013,32))
class Mesh(object): else: quality_func = lambda mesh: mesh.Lengths() elif quality_assessor == "aspect_ratio": quality_assessor = lambda mesh: mesh.AspectRatios() elif quality_assessor == "angle": quality_func = lambda mesh: mesh.Angles() else:
class Mesh(object): else: quality_func = lambda mesh: mesh.Lengths() elif quality_assessor == "aspect_ratio": quality_func = lambda mesh: mesh.AspectRatios() elif quality_assessor == "angle": quality_func = lambda mesh: mesh.Angles() else:
2,335
https://:@github.com/DomainGroupOSS/ml-recsys-tools.git
467f8e1d859af1109bb830b3c35a752baeddbb67
@@ -81,7 +81,7 @@ class FactorizationRecommender(BaseDFSparseRecommender): all_metrics.plot() self.early_stop_metrics_df = all_metrics - self._set_epochs(epochs=epochs_max) + self._set_epochs(epochs=max_epoch) if not refit_on_all: simple_logger.info('Loading best model from checkpoint at %d epochs' % max_epoch) self.model, self.model_checkpoint = self.model_checkpoint, None
ml_recsys_tools/recommenders/factorization_base.py
ReplaceText(target='max_epoch' @(84,32)->(84,42))
class FactorizationRecommender(BaseDFSparseRecommender): all_metrics.plot() self.early_stop_metrics_df = all_metrics self._set_epochs(epochs=epochs_max) if not refit_on_all: simple_logger.info('Loading best model from checkpoint at %d epochs' % max_epoch) self.model, self.model_checkpoint = self.model_checkpoint, None
class FactorizationRecommender(BaseDFSparseRecommender): all_metrics.plot() self.early_stop_metrics_df = all_metrics self._set_epochs(epochs=max_epoch) if not refit_on_all: simple_logger.info('Loading best model from checkpoint at %d epochs' % max_epoch) self.model, self.model_checkpoint = self.model_checkpoint, None
2,336
https://:@github.com/south-coast-science/scs_core.git
6ca2ac668b486924816ed461e8f40a87d82136da
@@ -25,7 +25,7 @@ class Filesystem(object): if head and not os.path.exists(head): cls.mkdir(head) - if os.path.exists(path): # handles case of trailing / + if not os.path.exists(path): # handles case of trailing / os.mkdir(path)
src/scs_core/sys/filesystem.py
ReplaceText(target='not ' @(28,11)->(28,11))
class Filesystem(object): if head and not os.path.exists(head): cls.mkdir(head) if os.path.exists(path): # handles case of trailing / os.mkdir(path)
class Filesystem(object): if head and not os.path.exists(head): cls.mkdir(head) if not os.path.exists(path): # handles case of trailing / os.mkdir(path)
2,337
https://:@github.com/south-coast-science/scs_core.git
653bf53c4d76b1b422d8aa11174c9a5351bbd1f6
@@ -81,7 +81,7 @@ class ExegeteRenderingTRhRow(JSONable): @classmethod def construct(cls, gas, rh, t_min, t_max, t_delta, exegete: Exegete): - cells = [ExegeteRenderingTRhCell(t, exegete.error(gas, t, rh)) + cells = [ExegeteRenderingTRhCell(t, exegete.error(gas, rh, t)) for t in range(t_min, t_max + 1, t_delta)] return ExegeteRenderingTRhRow(rh, cells)
src/scs_core/gas/exegesis/exegete_rendering_t_rh.py
ArgSwap(idxs=1<->2 @(84,44)->(84,57))
class ExegeteRenderingTRhRow(JSONable): @classmethod def construct(cls, gas, rh, t_min, t_max, t_delta, exegete: Exegete): cells = [ExegeteRenderingTRhCell(t, exegete.error(gas, t, rh)) for t in range(t_min, t_max + 1, t_delta)] return ExegeteRenderingTRhRow(rh, cells)
class ExegeteRenderingTRhRow(JSONable): @classmethod def construct(cls, gas, rh, t_min, t_max, t_delta, exegete: Exegete): cells = [ExegeteRenderingTRhCell(t, exegete.error(gas, rh, t)) for t in range(t_min, t_max + 1, t_delta)] return ExegeteRenderingTRhRow(rh, cells)
2,338
https://:@github.com/south-coast-science/scs_core.git
653bf53c4d76b1b422d8aa11174c9a5351bbd1f6
@@ -43,7 +43,7 @@ print("-") for rh in range(10, 91, 5): for t in range(0, 46, 5): - interpretation = exegete.interpretation('NO2', text, t, rh) + interpretation = exegete.interpretation('NO2', text, rh, t) print("rh: %2d t: %2d text: %3.1f interpretation: %3.1f" % (rh, t, text, interpretation)) print("-")
tests/gas/exegesis/sbl1/sbl1_no2_v1_test.py
ArgSwap(idxs=2<->3 @(46,25)->(46,47))
print("-") for rh in range(10, 91, 5): for t in range(0, 46, 5): interpretation = exegete.interpretation('NO2', text, t, rh) print("rh: %2d t: %2d text: %3.1f interpretation: %3.1f" % (rh, t, text, interpretation)) print("-")
print("-") for rh in range(10, 91, 5): for t in range(0, 46, 5): interpretation = exegete.interpretation('NO2', text, rh, t) print("rh: %2d t: %2d text: %3.1f interpretation: %3.1f" % (rh, t, text, interpretation)) print("-")
2,339
https://:@github.com/south-coast-science/scs_core.git
73c3c80fcc88fce844e0e387499753667dac811e
@@ -68,7 +68,7 @@ class S3Manager(object): bucket_list.append(str(inters), bucket["Name"]) inters += 1 - return bucket_list + return response def retrieve_from_bucket(self, bucket_name, resource_name):
src/scs_core/aws/manager/s3_manager.py
ReplaceText(target='response' @(71,15)->(71,26))
class S3Manager(object): bucket_list.append(str(inters), bucket["Name"]) inters += 1 return bucket_list def retrieve_from_bucket(self, bucket_name, resource_name):
class S3Manager(object): bucket_list.append(str(inters), bucket["Name"]) inters += 1 return response def retrieve_from_bucket(self, bucket_name, resource_name):
2,340
https://:@github.com/south-coast-science/scs_core.git
ac11b0e941914f83f984d183811a83bb0a5df544
@@ -59,7 +59,7 @@ class AccessKey(PersistentJSONable): @classmethod def persistence_location(cls, host): - return host.aws_dir(), cls.__FILENAME + return cls.aws_dir(), cls.__FILENAME @classmethod
src/scs_core/aws/client/access_key.py
ReplaceText(target='cls' @(62,15)->(62,19))
class AccessKey(PersistentJSONable): @classmethod def persistence_location(cls, host): return host.aws_dir(), cls.__FILENAME @classmethod
class AccessKey(PersistentJSONable): @classmethod def persistence_location(cls, host): return cls.aws_dir(), cls.__FILENAME @classmethod
2,341
https://:@github.com/south-coast-science/scs_core.git
e6293b84e0b7594738b85db5a7365fdbb5770461
@@ -53,7 +53,7 @@ class DeviceTester(object): delta = now - latest_pub elapsed_minutes = delta.total_seconds() / 60 - return elapsed_minutes > self.__config.unresponsive_minutes_allowed + return elapsed_minutes < self.__config.unresponsive_minutes_allowed def has_status_changed(self, s3_device_status_list):
src/scs_core/aws/monitor/device_tester.py
ReplaceText(target='<' @(56,35)->(56,36))
class DeviceTester(object): delta = now - latest_pub elapsed_minutes = delta.total_seconds() / 60 return elapsed_minutes > self.__config.unresponsive_minutes_allowed def has_status_changed(self, s3_device_status_list):
class DeviceTester(object): delta = now - latest_pub elapsed_minutes = delta.total_seconds() / 60 return elapsed_minutes < self.__config.unresponsive_minutes_allowed def has_status_changed(self, s3_device_status_list):
2,342
https://:@github.com/davidfstr/notifymail.git
585ae19eba35db57e969941ea2c340bd3be499d8
@@ -8,7 +8,7 @@ def is_older_than(file1, file2): return os.path.getmtime(file1) < os.path.getmtime(file2) # Generate README.rst if missing or out of date -if not os.path.exists('README.rst') and is_older_than('README.rst', 'README.md'): +if not os.path.exists('README.rst') or is_older_than('README.rst', 'README.md'): os.system('pandoc --from=markdown --to=rst --output=README.rst README.md') with open('README.rst') as file: long_description = file.read()
setup.py
ReplaceText(target='or' @(11,36)->(11,39))
def is_older_than(file1, file2): return os.path.getmtime(file1) < os.path.getmtime(file2) # Generate README.rst if missing or out of date if not os.path.exists('README.rst') and is_older_than('README.rst', 'README.md'): os.system('pandoc --from=markdown --to=rst --output=README.rst README.md') with open('README.rst') as file: long_description = file.read()
def is_older_than(file1, file2): return os.path.getmtime(file1) < os.path.getmtime(file2) # Generate README.rst if missing or out of date if not os.path.exists('README.rst') or is_older_than('README.rst', 'README.md'): os.system('pandoc --from=markdown --to=rst --output=README.rst README.md') with open('README.rst') as file: long_description = file.read()
2,343
https://:@github.com/hile/oodi.git
169b7db094c64a5c2ca695bf93782fce0b50945f
@@ -26,7 +26,7 @@ class Command(ScriptCommand): iterators = [] for path in paths: try: - iterators.append(IterableTrackPaths(self.script.configuration, path)) + iterators.append(IterableTrackPaths(path, self.script.configuration)) except LibraryError as e: self.error(e) return iterators
oodi/bin/commands/base.py
ArgSwap(idxs=0<->1 @(29,33)->(29,51))
class Command(ScriptCommand): iterators = [] for path in paths: try: iterators.append(IterableTrackPaths(self.script.configuration, path)) except LibraryError as e: self.error(e) return iterators
class Command(ScriptCommand): iterators = [] for path in paths: try: iterators.append(IterableTrackPaths(path, self.script.configuration)) except LibraryError as e: self.error(e) return iterators
2,344
https://:@github.com/tpm2-software/tpm2-pytss.git
675d845e04810a4129e9e5b21889a0258fcad7b2
@@ -141,7 +141,7 @@ class BaseContextMetaClass(type): and "uint8_t" in docstring.split() ): return_value.append( - to_bytearray(value.value, args[i + 1].value) + to_bytearray(args[i + 1].value, value.value) ) skip = True continue
tpm2_pytss/context.py
ArgSwap(idxs=0<->1 @(144,36)->(144,48))
class BaseContextMetaClass(type): and "uint8_t" in docstring.split() ): return_value.append( to_bytearray(value.value, args[i + 1].value) ) skip = True continue
class BaseContextMetaClass(type): and "uint8_t" in docstring.split() ): return_value.append( to_bytearray(args[i + 1].value, value.value) ) skip = True continue
2,345
https://:@github.com/josesho/bootstrap_contrast.git
1d55129dee9130374774bb1234b577e061974d93
@@ -1089,7 +1089,7 @@ def pairedcontrast(data, x, y, idcol, hue = None, linestyle = 'dotted') # Set xlimit to appropriate limits.. - newxlim = (ax_left.get_xlim()[0], xpos + 0.25) + newxlim = (ax_left.get_xlim()[0], xposPlusViolin + 0.25) ax_left.set_xlim(newxlim) # Remove left axes x-axis title.
bootstrapContrast/bootstrapContrast.py
ReplaceText(target='xposPlusViolin' @(1092,38)->(1092,42))
def pairedcontrast(data, x, y, idcol, hue = None, linestyle = 'dotted') # Set xlimit to appropriate limits.. newxlim = (ax_left.get_xlim()[0], xpos + 0.25) ax_left.set_xlim(newxlim) # Remove left axes x-axis title.
def pairedcontrast(data, x, y, idcol, hue = None, linestyle = 'dotted') # Set xlimit to appropriate limits.. newxlim = (ax_left.get_xlim()[0], xposPlusViolin + 0.25) ax_left.set_xlim(newxlim) # Remove left axes x-axis title.
2,346
https://:@github.com/josesho/bootstrap_contrast.git
5875648efb0994fdac3eae216fe36d51cc0f629c
@@ -88,7 +88,7 @@ def plotbootstrap_hubspoke(bslist, ax, violinWidth, violinOffset, for i in range(0, len(bslist)): bsi=bslist[i] # array=list(bsi.items())[7][1] # Pull out the bootstrapped array. - array=bslist['diffarray'] + array=bsi['diffarray'] ylims.append(array) # Then plot as violinplot.
bootstrapContrast/plot_bootstrap_tools.py
ReplaceText(target='bsi' @(91,14)->(91,20))
def plotbootstrap_hubspoke(bslist, ax, violinWidth, violinOffset, for i in range(0, len(bslist)): bsi=bslist[i] # array=list(bsi.items())[7][1] # Pull out the bootstrapped array. array=bslist['diffarray'] ylims.append(array) # Then plot as violinplot.
def plotbootstrap_hubspoke(bslist, ax, violinWidth, violinOffset, for i in range(0, len(bslist)): bsi=bslist[i] # array=list(bsi.items())[7][1] # Pull out the bootstrapped array. array=bsi['diffarray'] ylims.append(array) # Then plot as violinplot.
2,347
https://:@gitlab.com/harry.sky.vortex/melodiam.git
bbc45aeb762242382c68cfe6fa9a32e600eb3630
@@ -106,7 +106,7 @@ class SpotifyAPI(object): # Update ETag if song's playback was manipulated if song["progress_ms"] < self.current_song["progress"] or song["progress_ms"] - 10000 > self.current_song["progress"]: self.current_song_json_updated = str(time()) - LISTEN_ALONG_API.set_current_playing_song(song_uri=song["uri"], position_ms=song["progress_ms"]) + LISTEN_ALONG_API.set_current_playing_song(song_uri=item["uri"], position_ms=song["progress_ms"]) self.current_song["progress"] = song["progress_ms"] self.current_song_json = json.dumps(self.current_song)
backend/spotify.py
ReplaceText(target='item' @(109,71)->(109,75))
class SpotifyAPI(object): # Update ETag if song's playback was manipulated if song["progress_ms"] < self.current_song["progress"] or song["progress_ms"] - 10000 > self.current_song["progress"]: self.current_song_json_updated = str(time()) LISTEN_ALONG_API.set_current_playing_song(song_uri=song["uri"], position_ms=song["progress_ms"]) self.current_song["progress"] = song["progress_ms"] self.current_song_json = json.dumps(self.current_song)
class SpotifyAPI(object): # Update ETag if song's playback was manipulated if song["progress_ms"] < self.current_song["progress"] or song["progress_ms"] - 10000 > self.current_song["progress"]: self.current_song_json_updated = str(time()) LISTEN_ALONG_API.set_current_playing_song(song_uri=item["uri"], position_ms=song["progress_ms"]) self.current_song["progress"] = song["progress_ms"] self.current_song_json = json.dumps(self.current_song)
2,348
https://:@gitlab.com/harry.sky.vortex/melodiam.git
db126e963e784dee9d11f2331622ef9eca5baf9d
@@ -32,7 +32,7 @@ async def get_listen_along_users_endpoint(request: StarletteRequest) -> PlainTex for user in ListenAlong.users: users_json += user.public_json + "," - users_json = ']' + users_json += ']' return PlainTextResponse(content=users_json, media_type="application/json") @SERVER.route('/get_current_song', methods=['GET'])
backend/music/main.py
ReplaceText(target='+=' @(35,15)->(35,16))
async def get_listen_along_users_endpoint(request: StarletteRequest) -> PlainTex for user in ListenAlong.users: users_json += user.public_json + "," users_json = ']' return PlainTextResponse(content=users_json, media_type="application/json") @SERVER.route('/get_current_song', methods=['GET'])
async def get_listen_along_users_endpoint(request: StarletteRequest) -> PlainTex for user in ListenAlong.users: users_json += user.public_json + "," users_json += ']' return PlainTextResponse(content=users_json, media_type="application/json") @SERVER.route('/get_current_song', methods=['GET'])
2,349
https://:@gitlab.com/harry.sky.vortex/melodiam.git
9850cc42634b01312cc8754a03df8abccd2054ce
@@ -21,7 +21,7 @@ class ListenAlong(): @staticmethod def _set_song(user: ListenAlongUser, song_json: str) -> None: if user.tokens: - status = SpotifyWebAPI.set_current_playing_song(song_json, user.tokens.access) + status = SpotifyWebAPI.set_current_playing_song(user.tokens.access, song_json) if user.public.status != status: user.public.status = status user.public_json = json.dumps(asdict(user.public))
backend/music/features/listen_along.py
ArgSwap(idxs=0<->1 @(24,21)->(24,59))
class ListenAlong(): @staticmethod def _set_song(user: ListenAlongUser, song_json: str) -> None: if user.tokens: status = SpotifyWebAPI.set_current_playing_song(song_json, user.tokens.access) if user.public.status != status: user.public.status = status user.public_json = json.dumps(asdict(user.public))
class ListenAlong(): @staticmethod def _set_song(user: ListenAlongUser, song_json: str) -> None: if user.tokens: status = SpotifyWebAPI.set_current_playing_song(user.tokens.access, song_json) if user.public.status != status: user.public.status = status user.public_json = json.dumps(asdict(user.public))
2,350
https://:@github.com/ladybug-tools/honeybee-radiance-command.git
fe62562a0b228b8e46309aa88c116882247c89b9
@@ -18,7 +18,7 @@ def run_command(input_command, env=None, cwd=None): if platform.system() == 'Windows': command = input_command.replace('\'', '"') else: - command = command.replace('"', '\'') + command = input_command.replace('"', '\'') # change cwd - Popen cwd input simply doesn't work. cur_dir = os.getcwd()
honeybee_radiance_command/_command_util.py
ReplaceText(target='input_command' @(21,18)->(21,25))
def run_command(input_command, env=None, cwd=None): if platform.system() == 'Windows': command = input_command.replace('\'', '"') else: command = command.replace('"', '\'') # change cwd - Popen cwd input simply doesn't work. cur_dir = os.getcwd()
def run_command(input_command, env=None, cwd=None): if platform.system() == 'Windows': command = input_command.replace('\'', '"') else: command = input_command.replace('"', '\'') # change cwd - Popen cwd input simply doesn't work. cur_dir = os.getcwd()
2,351
https://:@bitbucket.org/bertrandboichon/pi.hifi.git
bf0316f181d6e6597722b0f3023f70a592c49d04
@@ -11,7 +11,7 @@ class post_install(install): install.run(self) print("*** Executing post install actions:") # update mpd configuration if necessary - if '/tmp/mpd.fifo' in open('/etc/mpd.conf').read(): + if '/tmp/mpd.fifo' not in open('/etc/mpd.conf').read(): os.system("sudo cat /etc/fifo-mpd.conf >> /etc/mpd.conf") os.system("sudo service mpd restart") # update music display init script
setup.py
ReplaceText(target=' not in ' @(14,26)->(14,30))
class post_install(install): install.run(self) print("*** Executing post install actions:") # update mpd configuration if necessary if '/tmp/mpd.fifo' in open('/etc/mpd.conf').read(): os.system("sudo cat /etc/fifo-mpd.conf >> /etc/mpd.conf") os.system("sudo service mpd restart") # update music display init script
class post_install(install): install.run(self) print("*** Executing post install actions:") # update mpd configuration if necessary if '/tmp/mpd.fifo' not in open('/etc/mpd.conf').read(): os.system("sudo cat /etc/fifo-mpd.conf >> /etc/mpd.conf") os.system("sudo service mpd restart") # update music display init script
2,352
https://:@github.com/hotoffthehamster/dob.git
9615cad9da920842a23b9a124b1d39356ebe7d2e
@@ -43,7 +43,7 @@ def echo_copyright(): cur_year = str(datetime.now().year) year_range = '2018' if cur_year != year_range: - year_range = '2018-{}'.format(year_range) + year_range = '2018-{}'.format(cur_year) gpl3_notice_2018 = [ '{app_name} {version}'.format( app_name=__BigName__,
dob/copyright.py
ReplaceText(target='cur_year' @(46,38)->(46,48))
def echo_copyright(): cur_year = str(datetime.now().year) year_range = '2018' if cur_year != year_range: year_range = '2018-{}'.format(year_range) gpl3_notice_2018 = [ '{app_name} {version}'.format( app_name=__BigName__,
def echo_copyright(): cur_year = str(datetime.now().year) year_range = '2018' if cur_year != year_range: year_range = '2018-{}'.format(cur_year) gpl3_notice_2018 = [ '{app_name} {version}'.format( app_name=__BigName__,
2,353
https://:@github.com/superadm1n/CiscoAutomationFramework.git
2ffcf5da63e3479f7bcae3d552f04695b36d9466
@@ -594,7 +594,7 @@ class IOS(TerminalCommands, CommandGetMethods): ) #mac_table_list.append(line.split()) - if len(line.split()) >= 1: + if len(line.split()) > 1: if '--' in line.split()[0]: flag = 1
CiscoAutomationFramework/CiscoIOS.py
ReplaceText(target='>' @(597,33)->(597,35))
class IOS(TerminalCommands, CommandGetMethods): ) #mac_table_list.append(line.split()) if len(line.split()) >= 1: if '--' in line.split()[0]: flag = 1
class IOS(TerminalCommands, CommandGetMethods): ) #mac_table_list.append(line.split()) if len(line.split()) > 1: if '--' in line.split()[0]: flag = 1
2,354
https://:@github.com/bruth/ipipe.git
dc24d06d645ab03e988a066267787aff98baee32
@@ -20,7 +20,7 @@ class Parser(object): output.extend(parsed) else: output.append(parsed) - return parsed + return output class FileParser(Parser):
pipes/parser.py
ReplaceText(target='output' @(23,15)->(23,21))
class Parser(object): output.extend(parsed) else: output.append(parsed) return parsed class FileParser(Parser):
class Parser(object): output.extend(parsed) else: output.append(parsed) return output class FileParser(Parser):
2,355
https://:@github.com/la-mar/permian-frac-exchange.git
3d0d86bfbb6871abdded1c5357dac5ca1c3ed756
@@ -249,7 +249,7 @@ class Parser(object): ) -> pd.Series: try: apply_to = apply_to or apply_on - self.df[apply_on] = self.df[apply_on].apply(func) + self.df[apply_to] = self.df[apply_on].apply(func) except KeyError as ke: logger.debug( MSG_PARSER_CHECK.format(op_name=self.operator.name, col_name=apply_on)
src/fsec/parser.py
ReplaceText(target='apply_to' @(252,20)->(252,28))
class Parser(object): ) -> pd.Series: try: apply_to = apply_to or apply_on self.df[apply_on] = self.df[apply_on].apply(func) except KeyError as ke: logger.debug( MSG_PARSER_CHECK.format(op_name=self.operator.name, col_name=apply_on)
class Parser(object): ) -> pd.Series: try: apply_to = apply_to or apply_on self.df[apply_to] = self.df[apply_on].apply(func) except KeyError as ke: logger.debug( MSG_PARSER_CHECK.format(op_name=self.operator.name, col_name=apply_on)
2,356
https://:@bitbucket.org/sambowers/biota.git
b9c2b1b28e5a5fb57b30e7c474ecf2e6f729edeb
@@ -304,9 +304,9 @@ def calculateTWC(tile, patch_size = 'auto', output = False, show = False): # Extract the data WC = woody_cover[ymin:ymax, xmin:xmax] - + # If at least 50 % of data is present... - if TWC.mask.sum() <= ((patch_size ** 2) * 0.5): + if WC.mask.sum() <= ((patch_size ** 2) * 0.5): # Calculate proportion of woody cover in patch TWC.data[n, m] = int(round((float(WC.sum()) / ((patch_size ** 2) - WC.mask.sum())) * 100))
biota/indices.py
ReplaceText(target='WC' @(309,11)->(309,14))
def calculateTWC(tile, patch_size = 'auto', output = False, show = False): # Extract the data WC = woody_cover[ymin:ymax, xmin:xmax] # If at least 50 % of data is present... if TWC.mask.sum() <= ((patch_size ** 2) * 0.5): # Calculate proportion of woody cover in patch TWC.data[n, m] = int(round((float(WC.sum()) / ((patch_size ** 2) - WC.mask.sum())) * 100))
def calculateTWC(tile, patch_size = 'auto', output = False, show = False): # Extract the data WC = woody_cover[ymin:ymax, xmin:xmax] # If at least 50 % of data is present... if WC.mask.sum() <= ((patch_size ** 2) * 0.5): # Calculate proportion of woody cover in patch TWC.data[n, m] = int(round((float(WC.sum()) / ((patch_size ** 2) - WC.mask.sum())) * 100))
2,357
https://:@github.com/yedhrab/YInstabot.git
c9d3f3b18656d7b19eb7b179fc7adb171bb6efe8
@@ -106,7 +106,7 @@ def main(): DEBUG, WAIT, NO_REFRESH, PATHS = not args.quite, args.wait, args.noRefresh, args.paths for PATH in PATHS: - if not os.path.isfile(PATHS): + if not os.path.isfile(PATH): print(f"`{PATH}` dosyaya ait değil.") continue
yinstabot/workspace.py
ReplaceText(target='PATH' @(109,30)->(109,35))
def main(): DEBUG, WAIT, NO_REFRESH, PATHS = not args.quite, args.wait, args.noRefresh, args.paths for PATH in PATHS: if not os.path.isfile(PATHS): print(f"`{PATH}` dosyaya ait değil.") continue
def main(): DEBUG, WAIT, NO_REFRESH, PATHS = not args.quite, args.wait, args.noRefresh, args.paths for PATH in PATHS: if not os.path.isfile(PATH): print(f"`{PATH}` dosyaya ait değil.") continue
2,358
https://:@github.com/fladi/pyrc522.git
83bd4bd1c169259a8e4c3e7736e2aa610f3a8691
@@ -48,7 +48,7 @@ class RFID(object): self.spi.max_speed_hz = speed GPIO.setmode(pin_mode) - if pin_rst is None: + if pin_rst is not None: GPIO.setup(pin_rst, GPIO.OUT) GPIO.output(pin_rst, 1) GPIO.setup(pin_irq, GPIO.IN, pull_up_down=GPIO.PUD_UP)
pirc522/rfid.py
ReplaceText(target=' is not ' @(51,18)->(51,22))
class RFID(object): self.spi.max_speed_hz = speed GPIO.setmode(pin_mode) if pin_rst is None: GPIO.setup(pin_rst, GPIO.OUT) GPIO.output(pin_rst, 1) GPIO.setup(pin_irq, GPIO.IN, pull_up_down=GPIO.PUD_UP)
class RFID(object): self.spi.max_speed_hz = speed GPIO.setmode(pin_mode) if pin_rst is not None: GPIO.setup(pin_rst, GPIO.OUT) GPIO.output(pin_rst, 1) GPIO.setup(pin_irq, GPIO.IN, pull_up_down=GPIO.PUD_UP)
2,359
https://:@github.com/ac-tuwien/pymhlib.git
07e69d451d8e2f665c23f31c780cfa58f583cf4f
@@ -63,7 +63,7 @@ def run_optimization(problem_name: str, instance_class, solution_class, default_ :param iter_cb: optional callback function that is called each iteration by some of the algorithms :param seed: optional seed value for the random number generators; 0: random initialization """ - if embedded: + if not embedded: add_general_arguments_and_parse_settings(default_inst_file, seed) init_logger()
pymhlib/demos/common.py
ReplaceText(target='not ' @(66,7)->(66,7))
def run_optimization(problem_name: str, instance_class, solution_class, default_ :param iter_cb: optional callback function that is called each iteration by some of the algorithms :param seed: optional seed value for the random number generators; 0: random initialization """ if embedded: add_general_arguments_and_parse_settings(default_inst_file, seed) init_logger()
def run_optimization(problem_name: str, instance_class, solution_class, default_ :param iter_cb: optional callback function that is called each iteration by some of the algorithms :param seed: optional seed value for the random number generators; 0: random initialization """ if not embedded: add_general_arguments_and_parse_settings(default_inst_file, seed) init_logger()
2,360
https://:@github.com/trevorparker/vane.git
8b9eb3a87c10bce414017216fbd7ef333e124597
@@ -140,7 +140,7 @@ def _fetch_weather_json( if (with_forecast): forecast_url = forecast_urls[provider] r = requests.get( - forecast_url.format(location, units, api_key)) + forecast_url.format(loc_parsed, units, api_key)) f = json.loads(r.text) if (c['response']['features']['forecast'] != 1): return {'e': 'Unable to load forecast'}
vane/utils.py
ReplaceText(target='loc_parsed' @(143,44)->(143,52))
def _fetch_weather_json( if (with_forecast): forecast_url = forecast_urls[provider] r = requests.get( forecast_url.format(location, units, api_key)) f = json.loads(r.text) if (c['response']['features']['forecast'] != 1): return {'e': 'Unable to load forecast'}
def _fetch_weather_json( if (with_forecast): forecast_url = forecast_urls[provider] r = requests.get( forecast_url.format(loc_parsed, units, api_key)) f = json.loads(r.text) if (c['response']['features']['forecast'] != 1): return {'e': 'Unable to load forecast'}
2,361
https://:@github.com/juancgvazquez/MODApy.git
7c64bc5452715160f8767c891f04d7a0a4848ebc
@@ -227,7 +227,7 @@ class Pipeline(object): logger2.info(step.name) args = step.args.replace( - 'patientname', tmpdir + patientname).replace('reference', ref).replace('samplename', samplename) + 'patientname', tmpdir + patientname).replace('reference', ref).replace('samplename', patientname) cmdver = step.version.replace('.', '_') javacmds = ['GATK', 'picard', 'SnpSift', 'snpEff'] if any(javacmd in step.command for javacmd in javacmds):
MODApy/pipeline.py
ReplaceText(target='patientname' @(230,101)->(230,111))
class Pipeline(object): logger2.info(step.name) args = step.args.replace( 'patientname', tmpdir + patientname).replace('reference', ref).replace('samplename', samplename) cmdver = step.version.replace('.', '_') javacmds = ['GATK', 'picard', 'SnpSift', 'snpEff'] if any(javacmd in step.command for javacmd in javacmds):
class Pipeline(object): logger2.info(step.name) args = step.args.replace( 'patientname', tmpdir + patientname).replace('reference', ref).replace('samplename', patientname) cmdver = step.version.replace('.', '_') javacmds = ['GATK', 'picard', 'SnpSift', 'snpEff'] if any(javacmd in step.command for javacmd in javacmds):
2,362
https://:@github.com/linhd-postdata/averell.git
538e7f13b3b57170d94241111b416c31deb75d5c
@@ -99,7 +99,7 @@ def download_corpora(corpus_indices=None, else: url = CORPORA_SOURCES[index]["properties"]["url"] filename = download_corpus(url, f"{folder_name}.zip") - folder_list.append(uncompress_corpus(filename, output_folder)) + folder_list.append(uncompress_corpus(filename, folder_path)) else: logging.error("No corpus selected. Nothing will be downloaded") return folder_list
src/averell/utils.py
ReplaceText(target='folder_path' @(102,63)->(102,76))
def download_corpora(corpus_indices=None, else: url = CORPORA_SOURCES[index]["properties"]["url"] filename = download_corpus(url, f"{folder_name}.zip") folder_list.append(uncompress_corpus(filename, output_folder)) else: logging.error("No corpus selected. Nothing will be downloaded") return folder_list
def download_corpora(corpus_indices=None, else: url = CORPORA_SOURCES[index]["properties"]["url"] filename = download_corpus(url, f"{folder_name}.zip") folder_list.append(uncompress_corpus(filename, folder_path)) else: logging.error("No corpus selected. Nothing will be downloaded") return folder_list
2,363
https://:@github.com/poqweur/ctec-utils.git
28470709205c35754325af5e817fded28921a389
@@ -122,7 +122,7 @@ class OraclePool(object): result_db = cursor.execute(sql, param) if commit: conn.commit() - result = result_db.rowcount + result = cursor.rowcount else: result = result_db.fetchall() except Exception as e:
ctec_utils/Database.py
ReplaceText(target='cursor' @(125,25)->(125,34))
class OraclePool(object): result_db = cursor.execute(sql, param) if commit: conn.commit() result = result_db.rowcount else: result = result_db.fetchall() except Exception as e:
class OraclePool(object): result_db = cursor.execute(sql, param) if commit: conn.commit() result = cursor.rowcount else: result = result_db.fetchall() except Exception as e:
2,364
https://:@github.com/fsepy/sfeprapy.git
ffdf8d512d6ca26a685f58fceac853d9ac9241b6
@@ -137,7 +137,7 @@ def dict_flatten(dict_in: dict): else: dict_out[k] = dict_in[k] - return dict_in + return dict_out def main(x: dict, num_samples: int):
sfeprapy/func/mcs_gen.py
ReplaceText(target='dict_out' @(140,11)->(140,18))
def dict_flatten(dict_in: dict): else: dict_out[k] = dict_in[k] return dict_in def main(x: dict, num_samples: int):
def dict_flatten(dict_in: dict): else: dict_out[k] = dict_in[k] return dict_out def main(x: dict, num_samples: int):
2,365
https://:@github.com/galias11/nlp_model_gen.git
7854eb5aac80b1f6d24bbdd7d319bb9ebb4e429a
@@ -173,7 +173,7 @@ class ModelManagerController: try: Logger.log('L-0021') custom_model = self.__initialize_custom_model() - new_model = Model(model_id, model_name, description, author, model_name, analyzer_rule_set) + new_model = Model(model_id, model_name, description, author, model_id, analyzer_rule_set) new_model.set_reference(custom_model) Logger.log('L-0022') self.__apply_tokenizer_exceptions(new_model, tokenizer_exceptions_path)
nlp_model_gen/packages/modelManager/ModelManagerController.py
ReplaceText(target='model_id' @(176,73)->(176,83))
class ModelManagerController: try: Logger.log('L-0021') custom_model = self.__initialize_custom_model() new_model = Model(model_id, model_name, description, author, model_name, analyzer_rule_set) new_model.set_reference(custom_model) Logger.log('L-0022') self.__apply_tokenizer_exceptions(new_model, tokenizer_exceptions_path)
class ModelManagerController: try: Logger.log('L-0021') custom_model = self.__initialize_custom_model() new_model = Model(model_id, model_name, description, author, model_id, analyzer_rule_set) new_model.set_reference(custom_model) Logger.log('L-0022') self.__apply_tokenizer_exceptions(new_model, tokenizer_exceptions_path)
2,366
https://:@github.com/galias11/nlp_model_gen.git
f03a84b2eaa77db12a4d7698bb982a2be062566b
@@ -104,7 +104,7 @@ class Model: token_analyzer = Analyzer(self.__analyzer_rules_set) for sent in doc.sents: for token in sent: - generated_token = Token(token.lemma_, token.is_oov, token.pos_, token.sent, token.sentiment, token.tag_, sent.text) + generated_token = Token(token.lemma_, token.is_oov, token.pos_, token.sent, token.sentiment, token.tag_, token.text) token_analyzer.analyze_token(generated_token) if not only_positives or generated_token.is_positive(): results.append(generated_token)
nlp_model_gen/packages/modelManager/model/Model.py
ReplaceText(target='token' @(107,121)->(107,125))
class Model: token_analyzer = Analyzer(self.__analyzer_rules_set) for sent in doc.sents: for token in sent: generated_token = Token(token.lemma_, token.is_oov, token.pos_, token.sent, token.sentiment, token.tag_, sent.text) token_analyzer.analyze_token(generated_token) if not only_positives or generated_token.is_positive(): results.append(generated_token)
class Model: token_analyzer = Analyzer(self.__analyzer_rules_set) for sent in doc.sents: for token in sent: generated_token = Token(token.lemma_, token.is_oov, token.pos_, token.sent, token.sentiment, token.tag_, token.text) token_analyzer.analyze_token(generated_token) if not only_positives or generated_token.is_positive(): results.append(generated_token)
2,367
https://:@github.com/axelfahy/bff.git
0023bab225d0c2571fb47e12a1edf19d61396a5d
@@ -602,7 +602,7 @@ def plot_series(df: pd.DataFrame, column: str, groupby: str = '1S', .mean() .resample(groupby) .apply(sem) - if groupby == 'S' and groupby != '1S' else + if groupby != 'S' and groupby != '1S' else df[column].groupby('datetime').apply(sem)) ax.fill_between(x, df_plot - df_sem, df_plot + df_sem,
bff/fancy.py
ReplaceText(target='!=' @(605,33)->(605,35))
def plot_series(df: pd.DataFrame, column: str, groupby: str = '1S', .mean() .resample(groupby) .apply(sem) if groupby == 'S' and groupby != '1S' else df[column].groupby('datetime').apply(sem)) ax.fill_between(x, df_plot - df_sem, df_plot + df_sem,
def plot_series(df: pd.DataFrame, column: str, groupby: str = '1S', .mean() .resample(groupby) .apply(sem) if groupby != 'S' and groupby != '1S' else df[column].groupby('datetime').apply(sem)) ax.fill_between(x, df_plot - df_sem, df_plot + df_sem,
2,368
https://:@github.com/GearPlug/mercadolibre-python.git
3b0fa8eb47a81093e884a5699c2d25a1c700b1d0
@@ -401,7 +401,7 @@ class Client(object): _params = {'access_token': self.access_token} if params: _params.update(params) - response = requests.request(method, self.BASE_URL + endpoint, params=params, **kwargs) + response = requests.request(method, self.BASE_URL + endpoint, params=_params, **kwargs) return self._parse(response) def _parse(self, response):
mercadolibre/client.py
ReplaceText(target='_params' @(404,77)->(404,83))
class Client(object): _params = {'access_token': self.access_token} if params: _params.update(params) response = requests.request(method, self.BASE_URL + endpoint, params=params, **kwargs) return self._parse(response) def _parse(self, response):
class Client(object): _params = {'access_token': self.access_token} if params: _params.update(params) response = requests.request(method, self.BASE_URL + endpoint, params=_params, **kwargs) return self._parse(response) def _parse(self, response):
2,369
https://:@bitbucket.org/jairhul/pytransport.git
1b8a3ebc993ea9e74429198fa3607e1bfe537601
@@ -270,7 +270,7 @@ def RemoveIllegals(line): """ illegal = ['"', '', '(', ')'] - linelist = [element for element in line if element in illegal] + linelist = [element for element in line if element not in illegal] line = _np.array(linelist) return line
pytransport/_General.py
ReplaceText(target=' not in ' @(273,54)->(273,58))
def RemoveIllegals(line): """ illegal = ['"', '', '(', ')'] linelist = [element for element in line if element in illegal] line = _np.array(linelist) return line
def RemoveIllegals(line): """ illegal = ['"', '', '(', ')'] linelist = [element for element in line if element not in illegal] line = _np.array(linelist) return line
2,370
https://:@github.com/lsst-sqre/jupyterlabdemo.git
f6240ef0aeebdfe6e62c3f1bcf79a0fb085febd6
@@ -480,7 +480,7 @@ class LSSTSpawner(namespacedkubespawner.NamespacedKubeSpawner): for vol in vollist: volname = self._get_volume_name_for_mountpoint(vol["mountpoint"]) shortname = vol["mountpoint"][1:].replace("/", "-") - if volname in already_vols: + if shortname in already_vols: self.log.info( "Volume '{}' already exists for pod.".format(volname)) continue
jupyterhub/sample_configs/20-spawner.py
ReplaceText(target='shortname' @(483,15)->(483,22))
class LSSTSpawner(namespacedkubespawner.NamespacedKubeSpawner): for vol in vollist: volname = self._get_volume_name_for_mountpoint(vol["mountpoint"]) shortname = vol["mountpoint"][1:].replace("/", "-") if volname in already_vols: self.log.info( "Volume '{}' already exists for pod.".format(volname)) continue
class LSSTSpawner(namespacedkubespawner.NamespacedKubeSpawner): for vol in vollist: volname = self._get_volume_name_for_mountpoint(vol["mountpoint"]) shortname = vol["mountpoint"][1:].replace("/", "-") if shortname in already_vols: self.log.info( "Volume '{}' already exists for pod.".format(volname)) continue
2,371
https://:@github.com/pyfarm/pyfarm-core.git
66dacd9725338a5f49d12ccee0e0ec8f9e5f8068
@@ -68,7 +68,7 @@ class Task(TaskModel): def __init__(self, job, frame, parent_task=None, state=None, priority=None, attempts=None, agent=None): # build parent job id - if not modelfor(job, TABLE_JOB): + if modelfor(job, TABLE_JOB): jobid = job.jobid if jobid is None: raise ValueError("`job` with null id provided")
models/task.py
ReplaceText(target='' @(71,11)->(71,15))
class Task(TaskModel): def __init__(self, job, frame, parent_task=None, state=None, priority=None, attempts=None, agent=None): # build parent job id if not modelfor(job, TABLE_JOB): jobid = job.jobid if jobid is None: raise ValueError("`job` with null id provided")
class Task(TaskModel): def __init__(self, job, frame, parent_task=None, state=None, priority=None, attempts=None, agent=None): # build parent job id if modelfor(job, TABLE_JOB): jobid = job.jobid if jobid is None: raise ValueError("`job` with null id provided")
2,372
https://:@github.com/pyfarm/pyfarm-core.git
47a4cc9232a09974dea7f246b96d0338a4a4339b
@@ -116,5 +116,5 @@ class Task(TaskModel): if priority is not None: self.priority = priority - if attempts is None: + if attempts is not None: self.attempts = attempts
models/task.py
ReplaceText(target=' is not ' @(119,19)->(119,23))
class Task(TaskModel): if priority is not None: self.priority = priority if attempts is None: self.attempts = attempts
class Task(TaskModel): if priority is not None: self.priority = priority if attempts is not None: self.attempts = attempts
2,373
https://:@github.com/amarouane-ABDLHAK/cumulus-process-py.git
3955f6f5628f0b5233ad19cf54303bd164f981f1
@@ -85,7 +85,7 @@ class Granule(object): m = re.match(self.inputs[f], os.path.basename(filename)) if m is not None: # does the file exist locally - if os.path.exists(f): + if os.path.exists(filename): self.local_in[f] = filename else: self.remote_in[f] = filename
cumulus/granule.py
ReplaceText(target='filename' @(88,34)->(88,35))
class Granule(object): m = re.match(self.inputs[f], os.path.basename(filename)) if m is not None: # does the file exist locally if os.path.exists(f): self.local_in[f] = filename else: self.remote_in[f] = filename
class Granule(object): m = re.match(self.inputs[f], os.path.basename(filename)) if m is not None: # does the file exist locally if os.path.exists(filename): self.local_in[f] = filename else: self.remote_in[f] = filename
2,374
https://:@github.com/CodeClubLux/TopCompiler.git
ec1cbd020e522f8e478000d7d898003972e11490
@@ -14,7 +14,7 @@ class Enum(Node): args = self.const[name] names = [codegen.getName() for _ in args] codegen.inFunction() - if len(args) > 0: + if len(names) > 0: codegen.append("function "+self.package+"_"+name+"(") codegen.append(",".join(names)) codegen.append("){return ["+str(count)+","+",".join(names)+"]}")
AST/Enum.py
ReplaceText(target='names' @(17,19)->(17,23))
class Enum(Node): args = self.const[name] names = [codegen.getName() for _ in args] codegen.inFunction() if len(args) > 0: codegen.append("function "+self.package+"_"+name+"(") codegen.append(",".join(names)) codegen.append("){return ["+str(count)+","+",".join(names)+"]}")
class Enum(Node): args = self.const[name] names = [codegen.getName() for _ in args] codegen.inFunction() if len(names) > 0: codegen.append("function "+self.package+"_"+name+"(") codegen.append(",".join(names)) codegen.append("){return ["+str(count)+","+",".join(names)+"]}")
2,375
https://:@github.com/GIScience/openpoiservice.git
5a686db7a201b52f836e824910d9218bd2ff790b
@@ -171,7 +171,7 @@ class QueryBuilder(object): if tag in filters: - filters.append(query.c.key == tag.lower()) + filters_list.append(query.c.key == tag.lower()) if settings['filterable'] == 'like': filters_list.append(query.c.value.like('%' + filters[tag].lower() + '%'))
openpoiservice/server/api/query_builder.py
ReplaceText(target='filters_list' @(174,16)->(174,23))
class QueryBuilder(object): if tag in filters: filters.append(query.c.key == tag.lower()) if settings['filterable'] == 'like': filters_list.append(query.c.value.like('%' + filters[tag].lower() + '%'))
class QueryBuilder(object): if tag in filters: filters_list.append(query.c.key == tag.lower()) if settings['filterable'] == 'like': filters_list.append(query.c.value.like('%' + filters[tag].lower() + '%'))
2,376
https://:@github.com/theblackcat102/jieba-tw.git
5270ed66ff64b2001c1bf5c4ba927fec09189e33
@@ -366,7 +366,7 @@ class Tokenizer(object): f = open(f, 'rb') for lineno, ln in enumerate(f, 1): line = ln.strip() - if not isinstance(f, text_type): + if not isinstance(line, text_type): try: line = line.decode('utf-8').lstrip('\ufeff') except UnicodeDecodeError:
jieba/__init__.py
ReplaceText(target='line' @(369,30)->(369,31))
class Tokenizer(object): f = open(f, 'rb') for lineno, ln in enumerate(f, 1): line = ln.strip() if not isinstance(f, text_type): try: line = line.decode('utf-8').lstrip('\ufeff') except UnicodeDecodeError:
class Tokenizer(object): f = open(f, 'rb') for lineno, ln in enumerate(f, 1): line = ln.strip() if not isinstance(line, text_type): try: line = line.decode('utf-8').lstrip('\ufeff') except UnicodeDecodeError:
2,377
https://:@github.com/mozilla/measure-noise.git
b100399b2d650a794f50c897dfb2ec3462ad814f
@@ -103,7 +103,7 @@ def process( # EG https://treeherder.mozilla.org/perf.html#/graphs?highlightAlerts=1&series=mozilla-central,fee739b45f7960e4a520d8e0bd781dd9d0a3bec4,1,10&timerange=31536000 url = "https://treeherder.mozilla.org/perf.html#/graphs?" + value2url_param({ "highlightAlerts": 1, - "series": [sig.repository, sig.id, 1, coalesce(sig.framework, sig.framework_id)], + "series": [sig.repository, sig.id, 1, coalesce(sig.framework_id, sig.framework)], "timerange": 31536000, })
measure_noise/analysis.py
ArgSwap(idxs=0<->1 @(106,46)->(106,54))
def process( # EG https://treeherder.mozilla.org/perf.html#/graphs?highlightAlerts=1&series=mozilla-central,fee739b45f7960e4a520d8e0bd781dd9d0a3bec4,1,10&timerange=31536000 url = "https://treeherder.mozilla.org/perf.html#/graphs?" + value2url_param({ "highlightAlerts": 1, "series": [sig.repository, sig.id, 1, coalesce(sig.framework, sig.framework_id)], "timerange": 31536000, })
def process( # EG https://treeherder.mozilla.org/perf.html#/graphs?highlightAlerts=1&series=mozilla-central,fee739b45f7960e4a520d8e0bd781dd9d0a3bec4,1,10&timerange=31536000 url = "https://treeherder.mozilla.org/perf.html#/graphs?" + value2url_param({ "highlightAlerts": 1, "series": [sig.repository, sig.id, 1, coalesce(sig.framework_id, sig.framework)], "timerange": 31536000, })
2,378
https://:@github.com/USGS-WiM/WIMLib.git
f9f74b29ed1dfc901b31e3df81f9f2459918dc4e
@@ -93,7 +93,7 @@ class MapLayer(object): raise Exception(datasetPath +" doesn't exist") #test for schema lock, before continue trys=0 - while arcpy.TestSchemaLock(datasetPath) or trys>6: + while arcpy.TestSchemaLock(datasetPath) or trys<6: time.sleep(10) trys+=1 #next
WIMLib/MapLayer.py
ReplaceText(target='<' @(96,59)->(96,60))
class MapLayer(object): raise Exception(datasetPath +" doesn't exist") #test for schema lock, before continue trys=0 while arcpy.TestSchemaLock(datasetPath) or trys>6: time.sleep(10) trys+=1 #next
class MapLayer(object): raise Exception(datasetPath +" doesn't exist") #test for schema lock, before continue trys=0 while arcpy.TestSchemaLock(datasetPath) or trys<6: time.sleep(10) trys+=1 #next
2,379
https://:@github.com/xiawu/newchain-web3.py.git
b253f8a8d55a087800e8e5b0947e7972a1f8258d
@@ -31,7 +31,7 @@ def pad_right(string, chars, filler="0"): def is_prefixed(value, prefix): return value.startswith( - force_bytes(prefix) if is_bytes(prefix) else force_text(prefix) + force_bytes(prefix) if is_bytes(value) else force_text(prefix) )
web3/utils/formatting.py
ReplaceText(target='value' @(34,40)->(34,46))
def pad_right(string, chars, filler="0"): def is_prefixed(value, prefix): return value.startswith( force_bytes(prefix) if is_bytes(prefix) else force_text(prefix) )
def pad_right(string, chars, filler="0"): def is_prefixed(value, prefix): return value.startswith( force_bytes(prefix) if is_bytes(value) else force_text(prefix) )
2,380
https://:@github.com/xiawu/newchain-web3.py.git
6c2e459fbb1c3e9cf665b8138744510f2f797149
@@ -143,7 +143,7 @@ def outputBlockFormatter(block): if is_array(block.get("transactions")): for item in block["transactions"]: - if is_string(item): + if not is_string(item): item = outputTransactionFormatter(item) return block
web3/formatters.py
ReplaceText(target='not ' @(146,15)->(146,15))
def outputBlockFormatter(block): if is_array(block.get("transactions")): for item in block["transactions"]: if is_string(item): item = outputTransactionFormatter(item) return block
def outputBlockFormatter(block): if is_array(block.get("transactions")): for item in block["transactions"]: if not is_string(item): item = outputTransactionFormatter(item) return block
2,381
https://:@github.com/xiawu/newchain-web3.py.git
9f8282b202f17f1e98a305b49657b7ff2387b85a
@@ -611,7 +611,7 @@ def call_contract_function(contract=None, if transaction is None: call_transaction = {} else: - call_transaction = dict(**call_transaction) + call_transaction = dict(**transaction) if not arguments: arguments = []
web3/contract.py
ReplaceText(target='transaction' @(614,34)->(614,50))
def call_contract_function(contract=None, if transaction is None: call_transaction = {} else: call_transaction = dict(**call_transaction) if not arguments: arguments = []
def call_contract_function(contract=None, if transaction is None: call_transaction = {} else: call_transaction = dict(**transaction) if not arguments: arguments = []
2,382
https://:@github.com/Elizafox/taillight.git
731c28874dd7aa29f59a862e6b01f4ea8010978e
@@ -131,7 +131,7 @@ class Signal: with self._slots_lock: for slot in self.slots: if slot.function is function: - ret.append(function) + ret.append(slot) if ret: return ret
taillight/signal.py
ReplaceText(target='slot' @(134,31)->(134,39))
class Signal: with self._slots_lock: for slot in self.slots: if slot.function is function: ret.append(function) if ret: return ret
class Signal: with self._slots_lock: for slot in self.slots: if slot.function is function: ret.append(slot) if ret: return ret
2,383
https://:@github.com/Fak3/minidjango.git
659ab9846e81d95bb75dbb3c00147324bf0d6541
@@ -22,7 +22,7 @@ def login(request): else: errors = {} response = HttpResponse() - response.session.set_test_cookie() + request.session.set_test_cookie() t = template_loader.get_template('registration/login') c = Context(request, { 'form': formfields.FormWrapper(manipulator, request.POST, errors),
django/views/auth/login.py
ReplaceText(target='request' @(25,4)->(25,12))
def login(request): else: errors = {} response = HttpResponse() response.session.set_test_cookie() t = template_loader.get_template('registration/login') c = Context(request, { 'form': formfields.FormWrapper(manipulator, request.POST, errors),
def login(request): else: errors = {} response = HttpResponse() request.session.set_test_cookie() t = template_loader.get_template('registration/login') c = Context(request, { 'form': formfields.FormWrapper(manipulator, request.POST, errors),
2,384
https://:@github.com/Fak3/minidjango.git
34655a3e7816d6a8e5da6b3fd613b49b454a4691
@@ -227,7 +227,7 @@ class DateFormat(TimeFormat): week_number = 1 else: j = day_of_year + (7 - weekday) + (jan1_weekday - 1) - week_number = j / 7 + week_number = j // 7 if jan1_weekday > 4: week_number -= 1 return week_number
django/utils/dateformat.py
ReplaceText(target='//' @(230,32)->(230,33))
class DateFormat(TimeFormat): week_number = 1 else: j = day_of_year + (7 - weekday) + (jan1_weekday - 1) week_number = j / 7 if jan1_weekday > 4: week_number -= 1 return week_number
class DateFormat(TimeFormat): week_number = 1 else: j = day_of_year + (7 - weekday) + (jan1_weekday - 1) week_number = j // 7 if jan1_weekday > 4: week_number -= 1 return week_number
2,385
https://:@github.com/Fak3/minidjango.git
a97648a7e03fb95b09e888e5d59d82d57fb289b7
@@ -105,7 +105,7 @@ class DecoratorsTest(TestCase): """ def my_view(request): return "response" - my_view_cached = cache_page(123, my_view) + my_view_cached = cache_page(my_view, 123) self.assertEqual(my_view_cached(HttpRequest()), "response") class MethodDecoratorAdapterTests(TestCase):
tests/regressiontests/decorators/tests.py
ArgSwap(idxs=0<->1 @(108,25)->(108,35))
class DecoratorsTest(TestCase): """ def my_view(request): return "response" my_view_cached = cache_page(123, my_view) self.assertEqual(my_view_cached(HttpRequest()), "response") class MethodDecoratorAdapterTests(TestCase):
class DecoratorsTest(TestCase): """ def my_view(request): return "response" my_view_cached = cache_page(my_view, 123) self.assertEqual(my_view_cached(HttpRequest()), "response") class MethodDecoratorAdapterTests(TestCase):
2,386
https://:@github.com/Fak3/minidjango.git
b2050ff546da4164f90a795e55d7d8c55981783d
@@ -169,7 +169,7 @@ class SQLCompiler(object): if isinstance(col, (list, tuple)): alias, column = col table = self.query.alias_map[alias][TABLE_NAME] - if table in only_load and col not in only_load[table]: + if table in only_load and column not in only_load[table]: continue r = '%s.%s' % (qn(alias), qn(column)) if with_aliases:
django/db/models/sql/compiler.py
ReplaceText(target='column' @(172,46)->(172,49))
class SQLCompiler(object): if isinstance(col, (list, tuple)): alias, column = col table = self.query.alias_map[alias][TABLE_NAME] if table in only_load and col not in only_load[table]: continue r = '%s.%s' % (qn(alias), qn(column)) if with_aliases:
class SQLCompiler(object): if isinstance(col, (list, tuple)): alias, column = col table = self.query.alias_map[alias][TABLE_NAME] if table in only_load and column not in only_load[table]: continue r = '%s.%s' % (qn(alias), qn(column)) if with_aliases:
2,387
https://:@github.com/Fak3/minidjango.git
cfba2460370a6d1808b78e2ba0709ea5c8b7e773
@@ -42,7 +42,7 @@ def check_settings(base_url=None): Checks if the staticfiles settings have sane values. """ - if base_url is not None: + if base_url is None: base_url = settings.STATIC_URL if not base_url: raise ImproperlyConfigured(
django/contrib/staticfiles/utils.py
ReplaceText(target=' is ' @(45,15)->(45,23))
def check_settings(base_url=None): Checks if the staticfiles settings have sane values. """ if base_url is not None: base_url = settings.STATIC_URL if not base_url: raise ImproperlyConfigured(
def check_settings(base_url=None): Checks if the staticfiles settings have sane values. """ if base_url is None: base_url = settings.STATIC_URL if not base_url: raise ImproperlyConfigured(
2,388
https://:@github.com/Fak3/minidjango.git
d72d5ce8274992ce01e39f866a7a250bc459eefe
@@ -37,7 +37,7 @@ class GeoSQLCompiler(compiler.SQLCompiler): if isinstance(col, (list, tuple)): alias, column = col table = self.query.alias_map[alias][TABLE_NAME] - if table in only_load and col not in only_load[table]: + if table in only_load and column not in only_load[table]: continue r = self.get_field_select(field, alias, column) if with_aliases:
django/contrib/gis/db/models/sql/compiler.py
ReplaceText(target='column' @(40,46)->(40,49))
class GeoSQLCompiler(compiler.SQLCompiler): if isinstance(col, (list, tuple)): alias, column = col table = self.query.alias_map[alias][TABLE_NAME] if table in only_load and col not in only_load[table]: continue r = self.get_field_select(field, alias, column) if with_aliases:
class GeoSQLCompiler(compiler.SQLCompiler): if isinstance(col, (list, tuple)): alias, column = col table = self.query.alias_map[alias][TABLE_NAME] if table in only_load and column not in only_load[table]: continue r = self.get_field_select(field, alias, column) if with_aliases:
2,389
https://:@github.com/Fak3/minidjango.git
6ecbac21a9017a53fe18ac81c9c1d2f28185a292
@@ -111,5 +111,5 @@ class OSMWidget(BaseGeometryWidget): return 900913 def render(self, name, value, attrs=None): - return super(self, OSMWidget).render(name, value, + return super(OSMWidget, self).render(name, value, {'default_lon': self.default_lon, 'default_lat': self.default_lat})
django/contrib/gis/forms/widgets.py
ArgSwap(idxs=0<->1 @(114,15)->(114,20))
class OSMWidget(BaseGeometryWidget): return 900913 def render(self, name, value, attrs=None): return super(self, OSMWidget).render(name, value, {'default_lon': self.default_lon, 'default_lat': self.default_lat})
class OSMWidget(BaseGeometryWidget): return 900913 def render(self, name, value, attrs=None): return super(OSMWidget, self).render(name, value, {'default_lon': self.default_lon, 'default_lat': self.default_lat})
2,390
https://:@github.com/Fak3/minidjango.git
86c248aa646183ef4a1cb407bb3e4cb597272f63
@@ -575,7 +575,7 @@ class SQLCompiler(object): for order, order_params in ordering_group_by: # Even if we have seen the same SQL string, it might have # different params, so, we add same SQL in "has params" case. - if order not in seen or params: + if order not in seen or order_params: result.append(order) params.extend(order_params) seen.add(order)
django/db/models/sql/compiler.py
ReplaceText(target='order_params' @(578,44)->(578,50))
class SQLCompiler(object): for order, order_params in ordering_group_by: # Even if we have seen the same SQL string, it might have # different params, so, we add same SQL in "has params" case. if order not in seen or params: result.append(order) params.extend(order_params) seen.add(order)
class SQLCompiler(object): for order, order_params in ordering_group_by: # Even if we have seen the same SQL string, it might have # different params, so, we add same SQL in "has params" case. if order not in seen or order_params: result.append(order) params.extend(order_params) seen.add(order)
2,391
https://:@github.com/Fak3/minidjango.git
fddb0131d37109c809ec391e1a134ef1d9e442a7
@@ -57,7 +57,7 @@ def check_password(password, encoded, setter=None, preferred='default'): must_update = hasher.algorithm != preferred.algorithm if not must_update: - must_update = hasher.must_update(encoded) + must_update = preferred.must_update(encoded) is_correct = hasher.verify(password, encoded) if setter and is_correct and must_update: setter(password)
django/contrib/auth/hashers.py
ReplaceText(target='preferred' @(60,22)->(60,28))
def check_password(password, encoded, setter=None, preferred='default'): must_update = hasher.algorithm != preferred.algorithm if not must_update: must_update = hasher.must_update(encoded) is_correct = hasher.verify(password, encoded) if setter and is_correct and must_update: setter(password)
def check_password(password, encoded, setter=None, preferred='default'): must_update = hasher.algorithm != preferred.algorithm if not must_update: must_update = preferred.must_update(encoded) is_correct = hasher.verify(password, encoded) if setter and is_correct and must_update: setter(password)
2,392
https://:@github.com/Fak3/minidjango.git
e8223b889aab3b5ac0c2312eb9ee2307ea635c97
@@ -228,7 +228,7 @@ class GenericRelationTests(TestCase): # then wrong results are produced here as the link to b will also match # (b and hs1 have equal pks). self.assertEqual(qs.count(), 1) - self.assertEqual(qs[0].links__sum, l.id) + self.assertEqual(qs[0].links__sum, hs1.id) l.delete() # Now if we don't have proper left join, we will not produce any # results at all here.
tests/generic_relations_regress/tests.py
ReplaceText(target='hs1' @(231,43)->(231,44))
class GenericRelationTests(TestCase): # then wrong results are produced here as the link to b will also match # (b and hs1 have equal pks). self.assertEqual(qs.count(), 1) self.assertEqual(qs[0].links__sum, l.id) l.delete() # Now if we don't have proper left join, we will not produce any # results at all here.
class GenericRelationTests(TestCase): # then wrong results are produced here as the link to b will also match # (b and hs1 have equal pks). self.assertEqual(qs.count(), 1) self.assertEqual(qs[0].links__sum, hs1.id) l.delete() # Now if we don't have proper left join, we will not produce any # results at all here.
2,393
https://:@github.com/Fak3/minidjango.git
3074c5b19e2da5f7a5359c3cf3c5308eb194cdf9
@@ -112,7 +112,7 @@ class ClassDecoratedTestCase(ClassDecoratedTestCaseSuper): @classmethod def setUpClass(cls): - super(cls, ClassDecoratedTestCase).setUpClass() + super(ClassDecoratedTestCase, cls).setUpClass() cls.foo = getattr(settings, 'TEST', 'BUG') def test_override(self):
tests/settings_tests/tests.py
ArgSwap(idxs=0<->1 @(115,8)->(115,13))
class ClassDecoratedTestCase(ClassDecoratedTestCaseSuper): @classmethod def setUpClass(cls): super(cls, ClassDecoratedTestCase).setUpClass() cls.foo = getattr(settings, 'TEST', 'BUG') def test_override(self):
class ClassDecoratedTestCase(ClassDecoratedTestCaseSuper): @classmethod def setUpClass(cls): super(ClassDecoratedTestCase, cls).setUpClass() cls.foo = getattr(settings, 'TEST', 'BUG') def test_override(self):
2,394
https://:@github.com/Fak3/minidjango.git
c2b4967e76fd671e6199e4dd54d2a2c1f096b8eb
@@ -23,7 +23,7 @@ def import_string(dotted_path): return getattr(module, class_name) except AttributeError: msg = 'Module "%s" does not define a "%s" attribute/class' % ( - dotted_path, class_name) + module_path, class_name) six.reraise(ImportError, ImportError(msg), sys.exc_info()[2])
django/utils/module_loading.py
ReplaceText(target='module_path' @(26,12)->(26,23))
def import_string(dotted_path): return getattr(module, class_name) except AttributeError: msg = 'Module "%s" does not define a "%s" attribute/class' % ( dotted_path, class_name) six.reraise(ImportError, ImportError(msg), sys.exc_info()[2])
def import_string(dotted_path): return getattr(module, class_name) except AttributeError: msg = 'Module "%s" does not define a "%s" attribute/class' % ( module_path, class_name) six.reraise(ImportError, ImportError(msg), sys.exc_info()[2])
2,395
https://:@github.com/Fak3/minidjango.git
abcdb237bb313d116ce2ac8e90f79f61429afc70
@@ -31,7 +31,7 @@ class DatabaseCreation(BaseDatabaseCreation): try: if verbosity >= 1: print("Destroying old test database for alias %s..." % ( - self._get_database_display_str(target_database_name, verbosity), + self._get_database_display_str(verbosity, target_database_name), )) cursor.execute("DROP DATABASE %s" % qn(target_database_name)) cursor.execute("CREATE DATABASE %s" % qn(target_database_name))
django/db/backends/mysql/creation.py
ArgSwap(idxs=0<->1 @(34,28)->(34,58))
class DatabaseCreation(BaseDatabaseCreation): try: if verbosity >= 1: print("Destroying old test database for alias %s..." % ( self._get_database_display_str(target_database_name, verbosity), )) cursor.execute("DROP DATABASE %s" % qn(target_database_name)) cursor.execute("CREATE DATABASE %s" % qn(target_database_name))
class DatabaseCreation(BaseDatabaseCreation): try: if verbosity >= 1: print("Destroying old test database for alias %s..." % ( self._get_database_display_str(verbosity, target_database_name), )) cursor.execute("DROP DATABASE %s" % qn(target_database_name)) cursor.execute("CREATE DATABASE %s" % qn(target_database_name))
2,396
https://:@github.com/Fak3/minidjango.git
542b7f6c50df18f2aa201cf1de81577c1bee643c
@@ -50,7 +50,7 @@ class SeparateDatabaseAndState(Operation): to_state = base_state.clone() for dbop in self.database_operations[:-(pos + 1)]: dbop.state_forwards(app_label, to_state) - from_state = base_state.clone() + from_state = to_state.clone() database_operation.state_forwards(app_label, from_state) database_operation.database_backwards(app_label, schema_editor, from_state, to_state)
django/db/migrations/operations/special.py
ReplaceText(target='to_state' @(53,25)->(53,35))
class SeparateDatabaseAndState(Operation): to_state = base_state.clone() for dbop in self.database_operations[:-(pos + 1)]: dbop.state_forwards(app_label, to_state) from_state = base_state.clone() database_operation.state_forwards(app_label, from_state) database_operation.database_backwards(app_label, schema_editor, from_state, to_state)
class SeparateDatabaseAndState(Operation): to_state = base_state.clone() for dbop in self.database_operations[:-(pos + 1)]: dbop.state_forwards(app_label, to_state) from_state = to_state.clone() database_operation.state_forwards(app_label, from_state) database_operation.database_backwards(app_label, schema_editor, from_state, to_state)
2,397
https://:@github.com/Fak3/minidjango.git
d5088f838d837fc9e3109c828f18511055f20bea
@@ -383,7 +383,7 @@ class CombinedExpression(Expression): return DurationExpression(self.lhs, self.connector, self.rhs).as_sql(compiler, connection) if (lhs_output and rhs_output and self.connector == self.SUB and lhs_output.get_internal_type() in {'DateField', 'DateTimeField', 'TimeField'} and - lhs_output.get_internal_type() == lhs_output.get_internal_type()): + lhs_output.get_internal_type() == rhs_output.get_internal_type()): return TemporalSubtraction(self.lhs, self.rhs).as_sql(compiler, connection) expressions = [] expression_params = []
django/db/models/expressions.py
ReplaceText(target='rhs_output' @(386,50)->(386,60))
class CombinedExpression(Expression): return DurationExpression(self.lhs, self.connector, self.rhs).as_sql(compiler, connection) if (lhs_output and rhs_output and self.connector == self.SUB and lhs_output.get_internal_type() in {'DateField', 'DateTimeField', 'TimeField'} and lhs_output.get_internal_type() == lhs_output.get_internal_type()): return TemporalSubtraction(self.lhs, self.rhs).as_sql(compiler, connection) expressions = [] expression_params = []
class CombinedExpression(Expression): return DurationExpression(self.lhs, self.connector, self.rhs).as_sql(compiler, connection) if (lhs_output and rhs_output and self.connector == self.SUB and lhs_output.get_internal_type() in {'DateField', 'DateTimeField', 'TimeField'} and lhs_output.get_internal_type() == rhs_output.get_internal_type()): return TemporalSubtraction(self.lhs, self.rhs).as_sql(compiler, connection) expressions = [] expression_params = []
2,398
https://:@github.com/Fak3/minidjango.git
67a6ba391bbcf1a4c6bb0c42cb17e4fc0530f6d2
@@ -42,7 +42,7 @@ class PasswordResetTokenGenerator: return False # Check the timestamp is within limit - if (self._num_days(self._today()) - ts) >= settings.PASSWORD_RESET_TIMEOUT_DAYS: + if (self._num_days(self._today()) - ts) > settings.PASSWORD_RESET_TIMEOUT_DAYS: return False return True
django/contrib/auth/tokens.py
ReplaceText(target='>' @(45,48)->(45,50))
class PasswordResetTokenGenerator: return False # Check the timestamp is within limit if (self._num_days(self._today()) - ts) >= settings.PASSWORD_RESET_TIMEOUT_DAYS: return False return True
class PasswordResetTokenGenerator: return False # Check the timestamp is within limit if (self._num_days(self._today()) - ts) > settings.PASSWORD_RESET_TIMEOUT_DAYS: return False return True
2,399
https://:@github.com/Fak3/minidjango.git
acc8dd4142ec81def9a73507120c0262ba6b1264
@@ -60,7 +60,7 @@ class RWLock: def writer_enters(self): with self.mutex: if self.active_writers == 0 and self.waiting_writers == 0 and self.active_readers == 0: - self.active_writers += 1 + self.active_writers = 1 self.can_write.release() else: self.waiting_writers += 1
django/utils/synch.py
ReplaceText(target='=' @(63,36)->(63,38))
class RWLock: def writer_enters(self): with self.mutex: if self.active_writers == 0 and self.waiting_writers == 0 and self.active_readers == 0: self.active_writers += 1 self.can_write.release() else: self.waiting_writers += 1
class RWLock: def writer_enters(self): with self.mutex: if self.active_writers == 0 and self.waiting_writers == 0 and self.active_readers == 0: self.active_writers = 1 self.can_write.release() else: self.waiting_writers += 1