Unnamed: 0
int64
0
2.44k
repo
stringlengths
32
81
hash
stringlengths
40
40
diff
stringlengths
113
1.17k
old_path
stringlengths
5
84
rewrite
stringlengths
34
79
initial_state
stringlengths
75
980
final_state
stringlengths
76
980
2,000
https://:@github.com/cctbx/cctbx_project.git
0a50a4f43be57400f7aed41055b7ae401954724d
@@ -189,7 +189,7 @@ def exercise(): pdb_inp1 = iotbx.pdb.input(source_info=None, lines=test_pdb) pdb_inp2 = iotbx.pdb.input(source_info=None, lines=test_cif) model1 = mmtbx.model.manager(pdb_inp1) - model2 = mmtbx.model.manager(pdb_inp1) + model2 = mmtbx.model.manager(pdb_inp2) trans_obj1 = iotbx.ncs.input(hierarchy=model1.get_hierarchy()) trans_obj2 = iotbx.ncs.input(hierarchy=model2.get_hierarchy())
iotbx/pdb/tst_read_mtrix_records_from_cif.py
ReplaceText(target='pdb_inp2' @(192,31)->(192,39))
def exercise(): pdb_inp1 = iotbx.pdb.input(source_info=None, lines=test_pdb) pdb_inp2 = iotbx.pdb.input(source_info=None, lines=test_cif) model1 = mmtbx.model.manager(pdb_inp1) model2 = mmtbx.model.manager(pdb_inp1) trans_obj1 = iotbx.ncs.input(hierarchy=model1.get_hierarchy()) trans_obj2 = iotbx.ncs.input(hierarchy=model2.get_hierarchy())
def exercise(): pdb_inp1 = iotbx.pdb.input(source_info=None, lines=test_pdb) pdb_inp2 = iotbx.pdb.input(source_info=None, lines=test_cif) model1 = mmtbx.model.manager(pdb_inp1) model2 = mmtbx.model.manager(pdb_inp2) trans_obj1 = iotbx.ncs.input(hierarchy=model1.get_hierarchy()) trans_obj2 = iotbx.ncs.input(hierarchy=model2.get_hierarchy())
2,001
https://:@github.com/cctbx/cctbx_project.git
46af355112348574b808a6b026a7f3cafdc8c745
@@ -965,7 +965,7 @@ class manager(object): atoms = atoms.select(~ias_selection) grm_geometry = self.get_restraints_manager().geometry grm_geometry.pair_proxies(sites_cart) - struct_conn_loop = grm_geometry.get_struct_conn_mmcif(atoms) + struct_conn_loop = grm_geometry.get_struct_conn_mmcif(hierarchy_to_output) cif_block.add_loop(struct_conn_loop) self.get_model_statistics_info() # outputting HELIX/SHEET records
mmtbx/model/model.py
ReplaceText(target='hierarchy_to_output' @(968,60)->(968,65))
class manager(object): atoms = atoms.select(~ias_selection) grm_geometry = self.get_restraints_manager().geometry grm_geometry.pair_proxies(sites_cart) struct_conn_loop = grm_geometry.get_struct_conn_mmcif(atoms) cif_block.add_loop(struct_conn_loop) self.get_model_statistics_info() # outputting HELIX/SHEET records
class manager(object): atoms = atoms.select(~ias_selection) grm_geometry = self.get_restraints_manager().geometry grm_geometry.pair_proxies(sites_cart) struct_conn_loop = grm_geometry.get_struct_conn_mmcif(hierarchy_to_output) cif_block.add_loop(struct_conn_loop) self.get_model_statistics_info() # outputting HELIX/SHEET records
2,002
https://:@github.com/cctbx/cctbx_project.git
fb3e93f9dc70a7d6027cdf4fcf906f78ad39ccf6
@@ -244,7 +244,7 @@ def run(args, log=None, ccp4_map=None, mtz_dataset.add_miller_array( miller_array = f_obs.generate_r_free_flags(), column_root_label = "R-free-flags") - if not nohl and params.k_blur is not None and params.b_blur is None: + if not nohl and params.k_blur is not None and params.b_blur is not None: # convert phases into HL coefficeints broadcast(m="Convert phases into HL coefficients:", log=log) hl = get_hl(f_obs_cmpl=f_obs_cmpl, k_blur=params.k_blur, b_blur=params.b_blur)
mmtbx/command_line/map_to_structure_factors.py
ReplaceText(target=' is not ' @(247,61)->(247,65))
def run(args, log=None, ccp4_map=None, mtz_dataset.add_miller_array( miller_array = f_obs.generate_r_free_flags(), column_root_label = "R-free-flags") if not nohl and params.k_blur is not None and params.b_blur is None: # convert phases into HL coefficeints broadcast(m="Convert phases into HL coefficients:", log=log) hl = get_hl(f_obs_cmpl=f_obs_cmpl, k_blur=params.k_blur, b_blur=params.b_blur)
def run(args, log=None, ccp4_map=None, mtz_dataset.add_miller_array( miller_array = f_obs.generate_r_free_flags(), column_root_label = "R-free-flags") if not nohl and params.k_blur is not None and params.b_blur is not None: # convert phases into HL coefficeints broadcast(m="Convert phases into HL coefficients:", log=log) hl = get_hl(f_obs_cmpl=f_obs_cmpl, k_blur=params.k_blur, b_blur=params.b_blur)
2,003
https://:@github.com/cctbx/cctbx_project.git
534896eb6cac83fd73eec3cbc32a391e77ecdedc
@@ -512,7 +512,7 @@ def select_crystal_symmetry( if cs and not cs.is_nonsense() and not cs.is_empty(): is_similar_cs = cs0.is_similar_symmetry(cs, absolute_angle_tolerance=absolute_angle_tolerance, - absolute_length_tolerance=absolute_angle_tolerance) + absolute_length_tolerance=absolute_length_tolerance) if(not is_similar_cs): msg = "Crystal symmetry mismatch between different files.\n" msg += "%s %s\n" % (cs0.unit_cell(), cs0.space_group_info())
cctbx/crystal/__init__.py
ReplaceText(target='absolute_length_tolerance' @(515,37)->(515,61))
def select_crystal_symmetry( if cs and not cs.is_nonsense() and not cs.is_empty(): is_similar_cs = cs0.is_similar_symmetry(cs, absolute_angle_tolerance=absolute_angle_tolerance, absolute_length_tolerance=absolute_angle_tolerance) if(not is_similar_cs): msg = "Crystal symmetry mismatch between different files.\n" msg += "%s %s\n" % (cs0.unit_cell(), cs0.space_group_info())
def select_crystal_symmetry( if cs and not cs.is_nonsense() and not cs.is_empty(): is_similar_cs = cs0.is_similar_symmetry(cs, absolute_angle_tolerance=absolute_angle_tolerance, absolute_length_tolerance=absolute_length_tolerance) if(not is_similar_cs): msg = "Crystal symmetry mismatch between different files.\n" msg += "%s %s\n" % (cs0.unit_cell(), cs0.space_group_info())
2,004
https://:@github.com/youngershen/django-super-cache.git
89da4b6ee32fe55c483b3919f6f1ee307da37f7a
@@ -29,7 +29,7 @@ class FileBackend(BaseBackend): cache_file = self.cache_dir + key with open(cache_file, 'w') as f: - f.write(cache_file) + f.write(content) f.flush() def get(self, key):
django_super_cache/backends.py
ReplaceText(target='content' @(32,20)->(32,30))
class FileBackend(BaseBackend): cache_file = self.cache_dir + key with open(cache_file, 'w') as f: f.write(cache_file) f.flush() def get(self, key):
class FileBackend(BaseBackend): cache_file = self.cache_dir + key with open(cache_file, 'w') as f: f.write(content) f.flush() def get(self, key):
2,005
https://:@github.com/T-Eberle/tgbot.git
26a4dbd36902a1554c22f52f13f335a2b9e9bbe4
@@ -59,4 +59,4 @@ def singleradiocommand(wrapped): logger.exception(typo) MessageController.hide_keyboard(message, message.chat_id(), "Witzbold.") deleteconv(message) - return wrapped + return _wrapped
telegram/bot/decorators/singleradiocommand.py
ReplaceText(target='_wrapped' @(62,15)->(62,22))
def singleradiocommand(wrapped): logger.exception(typo) MessageController.hide_keyboard(message, message.chat_id(), "Witzbold.") deleteconv(message) return wrapped
def singleradiocommand(wrapped): logger.exception(typo) MessageController.hide_keyboard(message, message.chat_id(), "Witzbold.") deleteconv(message) return _wrapped
2,006
https://:@github.com/Azure-Developments/ezzybot.git
0ebe7d860712b23bde58ba10bf4933277fa1db9b
@@ -305,7 +305,7 @@ class ezzybot(Socket): if regex._thread: regex_thread = threading.Thread(target=self.run_trigger, args=(regex, wrappers.connection_wrapper(self), self.info)) regex_thread.daemon = True - plugin_thread.start() + regex_thread.start() else: self.run_trigger(regex, wrappers.connection_wrapper(self), self.info) if self.nick not in self.db['users'].keys():
ezzybot/bot.py
ReplaceText(target='regex_thread' @(308,32)->(308,45))
class ezzybot(Socket): if regex._thread: regex_thread = threading.Thread(target=self.run_trigger, args=(regex, wrappers.connection_wrapper(self), self.info)) regex_thread.daemon = True plugin_thread.start() else: self.run_trigger(regex, wrappers.connection_wrapper(self), self.info) if self.nick not in self.db['users'].keys():
class ezzybot(Socket): if regex._thread: regex_thread = threading.Thread(target=self.run_trigger, args=(regex, wrappers.connection_wrapper(self), self.info)) regex_thread.daemon = True regex_thread.start() else: self.run_trigger(regex, wrappers.connection_wrapper(self), self.info) if self.nick not in self.db['users'].keys():
2,007
https://:@bitbucket.org/shiumachi/sphinxcontrib-recentpages.git
22c35dbac880b95d121bedb3b9e255c5ce67e654
@@ -83,7 +83,7 @@ def get_file_list_ordered_by_mtime(target_dir, env): for docname in env.found_docs: abspath = env.doc2path(docname) mtime = os.path.getmtime(abspath) - res.append((abspath,mtime)) + res.append((docname,mtime)) res = list(set(res)) res.sort(cmp=lambda x,y: cmp(x[1], y[1]), reverse=True)
sphinx.recentpages/recentpages.py
ReplaceText(target='docname' @(86,20)->(86,27))
def get_file_list_ordered_by_mtime(target_dir, env): for docname in env.found_docs: abspath = env.doc2path(docname) mtime = os.path.getmtime(abspath) res.append((abspath,mtime)) res = list(set(res)) res.sort(cmp=lambda x,y: cmp(x[1], y[1]), reverse=True)
def get_file_list_ordered_by_mtime(target_dir, env): for docname in env.found_docs: abspath = env.doc2path(docname) mtime = os.path.getmtime(abspath) res.append((docname,mtime)) res = list(set(res)) res.sort(cmp=lambda x,y: cmp(x[1], y[1]), reverse=True)
2,008
https://:@github.com/frkhit/bl_wxpy.git
e2e191f226e74c9ea3547b94a5f4cf4bef2120dc
@@ -85,7 +85,7 @@ class SentMessage(object): """ from wxpy import Group - if isinstance(Group, self.receiver): + if isinstance(self.receiver, Group): return self.receiver.self @property
wxpy/api/messages/sent_message.py
ArgSwap(idxs=0<->1 @(88,11)->(88,21))
class SentMessage(object): """ from wxpy import Group if isinstance(Group, self.receiver): return self.receiver.self @property
class SentMessage(object): """ from wxpy import Group if isinstance(self.receiver, Group): return self.receiver.self @property
2,009
https://:@github.com/frkhit/bl_wxpy.git
c782a43af904ea346f4482f2e9e6617bece06166
@@ -148,7 +148,7 @@ class Chat(object): :param friend_or_mp: 好友对象或公众号对象 """ - card_name = friend_or_mp.nickname if isinstance(Chat, friend_or_mp) else friend_or_mp + card_name = friend_or_mp.nickname if isinstance(friend_or_mp, Chat) else friend_or_mp logger.info('sending {} to {}: {}'.format(CARD, self, card_name)) return self.core.send(
wxpy/api/chats/chat.py
ArgSwap(idxs=0<->1 @(151,45)->(151,55))
class Chat(object): :param friend_or_mp: 好友对象或公众号对象 """ card_name = friend_or_mp.nickname if isinstance(Chat, friend_or_mp) else friend_or_mp logger.info('sending {} to {}: {}'.format(CARD, self, card_name)) return self.core.send(
class Chat(object): :param friend_or_mp: 好友对象或公众号对象 """ card_name = friend_or_mp.nickname if isinstance(friend_or_mp, Chat) else friend_or_mp logger.info('sending {} to {}: {}'.format(CARD, self, card_name)) return self.core.send(
2,010
https://:@github.com/kmarilleau/pytest-django-models.git
31fa2011c76bd1581d24bef19f422cebf644b03d
@@ -237,7 +237,7 @@ class ModelGenerator: # Ignore Special Methods. or is_dunder(attr) # Ignore Functions. - or inspect.isfunction(attr) + or inspect.isfunction(value) # Ignore Django Model Attributes. or attr in ("objects", "id", "_meta") # Ignore Fields.
pytest_django_model/objects.py
ReplaceText(target='value' @(240,34)->(240,38))
class ModelGenerator: # Ignore Special Methods. or is_dunder(attr) # Ignore Functions. or inspect.isfunction(attr) # Ignore Django Model Attributes. or attr in ("objects", "id", "_meta") # Ignore Fields.
class ModelGenerator: # Ignore Special Methods. or is_dunder(attr) # Ignore Functions. or inspect.isfunction(value) # Ignore Django Model Attributes. or attr in ("objects", "id", "_meta") # Ignore Fields.
2,011
https://:@github.com/fixstars/clpy.git
d751a0614598bf05b9bbfd15eb0d05e26e562649
@@ -151,7 +151,7 @@ class BatchNormalization(function.Function): def check_type_backward(self, in_types, out_types): type_check.expect(out_types.size() == 1) - x_type, = out_types + x_type, = in_types y_type, = out_types type_check.expect(
chainer/functions/batch_normalization.py
ReplaceText(target='in_types' @(154,18)->(154,27))
class BatchNormalization(function.Function): def check_type_backward(self, in_types, out_types): type_check.expect(out_types.size() == 1) x_type, = out_types y_type, = out_types type_check.expect(
class BatchNormalization(function.Function): def check_type_backward(self, in_types, out_types): type_check.expect(out_types.size() == 1) x_type, = in_types y_type, = out_types type_check.expect(
2,012
https://:@github.com/fixstars/clpy.git
447e1e6aaf5590b7bdce63afb826a754d03274d8
@@ -208,7 +208,7 @@ class Optimizer(object): with cuda.get_device(g_dst): if (isinstance(g_src, cuda.ndarray) and g_dst.gpudata.device != g_src.gpudata.device): - g_dst += cuda.copy(g_src, out_device=g_src.gpudata.device) + g_dst += cuda.copy(g_src, out_device=g_dst.gpudata.device) else: g_dst += cuda.to_gpu(g_src)
chainer/optimizer.py
ReplaceText(target='g_dst' @(211,57)->(211,62))
class Optimizer(object): with cuda.get_device(g_dst): if (isinstance(g_src, cuda.ndarray) and g_dst.gpudata.device != g_src.gpudata.device): g_dst += cuda.copy(g_src, out_device=g_src.gpudata.device) else: g_dst += cuda.to_gpu(g_src)
class Optimizer(object): with cuda.get_device(g_dst): if (isinstance(g_src, cuda.ndarray) and g_dst.gpudata.device != g_src.gpudata.device): g_dst += cuda.copy(g_src, out_device=g_dst.gpudata.device) else: g_dst += cuda.to_gpu(g_src)
2,013
https://:@github.com/fixstars/clpy.git
2cd7ddd29647184e00e79a551c37d6e975141e83
@@ -10,7 +10,7 @@ class Contrastive(function.Function): """Contrastive loss function.""" def __init__(self, margin): - if margin < 0: + if margin <= 0: raise Exception("margin should be positive value.") self.margin = margin
chainer/functions/loss/contrastive.py
ReplaceText(target='<=' @(13,18)->(13,19))
class Contrastive(function.Function): """Contrastive loss function.""" def __init__(self, margin): if margin < 0: raise Exception("margin should be positive value.") self.margin = margin
class Contrastive(function.Function): """Contrastive loss function.""" def __init__(self, margin): if margin <= 0: raise Exception("margin should be positive value.") self.margin = margin
2,014
https://:@github.com/fixstars/clpy.git
1f29bf157ef8289a6a16cfc19ea8737473623a7e
@@ -32,7 +32,7 @@ def array_split(ary, indices_or_sections, axis=0): for index in indices: ret.append(ary[skip + (slice(i, index),)]) i = index - ret.append(ary[skip + (slice(index, size),)]) + ret.append(ary[skip + (slice(i, size),)]) return ret
cupy/manipulation/split.py
ReplaceText(target='i' @(35,33)->(35,38))
def array_split(ary, indices_or_sections, axis=0): for index in indices: ret.append(ary[skip + (slice(i, index),)]) i = index ret.append(ary[skip + (slice(index, size),)]) return ret
def array_split(ary, indices_or_sections, axis=0): for index in indices: ret.append(ary[skip + (slice(i, index),)]) i = index ret.append(ary[skip + (slice(i, size),)]) return ret
2,015
https://:@github.com/fixstars/clpy.git
74b521b9ec3a8b0ceda86041d9e9f78ffdd8d5a1
@@ -123,7 +123,7 @@ def hstack(tup): axis = 1 if arrs[0].ndim == 1: axis = 0 - return concatenate(tup, axis) + return concatenate(arrs, axis) def vstack(tup):
cupy/manipulation/join.py
ReplaceText(target='arrs' @(126,23)->(126,26))
def hstack(tup): axis = 1 if arrs[0].ndim == 1: axis = 0 return concatenate(tup, axis) def vstack(tup):
def hstack(tup): axis = 1 if arrs[0].ndim == 1: axis = 0 return concatenate(arrs, axis) def vstack(tup):
2,016
https://:@github.com/fixstars/clpy.git
4faf0402e83dcb3e2486fff862142f92f61cf75f
@@ -54,7 +54,7 @@ def exec_ultima(source, _clpy_header=''): proc.kill() source, errstream = proc.communicate() - if proc.returncode != 0 and len(errstream) > 0: + if proc.returncode != 0 or len(errstream) > 0: raise clpy.backend.ultima.exceptions.UltimaRuntimeError( proc.returncode, errstream)
tests/clpy_tests/opencl_tests/ultima_tests/utility.py
ReplaceText(target='or' @(57,32)->(57,35))
def exec_ultima(source, _clpy_header=''): proc.kill() source, errstream = proc.communicate() if proc.returncode != 0 and len(errstream) > 0: raise clpy.backend.ultima.exceptions.UltimaRuntimeError( proc.returncode, errstream)
def exec_ultima(source, _clpy_header=''): proc.kill() source, errstream = proc.communicate() if proc.returncode != 0 or len(errstream) > 0: raise clpy.backend.ultima.exceptions.UltimaRuntimeError( proc.returncode, errstream)
2,017
https://:@github.com/ashleysommer/sanic-oauthlib.git
ae9f946b2b2739bb352961c200b2e4eeaba67044
@@ -323,7 +323,7 @@ class OAuthRemoteApp(object): if attr: return attr if default is not False and not self.app_key: - return attr + return default app = self.oauth.app or current_app config = app.config[self.app_key] if default is not False:
flask_oauthlib/client.py
ReplaceText(target='default' @(326,19)->(326,23))
class OAuthRemoteApp(object): if attr: return attr if default is not False and not self.app_key: return attr app = self.oauth.app or current_app config = app.config[self.app_key] if default is not False:
class OAuthRemoteApp(object): if attr: return attr if default is not False and not self.app_key: return default app = self.oauth.app or current_app config = app.config[self.app_key] if default is not False:
2,018
https://:@github.com/arangb/isbnlib-dnb.git
929ff7c071c452c316769df0d236af29f92d5cbf
@@ -51,7 +51,7 @@ def parser_dnb(data): #</td><td class='yellow'>Kindergartenblock - Verbinden, vergleichen, Fehler finden ab 4 Jahre / Linda Bayerl</td></tr> elif re.search(r"<strong>Titel</strong", line): title = re.findall('td .*>(.*)/.*</td', line)[0] - publisher = u(title.replace('td >', '').replace('</td', '')) + title = u(title.replace('td >', '').replace('</td', '')) recs['Title'] = u(title) # Publication year: #<td width="25%" class='yellow'><strong>Zeitliche Einordnung</strong>
isbnlib_dnb/_dnb.py
ReplaceText(target='title' @(54,16)->(54,25))
def parser_dnb(data): #</td><td class='yellow'>Kindergartenblock - Verbinden, vergleichen, Fehler finden ab 4 Jahre / Linda Bayerl</td></tr> elif re.search(r"<strong>Titel</strong", line): title = re.findall('td .*>(.*)/.*</td', line)[0] publisher = u(title.replace('td >', '').replace('</td', '')) recs['Title'] = u(title) # Publication year: #<td width="25%" class='yellow'><strong>Zeitliche Einordnung</strong>
def parser_dnb(data): #</td><td class='yellow'>Kindergartenblock - Verbinden, vergleichen, Fehler finden ab 4 Jahre / Linda Bayerl</td></tr> elif re.search(r"<strong>Titel</strong", line): title = re.findall('td .*>(.*)/.*</td', line)[0] title = u(title.replace('td >', '').replace('</td', '')) recs['Title'] = u(title) # Publication year: #<td width="25%" class='yellow'><strong>Zeitliche Einordnung</strong>
2,019
https://:@github.com/parantapa/xactor.git
c4e136ac827da189b19fe8f9be1e54f426dba777
@@ -133,7 +133,7 @@ class MPIRankActor: while not self.stopping: actor_id, message = self.acomm.recv() if actor_id not in self.local_actors: - raise RuntimeError("Message received for non-local actor: %r" % message) + raise RuntimeError("Message received for non-local actor: %r" % actor_id) actor = self.local_actors[actor_id] try:
xactor/mpi_actor.py
ReplaceText(target='actor_id' @(136,80)->(136,87))
class MPIRankActor: while not self.stopping: actor_id, message = self.acomm.recv() if actor_id not in self.local_actors: raise RuntimeError("Message received for non-local actor: %r" % message) actor = self.local_actors[actor_id] try:
class MPIRankActor: while not self.stopping: actor_id, message = self.acomm.recv() if actor_id not in self.local_actors: raise RuntimeError("Message received for non-local actor: %r" % actor_id) actor = self.local_actors[actor_id] try:
2,020
https://:@github.com/openlmi/openlmi-scripts.git
da316f00c9b29f4bc58fc5bc38274551e0f3aea3
@@ -510,7 +510,7 @@ def set_autoconnect(ns, setting, device=None, state=True): # Set IsNext = 2 (Is Not Next), don't change IsCurrent mode = service.ApplySettingToIPNetworkConnection.ModeValues.Mode5 - if device is not None: + if device is None: result = service.SyncApplySettingToIPNetworkConnection(SettingData=setting, Mode=mode) else: result = service.SyncApplySettingToIPNetworkConnection(SettingData=setting, IPNetworkConnection=device, Mode=mode)
commands/networking/lmi/scripts/networking/__init__.py
ReplaceText(target=' is ' @(513,13)->(513,21))
def set_autoconnect(ns, setting, device=None, state=True): # Set IsNext = 2 (Is Not Next), don't change IsCurrent mode = service.ApplySettingToIPNetworkConnection.ModeValues.Mode5 if device is not None: result = service.SyncApplySettingToIPNetworkConnection(SettingData=setting, Mode=mode) else: result = service.SyncApplySettingToIPNetworkConnection(SettingData=setting, IPNetworkConnection=device, Mode=mode)
def set_autoconnect(ns, setting, device=None, state=True): # Set IsNext = 2 (Is Not Next), don't change IsCurrent mode = service.ApplySettingToIPNetworkConnection.ModeValues.Mode5 if device is None: result = service.SyncApplySettingToIPNetworkConnection(SettingData=setting, Mode=mode) else: result = service.SyncApplySettingToIPNetworkConnection(SettingData=setting, IPNetworkConnection=device, Mode=mode)
2,021
https://:@github.com/socek/hatak.git
68f03e13ae99fd2769e5bfeb5cd49bcea218d9f4
@@ -62,7 +62,7 @@ class ControllerFixture(RequestFixture): request.registry['controller_plugins'] = app.controller_plugins controller = self._get_controller_class()(root_tree, request) controller.data = data - controller.matchdict = matchdict + request.matchdict = matchdict return controller
src/hatak/testing.py
ReplaceText(target='request' @(65,8)->(65,18))
class ControllerFixture(RequestFixture): request.registry['controller_plugins'] = app.controller_plugins controller = self._get_controller_class()(root_tree, request) controller.data = data controller.matchdict = matchdict return controller
class ControllerFixture(RequestFixture): request.registry['controller_plugins'] = app.controller_plugins controller = self._get_controller_class()(root_tree, request) controller.data = data request.matchdict = matchdict return controller
2,022
https://:@github.com/DaniFdezAlvarez/shexerp3.git
913462770932107c1230a56677029e7cccd5957a
@@ -125,7 +125,7 @@ def get_instance_tracker(instances_file_input=None, graph_file_input=None, selectors_tracker = ShapeMapInstanceTracker(shape_map=valid_shape_map) if _are_there_some_target_classes(target_classes, file_target_classes, all_classes_mode, shape_qualifiers_mode): model_classes = None - if all_classes_mode or target_classes is not None: + if file_target_classes or target_classes is not None: list_of_str_target_classes = tune_target_classes_if_needed( target_classes) if target_classes is not None else read_target_classes_from_file(file_target_classes) model_classes = get_list_of_model_classes(list_of_str_target_classes)
shexer/utils/factories/instance_tracker_factory.py
ReplaceText(target='file_target_classes' @(128,11)->(128,27))
def get_instance_tracker(instances_file_input=None, graph_file_input=None, selectors_tracker = ShapeMapInstanceTracker(shape_map=valid_shape_map) if _are_there_some_target_classes(target_classes, file_target_classes, all_classes_mode, shape_qualifiers_mode): model_classes = None if all_classes_mode or target_classes is not None: list_of_str_target_classes = tune_target_classes_if_needed( target_classes) if target_classes is not None else read_target_classes_from_file(file_target_classes) model_classes = get_list_of_model_classes(list_of_str_target_classes)
def get_instance_tracker(instances_file_input=None, graph_file_input=None, selectors_tracker = ShapeMapInstanceTracker(shape_map=valid_shape_map) if _are_there_some_target_classes(target_classes, file_target_classes, all_classes_mode, shape_qualifiers_mode): model_classes = None if file_target_classes or target_classes is not None: list_of_str_target_classes = tune_target_classes_if_needed( target_classes) if target_classes is not None else read_target_classes_from_file(file_target_classes) model_classes = get_list_of_model_classes(list_of_str_target_classes)
2,023
https://:@github.com/remykarem/mixed-naive-bayes.git
669a9c2aacc6c3a75e0dc54ce06db57e1db7b672
@@ -429,7 +429,7 @@ def _validate_training_data(X_raw, y_raw, categorical_features, max_categories): if not np.array_equal(X[:, feature_no], X[:, feature_no].astype(int)): warnings.warn(f"Feature no. {feature_no} is continuous data. " + "Casting data to integer.") - if max_categories is not None: + if max_categories is None: uniques = np.unique(X[:, feature_no]).astype(int) if not np.array_equal(uniques, np.arange(0, np.max(uniques)+1)): raise ValueError(f"Expected feature no. {feature_no} to have " +
mixed_naive_bayes/mixed_naive_bayes.py
ReplaceText(target=' is ' @(432,29)->(432,37))
def _validate_training_data(X_raw, y_raw, categorical_features, max_categories): if not np.array_equal(X[:, feature_no], X[:, feature_no].astype(int)): warnings.warn(f"Feature no. {feature_no} is continuous data. " + "Casting data to integer.") if max_categories is not None: uniques = np.unique(X[:, feature_no]).astype(int) if not np.array_equal(uniques, np.arange(0, np.max(uniques)+1)): raise ValueError(f"Expected feature no. {feature_no} to have " +
def _validate_training_data(X_raw, y_raw, categorical_features, max_categories): if not np.array_equal(X[:, feature_no], X[:, feature_no].astype(int)): warnings.warn(f"Feature no. {feature_no} is continuous data. " + "Casting data to integer.") if max_categories is None: uniques = np.unique(X[:, feature_no]).astype(int) if not np.array_equal(uniques, np.arange(0, np.max(uniques)+1)): raise ValueError(f"Expected feature no. {feature_no} to have " +
2,024
https://:@github.com/remykarem/mixed-naive-bayes.git
e5f53782e0edba232d4468a267274524c08ec180
@@ -153,7 +153,7 @@ class MixedNB(): if len(self.priors) != num_classes: raise ValueError( 'Number of priors must match number of classes.') - if np.isclose(self.priors.sum(), 1.0): + if not np.isclose(self.priors.sum(), 1.0): raise ValueError("The sum of priors should be 1.") if (self.priors < 0).any(): raise ValueError('Priors must be non-negative.')
mixed_naive_bayes/mixed_naive_bayes.py
ReplaceText(target='not ' @(156,15)->(156,15))
class MixedNB(): if len(self.priors) != num_classes: raise ValueError( 'Number of priors must match number of classes.') if np.isclose(self.priors.sum(), 1.0): raise ValueError("The sum of priors should be 1.") if (self.priors < 0).any(): raise ValueError('Priors must be non-negative.')
class MixedNB(): if len(self.priors) != num_classes: raise ValueError( 'Number of priors must match number of classes.') if not np.isclose(self.priors.sum(), 1.0): raise ValueError("The sum of priors should be 1.") if (self.priors < 0).any(): raise ValueError('Priors must be non-negative.')
2,025
https://:@github.com/hprid/adblockeval.git
89311b75f3460835e764947dd794f62e7a95d031
@@ -167,7 +167,7 @@ class Rule: self.is_exception = False def match(self, url, netloc, domain, origin=None): - if self.options and not self.options.can_apply_rule(netloc, origin): + if self.options and not self.options.can_apply_rule(domain, origin): return False return True
adblockeval/rules.py
ReplaceText(target='domain' @(170,60)->(170,66))
class Rule: self.is_exception = False def match(self, url, netloc, domain, origin=None): if self.options and not self.options.can_apply_rule(netloc, origin): return False return True
class Rule: self.is_exception = False def match(self, url, netloc, domain, origin=None): if self.options and not self.options.can_apply_rule(domain, origin): return False return True
2,026
https://:@github.com/mbkupfer/bls-datasets.git
3ffd9be073ca6ed9591d1633c1b4ef15b6e32c24
@@ -151,7 +151,7 @@ def get_data(year=CUR_YEAR, cut_by='national', area_focus=None, if filename == None: raise ValueError('"{}" is not a valid area focus\n' \ 'valid options include:\n{}' \ - .format(cut_by, ['metros', 'metros-divisions', 'non-metros'])) + .format(area_focus, ['metros', 'metros-divisions', 'non-metros'])) else: filename = OES_FILENAMES.get(cut_by)
bls_datasets/oes.py
ReplaceText(target='area_focus' @(154,24)->(154,30))
def get_data(year=CUR_YEAR, cut_by='national', area_focus=None, if filename == None: raise ValueError('"{}" is not a valid area focus\n' \ 'valid options include:\n{}' \ .format(cut_by, ['metros', 'metros-divisions', 'non-metros'])) else: filename = OES_FILENAMES.get(cut_by)
def get_data(year=CUR_YEAR, cut_by='national', area_focus=None, if filename == None: raise ValueError('"{}" is not a valid area focus\n' \ 'valid options include:\n{}' \ .format(area_focus, ['metros', 'metros-divisions', 'non-metros'])) else: filename = OES_FILENAMES.get(cut_by)
2,027
https://:@github.com/krrr/wstan.git
52b8e527af3bbaeadc25c796a960bd46b573f63e
@@ -24,7 +24,7 @@ def _get_digest(dat): def _on_pushToTunTaskDone(task): # suppress annoying "CancelledError exception not retrieved" error on Py3.5+ try: - if not isinstance(task.exception(), CancelledError): + if isinstance(task.exception(), CancelledError): logging.error("pushToTunTask exception: %s" % type(task.exception())) except CancelledError: # doc says it will raise this if canceled, but... pass
wstan/relay.py
ReplaceText(target='' @(27,11)->(27,15))
def _get_digest(dat): def _on_pushToTunTaskDone(task): # suppress annoying "CancelledError exception not retrieved" error on Py3.5+ try: if not isinstance(task.exception(), CancelledError): logging.error("pushToTunTask exception: %s" % type(task.exception())) except CancelledError: # doc says it will raise this if canceled, but... pass
def _get_digest(dat): def _on_pushToTunTaskDone(task): # suppress annoying "CancelledError exception not retrieved" error on Py3.5+ try: if isinstance(task.exception(), CancelledError): logging.error("pushToTunTask exception: %s" % type(task.exception())) except CancelledError: # doc says it will raise this if canceled, but... pass
2,028
https://:@github.com/rmarkello/snfpy.git
c777b0fb8b2ddb88f3a2ddd97b6d7bacc6e82055
@@ -408,7 +408,7 @@ def group_predict(train, test, labels, *, K=20, mu=0.4, t=20): # generate affinity matrices for stacked train/test data sets affinities = [] for (tr, te) in zip(train, test): - if len(tr.T) == len(te.T): + if len(tr.T) != len(te.T): raise ValueError('Train and test data must have same number of ' 'features for each data type. Make sure to ' 'supply data types in the same order.')
snf/compute.py
ReplaceText(target='!=' @(411,21)->(411,23))
def group_predict(train, test, labels, *, K=20, mu=0.4, t=20): # generate affinity matrices for stacked train/test data sets affinities = [] for (tr, te) in zip(train, test): if len(tr.T) == len(te.T): raise ValueError('Train and test data must have same number of ' 'features for each data type. Make sure to ' 'supply data types in the same order.')
def group_predict(train, test, labels, *, K=20, mu=0.4, t=20): # generate affinity matrices for stacked train/test data sets affinities = [] for (tr, te) in zip(train, test): if len(tr.T) != len(te.T): raise ValueError('Train and test data must have same number of ' 'features for each data type. Make sure to ' 'supply data types in the same order.')
2,029
https://:@github.com/PartnershipOnAI/safelife.git
be6c06c82569bde428c986399d8fa3fe159deb26
@@ -283,7 +283,7 @@ class SafeLifeLogger(BaseLogger): log_data['level_name'] = game.title log_data['length'] = length.tolist() log_data['reward'] = reward.tolist() - log_data['completed'] = reward.tolist() + log_data['completed'] = completed.tolist() log_data['reward_possible'] = reward_possible.tolist() log_data['reward_needed'] = required_points.tolist() log_data['time'] = datetime.utcnow().isoformat()
safelife/safelife_logger.py
ReplaceText(target='completed' @(286,32)->(286,38))
class SafeLifeLogger(BaseLogger): log_data['level_name'] = game.title log_data['length'] = length.tolist() log_data['reward'] = reward.tolist() log_data['completed'] = reward.tolist() log_data['reward_possible'] = reward_possible.tolist() log_data['reward_needed'] = required_points.tolist() log_data['time'] = datetime.utcnow().isoformat()
class SafeLifeLogger(BaseLogger): log_data['level_name'] = game.title log_data['length'] = length.tolist() log_data['reward'] = reward.tolist() log_data['completed'] = completed.tolist() log_data['reward_possible'] = reward_possible.tolist() log_data['reward_needed'] = required_points.tolist() log_data['time'] = datetime.utcnow().isoformat()
2,030
https://:@github.com/PartnershipOnAI/safelife.git
5c9ddd2bb0d304bb159c5b452ce028e515c7c4cc
@@ -745,7 +745,7 @@ def _summarize_run(logfile, wandb_run=None, artifact=None): """)) - if wandb_run is not None and bare_name == 'benchmark-data': + if wandb_run is not None and file_name == 'benchmark-data': wandb_run.summary['success'] = np.average(success) wandb_run.summary['avg_length'] = np.average(length) wandb_run.summary['side_effects'] = np.average(side_effects)
safelife/safelife_logger.py
ReplaceText(target='file_name' @(748,33)->(748,42))
def _summarize_run(logfile, wandb_run=None, artifact=None): """)) if wandb_run is not None and bare_name == 'benchmark-data': wandb_run.summary['success'] = np.average(success) wandb_run.summary['avg_length'] = np.average(length) wandb_run.summary['side_effects'] = np.average(side_effects)
def _summarize_run(logfile, wandb_run=None, artifact=None): """)) if wandb_run is not None and file_name == 'benchmark-data': wandb_run.summary['success'] = np.average(success) wandb_run.summary['avg_length'] = np.average(length) wandb_run.summary['side_effects'] = np.average(side_effects)
2,031
https://:@github.com/rymurr/dremio_client.git
3119593dbb6db992a34b7008130a16ff4717aa0d
@@ -410,7 +410,7 @@ def delete_catalog(args, cid, path): warning, this process is destructive and permanent """ base_url, token, verify = get_base_url_token(args) - x = _delete_catalog(token, base_url, verify, cid, path) + x = _delete_catalog(base_url, token, verify, cid, path) click.echo(json.dumps(x))
dremio_client/cli.py
ArgSwap(idxs=0<->1 @(413,8)->(413,23))
def delete_catalog(args, cid, path): warning, this process is destructive and permanent """ base_url, token, verify = get_base_url_token(args) x = _delete_catalog(token, base_url, verify, cid, path) click.echo(json.dumps(x))
def delete_catalog(args, cid, path): warning, this process is destructive and permanent """ base_url, token, verify = get_base_url_token(args) x = _delete_catalog(base_url, token, verify, cid, path) click.echo(json.dumps(x))
2,032
https://:@github.com/kubostech/kubos-cli.git
128e898be2aa04eb1e7b60203dd348122dc2c9b9
@@ -83,7 +83,7 @@ def main(): 'all dependencies, run:\n yotta build all_tests\n\n', 'Build the current module.' ) - add_yotta_command('link', 'link', + add_kubos_command('link', 'link', 'Symlink a module to be used into another module.\n\n'+ 'Use: "yotta link" in a module to link it globally, then use "yotta '+ 'link <modulename>" to link it into the module where you want to use '+
kubos/main.py
ReplaceText(target='add_kubos_command' @(86,4)->(86,21))
def main(): 'all dependencies, run:\n yotta build all_tests\n\n', 'Build the current module.' ) add_yotta_command('link', 'link', 'Symlink a module to be used into another module.\n\n'+ 'Use: "yotta link" in a module to link it globally, then use "yotta '+ 'link <modulename>" to link it into the module where you want to use '+
def main(): 'all dependencies, run:\n yotta build all_tests\n\n', 'Build the current module.' ) add_kubos_command('link', 'link', 'Symlink a module to be used into another module.\n\n'+ 'Use: "yotta link" in a module to link it globally, then use "yotta '+ 'link <modulename>" to link it into the module where you want to use '+
2,033
https://:@github.com/bsdphk/PyReveng3.git
cdd2a273a00c008999a6b38f75f6c33e029c3c7e
@@ -489,7 +489,7 @@ class vector(data.Data): super().__init__(asp, adr, adr + 4) self.ws = asp.bu16(adr) self.dstadr = asp.bu16(adr + 2) - cx.disass(asp, self.dstadr) + cx.disass(self.dstadr, asp) def render(self): return "WP=0x%04x,IP=%s" % (self.ws, self.aspace.adr(self.dstadr))
pyreveng/cpu/tms9900.py
ArgSwap(idxs=0<->1 @(492,8)->(492,17))
class vector(data.Data): super().__init__(asp, adr, adr + 4) self.ws = asp.bu16(adr) self.dstadr = asp.bu16(adr + 2) cx.disass(asp, self.dstadr) def render(self): return "WP=0x%04x,IP=%s" % (self.ws, self.aspace.adr(self.dstadr))
class vector(data.Data): super().__init__(asp, adr, adr + 4) self.ws = asp.bu16(adr) self.dstadr = asp.bu16(adr + 2) cx.disass(self.dstadr, asp) def render(self): return "WP=0x%04x,IP=%s" % (self.ws, self.aspace.adr(self.dstadr))
2,034
https://:@github.com/ehickox2012/bitraider.git
0499f84ed9c06dfc72ef604cffe0dd5105c34a13
@@ -67,7 +67,7 @@ class strategy(object): print("Times sold: "+str(self.exchange.times_sold)) print("The Market's performance: "+str(market_performance)+" %") print("Strategy's performance: "+str(strategy_performance)+" %") - print("Account's ending value if no trades were made: "+str(start_amt)+" BTC") + print("Account's ending value if no trades were made: "+str(end_amt_no_trades)+" BTC") print("Account's ending value with this strategy: "+str(end_amt)+" BTC") strategy_performance_vs_market = strategy_performance - market_performance if strategy_performance > market_performance:
bitraider/strategy.py
ReplaceText(target='end_amt_no_trades' @(70,68)->(70,77))
class strategy(object): print("Times sold: "+str(self.exchange.times_sold)) print("The Market's performance: "+str(market_performance)+" %") print("Strategy's performance: "+str(strategy_performance)+" %") print("Account's ending value if no trades were made: "+str(start_amt)+" BTC") print("Account's ending value with this strategy: "+str(end_amt)+" BTC") strategy_performance_vs_market = strategy_performance - market_performance if strategy_performance > market_performance:
class strategy(object): print("Times sold: "+str(self.exchange.times_sold)) print("The Market's performance: "+str(market_performance)+" %") print("Strategy's performance: "+str(strategy_performance)+" %") print("Account's ending value if no trades were made: "+str(end_amt_no_trades)+" BTC") print("Account's ending value with this strategy: "+str(end_amt)+" BTC") strategy_performance_vs_market = strategy_performance - market_performance if strategy_performance > market_performance:
2,035
https://:@github.com/Yatoom/Optimus.git
f33e600b5e873e08cedb864d1c4fb3ecf07e9d93
@@ -98,7 +98,7 @@ def decode_params(params, prefix="!", remove_prefixes=True): # Make a copy params_copy = copy(params) - for key in params_copy: + for key in params: # Check if key starts with prefix if key[0:len(prefix)] == prefix:
vault/decoder.py
ReplaceText(target='params' @(101,15)->(101,26))
def decode_params(params, prefix="!", remove_prefixes=True): # Make a copy params_copy = copy(params) for key in params_copy: # Check if key starts with prefix if key[0:len(prefix)] == prefix:
def decode_params(params, prefix="!", remove_prefixes=True): # Make a copy params_copy = copy(params) for key in params: # Check if key starts with prefix if key[0:len(prefix)] == prefix:
2,036
https://:@github.com/Yatoom/Optimus.git
6c1faf98462d128189e4e52dfe50d585311dec12
@@ -113,7 +113,7 @@ class Benchmark: for i in range(0, len(results["best_score"])): iteration = { "task": self.task_id, - "method": "{} (EI: {}, RT: {})".format(method.name, time_regressor, score_regressor), + "method": "{} (EI: {}, RT: {})".format(method.name, score_regressor, time_regressor), "iteration": i, "score": results["mean_test_score"][i], "best_score": results["best_score"][i],
benchmarks/benchmark.py
ArgSwap(idxs=1<->2 @(116,26)->(116,54))
class Benchmark: for i in range(0, len(results["best_score"])): iteration = { "task": self.task_id, "method": "{} (EI: {}, RT: {})".format(method.name, time_regressor, score_regressor), "iteration": i, "score": results["mean_test_score"][i], "best_score": results["best_score"][i],
class Benchmark: for i in range(0, len(results["best_score"])): iteration = { "task": self.task_id, "method": "{} (EI: {}, RT: {})".format(method.name, score_regressor, time_regressor), "iteration": i, "score": results["mean_test_score"][i], "best_score": results["best_score"][i],
2,037
https://:@github.com/nicholasturner1/Synaptor.git
661e148c09be514e874af3c5a2f2c2a6a401b1ba
@@ -23,7 +23,7 @@ def main(psd_cvname, cc_cvname, proc_dir_path, #Processing - dil_ccs = s.dilated_components(psd_output, cc_thresh, dil_param) + dil_ccs = s.dilated_components(psd_output, dil_param, cc_thresh) continuations = s.extract_continuations(dil_ccs) cont_ids = set(cont.segid for cont in continuations)
tasks/chunk_ccs.py
ArgSwap(idxs=1<->2 @(26,14)->(26,34))
def main(psd_cvname, cc_cvname, proc_dir_path, #Processing dil_ccs = s.dilated_components(psd_output, cc_thresh, dil_param) continuations = s.extract_continuations(dil_ccs) cont_ids = set(cont.segid for cont in continuations)
def main(psd_cvname, cc_cvname, proc_dir_path, #Processing dil_ccs = s.dilated_components(psd_output, dil_param, cc_thresh) continuations = s.extract_continuations(dil_ccs) cont_ids = set(cont.segid for cont in continuations)
2,038
https://:@github.com/nicholasturner1/Synaptor.git
8f529876b19ee670791f5860b9808f7ce12f2528
@@ -31,7 +31,7 @@ def read_network(proc_dir_path): local_model = io.pull_file(model_fname) local_chkpt = io.pull_file(chkpt_fname) - model = imp.load_source("Model",model_fname).InstantiatedModel + model = imp.load_source("Model",local_model).InstantiatedModel model.load_state_dict(torch.load(local_chkpt)) return model
synaptor/edges/io.py
ReplaceText(target='local_model' @(34,36)->(34,47))
def read_network(proc_dir_path): local_model = io.pull_file(model_fname) local_chkpt = io.pull_file(chkpt_fname) model = imp.load_source("Model",model_fname).InstantiatedModel model.load_state_dict(torch.load(local_chkpt)) return model
def read_network(proc_dir_path): local_model = io.pull_file(model_fname) local_chkpt = io.pull_file(chkpt_fname) model = imp.load_source("Model",local_model).InstantiatedModel model.load_state_dict(torch.load(local_chkpt)) return model
2,039
https://:@github.com/metrasynth/radiant-voices.git
b472a30772349e0f274629a4a516ded96cd80567
@@ -121,4 +121,4 @@ class Module(object, metaclass=ModuleMeta): def load_options(self, chunk): for i, name in enumerate(self.options.keys()): value = chunk.chdt[i] - setattr(self, name, i) + setattr(self, name, value)
rv/modules/module.py
ReplaceText(target='value' @(124,32)->(124,33))
class Module(object, metaclass=ModuleMeta): def load_options(self, chunk): for i, name in enumerate(self.options.keys()): value = chunk.chdt[i] setattr(self, name, i)
class Module(object, metaclass=ModuleMeta): def load_options(self, chunk): for i, name in enumerate(self.options.keys()): value = chunk.chdt[i] setattr(self, name, value)
2,040
https://:@github.com/Lursun/p2p_grpc_blockchain_package.git
964fada4f1d9b0c065c327d9f190c5310f9d8f37
@@ -82,6 +82,6 @@ class Transaction(): print ("=> unixtime:%s\tbody:%s" % (pb2tx.unixtime,pb2tx.body)) tx=Transaction() tx.pb2=pb2tx - Transaction.Transactions[tx.pb2.txhash]=pb2tx + Transaction.Transactions[tx.pb2.txhash]=tx threading.Thread(target=Transaction.sync).start() \ No newline at end of file
p2p_grpc_blockchain/transaction/transaction.py
ReplaceText(target='tx' @(85,48)->(85,53))
class Transaction(): print ("=> unixtime:%s\tbody:%s" % (pb2tx.unixtime,pb2tx.body)) tx=Transaction() tx.pb2=pb2tx Transaction.Transactions[tx.pb2.txhash]=pb2tx threading.Thread(target=Transaction.sync).start() \ No newline at end of file
class Transaction(): print ("=> unixtime:%s\tbody:%s" % (pb2tx.unixtime,pb2tx.body)) tx=Transaction() tx.pb2=pb2tx Transaction.Transactions[tx.pb2.txhash]=tx threading.Thread(target=Transaction.sync).start() \ No newline at end of file
2,041
https://:@github.com/wbsoft/livelex.git
928383697438c2c14ec530eeec25eb00422ff70f
@@ -899,7 +899,7 @@ class TreeDocumentMixin: def contents_changed(self, start, removed, added): """Called after modification of the text, retokenizes the modified part.""" if self._tree.lexicon: - start, end = self._builder().rebuild(self._tree, self.text(), start, added, removed) + start, end = self._builder().rebuild(self._tree, self.text(), start, removed, added) else: end = start + added self.set_modified_range(start, end)
livelex/tree.py
ArgSwap(idxs=3<->4 @(902,25)->(902,48))
class TreeDocumentMixin: def contents_changed(self, start, removed, added): """Called after modification of the text, retokenizes the modified part.""" if self._tree.lexicon: start, end = self._builder().rebuild(self._tree, self.text(), start, added, removed) else: end = start + added self.set_modified_range(start, end)
class TreeDocumentMixin: def contents_changed(self, start, removed, added): """Called after modification of the text, retokenizes the modified part.""" if self._tree.lexicon: start, end = self._builder().rebuild(self._tree, self.text(), start, removed, added) else: end = start + added self.set_modified_range(start, end)
2,042
https://:@github.com/griffithlab/civicpy.git
13f1945d0fcbb4e1bbf9fcc555710edab4ff4f71
@@ -99,7 +99,7 @@ class CivicRecord: try: data['type'] = data.get('type', singularize(field)) except AttributeError: # if data has no 'get' method, i.e. not a Dict - result.append(v) + result.append(data) else: result.append(cls(partial=True, **data)) self.__setattr__(field, result)
pycivic/civic.py
ReplaceText(target='data' @(102,38)->(102,39))
class CivicRecord: try: data['type'] = data.get('type', singularize(field)) except AttributeError: # if data has no 'get' method, i.e. not a Dict result.append(v) else: result.append(cls(partial=True, **data)) self.__setattr__(field, result)
class CivicRecord: try: data['type'] = data.get('type', singularize(field)) except AttributeError: # if data has no 'get' method, i.e. not a Dict result.append(data) else: result.append(cls(partial=True, **data)) self.__setattr__(field, result)
2,043
https://:@github.com/JoeriHermans/dist-keras.git
7b9f4110efd7470daa8e1ad9e93ab92fc3c135ea
@@ -63,7 +63,7 @@ class LabelVectorTransformerUDF(Transformer): v = to_dense_vector(label, self.output_dim) new_row = new_dataframe_row_fast(row, self.output_column, v) - return row + return new_row def transform(self, data): return data.map(self._transform).toDF()
distkeras/distributed.py
ReplaceText(target='new_row' @(66,15)->(66,18))
class LabelVectorTransformerUDF(Transformer): v = to_dense_vector(label, self.output_dim) new_row = new_dataframe_row_fast(row, self.output_column, v) return row def transform(self, data): return data.map(self._transform).toDF()
class LabelVectorTransformerUDF(Transformer): v = to_dense_vector(label, self.output_dim) new_row = new_dataframe_row_fast(row, self.output_column, v) return new_row def transform(self, data): return data.map(self._transform).toDF()
2,044
https://:@github.com/JoeriHermans/dist-keras.git
b2c8dee25e9687403108fbeb31efc2b309a51675
@@ -158,7 +158,7 @@ class AsynchronousDistributedTrainer(DistributedTrainer): def __init__(self, keras_model, worker_optimizer, loss, num_workers=2, batch_size=32, features_col="features", label_col="label", num_epoch=1): - super(AsynchronousDistributedTrainer, self).__init__(keras_model, loss, worker_optimizer, + super(AsynchronousDistributedTrainer, self).__init__(keras_model, worker_optimizer, loss, num_workers, batch_size, features_col, label_col, num_epoch) # Initialize asynchronous methods variables.
distkeras/trainers.py
ArgSwap(idxs=1<->2 @(161,8)->(161,60))
class AsynchronousDistributedTrainer(DistributedTrainer): def __init__(self, keras_model, worker_optimizer, loss, num_workers=2, batch_size=32, features_col="features", label_col="label", num_epoch=1): super(AsynchronousDistributedTrainer, self).__init__(keras_model, loss, worker_optimizer, num_workers, batch_size, features_col, label_col, num_epoch) # Initialize asynchronous methods variables.
class AsynchronousDistributedTrainer(DistributedTrainer): def __init__(self, keras_model, worker_optimizer, loss, num_workers=2, batch_size=32, features_col="features", label_col="label", num_epoch=1): super(AsynchronousDistributedTrainer, self).__init__(keras_model, worker_optimizer, loss, num_workers, batch_size, features_col, label_col, num_epoch) # Initialize asynchronous methods variables.
2,045
https://:@github.com/JoeriHermans/dist-keras.git
325770acaebbd0a6f05603d1d277d1a7c6d7b0c4
@@ -385,7 +385,7 @@ class ExperimentalParameterServer(SocketParameterServer): data = recv_data(conn) # Extract the data from the dictionary. r = data['residual'] - worker_id = r['worker_id'] + worker_id = data['worker_id'] with self.mutex: self.add_staleness(worker_id) # Update the center variable.
distkeras/parameter_servers.py
ReplaceText(target='data' @(388,20)->(388,21))
class ExperimentalParameterServer(SocketParameterServer): data = recv_data(conn) # Extract the data from the dictionary. r = data['residual'] worker_id = r['worker_id'] with self.mutex: self.add_staleness(worker_id) # Update the center variable.
class ExperimentalParameterServer(SocketParameterServer): data = recv_data(conn) # Extract the data from the dictionary. r = data['residual'] worker_id = data['worker_id'] with self.mutex: self.add_staleness(worker_id) # Update the center variable.
2,046
https://:@github.com/K1DV5/ScpyCalc.git
a4f2b442c7db6ed936811d4fea74854dc03ceb4a
@@ -306,7 +306,7 @@ class MathVisitor(ast.NodeVisitor): return to_math(tree, mul=self.mul, div=self.div, mat_size=self.mat_size, decimal=self.decimal, syntax=self.s, ital=self.ital) - if not self.subs and not shallow: + if not self.subs or not shallow: return self.format_name(n.id) # substitute the value of the variable by formatted value try:
docal/parsing.py
ReplaceText(target='or' @(309,25)->(309,28))
class MathVisitor(ast.NodeVisitor): return to_math(tree, mul=self.mul, div=self.div, mat_size=self.mat_size, decimal=self.decimal, syntax=self.s, ital=self.ital) if not self.subs and not shallow: return self.format_name(n.id) # substitute the value of the variable by formatted value try:
class MathVisitor(ast.NodeVisitor): return to_math(tree, mul=self.mul, div=self.div, mat_size=self.mat_size, decimal=self.decimal, syntax=self.s, ital=self.ital) if not self.subs or not shallow: return self.format_name(n.id) # substitute the value of the variable by formatted value try:
2,047
https://:@github.com/kylebittinger/unassigner.git
20c134aeafa34ed6c626677064c0b67183f812a4
@@ -61,7 +61,7 @@ def blastdb_fps(fp): def get_url(url): fp = url_fp(url) - if not os.path.exists(fp): + if os.path.exists(fp): os.remove(fp) subprocess.check_call(["wget", url]) return fp
unassign/download.py
ReplaceText(target='' @(64,7)->(64,11))
def blastdb_fps(fp): def get_url(url): fp = url_fp(url) if not os.path.exists(fp): os.remove(fp) subprocess.check_call(["wget", url]) return fp
def blastdb_fps(fp): def get_url(url): fp = url_fp(url) if os.path.exists(fp): os.remove(fp) subprocess.check_call(["wget", url]) return fp
2,048
https://:@github.com/AFM-analysis/PyJibe.git
9946f6261a7ea6bbf7309e077788cafd3f6ba7e6
@@ -57,7 +57,7 @@ class PyJibe(QtWidgets.QMainWindow, MainBase): # Add export choices if hasattr(inst, "get_export_choices"): choices = inst.get_export_choices() - menobj = self.menuExport.addMenu(inst.windowTitle()) + menobj = self.menuExport.addMenu(sub.windowTitle()) for choice in choices: action = menobj.addAction(choice[0]) action.triggered.connect(getattr(inst, choice[1]))
pyjibe/head/main.py
ReplaceText(target='sub' @(60,45)->(60,49))
class PyJibe(QtWidgets.QMainWindow, MainBase): # Add export choices if hasattr(inst, "get_export_choices"): choices = inst.get_export_choices() menobj = self.menuExport.addMenu(inst.windowTitle()) for choice in choices: action = menobj.addAction(choice[0]) action.triggered.connect(getattr(inst, choice[1]))
class PyJibe(QtWidgets.QMainWindow, MainBase): # Add export choices if hasattr(inst, "get_export_choices"): choices = inst.get_export_choices() menobj = self.menuExport.addMenu(sub.windowTitle()) for choice in choices: action = menobj.addAction(choice[0]) action.triggered.connect(getattr(inst, choice[1]))
2,049
https://:@gitlab.com/petra-sim/petra.git
dda7d9769193fbcf1419f7f55353819a805193e6
@@ -107,7 +107,7 @@ def create_mesh(structure, path): for element in structure.elements]) min_size = 1 * c.nm - with geo.Geo(points_geo) as g: + with geo.Geo(mesh_geo) as g: g.include("structure.geo") g.include("points.geo") g.attractor(1, geo.range(idx0, idx))
transport/poisson/geometry.py
ReplaceText(target='mesh_geo' @(110,17)->(110,27))
def create_mesh(structure, path): for element in structure.elements]) min_size = 1 * c.nm with geo.Geo(points_geo) as g: g.include("structure.geo") g.include("points.geo") g.attractor(1, geo.range(idx0, idx))
def create_mesh(structure, path): for element in structure.elements]) min_size = 1 * c.nm with geo.Geo(mesh_geo) as g: g.include("structure.geo") g.include("points.geo") g.attractor(1, geo.range(idx0, idx))
2,050
https://:@github.com/robertbuecker/diffractem.git
9eb32dde6f3998ccdf27c5c5a64cd0a6b62f3415
@@ -588,7 +588,7 @@ def lorentz_fast(img, x_0: float = None, y_0: float = None, amp: float = None, """ if (x_0 is None) or (not np.isfinite(x_0)): x_0 = img.shape[1] / 2 - if (y_0 is None) or (not np.isfinite(x_0)): + if (y_0 is None) or (not np.isfinite(y_0)): y_0 = img.shape[0] / 2 if radius is not None: x1 = int(x_0 - radius)
diffractem/proc2d.py
ReplaceText(target='y_0' @(591,41)->(591,44))
def lorentz_fast(img, x_0: float = None, y_0: float = None, amp: float = None, """ if (x_0 is None) or (not np.isfinite(x_0)): x_0 = img.shape[1] / 2 if (y_0 is None) or (not np.isfinite(x_0)): y_0 = img.shape[0] / 2 if radius is not None: x1 = int(x_0 - radius)
def lorentz_fast(img, x_0: float = None, y_0: float = None, amp: float = None, """ if (x_0 is None) or (not np.isfinite(x_0)): x_0 = img.shape[1] / 2 if (y_0 is None) or (not np.isfinite(y_0)): y_0 = img.shape[0] / 2 if radius is not None: x1 = int(x_0 - radius)
2,051
https://:@github.com/kikuchi-m/ceryle.git
03f69077698b989fa1fb344d3fb1a29a7e2e10dc
@@ -165,7 +165,7 @@ def main(argv): }[args.pop('log_level')], console=args.pop('log_stream'), filename=args.pop('log_filename')) - logger.debug(f'arguments: {argv}') + logger.debug(f'arguments: {args}') try: if args.pop('list_tasks', False):
ceryle/main.py
ReplaceText(target='args' @(168,31)->(168,35))
def main(argv): }[args.pop('log_level')], console=args.pop('log_stream'), filename=args.pop('log_filename')) logger.debug(f'arguments: {argv}') try: if args.pop('list_tasks', False):
def main(argv): }[args.pop('log_level')], console=args.pop('log_stream'), filename=args.pop('log_filename')) logger.debug(f'arguments: {args}') try: if args.pop('list_tasks', False):
2,052
https://:@github.com/persepolisdm/persepolis.git
3765f351be8734881e5556f75e7f4fdba8991f82
@@ -38,7 +38,7 @@ class Tor: def check_tor(self): """ True If Tor Is Installed """ - return (self.tor is None) + return (self.tor is not None) def socks_tor(self): """ Checks If Socks Proxy Is Configured For Tor """
persepolis/scripts/check_proxy.py
ReplaceText(target=' is not ' @(41,24)->(41,28))
class Tor: def check_tor(self): """ True If Tor Is Installed """ return (self.tor is None) def socks_tor(self): """ Checks If Socks Proxy Is Configured For Tor """
class Tor: def check_tor(self): """ True If Tor Is Installed """ return (self.tor is not None) def socks_tor(self): """ Checks If Socks Proxy Is Configured For Tor """
2,053
https://:@github.com/persepolisdm/persepolis.git
11559164cbc09d141949b464e4d5ec7ea07ec016
@@ -103,7 +103,7 @@ def spider(add_link_dictionary): file_size = humanReadbleSize(file_size) # return results - return filename, filesize + return filename, file_size # this function finds and returns file name for links.
persepolis/scripts/spider.py
ReplaceText(target='file_size' @(106,21)->(106,29))
def spider(add_link_dictionary): file_size = humanReadbleSize(file_size) # return results return filename, filesize # this function finds and returns file name for links.
def spider(add_link_dictionary): file_size = humanReadbleSize(file_size) # return results return filename, file_size # this function finds and returns file name for links.
2,054
https://:@gitlab.com/sumner/sublime-music.git
fab385a778c4486043bc069ce46accac6f0cffa7
@@ -159,7 +159,7 @@ class PlayerControls(Gtk.ActionBar): self.album_art.set_loading(False) def update_scrubber(self, current, duration): - if current is None and duration is None: + if current is None or duration is None: self.song_duration_label.set_text('-:--') self.song_progress_label.set_text('-:--') self.song_scrubber.set_value(0)
libremsonic/ui/player_controls.py
ReplaceText(target='or' @(162,27)->(162,30))
class PlayerControls(Gtk.ActionBar): self.album_art.set_loading(False) def update_scrubber(self, current, duration): if current is None and duration is None: self.song_duration_label.set_text('-:--') self.song_progress_label.set_text('-:--') self.song_scrubber.set_value(0)
class PlayerControls(Gtk.ActionBar): self.album_art.set_loading(False) def update_scrubber(self, current, duration): if current is None or duration is None: self.song_duration_label.set_text('-:--') self.song_progress_label.set_text('-:--') self.song_scrubber.set_value(0)
2,055
https://:@github.com/INM-6/hybridLFPy.git
e165d8eb36122f4647297989e4da50c25e445c5f
@@ -48,7 +48,7 @@ if __name__ == '__main__': fname = os.path.join(jobscriptdir, job + '.job') f = open(fname, 'w') - f.write(content.format(job, stime, oe, oe, ntasks, memPerCPU, mpiexec, sim)) + f.write(content.format(job, stime, oe, oe, memPerCPU, ntasks, mpiexec, sim)) f.close() jobscripts.append(fname)
examples/Hagen_et_al_2016_cercor/run_all_jobs.py
ArgSwap(idxs=4<->5 @(51,16)->(51,30))
if __name__ == '__main__': fname = os.path.join(jobscriptdir, job + '.job') f = open(fname, 'w') f.write(content.format(job, stime, oe, oe, ntasks, memPerCPU, mpiexec, sim)) f.close() jobscripts.append(fname)
if __name__ == '__main__': fname = os.path.join(jobscriptdir, job + '.job') f = open(fname, 'w') f.write(content.format(job, stime, oe, oe, memPerCPU, ntasks, mpiexec, sim)) f.close() jobscripts.append(fname)
2,056
https://:@github.com/tgalal/microbus.git
ad908781df4151ae453e0d832fe6324d470363d7
@@ -13,7 +13,7 @@ class BusSchedulerTest(unittest.TestCase): self.stop2 = microbus.BusStop("stop2") self.stop3 = microbus.BusStop("stop3") self.stops = [self.stop1, self.stop2, self.stop3] - self.busRoute1 = microbus.BusRoute("test", self.stops) + self.busRoute1 = microbus.BusRoute(self.stops, "test") self.busRoute2 = self.busRoute1[::-1] self.bus = Bus(keep_prev=2) self.scheduler = BusScheduler(self.bus)
microbus/test_scheduler.py
ArgSwap(idxs=0<->1 @(16,25)->(16,42))
class BusSchedulerTest(unittest.TestCase): self.stop2 = microbus.BusStop("stop2") self.stop3 = microbus.BusStop("stop3") self.stops = [self.stop1, self.stop2, self.stop3] self.busRoute1 = microbus.BusRoute("test", self.stops) self.busRoute2 = self.busRoute1[::-1] self.bus = Bus(keep_prev=2) self.scheduler = BusScheduler(self.bus)
class BusSchedulerTest(unittest.TestCase): self.stop2 = microbus.BusStop("stop2") self.stop3 = microbus.BusStop("stop3") self.stops = [self.stop1, self.stop2, self.stop3] self.busRoute1 = microbus.BusRoute(self.stops, "test") self.busRoute2 = self.busRoute1[::-1] self.bus = Bus(keep_prev=2) self.scheduler = BusScheduler(self.bus)
2,057
https://:@github.com/nanvel/c2p2.git
554470ca3827bf276f15159434de4996a65bc130
@@ -64,6 +64,6 @@ class GitHubPullHandler(RequestHandler): if event == 'push': ref = json.loads(self.request.body.decode('utf8'))['ref'] - if ref != 'refs/heads/{branch}'.format(branch=options.GITHUB_BRANCH): + if ref == 'refs/heads/{branch}'.format(branch=options.GITHUB_BRANCH): result = yield github_pull() logger.warning(result)
mdpages/handlers/github.py
ReplaceText(target='==' @(67,19)->(67,21))
class GitHubPullHandler(RequestHandler): if event == 'push': ref = json.loads(self.request.body.decode('utf8'))['ref'] if ref != 'refs/heads/{branch}'.format(branch=options.GITHUB_BRANCH): result = yield github_pull() logger.warning(result)
class GitHubPullHandler(RequestHandler): if event == 'push': ref = json.loads(self.request.body.decode('utf8'))['ref'] if ref == 'refs/heads/{branch}'.format(branch=options.GITHUB_BRANCH): result = yield github_pull() logger.warning(result)
2,058
https://:@github.com/caleblareau/bap.git
063364d986c20b9eeca8072b763feead8573038e
@@ -68,7 +68,7 @@ class bapProject(): self.minimum_cell_fragments = minimum_cell_fragments self.minimum_jaccard_fragments = minimum_jaccard_fragments self.extract_mito = extract_mito - self.drop_tag = barcode_tag + self.drop_tag = drop_tag self.barcode_tag = barcode_tag # Figure out operating system just for funzies; not presently used
bap/bapProjectClass.py
ReplaceText(target='drop_tag' @(71,18)->(71,29))
class bapProject(): self.minimum_cell_fragments = minimum_cell_fragments self.minimum_jaccard_fragments = minimum_jaccard_fragments self.extract_mito = extract_mito self.drop_tag = barcode_tag self.barcode_tag = barcode_tag # Figure out operating system just for funzies; not presently used
class bapProject(): self.minimum_cell_fragments = minimum_cell_fragments self.minimum_jaccard_fragments = minimum_jaccard_fragments self.extract_mito = extract_mito self.drop_tag = drop_tag self.barcode_tag = barcode_tag # Figure out operating system just for funzies; not presently used
2,059
https://:@github.com/skblaz/tax2vec.git
722d18184211d2f1e4934cdac4464be2a37de869
@@ -370,7 +370,7 @@ class tax2vec: if out is not None: hypernyms.extend(out) - for h in hypernyms: + for h in out: local_graph.append((str(token), h)) return initial_terms, idx, hypernyms, local_graph
tax2vec/__init__.py
ReplaceText(target='out' @(373,29)->(373,38))
class tax2vec: if out is not None: hypernyms.extend(out) for h in hypernyms: local_graph.append((str(token), h)) return initial_terms, idx, hypernyms, local_graph
class tax2vec: if out is not None: hypernyms.extend(out) for h in out: local_graph.append((str(token), h)) return initial_terms, idx, hypernyms, local_graph
2,060
https://:@github.com/apertif/apercal.git
e0a3e59de9c906da7bd7b22b99e0eed7a75b4b05
@@ -144,7 +144,7 @@ def run_casa(cmd, raise_on_severe=False, timeout=1800): casa = drivecasa.Casapy() try: casa_output, casa_error = casa.run_script(cmd, raise_on_severe=True, timeout=timeout) - logger.debug('\n'.join(casa_output)) + logger.debug('\n'.join(casa_error)) except RuntimeError: logger.error("Casa command failed") if raise_on_severe:
apercal/libs/lib.py
ReplaceText(target='casa_error' @(147,31)->(147,42))
def run_casa(cmd, raise_on_severe=False, timeout=1800): casa = drivecasa.Casapy() try: casa_output, casa_error = casa.run_script(cmd, raise_on_severe=True, timeout=timeout) logger.debug('\n'.join(casa_output)) except RuntimeError: logger.error("Casa command failed") if raise_on_severe:
def run_casa(cmd, raise_on_severe=False, timeout=1800): casa = drivecasa.Casapy() try: casa_output, casa_error = casa.run_script(cmd, raise_on_severe=True, timeout=timeout) logger.debug('\n'.join(casa_error)) except RuntimeError: logger.error("Casa command failed") if raise_on_severe:
2,061
https://:@github.com/apertif/apercal.git
be536be1cbe8f89017d77b6139d6742dbc059a42
@@ -68,7 +68,7 @@ class transfer(BaseModule): logger.debug( "Setting amplitude selfcal file name: {}".format(datasetname_amp)) logger.debug( - "Setting phase selfcal file name: {}".format(datasetname_amp)) + "Setting phase selfcal file name: {}".format(datasetname_phase)) # datasetname_amp = self.get_target_path().rstrip('.mir') + '_amp.mir' # datasetname_phase = self.get_target_path() if os.path.isdir(datasetname_amp) and selfcaltargetbeamsampstatus:
apercal/modules/transfer.py
ReplaceText(target='datasetname_phase' @(71,65)->(71,80))
class transfer(BaseModule): logger.debug( "Setting amplitude selfcal file name: {}".format(datasetname_amp)) logger.debug( "Setting phase selfcal file name: {}".format(datasetname_amp)) # datasetname_amp = self.get_target_path().rstrip('.mir') + '_amp.mir' # datasetname_phase = self.get_target_path() if os.path.isdir(datasetname_amp) and selfcaltargetbeamsampstatus:
class transfer(BaseModule): logger.debug( "Setting amplitude selfcal file name: {}".format(datasetname_amp)) logger.debug( "Setting phase selfcal file name: {}".format(datasetname_phase)) # datasetname_amp = self.get_target_path().rstrip('.mir') + '_amp.mir' # datasetname_phase = self.get_target_path() if os.path.isdir(datasetname_amp) and selfcaltargetbeamsampstatus:
2,062
https://:@github.com/apertif/apercal.git
2bd6291a92fe9fd1e9c175ab79fea9e03ef56cc4
@@ -41,7 +41,7 @@ def create_beam(beam, beam_map_dir, corrtype = 'Gaussian', primary_beam_path = N beamoutname = 'beam_{}.map'.format(beam.zfill(2)) # check if file exists: - if os.path.isdir(beamoutname): + if not os.path.isdir(beamoutname): #then test type and proceed for different types if corrtype == 'Gaussian': make_gaussian_beam(beam_map_dir,beamoutname,bm_size,cell,fwhm,cutoff)
apercal/subs/mosaic_utils.py
ReplaceText(target='not ' @(44,7)->(44,7))
def create_beam(beam, beam_map_dir, corrtype = 'Gaussian', primary_beam_path = N beamoutname = 'beam_{}.map'.format(beam.zfill(2)) # check if file exists: if os.path.isdir(beamoutname): #then test type and proceed for different types if corrtype == 'Gaussian': make_gaussian_beam(beam_map_dir,beamoutname,bm_size,cell,fwhm,cutoff)
def create_beam(beam, beam_map_dir, corrtype = 'Gaussian', primary_beam_path = N beamoutname = 'beam_{}.map'.format(beam.zfill(2)) # check if file exists: if not os.path.isdir(beamoutname): #then test type and proceed for different types if corrtype == 'Gaussian': make_gaussian_beam(beam_map_dir,beamoutname,bm_size,cell,fwhm,cutoff)
2,063
https://:@github.com/GalakVince/skin_lesion_symmetry.git
cbad346417689f3a698035f0393fef710ae6dedd
@@ -338,7 +338,7 @@ def classifierTrainer(maxLeafNodes, data=None, data_backup_file='patchesDataSet/ clf: The fitted classifier. acc: The accuracy score of the classifier """ - if data is not None: + if data is None: data = pd.read_csv(f"{package_path()}/data/patchesDataSet/{data_backup_file}.csv", index_col=False) features = list(data) del features[0]
dermoscopic_symmetry/classifier_feeder.py
ReplaceText(target=' is ' @(341,11)->(341,19))
def classifierTrainer(maxLeafNodes, data=None, data_backup_file='patchesDataSet/ clf: The fitted classifier. acc: The accuracy score of the classifier """ if data is not None: data = pd.read_csv(f"{package_path()}/data/patchesDataSet/{data_backup_file}.csv", index_col=False) features = list(data) del features[0]
def classifierTrainer(maxLeafNodes, data=None, data_backup_file='patchesDataSet/ clf: The fitted classifier. acc: The accuracy score of the classifier """ if data is None: data = pd.read_csv(f"{package_path()}/data/patchesDataSet/{data_backup_file}.csv", index_col=False) features = list(data) del features[0]
2,064
https://:@github.com/Infinidat/infi.hbaapi.git
b0e2322452b83d0c9d037a502eee5e3b90ce1344
@@ -32,7 +32,7 @@ class GeneratorTestCase(unittest.TestCase): port_test_class.assert_port(port) def _assert_wwn_translation(self, expected, actual): - self.assertEquals(expected, sysfs.translate_wwn(actual)) + self.assertEquals(sysfs.translate_wwn(actual), expected) def test_wwn_translation(self): for expected, actual in [('01:02:03:04:05:06:07:08', '01:02:03:04:05:06:07:08'),
src/infi/hbaapi/generators/sysfs/tests/__init__.py
ArgSwap(idxs=0<->1 @(35,8)->(35,25))
class GeneratorTestCase(unittest.TestCase): port_test_class.assert_port(port) def _assert_wwn_translation(self, expected, actual): self.assertEquals(expected, sysfs.translate_wwn(actual)) def test_wwn_translation(self): for expected, actual in [('01:02:03:04:05:06:07:08', '01:02:03:04:05:06:07:08'),
class GeneratorTestCase(unittest.TestCase): port_test_class.assert_port(port) def _assert_wwn_translation(self, expected, actual): self.assertEquals(sysfs.translate_wwn(actual), expected) def test_wwn_translation(self): for expected, actual in [('01:02:03:04:05:06:07:08', '01:02:03:04:05:06:07:08'),
2,065
https://:@github.com/PanDAWMS/pilot2.git
d501b7defb163d213b08e69100e1b4ddc30b68f0
@@ -154,7 +154,7 @@ class BaseData(object): logger.warning('failed to convert data for key=%s, raw=%s to type=%s' % (kname, raw, ktype)) return defval - return raw.lower() in ['1', 'true', 'yes'] + return val.lower() in ['1', 'true', 'yes'] def clean_dictdata(self, raw, ktype, kname=None, defval=None): """
pilot/info/basedata.py
ReplaceText(target='val' @(157,15)->(157,18))
class BaseData(object): logger.warning('failed to convert data for key=%s, raw=%s to type=%s' % (kname, raw, ktype)) return defval return raw.lower() in ['1', 'true', 'yes'] def clean_dictdata(self, raw, ktype, kname=None, defval=None): """
class BaseData(object): logger.warning('failed to convert data for key=%s, raw=%s to type=%s' % (kname, raw, ktype)) return defval return val.lower() in ['1', 'true', 'yes'] def clean_dictdata(self, raw, ktype, kname=None, defval=None): """
2,066
https://:@github.com/PanDAWMS/pilot2.git
db2a9ae0873c0c53e97e4a43f2d929ca89729128
@@ -36,7 +36,7 @@ def verify_proxy(limit=None): # add setup for arcproxy if it exists arcproxy_setup = "%s/atlas.cern.ch/repo/sw/arc/client/latest/slc6/x86_64/setup.sh" % get_file_system_root_path() - envsetup += ". %s;" % (arcproxy_setup) + envsetup = ". %s;" % (arcproxy_setup) # first try to use arcproxy since voms-proxy-info is not working properly on SL6 # (memory issues on queues with limited memory)
pilot/user/atlas/proxy.py
ReplaceText(target='=' @(39,13)->(39,15))
def verify_proxy(limit=None): # add setup for arcproxy if it exists arcproxy_setup = "%s/atlas.cern.ch/repo/sw/arc/client/latest/slc6/x86_64/setup.sh" % get_file_system_root_path() envsetup += ". %s;" % (arcproxy_setup) # first try to use arcproxy since voms-proxy-info is not working properly on SL6 # (memory issues on queues with limited memory)
def verify_proxy(limit=None): # add setup for arcproxy if it exists arcproxy_setup = "%s/atlas.cern.ch/repo/sw/arc/client/latest/slc6/x86_64/setup.sh" % get_file_system_root_path() envsetup = ". %s;" % (arcproxy_setup) # first try to use arcproxy since voms-proxy-info is not working properly on SL6 # (memory issues on queues with limited memory)
2,067
https://:@github.com/PanDAWMS/pilot2.git
11ef7369f5f48c183709c3f4143deef9c8d9335a
@@ -101,7 +101,7 @@ class Analytics(Services): else: raise NotDefined('Fit has not been defined') - return intersect + return x2 class Fit(object):
pilot/api/analytics.py
ReplaceText(target='x2' @(104,15)->(104,24))
class Analytics(Services): else: raise NotDefined('Fit has not been defined') return intersect class Fit(object):
class Analytics(Services): else: raise NotDefined('Fit has not been defined') return x2 class Fit(object):
2,068
https://:@github.com/PanDAWMS/pilot2.git
bf8e5359f24dd7da724fe770d893ba3e3af41ef7
@@ -194,7 +194,7 @@ def copy_output(job, job_scratch_dir, work_dir): try: for outfile in job.output_files.keys(): if os.path.exists(outfile): - copy(os.path.join(job_scratch_dir, outfile), os.path.join(job_scratch_dir, outfile)) + copy(os.path.join(job_scratch_dir, outfile), os.path.join(work_dir, outfile)) os.chdir(work_dir) except IOError: raise FileHandlingFailure("Copy from scratch dir to access point failed")
pilot/workflow/generic_hpc.py
ReplaceText(target='work_dir' @(197,74)->(197,89))
def copy_output(job, job_scratch_dir, work_dir): try: for outfile in job.output_files.keys(): if os.path.exists(outfile): copy(os.path.join(job_scratch_dir, outfile), os.path.join(job_scratch_dir, outfile)) os.chdir(work_dir) except IOError: raise FileHandlingFailure("Copy from scratch dir to access point failed")
def copy_output(job, job_scratch_dir, work_dir): try: for outfile in job.output_files.keys(): if os.path.exists(outfile): copy(os.path.join(job_scratch_dir, outfile), os.path.join(work_dir, outfile)) os.chdir(work_dir) except IOError: raise FileHandlingFailure("Copy from scratch dir to access point failed")
2,069
https://:@github.com/PanDAWMS/pilot2.git
db0b653efeda7aed7af5ce710b7f57d94c241124
@@ -148,7 +148,7 @@ def get_proper_pid(pid, command, use_container=True, transformation=""): imax = 120 while i < imax: # abort if main process has finished already - if is_process_running(pid): + if not is_process_running(pid): return -1 ps = get_ps_info()
pilot/user/atlas/utilities.py
ReplaceText(target='not ' @(151,11)->(151,11))
def get_proper_pid(pid, command, use_container=True, transformation=""): imax = 120 while i < imax: # abort if main process has finished already if is_process_running(pid): return -1 ps = get_ps_info()
def get_proper_pid(pid, command, use_container=True, transformation=""): imax = 120 while i < imax: # abort if main process has finished already if not is_process_running(pid): return -1 ps = get_ps_info()
2,070
https://:@github.com/PanDAWMS/pilot2.git
7810aa90923f7e6380f364a173db0fdedc65f07c
@@ -113,7 +113,7 @@ def get_copysetup(copytools, copytool_name): """ copysetup = "" - if not copysetup: + if not copytools: return "" for ct in list(copytools.keys()): # Python 2/3
pilot/copytool/common.py
ReplaceText(target='copytools' @(116,11)->(116,20))
def get_copysetup(copytools, copytool_name): """ copysetup = "" if not copysetup: return "" for ct in list(copytools.keys()): # Python 2/3
def get_copysetup(copytools, copytool_name): """ copysetup = "" if not copytools: return "" for ct in list(copytools.keys()): # Python 2/3
2,071
https://:@github.com/PanDAWMS/pilot2.git
4d3fafc26c46daa348d7c9e61bf60f3f73562cba
@@ -288,7 +288,7 @@ def send_state(job, args, state, xml=None, metadata=None): # noqa: C901 time_before = int(time.time()) res = https.request('{pandaserver}/server/panda/updateJob'.format(pandaserver=pandaserver), data=data) time_after = int(time.time()) - log.info('server updateJob request completed in %ds for job %s' % (time_after - time_after, job.jobid)) + log.info('server updateJob request completed in %ds for job %s' % (time_after - time_before, job.jobid)) log.info("server responded with: res = %s" % str(res)) if res is not None: # does the server update contain any backchannel information? if so, update the job object
pilot/control/job.py
ReplaceText(target='time_before' @(291,92)->(291,102))
def send_state(job, args, state, xml=None, metadata=None): # noqa: C901 time_before = int(time.time()) res = https.request('{pandaserver}/server/panda/updateJob'.format(pandaserver=pandaserver), data=data) time_after = int(time.time()) log.info('server updateJob request completed in %ds for job %s' % (time_after - time_after, job.jobid)) log.info("server responded with: res = %s" % str(res)) if res is not None: # does the server update contain any backchannel information? if so, update the job object
def send_state(job, args, state, xml=None, metadata=None): # noqa: C901 time_before = int(time.time()) res = https.request('{pandaserver}/server/panda/updateJob'.format(pandaserver=pandaserver), data=data) time_after = int(time.time()) log.info('server updateJob request completed in %ds for job %s' % (time_after - time_before, job.jobid)) log.info("server responded with: res = %s" % str(res)) if res is not None: # does the server update contain any backchannel information? if so, update the job object
2,072
https://:@github.com/jmoswalt/django-things.git
d83bb4dc5f222f3074ad91eb63089989bc41dc57
@@ -12,7 +12,7 @@ class ThingDetailView(DetailView): default_template_name = "things/thing_detail.html" def get_object(self, **kwargs): - return get_thing_object(self.model, self.request.user, self.kwargs['slug']) + return get_thing_object(self.model, self.kwargs['slug'], self.request.user) def get_template_names(self): names = []
things/views.py
ArgSwap(idxs=1<->2 @(15,15)->(15,31))
class ThingDetailView(DetailView): default_template_name = "things/thing_detail.html" def get_object(self, **kwargs): return get_thing_object(self.model, self.request.user, self.kwargs['slug']) def get_template_names(self): names = []
class ThingDetailView(DetailView): default_template_name = "things/thing_detail.html" def get_object(self, **kwargs): return get_thing_object(self.model, self.kwargs['slug'], self.request.user) def get_template_names(self): names = []
2,073
https://:@github.com/dignio/py-smsframework-pswin.git
fcebe208a6ec6de47a187295adceb9e49b6cad97
@@ -15,7 +15,7 @@ class PswinProvider(IProvider): :param password: Account password :param https: Use HTTPS for outgoing messages? """ - self.api = PswinHttpApi(user, password, https, hostname) + self.api = PswinHttpApi(user, password, hostname, https) super(PswinProvider, self).__init__(gateway, name) def send(self, message):
smsframework_pswin/provider.py
ArgSwap(idxs=2<->3 @(18,19)->(18,31))
class PswinProvider(IProvider): :param password: Account password :param https: Use HTTPS for outgoing messages? """ self.api = PswinHttpApi(user, password, https, hostname) super(PswinProvider, self).__init__(gateway, name) def send(self, message):
class PswinProvider(IProvider): :param password: Account password :param https: Use HTTPS for outgoing messages? """ self.api = PswinHttpApi(user, password, hostname, https) super(PswinProvider, self).__init__(gateway, name) def send(self, message):
2,074
https://:@github.com/hsolbrig/pyjsg.git
951dac74c9d9c99b601b350330d985e368a1a64b
@@ -99,7 +99,7 @@ def iterable_conforms(element, etype, namespace: Dict[str, Any]) -> bool: def element_conforms(element, etype) -> bool: if element is None and etype == object: - return True + return False elif isinstance(etype, type(type)) and (issubclass(etype, type(None))): return element is None elif element is None:
pyjsg/jsglib/typing_patch_36.py
ReplaceText(target='False' @(102,15)->(102,19))
def iterable_conforms(element, etype, namespace: Dict[str, Any]) -> bool: def element_conforms(element, etype) -> bool: if element is None and etype == object: return True elif isinstance(etype, type(type)) and (issubclass(etype, type(None))): return element is None elif element is None:
def iterable_conforms(element, etype, namespace: Dict[str, Any]) -> bool: def element_conforms(element, etype) -> bool: if element is None and etype == object: return False elif isinstance(etype, type(type)) and (issubclass(etype, type(None))): return element is None elif element is None:
2,075
https://:@github.com/TrackerSB/DriveBuild.git
cb617d93003c0bd64fd35a0289615224cac6baaf
@@ -324,7 +324,7 @@ def generate_scenario(env: _ElementTree, participants_node: _Element) -> Scenari movements = list() waypoint_nodes = xpath(node, "db:movement/db:waypoint") for wp_node in waypoint_nodes: - common_state_vals = _extract_common_state_vals(initial_state_node) + common_state_vals = _extract_common_state_vals(wp_node) movements.append(WayPoint( (float(wp_node.get("x")), float(wp_node.get("y"))), float(wp_node.get("tolerance")),
generator.py
ReplaceText(target='wp_node' @(327,59)->(327,77))
def generate_scenario(env: _ElementTree, participants_node: _Element) -> Scenari movements = list() waypoint_nodes = xpath(node, "db:movement/db:waypoint") for wp_node in waypoint_nodes: common_state_vals = _extract_common_state_vals(initial_state_node) movements.append(WayPoint( (float(wp_node.get("x")), float(wp_node.get("y"))), float(wp_node.get("tolerance")),
def generate_scenario(env: _ElementTree, participants_node: _Element) -> Scenari movements = list() waypoint_nodes = xpath(node, "db:movement/db:waypoint") for wp_node in waypoint_nodes: common_state_vals = _extract_common_state_vals(wp_node) movements.append(WayPoint( (float(wp_node.get("x")), float(wp_node.get("y"))), float(wp_node.get("tolerance")),
2,076
https://:@bitbucket.org/madssj/fabric-coat.git
093e2a926dce52b47a5f47b98413287cd9f31588
@@ -22,7 +22,7 @@ def update_env(*args, **kwargs): env.versions_dir = env.base_dir + "/versions" - if 'wsgi_file' in env: + if 'wsgi_file' not in env: env.wsgi_file = env.django_appname + ".wsgi" if 'local_base_dir' not in env:
src/coat/django.py
ReplaceText(target=' not in ' @(25,18)->(25,22))
def update_env(*args, **kwargs): env.versions_dir = env.base_dir + "/versions" if 'wsgi_file' in env: env.wsgi_file = env.django_appname + ".wsgi" if 'local_base_dir' not in env:
def update_env(*args, **kwargs): env.versions_dir = env.base_dir + "/versions" if 'wsgi_file' not in env: env.wsgi_file = env.django_appname + ".wsgi" if 'local_base_dir' not in env:
2,077
https://:@github.com/soreau/catt-qt.git
b795ef486b5cdbb3a7d4758a8e5acd978423db3f
@@ -764,7 +764,7 @@ class App(QMainWindow): if self.combo_box.currentIndex() == device.index: self.play_button.setEnabled(True) self.stop_button.setEnabled(True) - d.disconnect_volume = round(device.cast.status.volume_level * 100) + device.disconnect_volume = round(device.cast.status.volume_level * 100) if self.reconnect_volume == -1: if last_volume != round(device.cast.status.volume_level * 100): d.volume(last_volume / 100)
cattqt/cattqt.py
ReplaceText(target='device' @(767,8)->(767,9))
class App(QMainWindow): if self.combo_box.currentIndex() == device.index: self.play_button.setEnabled(True) self.stop_button.setEnabled(True) d.disconnect_volume = round(device.cast.status.volume_level * 100) if self.reconnect_volume == -1: if last_volume != round(device.cast.status.volume_level * 100): d.volume(last_volume / 100)
class App(QMainWindow): if self.combo_box.currentIndex() == device.index: self.play_button.setEnabled(True) self.stop_button.setEnabled(True) device.disconnect_volume = round(device.cast.status.volume_level * 100) if self.reconnect_volume == -1: if last_volume != round(device.cast.status.volume_level * 100): d.volume(last_volume / 100)
2,078
https://:@github.com/FredHutch/find-cags.git
e45d29450aad390dd8da28ed6285ba089728abd8
@@ -271,7 +271,7 @@ def make_summary_abund_df(df, cags, singletons): cag_ix: df.loc[cag].mean() for cag_ix, cag in cags.items() }).T, - cags.loc[singletons] + df.loc[singletons] ]) assert summary_df.shape[0] == len(cags) + len(singletons)
find-cags.py
ReplaceText(target='df' @(274,8)->(274,12))
def make_summary_abund_df(df, cags, singletons): cag_ix: df.loc[cag].mean() for cag_ix, cag in cags.items() }).T, cags.loc[singletons] ]) assert summary_df.shape[0] == len(cags) + len(singletons)
def make_summary_abund_df(df, cags, singletons): cag_ix: df.loc[cag].mean() for cag_ix, cag in cags.items() }).T, df.loc[singletons] ]) assert summary_df.shape[0] == len(cags) + len(singletons)
2,079
https://:@github.com/thuctran289/aztex.git
3732482a5ea5d7a6972e8adb33a286b77fb77fef
@@ -104,7 +104,7 @@ class Parser(object): match = container.get() em = LinkTextMatch(match) subelement = self.parseText(em.text()) - element = LinkElement(element, em.url()) + element = LinkElement(subelement, em.url()) elif container.set(self.matcher.matchImage(block)): match = container.get()
Parser.py
ReplaceText(target='subelement' @(107,26)->(107,33))
class Parser(object): match = container.get() em = LinkTextMatch(match) subelement = self.parseText(em.text()) element = LinkElement(element, em.url()) elif container.set(self.matcher.matchImage(block)): match = container.get()
class Parser(object): match = container.get() em = LinkTextMatch(match) subelement = self.parseText(em.text()) element = LinkElement(subelement, em.url()) elif container.set(self.matcher.matchImage(block)): match = container.get()
2,080
https://:@github.com/sviete/home-assistant.git
917db18b29e37685517bde78b827c41729f3512d
@@ -56,7 +56,7 @@ def get_scanner(hass, config): _LOGGER.warning('Found username or password but no host') return None - scanner = NetgearDeviceScanner(host, password, username) + scanner = NetgearDeviceScanner(host, username, password) return scanner if scanner.success_init else None
homeassistant/components/device_tracker/netgear.py
ArgSwap(idxs=1<->2 @(59,14)->(59,34))
def get_scanner(hass, config): _LOGGER.warning('Found username or password but no host') return None scanner = NetgearDeviceScanner(host, password, username) return scanner if scanner.success_init else None
def get_scanner(hass, config): _LOGGER.warning('Found username or password but no host') return None scanner = NetgearDeviceScanner(host, username, password) return scanner if scanner.success_init else None
2,081
https://:@github.com/sviete/home-assistant.git
33b0f4d05d5e7479fce392053763f98d650085b3
@@ -45,7 +45,7 @@ def trigger(hass, config, action): and not convert(seconds.lstrip('/'), int) % 60 == 0: _LOGGER.warning('Periodic seconds should be divisible with 60' 'there will be an offset every minute') - if isinstance(minutes, str) and hours.startswith('/') \ + if isinstance(hours, str) and hours.startswith('/') \ and not convert(hours.lstrip('/'), int) % 24 == 0: _LOGGER.warning('Periodic hours should be divisible with 24' 'there will be an offset every midnight')
homeassistant/components/automation/time.py
ReplaceText(target='hours' @(48,22)->(48,29))
def trigger(hass, config, action): and not convert(seconds.lstrip('/'), int) % 60 == 0: _LOGGER.warning('Periodic seconds should be divisible with 60' 'there will be an offset every minute') if isinstance(minutes, str) and hours.startswith('/') \ and not convert(hours.lstrip('/'), int) % 24 == 0: _LOGGER.warning('Periodic hours should be divisible with 24' 'there will be an offset every midnight')
def trigger(hass, config, action): and not convert(seconds.lstrip('/'), int) % 60 == 0: _LOGGER.warning('Periodic seconds should be divisible with 60' 'there will be an offset every minute') if isinstance(hours, str) and hours.startswith('/') \ and not convert(hours.lstrip('/'), int) % 24 == 0: _LOGGER.warning('Periodic hours should be divisible with 24' 'there will be an offset every midnight')
2,082
https://:@github.com/sviete/home-assistant.git
f5227e1de07066b2ae67b48a3f2312f0057207be
@@ -109,7 +109,7 @@ def setup(hass, base_config): (LIGHT, DISCOVER_LIGHTS), (SWITCH, DISCOVER_SWITCHES))): component = get_component(comp_name) - bootstrap.setup_component(hass, component.DOMAIN, config) + bootstrap.setup_component(hass, component.DOMAIN, base_config) hass.bus.fire(EVENT_PLATFORM_DISCOVERED, {ATTR_SERVICE: discovery, ATTR_DISCOVERED: {}})
homeassistant/components/vera.py
ReplaceText(target='base_config' @(112,58)->(112,64))
def setup(hass, base_config): (LIGHT, DISCOVER_LIGHTS), (SWITCH, DISCOVER_SWITCHES))): component = get_component(comp_name) bootstrap.setup_component(hass, component.DOMAIN, config) hass.bus.fire(EVENT_PLATFORM_DISCOVERED, {ATTR_SERVICE: discovery, ATTR_DISCOVERED: {}})
def setup(hass, base_config): (LIGHT, DISCOVER_LIGHTS), (SWITCH, DISCOVER_SWITCHES))): component = get_component(comp_name) bootstrap.setup_component(hass, component.DOMAIN, base_config) hass.bus.fire(EVENT_PLATFORM_DISCOVERED, {ATTR_SERVICE: discovery, ATTR_DISCOVERED: {}})
2,083
https://:@github.com/sviete/home-assistant.git
e44c2a4016a12c992154b62ecfbed5a6013623e2
@@ -39,7 +39,7 @@ def _conf_preprocess(value): return value _SINGLE_GROUP_CONFIG = vol.Schema(vol.All(_conf_preprocess, { - vol.Optional(CONF_ENTITIES): vol.Any(None, cv.entity_ids), + vol.Optional(CONF_ENTITIES): vol.Any(cv.entity_ids, None), CONF_VIEW: bool, CONF_NAME: str, CONF_ICON: cv.icon,
homeassistant/components/group.py
ArgSwap(idxs=0<->1 @(42,33)->(42,40))
def _conf_preprocess(value): return value _SINGLE_GROUP_CONFIG = vol.Schema(vol.All(_conf_preprocess, { vol.Optional(CONF_ENTITIES): vol.Any(None, cv.entity_ids), CONF_VIEW: bool, CONF_NAME: str, CONF_ICON: cv.icon,
def _conf_preprocess(value): return value _SINGLE_GROUP_CONFIG = vol.Schema(vol.All(_conf_preprocess, { vol.Optional(CONF_ENTITIES): vol.Any(cv.entity_ids, None), CONF_VIEW: bool, CONF_NAME: str, CONF_ICON: cv.icon,
2,084
https://:@github.com/sviete/home-assistant.git
b1736994b72a90ff18d7461b0afe00dd93ba2cdc
@@ -258,7 +258,7 @@ class AndroidIPCamEntity(Entity): def device_state_attributes(self): """Return the state attributes.""" state_attr = {} - if self._ipcam.status_data is not None: + if self._ipcam.status_data is None: return state_attr state_attr[ATTR_VID_CONNS] = \
homeassistant/components/android_ip_webcam.py
ReplaceText(target=' is ' @(261,34)->(261,42))
class AndroidIPCamEntity(Entity): def device_state_attributes(self): """Return the state attributes.""" state_attr = {} if self._ipcam.status_data is not None: return state_attr state_attr[ATTR_VID_CONNS] = \
class AndroidIPCamEntity(Entity): def device_state_attributes(self): """Return the state attributes.""" state_attr = {} if self._ipcam.status_data is None: return state_attr state_attr[ATTR_VID_CONNS] = \
2,085
https://:@github.com/sviete/home-assistant.git
32b7f4d16f3c790d3370df3f4bcaf1a1462c8944
@@ -288,7 +288,7 @@ class TadoClimate(ClimateDevice): if 'setting' in overlay_data: setting_data = overlay_data['setting'] - setting = setting is not None + setting = setting_data is not None if setting: if 'mode' in setting_data:
homeassistant/components/climate/tado.py
ReplaceText(target='setting_data' @(291,26)->(291,33))
class TadoClimate(ClimateDevice): if 'setting' in overlay_data: setting_data = overlay_data['setting'] setting = setting is not None if setting: if 'mode' in setting_data:
class TadoClimate(ClimateDevice): if 'setting' in overlay_data: setting_data = overlay_data['setting'] setting = setting_data is not None if setting: if 'mode' in setting_data:
2,086
https://:@github.com/sviete/home-assistant.git
6505019701d57bf497cd42fc087e4a2e7a9ef546
@@ -89,7 +89,7 @@ class PushBulletNotificationService(BaseNotificationService): if not targets: # Backward compatibility, notify all devices in own account - self._push_data(filepath, message, title, self.pushbullet, url) + self._push_data(filepath, message, title, url, self.pushbullet) _LOGGER.info("Sent notification to self") return
homeassistant/components/notify/pushbullet.py
ArgSwap(idxs=3<->4 @(92,12)->(92,27))
class PushBulletNotificationService(BaseNotificationService): if not targets: # Backward compatibility, notify all devices in own account self._push_data(filepath, message, title, self.pushbullet, url) _LOGGER.info("Sent notification to self") return
class PushBulletNotificationService(BaseNotificationService): if not targets: # Backward compatibility, notify all devices in own account self._push_data(filepath, message, title, url, self.pushbullet) _LOGGER.info("Sent notification to self") return
2,087
https://:@github.com/sviete/home-assistant.git
e2ce1d05aeb825efcc324d0e4ac38fd868e80875
@@ -27,7 +27,7 @@ def setup_platform(hass, config, add_devices, discovery_info=None): # Get all regular switches that are not excluded or marked as lights for device in data.abode.get_devices(generic_type=CONST.TYPE_SWITCH): - if data.is_excluded(device) or not data.is_light(device): + if data.is_excluded(device) or data.is_light(device): continue devices.append(AbodeSwitch(data, device))
homeassistant/components/switch/abode.py
ReplaceText(target='' @(30,39)->(30,43))
def setup_platform(hass, config, add_devices, discovery_info=None): # Get all regular switches that are not excluded or marked as lights for device in data.abode.get_devices(generic_type=CONST.TYPE_SWITCH): if data.is_excluded(device) or not data.is_light(device): continue devices.append(AbodeSwitch(data, device))
def setup_platform(hass, config, add_devices, discovery_info=None): # Get all regular switches that are not excluded or marked as lights for device in data.abode.get_devices(generic_type=CONST.TYPE_SWITCH): if data.is_excluded(device) or data.is_light(device): continue devices.append(AbodeSwitch(data, device))
2,088
https://:@github.com/sviete/home-assistant.git
486263fff771a5f647d70d062e67022ae5031378
@@ -118,7 +118,7 @@ class Concord232ZoneSensor(BinarySensorDevice): def is_on(self): """Return true if the binary sensor is on.""" # True means "faulted" or "open" or "abnormal state" - return bool(self._zone['state'] == 'Normal') + return bool(self._zone['state'] != 'Normal') def update(self): """Get updated stats from API."""
homeassistant/components/binary_sensor/concord232.py
ReplaceText(target='!=' @(121,40)->(121,42))
class Concord232ZoneSensor(BinarySensorDevice): def is_on(self): """Return true if the binary sensor is on.""" # True means "faulted" or "open" or "abnormal state" return bool(self._zone['state'] == 'Normal') def update(self): """Get updated stats from API."""
class Concord232ZoneSensor(BinarySensorDevice): def is_on(self): """Return true if the binary sensor is on.""" # True means "faulted" or "open" or "abnormal state" return bool(self._zone['state'] != 'Normal') def update(self): """Get updated stats from API."""
2,089
https://:@github.com/sviete/home-assistant.git
cf3f1c3081405034dcd96cf5d2ae6f070c5bbfa8
@@ -48,7 +48,7 @@ def setup_platform(hass, config, add_devices, discovery_info=None): dev = [] for pmname in coll.supported_values(): - if config.get(CONF_NAME) is None: + if config.get(CONF_NAME) is not None: name = '{} PM{}'.format(config.get(CONF_NAME), pmname) else: name = 'PM{}'.format(pmname)
homeassistant/components/sensor/serial_pm.py
ReplaceText(target=' is not ' @(51,32)->(51,36))
def setup_platform(hass, config, add_devices, discovery_info=None): dev = [] for pmname in coll.supported_values(): if config.get(CONF_NAME) is None: name = '{} PM{}'.format(config.get(CONF_NAME), pmname) else: name = 'PM{}'.format(pmname)
def setup_platform(hass, config, add_devices, discovery_info=None): dev = [] for pmname in coll.supported_values(): if config.get(CONF_NAME) is not None: name = '{} PM{}'.format(config.get(CONF_NAME), pmname) else: name = 'PM{}'.format(pmname)
2,090
https://:@github.com/sviete/home-assistant.git
74c249e57d16340ebd89fcd989942ff8b2fac26f
@@ -194,7 +194,7 @@ class SpotifyMediaPlayer(MediaPlayerDevice): self._title = item.get('name') self._artist = ', '.join([artist.get('name') for artist in item.get('artists')]) - self._uri = current.get('uri') + self._uri = item.get('uri') images = item.get('album').get('images') self._image_url = images[0].get('url') if images else None # Playing state
homeassistant/components/media_player/spotify.py
ReplaceText(target='item' @(197,24)->(197,31))
class SpotifyMediaPlayer(MediaPlayerDevice): self._title = item.get('name') self._artist = ', '.join([artist.get('name') for artist in item.get('artists')]) self._uri = current.get('uri') images = item.get('album').get('images') self._image_url = images[0].get('url') if images else None # Playing state
class SpotifyMediaPlayer(MediaPlayerDevice): self._title = item.get('name') self._artist = ', '.join([artist.get('name') for artist in item.get('artists')]) self._uri = item.get('uri') images = item.get('album').get('images') self._image_url = images[0].get('url') if images else None # Playing state
2,091
https://:@github.com/sviete/home-assistant.git
2a5751c09d62823371da14e9bdb1b19143851c85
@@ -173,7 +173,7 @@ class TestHomeKit(unittest.TestCase): self.assertEqual(mock_add_bridge_acc.mock_calls, [call(state)]) self.assertEqual(mock_show_setup_msg.mock_calls, [ - call(homekit.bridge, self.hass)]) + call(self.hass, homekit.bridge)]) self.assertEqual(homekit.driver.mock_calls, [call.start()]) self.assertTrue(homekit.started)
tests/components/homekit/test_homekit.py
ArgSwap(idxs=0<->1 @(176,12)->(176,16))
class TestHomeKit(unittest.TestCase): self.assertEqual(mock_add_bridge_acc.mock_calls, [call(state)]) self.assertEqual(mock_show_setup_msg.mock_calls, [ call(homekit.bridge, self.hass)]) self.assertEqual(homekit.driver.mock_calls, [call.start()]) self.assertTrue(homekit.started)
class TestHomeKit(unittest.TestCase): self.assertEqual(mock_add_bridge_acc.mock_calls, [call(state)]) self.assertEqual(mock_show_setup_msg.mock_calls, [ call(self.hass, homekit.bridge)]) self.assertEqual(homekit.driver.mock_calls, [call.start()]) self.assertTrue(homekit.started)
2,092
https://:@github.com/sviete/home-assistant.git
bd23145331c2a3497160d311da5853393852df61
@@ -185,7 +185,7 @@ class EntityRegistry: for listener_ref in new.update_listeners: listener = listener_ref() if listener is None: - to_remove.append(listener) + to_remove.append(listener_ref) else: try: listener.async_registry_updated(old, new)
homeassistant/helpers/entity_registry.py
ReplaceText(target='listener_ref' @(188,33)->(188,41))
class EntityRegistry: for listener_ref in new.update_listeners: listener = listener_ref() if listener is None: to_remove.append(listener) else: try: listener.async_registry_updated(old, new)
class EntityRegistry: for listener_ref in new.update_listeners: listener = listener_ref() if listener is None: to_remove.append(listener_ref) else: try: listener.async_registry_updated(old, new)
2,093
https://:@github.com/sviete/home-assistant.git
34d7758b4a04b6cdc44a763cb1da194d4168b833
@@ -46,7 +46,7 @@ def async_register_http(hass, cfg): entity_config.get(entity.entity_id, {}).get(CONF_EXPOSE) domain_exposed_by_default = \ - expose_by_default and entity.domain in exposed_domains + expose_by_default or entity.domain in exposed_domains # Expose an entity if the entity's domain is exposed by default and # the configuration doesn't explicitly exclude it from being
homeassistant/components/google_assistant/http.py
ReplaceText(target='or' @(49,30)->(49,33))
def async_register_http(hass, cfg): entity_config.get(entity.entity_id, {}).get(CONF_EXPOSE) domain_exposed_by_default = \ expose_by_default and entity.domain in exposed_domains # Expose an entity if the entity's domain is exposed by default and # the configuration doesn't explicitly exclude it from being
def async_register_http(hass, cfg): entity_config.get(entity.entity_id, {}).get(CONF_EXPOSE) domain_exposed_by_default = \ expose_by_default or entity.domain in exposed_domains # Expose an entity if the entity's domain is exposed by default and # the configuration doesn't explicitly exclude it from being
2,094
https://:@github.com/sviete/home-assistant.git
04c7d5c128c61bc26caf6950ccb231cb27faacac
@@ -48,7 +48,7 @@ def async_register_http(hass, cfg): entity_config.get(entity.entity_id, {}).get(CONF_EXPOSE) domain_exposed_by_default = \ - expose_by_default or entity.domain in exposed_domains + expose_by_default and entity.domain in exposed_domains # Expose an entity if the entity's domain is exposed by default and # the configuration doesn't explicitly exclude it from being
homeassistant/components/google_assistant/http.py
ReplaceText(target='and' @(51,30)->(51,32))
def async_register_http(hass, cfg): entity_config.get(entity.entity_id, {}).get(CONF_EXPOSE) domain_exposed_by_default = \ expose_by_default or entity.domain in exposed_domains # Expose an entity if the entity's domain is exposed by default and # the configuration doesn't explicitly exclude it from being
def async_register_http(hass, cfg): entity_config.get(entity.entity_id, {}).get(CONF_EXPOSE) domain_exposed_by_default = \ expose_by_default and entity.domain in exposed_domains # Expose an entity if the entity's domain is exposed by default and # the configuration doesn't explicitly exclude it from being
2,095
https://:@github.com/sviete/home-assistant.git
d13c892b281049415c67370d34ca711b5c3691c5
@@ -19,7 +19,7 @@ async def async_handle_state_update(hass, context, msg): _LOGGER.debug("[state handler] context: %s msg: %s", context, msg) entity_id = context.get(ATTR_ENTITY_ID) state = bool(int(msg.get(ATTR_STATE))) - if msg.get(CONF_INVERSE): + if context.get(CONF_INVERSE): state = not state async_dispatcher_send(
homeassistant/components/konnected/handlers.py
ReplaceText(target='context' @(22,7)->(22,10))
async def async_handle_state_update(hass, context, msg): _LOGGER.debug("[state handler] context: %s msg: %s", context, msg) entity_id = context.get(ATTR_ENTITY_ID) state = bool(int(msg.get(ATTR_STATE))) if msg.get(CONF_INVERSE): state = not state async_dispatcher_send(
async def async_handle_state_update(hass, context, msg): _LOGGER.debug("[state handler] context: %s msg: %s", context, msg) entity_id = context.get(ATTR_ENTITY_ID) state = bool(int(msg.get(ATTR_STATE))) if context.get(CONF_INVERSE): state = not state async_dispatcher_send(
2,096
https://:@github.com/sviete/home-assistant.git
3d91d76d3d87dc28958c70c25cbd7568c8c20d4c
@@ -355,7 +355,7 @@ async def _async_set_up_integrations( if stage_1_domains: await asyncio.gather(*[ async_setup_component(hass, domain, config) - for domain in logging_domains + for domain in stage_1_domains ]) # Load all integrations
homeassistant/bootstrap.py
ReplaceText(target='stage_1_domains' @(358,26)->(358,41))
async def _async_set_up_integrations( if stage_1_domains: await asyncio.gather(*[ async_setup_component(hass, domain, config) for domain in logging_domains ]) # Load all integrations
async def _async_set_up_integrations( if stage_1_domains: await asyncio.gather(*[ async_setup_component(hass, domain, config) for domain in stage_1_domains ]) # Load all integrations
2,097
https://:@github.com/sviete/home-assistant.git
02f927ae2dfc1ed8de305a3cb7a7ee2b955b97cf
@@ -62,7 +62,7 @@ async def async_setup_entry(hass, entry, async_add_entities): return if _token_info: - await store.async_save(token_info) + await store.async_save(_token_info) token_info = _token_info data_connection = ambiclimate.AmbiclimateConnection(oauth,
homeassistant/components/ambiclimate/climate.py
ReplaceText(target='_token_info' @(65,31)->(65,41))
async def async_setup_entry(hass, entry, async_add_entities): return if _token_info: await store.async_save(token_info) token_info = _token_info data_connection = ambiclimate.AmbiclimateConnection(oauth,
async def async_setup_entry(hass, entry, async_add_entities): return if _token_info: await store.async_save(_token_info) token_info = _token_info data_connection = ambiclimate.AmbiclimateConnection(oauth,
2,098
https://:@github.com/sviete/home-assistant.git
e824c553ca72c2b6de0197ae515486d32785e8a2
@@ -254,7 +254,7 @@ class WazeTravelTimeData(): if self.exclude is not None: routes = {k: v for k, v in routes.items() if - self.exclude.lower() in k.lower()} + self.exclude.lower() not in k.lower()} route = sorted(routes, key=(lambda key: routes[key][0]))[0]
homeassistant/components/waze_travel_time/sensor.py
ReplaceText(target=' not in ' @(257,50)->(257,54))
class WazeTravelTimeData(): if self.exclude is not None: routes = {k: v for k, v in routes.items() if self.exclude.lower() in k.lower()} route = sorted(routes, key=(lambda key: routes[key][0]))[0]
class WazeTravelTimeData(): if self.exclude is not None: routes = {k: v for k, v in routes.items() if self.exclude.lower() not in k.lower()} route = sorted(routes, key=(lambda key: routes[key][0]))[0]
2,099
https://:@github.com/sviete/home-assistant.git
0653f57fb41563385b141efcd2ffcfb60572042b
@@ -433,7 +433,7 @@ class Entity: async def _async_registry_updated(self, event): """Handle entity registry update.""" data = event.data - if data['action'] != 'update' and data.get( + if data['action'] != 'update' or data.get( 'old_entity_id', data['entity_id']) != self.entity_id: return
homeassistant/helpers/entity.py
ReplaceText(target='or' @(436,38)->(436,41))
class Entity: async def _async_registry_updated(self, event): """Handle entity registry update.""" data = event.data if data['action'] != 'update' and data.get( 'old_entity_id', data['entity_id']) != self.entity_id: return
class Entity: async def _async_registry_updated(self, event): """Handle entity registry update.""" data = event.data if data['action'] != 'update' or data.get( 'old_entity_id', data['entity_id']) != self.entity_id: return