project
stringlengths
1
98
commit_sha
stringlengths
40
40
parent_sha
stringlengths
40
40
file_path
stringlengths
4
209
project_url
stringlengths
23
132
likely_bug
bool
1 class
comodified
bool
1 class
in_function
bool
2 classes
diff
stringlengths
27
9.71k
before
stringlengths
1
8.91k
after
stringlengths
1
6k
sstub_pattern
stringclasses
23 values
edit_script
stringlengths
33
158k
key
stringlengths
45
154
commit_message
stringlengths
3
65.5k
files
list
controller
f9f60b3df7b078d2b23c97551853141200f7991d
887732e92e2641f6b6794f3a0b29fbd221e5efb4
docs/conf.py
https://github.com/Codaisseur/controller
true
false
false
@@ -21,7 +21,7 @@ import sys sys.path.insert(0, os.path.abspath('..')) # create local_settings.py for SECRET_KEY if necessary local_settings_path = os.path.abspath( - os.path.join('..', 'deis', 'local_settings.py')) + os.path.join('..', 'controller', 'deis', 'local_settings.py')) if not os.path.exists(local_settings_path): with open(local_settings_path, 'w') as local_settings: local_settings.write("SECRET_KEY = 'DummySecretKey'\n")
local_settings_path = os . path . abspath ( os . path . join ( '..' , 'deis' , 'local_settings.py' ) )
local_settings_path = os . path . abspath ( os . path . join ( '..' , 'controller' , 'deis' , 'local_settings.py' ) )
SAME_FUNCTION_MORE_ARGS
[["Move", ["string:'deis'", 3, 24, 3, 30], ["argument_list", 3, 17, 3, 52], 4], ["Insert", ["argument_list", 3, 17, 3, 52], ["string:'controller'", "T"], 3], ["Insert", ["argument_list", 3, 17, 3, 52], [",:,", "T"], 5]]
Codaisseur/controller@f9f60b3df7b078d2b23c97551853141200f7991d
fix docs path issue
[ { "sha": "5b40e14df122c550e4f7611c67d14c0dcc7842d5", "filename": "docs/conf.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/Codaisseur/controller/blob/f9f60b3df7b078d2b23c97551853141200f7991d/docs%2Fconf.py", "raw_url": "https://github.com/Codaisseur/controller/raw/f9f60b3df7b078d2b23c97551853141200f7991d/docs%2Fconf.py", "contents_url": "https://api.github.com/repos/Codaisseur/controller/contents/docs%2Fconf.py?ref=f9f60b3df7b078d2b23c97551853141200f7991d", "patch": "@@ -21,7 +21,7 @@\n sys.path.insert(0, os.path.abspath('..'))\n # create local_settings.py for SECRET_KEY if necessary\n local_settings_path = os.path.abspath(\n- os.path.join('..', 'deis', 'local_settings.py'))\n+ os.path.join('..', 'controller', 'deis', 'local_settings.py'))\n if not os.path.exists(local_settings_path):\n with open(local_settings_path, 'w') as local_settings:\n local_settings.write(\"SECRET_KEY = 'DummySecretKey'\\n\")" } ]
controller
3dda9fbeab1c546b8f137ea22bb4492d7e6f9112
dfd202b6e43d4440ef767ed5298df0e561898e93
controller/api/admin.py
https://github.com/Codaisseur/controller
true
false
false
@@ -63,7 +63,7 @@ class ContainerAdmin(admin.ModelAdmin): date_hierarchy = 'created' - list_display = ('short_name', 'owner', 'cluster', 'app', 'state') + list_display = ('short_name', 'owner', 'app', 'state') list_filter = ('owner', 'cluster', 'app', 'state') admin.site.register(Container, ContainerAdmin)
list_display = ( 'short_name' , 'owner' , 'cluster' , 'app' , 'state' )
list_display = ( 'short_name' , 'owner' , 'app' , 'state' )
SINGLE_STMT
[["Move", [",:,", 1, 53, 1, 54], ["tuple", 1, 20, 1, 70], 7], ["Delete", ["string:'cluster'", 1, 44, 1, 53]], ["Delete", [",:,", 1, 60, 1, 61]]]
Codaisseur/controller@3dda9fbeab1c546b8f137ea22bb4492d7e6f9112
fix(controller): fix container admin view A container does not have a cluster attribute, but an app does! fixes #743
[ { "sha": "e922b3586263324bd7fe42ed30fec28066cd6b46", "filename": "controller/api/admin.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/Codaisseur/controller/blob/3dda9fbeab1c546b8f137ea22bb4492d7e6f9112/controller%2Fapi%2Fadmin.py", "raw_url": "https://github.com/Codaisseur/controller/raw/3dda9fbeab1c546b8f137ea22bb4492d7e6f9112/controller%2Fapi%2Fadmin.py", "contents_url": "https://api.github.com/repos/Codaisseur/controller/contents/controller%2Fapi%2Fadmin.py?ref=3dda9fbeab1c546b8f137ea22bb4492d7e6f9112", "patch": "@@ -63,7 +63,7 @@ class ContainerAdmin(admin.ModelAdmin):\n in the Django admin.\n \"\"\"\n date_hierarchy = 'created'\n- list_display = ('short_name', 'owner', 'cluster', 'app', 'state')\n+ list_display = ('short_name', 'owner', 'app', 'state')\n list_filter = ('owner', 'cluster', 'app', 'state')\n admin.site.register(Container, ContainerAdmin)\n " } ]
controller
746e140cbd89aa29d3a813c270611394851bfb80
3dda9fbeab1c546b8f137ea22bb4492d7e6f9112
controller/api/models.py
https://github.com/Codaisseur/controller
true
false
false
@@ -67,7 +67,7 @@ class Cluster(UuidAuditedModel): owner = models.ForeignKey(settings.AUTH_USER_MODEL) id = models.CharField(max_length=128, unique=True) - type = models.CharField(max_length=16, choices=CLUSTER_TYPES) + type = models.CharField(max_length=16, choices=CLUSTER_TYPES, default='coreos') domain = models.CharField(max_length=128) hosts = models.CharField(max_length=256)
type = models . CharField ( max_length = 16 , choices = CLUSTER_TYPES )
type = models . CharField ( max_length = 16 , choices = CLUSTER_TYPES , default = 'coreos' )
SAME_FUNCTION_MORE_ARGS
[["Insert", ["argument_list", 3, 28, 3, 66], [",:,", "T"], 4], ["Insert", ["argument_list", 3, 28, 3, 66], ["keyword_argument", "N0"], 5], ["Insert", "N0", ["identifier:default", "T"], 0], ["Insert", "N0", ["=:=", "T"], 1], ["Insert", "N0", ["string:'coreos'", "T"], 2]]
Codaisseur/controller@746e140cbd89aa29d3a813c270611394851bfb80
feat(controller): make coreos the default cluster
[ { "sha": "8034b566966541d13fd8c4c3e20f2d347757f4c2", "filename": "controller/api/models.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/Codaisseur/controller/blob/746e140cbd89aa29d3a813c270611394851bfb80/controller%2Fapi%2Fmodels.py", "raw_url": "https://github.com/Codaisseur/controller/raw/746e140cbd89aa29d3a813c270611394851bfb80/controller%2Fapi%2Fmodels.py", "contents_url": "https://api.github.com/repos/Codaisseur/controller/contents/controller%2Fapi%2Fmodels.py?ref=746e140cbd89aa29d3a813c270611394851bfb80", "patch": "@@ -67,7 +67,7 @@ class Cluster(UuidAuditedModel):\n \n owner = models.ForeignKey(settings.AUTH_USER_MODEL)\n id = models.CharField(max_length=128, unique=True)\n- type = models.CharField(max_length=16, choices=CLUSTER_TYPES)\n+ type = models.CharField(max_length=16, choices=CLUSTER_TYPES, default='coreos')\n \n domain = models.CharField(max_length=128)\n hosts = models.CharField(max_length=256)" } ]
controller
1d05e0d18061ad2559ecf16dead104dffb2c632c
8517d7dff3e57b839a3c1825843e24486a8c5a22
controller/api/admin.py
https://github.com/Codaisseur/controller
true
false
false
@@ -64,7 +64,7 @@ class ContainerAdmin(admin.ModelAdmin): date_hierarchy = 'created' list_display = ('short_name', 'owner', 'app', 'state') - list_filter = ('owner', 'cluster', 'app', 'state') + list_filter = ('owner', 'app', 'state') admin.site.register(Container, ContainerAdmin)
list_filter = ( 'owner' , 'cluster' , 'app' , 'state' )
list_filter = ( 'owner' , 'app' , 'state' )
SINGLE_STMT
[["Move", [",:,", 2, 38, 2, 39], ["tuple", 2, 19, 2, 55], 5], ["Delete", ["string:'cluster'", 2, 29, 2, 38]], ["Delete", [",:,", 2, 45, 2, 46]]]
Codaisseur/controller@1d05e0d18061ad2559ecf16dead104dffb2c632c
fix(controller): remove cluster from list_filter Forgot to add this into 3dda9fbeab1c546b8f137ea22bb4492d7e6f9112
[ { "sha": "c4f34efcf7177aeaca973449f329f82e08c5036b", "filename": "controller/api/admin.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/Codaisseur/controller/blob/1d05e0d18061ad2559ecf16dead104dffb2c632c/controller%2Fapi%2Fadmin.py", "raw_url": "https://github.com/Codaisseur/controller/raw/1d05e0d18061ad2559ecf16dead104dffb2c632c/controller%2Fapi%2Fadmin.py", "contents_url": "https://api.github.com/repos/Codaisseur/controller/contents/controller%2Fapi%2Fadmin.py?ref=1d05e0d18061ad2559ecf16dead104dffb2c632c", "patch": "@@ -64,7 +64,7 @@ class ContainerAdmin(admin.ModelAdmin):\n \"\"\"\n date_hierarchy = 'created'\n list_display = ('short_name', 'owner', 'app', 'state')\n- list_filter = ('owner', 'cluster', 'app', 'state')\n+ list_filter = ('owner', 'app', 'state')\n admin.site.register(Container, ContainerAdmin)\n \n " } ]
controller
07d49ef782248dc8effaef56c5142b0d382ed141
82a0339c467ce10f9d5c2f155eda1b69b86054b4
controller/scheduler/coreos.py
https://github.com/Codaisseur/controller
true
false
true
@@ -112,7 +112,7 @@ class FleetClient(object): status = None for _ in range(60): status = subprocess.check_output( - "fleetctl.sh list-units | grep {name}-announce.service | awk '{{print $4}}'".format(**locals()), + "fleetctl.sh list-units | grep {name}-announce.service | awk '{{print $5}}'".format(**locals()), shell=True, env=env).strip('\n') if status == 'running': break
status = subprocess . check_output ( "fleetctl.sh list-units | grep {name}-announce.service | awk '{{print $4}}'" . format ( ** locals ( ) ) , shell = True , env = env ) . strip ( '\n' )
status = subprocess . check_output ( "fleetctl.sh list-units | grep {name}-announce.service | awk '{{print $5}}'" . format ( ** locals ( ) ) , shell = True , env = env ) . strip ( '\n' )
CHANGE_STRING_LITERAL
[["Update", ["string:\"fleetctl.sh list-units | grep {name}-announce.service | awk '{{print $4}}'\"", 3, 17, 3, 93], "\"fleetctl.sh list-units | grep {name}-announce.service | awk '{{print $5}}'\""]]
Codaisseur/controller@07d49ef782248dc8effaef56c5142b0d382ed141
fix(controller): watch fifth column for state fleetctl v0.3.4 introduced a fifth column, so we need to update the location of the state column.
[ { "sha": "134054d903edb49f18cc3b002696eafcdc34cf5b", "filename": "controller/scheduler/coreos.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/Codaisseur/controller/blob/07d49ef782248dc8effaef56c5142b0d382ed141/controller%2Fscheduler%2Fcoreos.py", "raw_url": "https://github.com/Codaisseur/controller/raw/07d49ef782248dc8effaef56c5142b0d382ed141/controller%2Fscheduler%2Fcoreos.py", "contents_url": "https://api.github.com/repos/Codaisseur/controller/contents/controller%2Fscheduler%2Fcoreos.py?ref=07d49ef782248dc8effaef56c5142b0d382ed141", "patch": "@@ -112,7 +112,7 @@ def _wait_for_announcer(self, name, env):\n status = None\n for _ in range(60):\n status = subprocess.check_output(\n- \"fleetctl.sh list-units | grep {name}-announce.service | awk '{{print $4}}'\".format(**locals()),\n+ \"fleetctl.sh list-units | grep {name}-announce.service | awk '{{print $5}}'\".format(**locals()),\n shell=True, env=env).strip('\\n')\n if status == 'running':\n break" } ]
CAPP-30254
58d49b1375654cc49ecfcd3dc53d4f4897f9e642
54d015231265f85d9e46b248c8654caa21e0db09
Project/Pipeline/pipe.py
https://github.com/abhig94/CAPP-30254
true
false
true
@@ -104,7 +104,7 @@ Functions dealing with the actual pipeLine ''' Remove a key from a dictionary. Used in makeDicts. ''' -def removeKey(d, ey): +def removeKey(d, key): r = dict(d) del r[key] return r
def removeKey ( d , ey ) : r = dict ( d ) del r [ key ] return r
def removeKey ( d , key ) : r = dict ( d ) del r [ key ] return r
CHANGE_IDENTIFIER_USED
[["Update", ["identifier:ey", 3, 18, 3, 20], "key"]]
abhig94/CAPP-30254@58d49b1375654cc49ecfcd3dc53d4f4897f9e642
fixed
[ { "sha": "7e524b48c361dffb97b95adb7d472fdd0e9f4f12", "filename": "Project/Pipeline/pipe.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/abhig94/CAPP-30254/blob/58d49b1375654cc49ecfcd3dc53d4f4897f9e642/Project%2FPipeline%2Fpipe.py", "raw_url": "https://github.com/abhig94/CAPP-30254/raw/58d49b1375654cc49ecfcd3dc53d4f4897f9e642/Project%2FPipeline%2Fpipe.py", "contents_url": "https://api.github.com/repos/abhig94/CAPP-30254/contents/Project%2FPipeline%2Fpipe.py?ref=58d49b1375654cc49ecfcd3dc53d4f4897f9e642", "patch": "@@ -104,7 +104,7 @@\n '''\n Remove a key from a dictionary. Used in makeDicts.\n '''\n-def removeKey(d, ey):\n+def removeKey(d, key):\n r = dict(d)\n del r[key]\n return r" } ]
CAPP-30254
231e9d17ef0c83c29ff3f884af9d9ae707598c44
7f401960cf3d7c96c63e0ead307a6e51dbaa7ff1
Project/Pipeline/pipe.py
https://github.com/abhig94/CAPP-30254
true
false
true
@@ -460,7 +460,7 @@ def clf_loop_reloaded(X,y,k,clf_list,discr_var_names, bin_nums, weights, train_s fitted = clf.fit(XTrain, yTrain, train_cross_weights) except: print("fuck me") - print params['solver'] + print(params['solver']) res[z] = {} carry_on_son = False break
print params [ 'solver' ]
print ( params [ 'solver' ] )
SINGLE_STMT
[["Insert", ["module", 0, 25, 7, 0], ["expression_statement", "N0"], 2], ["Insert", "N0", ["call", "N1"], 0], ["Insert", "N1", ["identifier:print", "T"], 0], ["Insert", "N1", ["argument_list", "N2"], 1], ["Insert", "N2", ["(:(", "T"], 0], ["Move", "N2", ["subscript", 3, 31, 3, 47], 1], ["Insert", "N2", ["):)", "T"], 2], ["Delete", ["print:print", 3, 25, 3, 30]], ["Delete", ["print_statement", 3, 25, 3, 47]]]
abhig94/CAPP-30254@231e9d17ef0c83c29ff3f884af9d9ae707598c44
fuckfix
[ { "sha": "50ae7ad93834c7a7e6b613f2eb087458c5818667", "filename": "Project/Pipeline/pipe.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/abhig94/CAPP-30254/blob/231e9d17ef0c83c29ff3f884af9d9ae707598c44/Project%2FPipeline%2Fpipe.py", "raw_url": "https://github.com/abhig94/CAPP-30254/raw/231e9d17ef0c83c29ff3f884af9d9ae707598c44/Project%2FPipeline%2Fpipe.py", "contents_url": "https://api.github.com/repos/abhig94/CAPP-30254/contents/Project%2FPipeline%2Fpipe.py?ref=231e9d17ef0c83c29ff3f884af9d9ae707598c44", "patch": "@@ -460,7 +460,7 @@ def clf_loop_reloaded(X,y,k,clf_list,discr_var_names, bin_nums, weights, train_s\n fitted = clf.fit(XTrain, yTrain, train_cross_weights)\n except:\n print(\"fuck me\")\n- print params['solver']\n+ print(params['solver'])\n res[z] = {}\n carry_on_son = False\n break" } ]
CAPP-30254
1388556e70dfc393c8fe639a2cbbd1dcc2708f85
60cdd78197ad5a9bc98d2c895b865313d1066b72
Project/Pipeline/pipe.py
https://github.com/abhig94/CAPP-30254
true
false
true
@@ -274,7 +274,7 @@ def clf_loop_revolutions(X,y,k,clf_list,discr_var_names, bin_nums, s_weights, s y_tests[indx] = yTest XTrain_discrete, train_bins = discretize(XTrain_init, discr_var_names, bin_nums) - XTrain = create_dummies(XTrain_discrete, discr_var_names) + XTrain_update = create_dummies(XTrain_discrete, discr_var_names) XTest_discrete = discretize_given_bins(XTest_init, discr_var_names, train_bins)
XTrain = create_dummies ( XTrain_discrete , discr_var_names )
XTrain_update = create_dummies ( XTrain_discrete , discr_var_names )
CHANGE_IDENTIFIER_USED
[["Update", ["identifier:XTrain", 3, 21, 3, 27], "XTrain_update"]]
abhig94/CAPP-30254@1388556e70dfc393c8fe639a2cbbd1dcc2708f85
buggy name
[ { "sha": "83e8831c4a0fcdb727b74a8eeddd36a8761e166a", "filename": "Project/Pipeline/pipe.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/abhig94/CAPP-30254/blob/1388556e70dfc393c8fe639a2cbbd1dcc2708f85/Project%2FPipeline%2Fpipe.py", "raw_url": "https://github.com/abhig94/CAPP-30254/raw/1388556e70dfc393c8fe639a2cbbd1dcc2708f85/Project%2FPipeline%2Fpipe.py", "contents_url": "https://api.github.com/repos/abhig94/CAPP-30254/contents/Project%2FPipeline%2Fpipe.py?ref=1388556e70dfc393c8fe639a2cbbd1dcc2708f85", "patch": "@@ -274,7 +274,7 @@ def clf_loop_revolutions(X,y,k,clf_list,discr_var_names, bin_nums, s_weights, s\n y_tests[indx] = yTest\n \n XTrain_discrete, train_bins = discretize(XTrain_init, discr_var_names, bin_nums)\n- XTrain = create_dummies(XTrain_discrete, discr_var_names)\n+ XTrain_update = create_dummies(XTrain_discrete, discr_var_names)\n \n XTest_discrete = discretize_given_bins(XTest_init, discr_var_names, train_bins)\n " } ]
CAPP-30254
b59d77539e65a7b7dd22cdb5db93a2e8c9c83cab
d19d96a60ced9d43b96b62ed2df57ed70fa4f5b6
Project/Pipeline/pipe.py
https://github.com/abhig94/CAPP-30254
true
false
true
@@ -246,7 +246,7 @@ def clf_loop_revolutions(X,y,k,clf_list,discr_var_names, bin_nums, s_weights, s for item in subsects: y_use = y[X[item] == 1] x_use = X[X[item] == 1] - weight_use = s_weights[X[item] == 1] + weight_use = s_weights[X[item] == 1].as_matrix() for clf_d in clf_list: print("\nIter: " + str(indexer) + "\n") param_grid = parameter_grid(clf_d)
weight_use = s_weights [ X [ item ] == 1 ]
weight_use = s_weights [ X [ item ] == 1 ] . as_matrix ( )
ADD_METHOD_CALL
[["Insert", ["assignment", 3, 9, 3, 45], ["call", "N0"], 2], ["Insert", "N0", ["attribute", "N1"], 0], ["Insert", "N0", ["argument_list", "N2"], 1], ["Move", "N1", ["subscript", 3, 22, 3, 45], 0], ["Insert", "N1", [".:.", "T"], 1], ["Insert", "N1", ["identifier:as_matrix", "T"], 2], ["Insert", "N2", ["(:(", "T"], 0], ["Insert", "N2", ["):)", "T"], 1]]
abhig94/CAPP-30254@b59d77539e65a7b7dd22cdb5db93a2e8c9c83cab
pipe fix
[ { "sha": "eb65fee4c1a5887049912c37debb835e0a558970", "filename": "Project/Pipeline/pipe.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/abhig94/CAPP-30254/blob/b59d77539e65a7b7dd22cdb5db93a2e8c9c83cab/Project%2FPipeline%2Fpipe.py", "raw_url": "https://github.com/abhig94/CAPP-30254/raw/b59d77539e65a7b7dd22cdb5db93a2e8c9c83cab/Project%2FPipeline%2Fpipe.py", "contents_url": "https://api.github.com/repos/abhig94/CAPP-30254/contents/Project%2FPipeline%2Fpipe.py?ref=b59d77539e65a7b7dd22cdb5db93a2e8c9c83cab", "patch": "@@ -246,7 +246,7 @@ def clf_loop_revolutions(X,y,k,clf_list,discr_var_names, bin_nums, s_weights, s\n for item in subsects:\n y_use = y[X[item] == 1]\n x_use = X[X[item] == 1]\n- weight_use = s_weights[X[item] == 1]\n+ weight_use = s_weights[X[item] == 1].as_matrix()\n for clf_d in clf_list:\n print(\"\\nIter: \" + str(indexer) + \"\\n\")\n param_grid = parameter_grid(clf_d)" } ]
CAPP-30254
517db3489469caff2878e3f4f69be085dc4a2694
137695fdc61e0cf837c4ae633c2b30b67a63ef05
Project/Pipeline/true_run.py
https://github.com/abhig94/CAPP-30254
true
false
false
@@ -25,7 +25,7 @@ modelDT = {'model': DecisionTreeClassifier, 'criterion': ['gini', 'entropy'], ' 'max_features': ['sqrt','log2'],'min_samples_split': [2, 10, 50]} modelRF = {'model': RandomForestClassifier, 'n_estimators': [25, 100], 'criterion': ['gini', 'entropy'], 'max_features': ['sqrt', 'log2'], 'max_depth': depth, 'min_samples_split': [2,10, 50], #min sample split also had 2, 5, 10 - 'bootstrap': [True], 'n_jobs':[cores], 'warm_start':True} #bootstrap also had False + 'bootstrap': [True], 'n_jobs':[cores], 'warm_start':[True]} #bootstrap also had False modelAB = {'model': AdaBoostClassifier, 'algorithm': ['SAMME', 'SAMME.R'], 'n_estimators': [25, 50]}#, 200]} modelET = {'model': ExtraTreesClassifier, 'n_estimators': [25, 100], 'criterion': ['gini', 'entropy'], 'max_features': ['sqrt', 'log2'], 'max_depth': depth,
modelRF = { 'model' : RandomForestClassifier , 'n_estimators' : [ 25 , 100 ] , 'criterion' : [ 'gini' , 'entropy' ] , 'max_features' : [ 'sqrt' , 'log2' ] , 'max_depth' : depth , 'min_samples_split' : [ 2 , 10 , 50 ] , 'bootstrap' : [ True ] , 'n_jobs' : [ cores ] , 'warm_start' : True }
modelRF = { 'model' : RandomForestClassifier , 'n_estimators' : [ 25 , 100 ] , 'criterion' : [ 'gini' , 'entropy' ] , 'max_features' : [ 'sqrt' , 'log2' ] , 'max_depth' : depth , 'min_samples_split' : [ 2 , 10 , 50 ] , 'bootstrap' : [ True ] , 'n_jobs' : [ cores ] , 'warm_start' : [ True ] }
SINGLE_STMT
[["Insert", ["pair", 3, 52, 3, 69], ["list", "N0"], 2], ["Insert", "N0", ["[:[", "T"], 0], ["Move", "N0", ["true:True", 3, 65, 3, 69], 1], ["Insert", "N0", ["]:]", "T"], 2]]
abhig94/CAPP-30254@517db3489469caff2878e3f4f69be085dc4a2694
abhiiiii your bug
[ { "sha": "8a575ee9786242bac5f7522a4f3fa30976fede10", "filename": "Project/Pipeline/true_run.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/abhig94/CAPP-30254/blob/517db3489469caff2878e3f4f69be085dc4a2694/Project%2FPipeline%2Ftrue_run.py", "raw_url": "https://github.com/abhig94/CAPP-30254/raw/517db3489469caff2878e3f4f69be085dc4a2694/Project%2FPipeline%2Ftrue_run.py", "contents_url": "https://api.github.com/repos/abhig94/CAPP-30254/contents/Project%2FPipeline%2Ftrue_run.py?ref=517db3489469caff2878e3f4f69be085dc4a2694", "patch": "@@ -25,7 +25,7 @@\n 'max_features': ['sqrt','log2'],'min_samples_split': [2, 10, 50]}\n modelRF = {'model': RandomForestClassifier, 'n_estimators': [25, 100], 'criterion': ['gini', 'entropy'],\n 'max_features': ['sqrt', 'log2'], 'max_depth': depth, 'min_samples_split': [2,10, 50], #min sample split also had 2, 5, 10\n- 'bootstrap': [True], 'n_jobs':[cores], 'warm_start':True} #bootstrap also had False\n+ 'bootstrap': [True], 'n_jobs':[cores], 'warm_start':[True]} #bootstrap also had False\n modelAB = {'model': AdaBoostClassifier, 'algorithm': ['SAMME', 'SAMME.R'], 'n_estimators': [25, 50]}#, 200]}\n modelET = {'model': ExtraTreesClassifier, 'n_estimators': [25, 100], 'criterion': ['gini', 'entropy'],\n 'max_features': ['sqrt', 'log2'], 'max_depth': depth," } ]
pefile
19e33592456543fca5360615dda6659600b26ec8
51e79b3d1241da788217d45df371133118356b31
pefile.py
https://github.com/rmusser01/pefile
true
false
true
@@ -850,7 +850,7 @@ class SectionStructure(Structure): # bss and other sections containing only uninitialized data must have 0 # and do not take space in the file return False - return self.PointerToRawData <= offset < self.VirtualAddress + self.SizeOfRawData + return self.PointerToRawData <= offset < self.PointerToRawData + self.SizeOfRawData def contains_rva(self, rva):
return self . PointerToRawData <= offset < self . VirtualAddress + self . SizeOfRawData
return self . PointerToRawData <= offset < self . PointerToRawData + self . SizeOfRawData
CHANGE_ATTRIBUTE_USED
[["Update", ["identifier:VirtualAddress", 3, 55, 3, 69], "PointerToRawData"]]
rmusser01/pefile@19e33592456543fca5360615dda6659600b26ec8
-Fixed bug in contains_offset(). The end of the section's data on disk was being calculated as VirtualAddress + SizeOfRawData instead of the correct: PointerToRawData + SizeOfRawData
[ { "sha": "01dcfc2d4e48141a806c1a022b0e4c1a1d4afeb7", "filename": "pefile.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/rmusser01/pefile/blob/19e33592456543fca5360615dda6659600b26ec8/pefile.py", "raw_url": "https://github.com/rmusser01/pefile/raw/19e33592456543fca5360615dda6659600b26ec8/pefile.py", "contents_url": "https://api.github.com/repos/rmusser01/pefile/contents/pefile.py?ref=19e33592456543fca5360615dda6659600b26ec8", "patch": "@@ -850,7 +850,7 @@ def contains_offset(self, offset):\n # bss and other sections containing only uninitialized data must have 0\n # and do not take space in the file\n return False\n- return self.PointerToRawData <= offset < self.VirtualAddress + self.SizeOfRawData\n+ return self.PointerToRawData <= offset < self.PointerToRawData + self.SizeOfRawData\n \n \n def contains_rva(self, rva):" } ]
pefile
5e383f0e81d666a3dc7cc263dce7370728fe1c29
6a5a03c5748bb98340676c55315e8aacee9a4a15
pefile.py
https://github.com/rmusser01/pefile
true
false
true
@@ -2181,7 +2181,7 @@ class PE: u'\0' ] * remainder*2 if is_bytearray_available(): - new_file_data = file_data + new_file_data = ''.join( chr(c) for c in file_data ) else: new_file_data = ''.join( [ chr(ord(c)) for c in file_data] )
new_file_data = file_data
new_file_data = '' . join ( chr ( c ) for c in file_data )
SINGLE_STMT
[["Insert", ["assignment", 3, 13, 3, 38], ["call", "N0"], 2], ["Insert", "N0", ["attribute", "N1"], 0], ["Insert", "N0", ["generator_expression", "N2"], 1], ["Insert", "N1", ["string:''", "T"], 0], ["Insert", "N1", [".:.", "T"], 1], ["Insert", "N1", ["identifier:join", "T"], 2], ["Insert", "N2", ["(:(", "T"], 0], ["Insert", "N2", ["call", "N3"], 1], ["Insert", "N2", ["for_in_clause", "N4"], 2], ["Insert", "N2", ["):)", "T"], 3], ["Insert", "N3", ["identifier:chr", "T"], 0], ["Insert", "N3", ["argument_list", "N5"], 1], ["Insert", "N4", ["for:for", "T"], 0], ["Insert", "N4", ["identifier:c", "T"], 1], ["Insert", "N4", ["in:in", "T"], 2], ["Move", "N4", ["identifier:file_data", 3, 29, 3, 38], 3], ["Insert", "N5", ["(:(", "T"], 0], ["Insert", "N5", ["identifier:c", "T"], 1], ["Insert", "N5", ["):)", "T"], 2]]
rmusser01/pefile@5e383f0e81d666a3dc7cc263dce7370728fe1c29
-Minor tweaks to fix the regression tests.
[ { "sha": "f3d53ce0e5c531b2894984eeaae0f11d31def16a", "filename": "pefile.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/rmusser01/pefile/blob/5e383f0e81d666a3dc7cc263dce7370728fe1c29/pefile.py", "raw_url": "https://github.com/rmusser01/pefile/raw/5e383f0e81d666a3dc7cc263dce7370728fe1c29/pefile.py", "contents_url": "https://api.github.com/repos/rmusser01/pefile/contents/pefile.py?ref=5e383f0e81d666a3dc7cc263dce7370728fe1c29", "patch": "@@ -2181,7 +2181,7 @@ def write(self, filename=None):\n u'\\0' ] * remainder*2\n \n if is_bytearray_available():\n- new_file_data = file_data\n+ new_file_data = ''.join( chr(c) for c in file_data )\n else:\n new_file_data = ''.join( [ chr(ord(c)) for c in file_data] )\n " } ]
osiris
3ede6f2c0d6921e2eb2ac8205c5debff5ee96111
c2665f1f3c90a67e12377cc64f867939d3c08dfa
coeus-attendence/main.py
https://github.com/debuggerman/osiris
true
false
false
@@ -19,7 +19,7 @@ from controllers import MainHandler, LeaveRequestHandler,EmployeeHandler app = webapp2.WSGIApplication( [ ('/', MainHandler), - ('/request_leave.do', LeaveRequestHandler) + ('/request_leave.do', LeaveRequestHandler), ('/user',EmployeeHandler) ], debug=True)
app = webapp2 . WSGIApplication ( [ ( '/' , MainHandler ) , ( '/request_leave.do' , LeaveRequestHandler ) ( '/user' , EmployeeHandler ) ] , debug = True )
app = webapp2 . WSGIApplication ( [ ( '/' , MainHandler ) , ( '/request_leave.do' , LeaveRequestHandler ) , ( '/user' , EmployeeHandler ) ] , debug = True )
SINGLE_STMT
[["Move", ["list", 1, 32, 5, 3], ["tuple", 3, 2, 3, 44], 3], ["Insert", ["list", 1, 32, 5, 3], [",:,", "T"], 4], ["Insert", ["list", 1, 32, 5, 3], ["tuple", "N0"], 5], ["Move", "N0", ["(:(", 4, 2, 4, 3], 0], ["Move", "N0", ["string:'/user'", 4, 3, 4, 10], 1], ["Move", "N0", [",:,", 4, 10, 4, 11], 2], ["Move", "N0", ["identifier:EmployeeHandler", 4, 11, 4, 26], 3], ["Move", "N0", ["):)", 4, 26, 4, 27], 4], ["Delete", ["argument_list", 4, 2, 4, 27]], ["Delete", ["call", 3, 2, 4, 27]]]
debuggerman/osiris@3ede6f2c0d6921e2eb2ac8205c5debff5ee96111
handler url error fixed
[ { "sha": "28e5682cda0fd0cf142bcc2cdd65c57455800c75", "filename": "coeus-attendence/main.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/debuggerman/osiris/blob/3ede6f2c0d6921e2eb2ac8205c5debff5ee96111/coeus-attendence%2Fmain.py", "raw_url": "https://github.com/debuggerman/osiris/raw/3ede6f2c0d6921e2eb2ac8205c5debff5ee96111/coeus-attendence%2Fmain.py", "contents_url": "https://api.github.com/repos/debuggerman/osiris/contents/coeus-attendence%2Fmain.py?ref=3ede6f2c0d6921e2eb2ac8205c5debff5ee96111", "patch": "@@ -19,7 +19,7 @@\n \n app = webapp2.WSGIApplication( [ \n \t('/', MainHandler),\n-\t('/request_leave.do', LeaveRequestHandler)\n+\t('/request_leave.do', LeaveRequestHandler),\n \t('/user',EmployeeHandler)\n ], debug=True)\n " } ]
osiris
7badb7a5da7607d10b6c785d527907c16878a399
a386a0f5f5896efdce60a4d6a9efb9686e55bffe
coeus-attendence/main.py
https://github.com/debuggerman/osiris
true
false
false
@@ -21,6 +21,6 @@ app = webapp2.WSGIApplication( [ webapp2.Route(r'/', handler=MainHandler, name='home'), webapp2.Route(r'/respond_leave.do/<leave_id:\d+>/<method:\w+>', handler=LeaveResponseHandler, name='leaveResponse'), webapp2.Route('/employee_register.do', handler=RegisterHandler, name="register"), - webapp2.Route('/request_leave.do?', handler=LeaveRequestHandler, nae="requestLeave"), + webapp2.Route('/request_leave.do?', handler=LeaveRequestHandler, name="requestLeave"), ], debug=True)
webapp2 . Route ( '/request_leave.do?' , handler = LeaveRequestHandler , nae = "requestLeave" ) ,
webapp2 . Route ( '/request_leave.do?' , handler = LeaveRequestHandler , name = "requestLeave" ) ,
CHANGE_KEYWORD_ARGUMENT_USED
[["Update", ["identifier:nae", 3, 67, 3, 70], "name"]]
debuggerman/osiris@7badb7a5da7607d10b6c785d527907c16878a399
updated error
[ { "sha": "2333d3c2119c2ea4c924d42963fd0d698ce1f223", "filename": "coeus-attendence/main.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/debuggerman/osiris/blob/7badb7a5da7607d10b6c785d527907c16878a399/coeus-attendence%2Fmain.py", "raw_url": "https://github.com/debuggerman/osiris/raw/7badb7a5da7607d10b6c785d527907c16878a399/coeus-attendence%2Fmain.py", "contents_url": "https://api.github.com/repos/debuggerman/osiris/contents/coeus-attendence%2Fmain.py?ref=7badb7a5da7607d10b6c785d527907c16878a399", "patch": "@@ -21,6 +21,6 @@\n \twebapp2.Route(r'/', handler=MainHandler, name='home'),\n \twebapp2.Route(r'/respond_leave.do/<leave_id:\\d+>/<method:\\w+>', handler=LeaveResponseHandler, name='leaveResponse'),\n \twebapp2.Route('/employee_register.do', handler=RegisterHandler, name=\"register\"),\n-\twebapp2.Route('/request_leave.do?', handler=LeaveRequestHandler, nae=\"requestLeave\"),\n+\twebapp2.Route('/request_leave.do?', handler=LeaveRequestHandler, name=\"requestLeave\"),\n ], debug=True)\n " } ]
osiris
ff7b761590d3d403f86821411fe5d44eb6489df4
44090f3905eb4f39ee3450476a9a57cd48f81319
coeus-attendence/controllers/RegisterHandler.py
https://github.com/debuggerman/osiris
true
false
true
@@ -24,7 +24,7 @@ class RegisterHandler(webapp2.RequestHandler): def post(self): employee = Employee() employee.name = (cgi.escape(self.request.get('name'))) - employee.email = (cgi.escape(self.request.get('email'))) + employee.email = user.email() employee.designation = (cgi.escape(self.request.get('designation'))) employee.phone = [(cgi.escape(self.request.get('phone1'))),(cgi.escape(self.request.get('phone2')))] employee.address = [(cgi.escape(self.request.get('address1'))),(cgi.escape(self.request.get('address2')))]
employee . email = ( cgi . escape ( self . request . get ( 'email' ) ) )
employee . email = user . email ( )
SINGLE_STMT
[["Move", ["assignment", 3, 3, 3, 59], ["call", 3, 32, 3, 57], 2], ["Move", ["call", 3, 32, 3, 57], ["attribute", 3, 32, 3, 44], 0], ["Update", ["identifier:self", 3, 32, 3, 36], "user"], ["Update", ["identifier:request", 3, 37, 3, 44], "email"], ["Delete", ["(:(", 3, 20, 3, 21]], ["Delete", ["identifier:cgi", 3, 21, 3, 24]], ["Delete", [".:.", 3, 24, 3, 25]], ["Delete", ["identifier:escape", 3, 25, 3, 31]], ["Delete", ["attribute", 3, 21, 3, 31]], ["Delete", ["(:(", 3, 31, 3, 32]], ["Delete", [".:.", 3, 44, 3, 45]], ["Delete", ["identifier:get", 3, 45, 3, 48]], ["Delete", ["attribute", 3, 32, 3, 48]], ["Delete", ["string:'email'", 3, 49, 3, 56]], ["Delete", ["):)", 3, 57, 3, 58]], ["Delete", ["argument_list", 3, 31, 3, 58]], ["Delete", ["call", 3, 21, 3, 58]], ["Delete", ["):)", 3, 58, 3, 59]], ["Delete", ["parenthesized_expression", 3, 20, 3, 59]]]
debuggerman/osiris@ff7b761590d3d403f86821411fe5d44eb6489df4
email error fixed
[ { "sha": "113d594238f8a95038a1a1870d1c33d7e062de47", "filename": "coeus-attendence/controllers/RegisterHandler.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/debuggerman/osiris/blob/ff7b761590d3d403f86821411fe5d44eb6489df4/coeus-attendence%2Fcontrollers%2FRegisterHandler.py", "raw_url": "https://github.com/debuggerman/osiris/raw/ff7b761590d3d403f86821411fe5d44eb6489df4/coeus-attendence%2Fcontrollers%2FRegisterHandler.py", "contents_url": "https://api.github.com/repos/debuggerman/osiris/contents/coeus-attendence%2Fcontrollers%2FRegisterHandler.py?ref=ff7b761590d3d403f86821411fe5d44eb6489df4", "patch": "@@ -24,7 +24,7 @@ def get(self):\n \tdef post(self):\n \t\temployee = Employee()\n \t\temployee.name = (cgi.escape(self.request.get('name')))\n-\t\temployee.email = (cgi.escape(self.request.get('email')))\n+\t\temployee.email = user.email()\n \t\temployee.designation = (cgi.escape(self.request.get('designation')))\n \t\temployee.phone = [(cgi.escape(self.request.get('phone1'))),(cgi.escape(self.request.get('phone2')))]\n \t\temployee.address = [(cgi.escape(self.request.get('address1'))),(cgi.escape(self.request.get('address2')))]" } ]
osiris
6e3d4063674d5054f5f15611c5a7386467979dfc
167cb36d69d1fba5b881a49b0e2fad1a3e864b8d
coeus-attendence/controllers/MainHandler.py
https://github.com/debuggerman/osiris
true
false
true
@@ -29,6 +29,6 @@ class MainHandler(webapp2.RequestHandler): self.response.write(template.render(path,{'employee_name':user.nickname(),'employee_email':user.email()})) else: path = os.path.join(os.path.dirname(__file__),'../views', 'index.html') - self.response.write(template.render(path,{'employee_name':user.nickname(),'employee_email':user.email()})) + self.response.write(template.render(path,{'username':user.nickname(),'employee_email':user.email()}))
self . response . write ( template . render ( path , { 'employee_name' : user . nickname ( ) , 'employee_email' : user . email ( ) } ) )
self . response . write ( template . render ( path , { 'username' : user . nickname ( ) , 'employee_email' : user . email ( ) } ) )
CHANGE_STRING_LITERAL
[["Update", ["string:'employee_name'", 3, 59, 3, 74], "'username'"]]
debuggerman/osiris@6e3d4063674d5054f5f15611c5a7386467979dfc
fixed home page
[ { "sha": "cbee40473cc472113ab4cdd30d86ef6b08c64242", "filename": "coeus-attendence/controllers/MainHandler.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/debuggerman/osiris/blob/6e3d4063674d5054f5f15611c5a7386467979dfc/coeus-attendence%2Fcontrollers%2FMainHandler.py", "raw_url": "https://github.com/debuggerman/osiris/raw/6e3d4063674d5054f5f15611c5a7386467979dfc/coeus-attendence%2Fcontrollers%2FMainHandler.py", "contents_url": "https://api.github.com/repos/debuggerman/osiris/contents/coeus-attendence%2Fcontrollers%2FMainHandler.py?ref=6e3d4063674d5054f5f15611c5a7386467979dfc", "patch": "@@ -29,6 +29,6 @@ def get(self):\n self.response.write(template.render(path,{'employee_name':user.nickname(),'employee_email':user.email()}))\n else:\n path = os.path.join(os.path.dirname(__file__),'../views', 'index.html')\n- self.response.write(template.render(path,{'employee_name':user.nickname(),'employee_email':user.email()})) \n+ self.response.write(template.render(path,{'username':user.nickname(),'employee_email':user.email()})) \n \n " } ]
html_table_to_excel
0f7e674c21911a548ffae1971d3fa79f469e3005
a9fb4fdf26bac25101a2e34a15e98069596276c0
main.py
https://github.com/m9psy/html_table_to_excel
true
false
true
@@ -16,7 +16,7 @@ def html_to_workbook(html_text): - tables = bs(html_text).findAll('table') + tables = BeautifulSoup(html_text).findAll('table') wb = openpyxl.Workbook() for table_index in range(len(tables)): if table_index == 0:
tables = bs ( html_text ) . findAll ( 'table' )
tables = BeautifulSoup ( html_text ) . findAll ( 'table' )
WRONG_FUNCTION_NAME
[["Update", ["identifier:bs", 0, 14, 0, 16], "BeautifulSoup"]]
m9psy/html_table_to_excel@0f7e674c21911a548ffae1971d3fa79f469e3005
Fix bug.
[ { "sha": "683a66f828302594ed20f476e32df6e6609fd16c", "filename": "main.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/m9psy/html_table_to_excel/blob/0f7e674c21911a548ffae1971d3fa79f469e3005/main.py", "raw_url": "https://github.com/m9psy/html_table_to_excel/raw/0f7e674c21911a548ffae1971d3fa79f469e3005/main.py", "contents_url": "https://api.github.com/repos/m9psy/html_table_to_excel/contents/main.py?ref=0f7e674c21911a548ffae1971d3fa79f469e3005", "patch": "@@ -16,7 +16,7 @@ def html_to_workbook(html_text):\n :param html_text: Raw html table.\n :return: openpyxl.Workbook object\n \"\"\"\n- tables = bs(html_text).findAll('table')\n+ tables = BeautifulSoup(html_text).findAll('table')\n wb = openpyxl.Workbook()\n for table_index in range(len(tables)):\n if table_index == 0:" } ]
controller
d3207bab692f033a2f0382daa59bfcb651f0526f
e3450e4eb09bcf8fa40c6c310763347a7d9f27db
rootfs/api/models/release.py
https://github.com/Codaisseur/controller
true
false
true
@@ -153,7 +153,7 @@ class Release(UuidAuditedModel): try: labels = { 'app': self.app.id, - 'version': self.version + 'version': 'v{}'.format(self.version) } controllers = self._scheduler.get_rcs(self.app.id, labels=labels) for controller in controllers.json()['items']:
labels = { 'app' : self . app . id , 'version' : self . version }
labels = { 'app' : self . app . id , 'version' : 'v{}' . format ( self . version ) }
ADD_FUNCTION_AROUND_EXPRESSION
[["Insert", ["pair", 3, 17, 3, 40], ["call", "N0"], 2], ["Insert", "N0", ["attribute", "N1"], 0], ["Insert", "N0", ["argument_list", "N2"], 1], ["Insert", "N1", ["string:'v{}'", "T"], 0], ["Insert", "N1", [".:.", "T"], 1], ["Insert", "N1", ["identifier:format", "T"], 2], ["Insert", "N2", ["(:(", "T"], 0], ["Move", "N2", ["attribute", 3, 28, 3, 40], 1], ["Insert", "N2", ["):)", "T"], 2]]
Codaisseur/controller@d3207bab692f033a2f0382daa59bfcb651f0526f
fix(release): versions are stored with v appended in the RC version
[ { "sha": "199a86dab4d53359acc2b9e496a78b660e619780", "filename": "rootfs/api/models/release.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/Codaisseur/controller/blob/d3207bab692f033a2f0382daa59bfcb651f0526f/rootfs%2Fapi%2Fmodels%2Frelease.py", "raw_url": "https://github.com/Codaisseur/controller/raw/d3207bab692f033a2f0382daa59bfcb651f0526f/rootfs%2Fapi%2Fmodels%2Frelease.py", "contents_url": "https://api.github.com/repos/Codaisseur/controller/contents/rootfs%2Fapi%2Fmodels%2Frelease.py?ref=d3207bab692f033a2f0382daa59bfcb651f0526f", "patch": "@@ -153,7 +153,7 @@ def delete(self, *args, **kwargs):\n try:\n labels = {\n 'app': self.app.id,\n- 'version': self.version\n+ 'version': 'v{}'.format(self.version)\n }\n controllers = self._scheduler.get_rcs(self.app.id, labels=labels)\n for controller in controllers.json()['items']:" } ]
controller
67cb296f9abaa901c9961217075d9764fe802c6b
3c07d6c35c3509bf71c548328911ed85e0ef69b0
rootfs/api/models/release.py
https://github.com/Codaisseur/controller
true
false
true
@@ -155,7 +155,7 @@ class Release(UuidAuditedModel): 'app': self.app.id, 'version': 'v{}'.format(self.version) } - controllers = self._scheduler.get_rcs(self.app.id, labels=labels) + controllers = self._scheduler._get_rcs(self.app.id, labels=labels) for controller in controllers.json()['items']: self._scheduler._delete_rc(self.app.id, controller['metadata']['name']) except KubeHTTPException as e:
controllers = self . _scheduler . get_rcs ( self . app . id , labels = labels )
controllers = self . _scheduler . _get_rcs ( self . app . id , labels = labels )
WRONG_FUNCTION_NAME
[["Update", ["identifier:get_rcs", 3, 43, 3, 50], "_get_rcs"]]
Codaisseur/controller@67cb296f9abaa901c9961217075d9764fe802c6b
fix(release): fix function call to scheduler, missing _
[ { "sha": "fb97b685170c74d3646332ee2cf521eef35316c4", "filename": "rootfs/api/models/release.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/Codaisseur/controller/blob/67cb296f9abaa901c9961217075d9764fe802c6b/rootfs%2Fapi%2Fmodels%2Frelease.py", "raw_url": "https://github.com/Codaisseur/controller/raw/67cb296f9abaa901c9961217075d9764fe802c6b/rootfs%2Fapi%2Fmodels%2Frelease.py", "contents_url": "https://api.github.com/repos/Codaisseur/controller/contents/rootfs%2Fapi%2Fmodels%2Frelease.py?ref=67cb296f9abaa901c9961217075d9764fe802c6b", "patch": "@@ -155,7 +155,7 @@ def delete(self, *args, **kwargs):\n 'app': self.app.id,\n 'version': 'v{}'.format(self.version)\n }\n- controllers = self._scheduler.get_rcs(self.app.id, labels=labels)\n+ controllers = self._scheduler._get_rcs(self.app.id, labels=labels)\n for controller in controllers.json()['items']:\n self._scheduler._delete_rc(self.app.id, controller['metadata']['name'])\n except KubeHTTPException as e:" } ]
facebook-python-ads-sdk
f737fec11ddd420057520295b8a3fba430003fea
2845b95d2d7afdf2ae66739dfe8955bfec57b5f3
facebookads/objects.py
https://github.com/haocafes/facebook-python-ads-sdk
true
false
false
@@ -1283,7 +1283,7 @@ class AdCampaign(CanValidate, HasStatus, HasObjective, HasAdLabels, CanArchive, class BuyingType(object): auction = 'AUCTION' fixed_cpm = 'FIXED_CPM' - mixed = 'MIXED' + reserved = 'RESERVED' @classmethod def get_endpoint(cls):
mixed = 'MIXED'
reserved = 'RESERVED'
SINGLE_STMT
[["Update", ["identifier:mixed", 3, 9, 3, 14], "reserved"], ["Update", ["string:'MIXED'", 3, 17, 3, 24], "'RESERVED'"]]
haocafes/facebook-python-ads-sdk@f737fec11ddd420057520295b8a3fba430003fea
Adding 'RESERVED' buying type for ad campaigns on Python SDK Summary: Marketing API Python SDK campaign BuyingType misses "RESERVED" which needs to be added and contains 'MIXED' which needs to be removed Test Plan: All Unit tests and Integration tests pass- ritu-mbp:python-ads-sdk ritu$ python3 -m facebookads.test.integration .................... ---------------------------------------------------------------------- Ran 20 tests in 16.283s OK ritu-mbp:python-ads-sdk ritu$ python3 -m facebookads.test.unit ............................ ---------------------------------------------------------------------- Ran 28 tests in 0.015s OK
[ { "sha": "ed050fa5f39dc91b01ad51597984181487754ca3", "filename": "facebookads/objects.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/haocafes/facebook-python-ads-sdk/blob/f737fec11ddd420057520295b8a3fba430003fea/facebookads%2Fobjects.py", "raw_url": "https://github.com/haocafes/facebook-python-ads-sdk/raw/f737fec11ddd420057520295b8a3fba430003fea/facebookads%2Fobjects.py", "contents_url": "https://api.github.com/repos/haocafes/facebook-python-ads-sdk/contents/facebookads%2Fobjects.py?ref=f737fec11ddd420057520295b8a3fba430003fea", "patch": "@@ -1283,7 +1283,7 @@ class Field(object):\n class BuyingType(object):\n auction = 'AUCTION'\n fixed_cpm = 'FIXED_CPM'\n- mixed = 'MIXED'\n+ reserved = 'RESERVED'\n \n @classmethod\n def get_endpoint(cls):" } ]
facebook-python-ads-sdk
f0164e3f28e720c2a68e98cbd27f5accdbcd4854
124ccdb7c6e480e1c1090a78ceedeea74bf82625
facebookads/objects.py
https://github.com/haocafes/facebook-python-ads-sdk
true
false
false
@@ -955,7 +955,7 @@ class AdAccount(CannotCreate, CannotDelete, HasAdLabels, AbstractCrudObject): partner = 'partner' spend_cap = 'spend_cap' tax_id_status = 'tax_id_status' - timezon_id = 'timezone_id' + timezone_id = 'timezone_id' timezone_name = 'timezone_name' timezone_offset_hours_utc = 'timezone_offset_hours_utc' tos_accepted = 'tos_accepted'
timezon_id = 'timezone_id'
timezone_id = 'timezone_id'
CHANGE_IDENTIFIER_USED
[["Update", ["identifier:timezon_id", 3, 9, 3, 19], "timezone_id"]]
haocafes/facebook-python-ads-sdk@f0164e3f28e720c2a68e98cbd27f5accdbcd4854
Correcting spell error on AdAccount object Summary: Correcting the spell check error on timezone_id field in ad account object Test Plan: Unit tests and Integration tests, all pass: ritu-mbp:python-ads-sdk ritu$ python -m facebookads.test.unit ............................ ---------------------------------------------------------------------- Ran 28 tests in 0.017s OK ritu-mbp:python-ads-sdk ritu$ python -m facebookads.test.integration .................... ---------------------------------------------------------------------- Ran 20 tests in 17.914s
[ { "sha": "ee64fd50e190f0484df7802f9b402650077aa753", "filename": "facebookads/objects.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/haocafes/facebook-python-ads-sdk/blob/f0164e3f28e720c2a68e98cbd27f5accdbcd4854/facebookads%2Fobjects.py", "raw_url": "https://github.com/haocafes/facebook-python-ads-sdk/raw/f0164e3f28e720c2a68e98cbd27f5accdbcd4854/facebookads%2Fobjects.py", "contents_url": "https://api.github.com/repos/haocafes/facebook-python-ads-sdk/contents/facebookads%2Fobjects.py?ref=f0164e3f28e720c2a68e98cbd27f5accdbcd4854", "patch": "@@ -955,7 +955,7 @@ class Field(object):\n partner = 'partner'\n spend_cap = 'spend_cap'\n tax_id_status = 'tax_id_status'\n- timezon_id = 'timezone_id'\n+ timezone_id = 'timezone_id'\n timezone_name = 'timezone_name'\n timezone_offset_hours_utc = 'timezone_offset_hours_utc'\n tos_accepted = 'tos_accepted'" } ]
facebook-python-ads-sdk
098bcfb03d40fe6fad40fab86328a57851c901cd
bd20bc4467cb1d55119328a12a4a7b58798684c4
facebookads/objects.py
https://github.com/haocafes/facebook-python-ads-sdk
true
false
false
@@ -2699,7 +2699,7 @@ class Product(AbstractCrudObject): return 'products' -class ProductAudience(CannotUpdate, CannotDelete, AbstractCrudObject): +class ProductAudience(AbstractCrudObject): class Field(object): description = 'description'
class ProductAudience ( CannotUpdate , CannotDelete , AbstractCrudObject ) : class Field ( object ) : description = 'description'
class ProductAudience ( AbstractCrudObject ) : class Field ( object ) : description = 'description'
SINGLE_STMT
[["Delete", ["identifier:CannotUpdate", 3, 23, 3, 35]], ["Delete", [",:,", 3, 35, 3, 36]], ["Delete", ["identifier:CannotDelete", 3, 37, 3, 49]], ["Delete", [",:,", 3, 49, 3, 50]]]
haocafes/facebook-python-ads-sdk@098bcfb03d40fe6fad40fab86328a57851c901cd
Product audiences have full CRUD capability Summary: Fix wrong declaration of CRUD capabilities for product audiences Test Plan: remote_delete(), remote_update()
[ { "sha": "c1f3a4c46e9e4370e3f26128061c5ccf28b2a764", "filename": "facebookads/objects.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/haocafes/facebook-python-ads-sdk/blob/098bcfb03d40fe6fad40fab86328a57851c901cd/facebookads%2Fobjects.py", "raw_url": "https://github.com/haocafes/facebook-python-ads-sdk/raw/098bcfb03d40fe6fad40fab86328a57851c901cd/facebookads%2Fobjects.py", "contents_url": "https://api.github.com/repos/haocafes/facebook-python-ads-sdk/contents/facebookads%2Fobjects.py?ref=098bcfb03d40fe6fad40fab86328a57851c901cd", "patch": "@@ -2699,7 +2699,7 @@ def get_endpoint(cls):\n return 'products'\n \n \n-class ProductAudience(CannotUpdate, CannotDelete, AbstractCrudObject):\n+class ProductAudience(AbstractCrudObject):\n \n class Field(object):\n description = 'description'" } ]
facebook-python-ads-sdk
31f077626c7979b1a3979e86e87148fb5e7b867d
d4f113e746fee6d5c0967a44685c755cc745c799
facebookads/objects.py
https://github.com/haocafes/facebook-python-ads-sdk
true
false
true
@@ -138,7 +138,7 @@ class EdgeIterator(object): # Load next page at end. # If the queue counter equals the length of the queue and # If load_next_page returns False, raise StopIteration exception - if (self._count == len(self._queue) and not self.load_next_page()) or (len(self._queue) == 0): + if (self._count > len(self._queue) and not self.load_next_page()) or (len(self._queue) == 0): raise StopIteration() return self._queue[self._count-1]
if ( self . _count == len ( self . _queue ) and not self . load_next_page ( ) ) or ( len ( self . _queue ) == 0 ) : raise StopIteration ( )
if ( self . _count > len ( self . _queue ) and not self . load_next_page ( ) ) or ( len ( self . _queue ) == 0 ) : raise StopIteration ( )
CHANGE_BINARY_OPERATOR
[["Insert", ["comparison_operator", 3, 13, 3, 44], [">:>", "T"], 1], ["Delete", ["==:==", 3, 25, 3, 27]]]
haocafes/facebook-python-ads-sdk@31f077626c7979b1a3979e86e87148fb5e7b867d
Fixing edge iterator bug to ensure that last element is returned
[ { "sha": "0528dad6b22697362273dc40410233539344d931", "filename": "facebookads/objects.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/haocafes/facebook-python-ads-sdk/blob/31f077626c7979b1a3979e86e87148fb5e7b867d/facebookads%2Fobjects.py", "raw_url": "https://github.com/haocafes/facebook-python-ads-sdk/raw/31f077626c7979b1a3979e86e87148fb5e7b867d/facebookads%2Fobjects.py", "contents_url": "https://api.github.com/repos/haocafes/facebook-python-ads-sdk/contents/facebookads%2Fobjects.py?ref=31f077626c7979b1a3979e86e87148fb5e7b867d", "patch": "@@ -138,7 +138,7 @@ def __next__(self):\n # Load next page at end.\n # If the queue counter equals the length of the queue and\n # If load_next_page returns False, raise StopIteration exception\n- if (self._count == len(self._queue) and not self.load_next_page()) or (len(self._queue) == 0):\n+ if (self._count > len(self._queue) and not self.load_next_page()) or (len(self._queue) == 0):\n raise StopIteration()\n \n return self._queue[self._count-1]" } ]
scikit-learn
e74b59aeadc00e3964c1acccea74887e135c68be
840443b5d6f9fee728edf08e5dd7256ab6133442
sklearn/linear_model/coordinate_descent.py
https://github.com/paulha/scikit-learn
true
false
false
@@ -1863,7 +1863,7 @@ class MultiTaskElasticNetCV(LinearModelCV, RegressorMixin): i.e. the sum of norm of each row. - Read more in the :ref:`User Guide <multi_task_lasso>`. + Read more in the :ref:`User Guide <multi_task_elastic_net>`. Parameters ----------
more in the : ref : `User Guide <multi_task_lasso>` . Parameters
more in the : ref : `User Guide <multi_task_elastic_net>` . Parameters
CHANGE_STRING_LITERAL
[["Update", ["string:`User Guide <multi_task_lasso>`", 3, 27, 3, 58], "`User Guide <multi_task_elastic_net>`"]]
paulha/scikit-learn@e74b59aeadc00e3964c1acccea74887e135c68be
DOC Fix user guide link in MultiTaskElasticNetCV docstring (#8880)
[ { "sha": "2a38eb65831f6f80940f41cd8329a0913c5c8d4d", "filename": "sklearn/linear_model/coordinate_descent.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/paulha/scikit-learn/blob/e74b59aeadc00e3964c1acccea74887e135c68be/sklearn%2Flinear_model%2Fcoordinate_descent.py", "raw_url": "https://github.com/paulha/scikit-learn/raw/e74b59aeadc00e3964c1acccea74887e135c68be/sklearn%2Flinear_model%2Fcoordinate_descent.py", "contents_url": "https://api.github.com/repos/paulha/scikit-learn/contents/sklearn%2Flinear_model%2Fcoordinate_descent.py?ref=e74b59aeadc00e3964c1acccea74887e135c68be", "patch": "@@ -1863,7 +1863,7 @@ class MultiTaskElasticNetCV(LinearModelCV, RegressorMixin):\n \n i.e. the sum of norm of each row.\n \n- Read more in the :ref:`User Guide <multi_task_lasso>`.\n+ Read more in the :ref:`User Guide <multi_task_elastic_net>`.\n \n Parameters\n ----------" } ]
scikit-learn
38a67f598b30648f16b12bc08f99b9ff2b72ca89
be5ed9398bd4da029da936f4efe75d276293c66f
sklearn/model_selection/tests/test_validation.py
https://github.com/paulha/scikit-learn
true
false
true
@@ -921,7 +921,7 @@ def check_cross_val_predict_with_method(est): X, y = shuffle(X, y, random_state=0) classes = len(set(y)) - kfold = KFold(len(iris.target)) + kfold = KFold() methods = ['decision_function', 'predict_proba', 'predict_log_proba'] for method in methods:
kfold = KFold ( len ( iris . target ) )
kfold = KFold ( )
SAME_FUNCTION_LESS_ARGS
[["Move", ["argument_list", 3, 18, 3, 36], ["):)", 3, 34, 3, 35], 1], ["Delete", ["identifier:len", 3, 19, 3, 22]], ["Delete", ["(:(", 3, 22, 3, 23]], ["Delete", ["identifier:iris", 3, 23, 3, 27]], ["Delete", [".:.", 3, 27, 3, 28]], ["Delete", ["identifier:target", 3, 28, 3, 34]], ["Delete", ["attribute", 3, 23, 3, 34]], ["Delete", ["argument_list", 3, 22, 3, 35]], ["Delete", ["call", 3, 19, 3, 35]], ["Delete", ["):)", 3, 35, 3, 36]]]
paulha/scikit-learn@38a67f598b30648f16b12bc08f99b9ff2b72ca89
Fix n_splits in KFold instantiation in model_selection tests (#8910)
[ { "sha": "c05b25ce67f12ab85994c32ba79af44bd130f84b", "filename": "sklearn/model_selection/tests/test_validation.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/paulha/scikit-learn/blob/38a67f598b30648f16b12bc08f99b9ff2b72ca89/sklearn%2Fmodel_selection%2Ftests%2Ftest_validation.py", "raw_url": "https://github.com/paulha/scikit-learn/raw/38a67f598b30648f16b12bc08f99b9ff2b72ca89/sklearn%2Fmodel_selection%2Ftests%2Ftest_validation.py", "contents_url": "https://api.github.com/repos/paulha/scikit-learn/contents/sklearn%2Fmodel_selection%2Ftests%2Ftest_validation.py?ref=38a67f598b30648f16b12bc08f99b9ff2b72ca89", "patch": "@@ -921,7 +921,7 @@ def check_cross_val_predict_with_method(est):\n X, y = shuffle(X, y, random_state=0)\n classes = len(set(y))\n \n- kfold = KFold(len(iris.target))\n+ kfold = KFold()\n \n methods = ['decision_function', 'predict_proba', 'predict_log_proba']\n for method in methods:" } ]
scikit-learn
9d21d9ab2e671b80581357002629235c5520a7df
e121da71a6fa6d027a30ccf8450f96f312e1ace5
examples/decomposition/plot_image_denoising.py
https://github.com/paulha/scikit-learn
true
false
false
@@ -53,7 +53,7 @@ except ImportError: # Convert from uint8 representation with values between 0 and 255 to # a floating point representation with values between 0 and 1. -face = face / 255 +face = face / 255. # downsample for higher speed face = face[::2, ::2] + face[1::2, ::2] + face[::2, 1::2] + face[1::2, 1::2]
face = face / 255
face = face / 255.
CHANGE_CONSTANT_TYPE
[["Insert", ["binary_operator", 3, 8, 3, 18], ["float:255.", "T"], 2], ["Delete", ["integer:255", 3, 15, 3, 18]]]
paulha/scikit-learn@9d21d9ab2e671b80581357002629235c5520a7df
BF: fixing error on plot_image_denoising.py with Python 2.7 (#9003)
[ { "sha": "29bdf6ba6521716d6f5a7d6dded11e064128ad4a", "filename": "examples/decomposition/plot_image_denoising.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/paulha/scikit-learn/blob/9d21d9ab2e671b80581357002629235c5520a7df/examples%2Fdecomposition%2Fplot_image_denoising.py", "raw_url": "https://github.com/paulha/scikit-learn/raw/9d21d9ab2e671b80581357002629235c5520a7df/examples%2Fdecomposition%2Fplot_image_denoising.py", "contents_url": "https://api.github.com/repos/paulha/scikit-learn/contents/examples%2Fdecomposition%2Fplot_image_denoising.py?ref=9d21d9ab2e671b80581357002629235c5520a7df", "patch": "@@ -53,7 +53,7 @@\n \n # Convert from uint8 representation with values between 0 and 255 to\n # a floating point representation with values between 0 and 1.\n-face = face / 255\n+face = face / 255.\n \n # downsample for higher speed\n face = face[::2, ::2] + face[1::2, ::2] + face[::2, 1::2] + face[1::2, 1::2]" } ]
scikit-learn
5de910d56381573be2b2a8951bab6a5d3a550341
7d49de47e5937fc6d2fad4975672b3c76602da06
sklearn/neural_network/tests/test_mlp.py
https://github.com/paulha/scikit-learn
true
false
false
@@ -564,7 +564,7 @@ def test_adaptive_learning_rate(): assert_greater(1e-6, clf._optimizer.learning_rate) -@ignore_warnings(RuntimeError) +@ignore_warnings(category=RuntimeWarning) def test_warm_start(): X = X_iris y = y_iris
@ ignore_warnings ( RuntimeError ) def test_warm_start ( ) : X = X_iris y = y_iris
@ ignore_warnings ( category = RuntimeWarning ) def test_warm_start ( ) : X = X_iris y = y_iris
SINGLE_STMT
[["Insert", ["argument_list", 3, 17, 3, 31], ["keyword_argument", "N0"], 1], ["Update", ["identifier:RuntimeError", 3, 18, 3, 30], "category"], ["Move", "N0", ["identifier:RuntimeError", 3, 18, 3, 30], 0], ["Insert", "N0", ["=:=", "T"], 1], ["Insert", "N0", ["identifier:RuntimeWarning", "T"], 2]]
paulha/scikit-learn@5de910d56381573be2b2a8951bab6a5d3a550341
Fix decorator called without kwarg that would prevent test from running. (#9083)
[ { "sha": "9c42b7c930cdf0596b43363599c7c2a583d50021", "filename": "sklearn/neural_network/tests/test_mlp.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/paulha/scikit-learn/blob/5de910d56381573be2b2a8951bab6a5d3a550341/sklearn%2Fneural_network%2Ftests%2Ftest_mlp.py", "raw_url": "https://github.com/paulha/scikit-learn/raw/5de910d56381573be2b2a8951bab6a5d3a550341/sklearn%2Fneural_network%2Ftests%2Ftest_mlp.py", "contents_url": "https://api.github.com/repos/paulha/scikit-learn/contents/sklearn%2Fneural_network%2Ftests%2Ftest_mlp.py?ref=5de910d56381573be2b2a8951bab6a5d3a550341", "patch": "@@ -564,7 +564,7 @@ def test_adaptive_learning_rate():\n assert_greater(1e-6, clf._optimizer.learning_rate)\n \n \n-@ignore_warnings(RuntimeError)\n+@ignore_warnings(category=RuntimeWarning)\n def test_warm_start():\n X = X_iris\n y = y_iris" } ]
scikit-learn
cacc4ca7f7c4b616638609508b7f5008e7001f1c
ccbc83116f3dc1a8eebb3fe4f7a771a05866540a
sklearn/__init__.py
https://github.com/paulha/scikit-learn
true
false
false
@@ -91,7 +91,7 @@ def config_context(**new_config): # Make sure that DeprecationWarning within this package always gets printed warnings.filterwarnings('always', category=DeprecationWarning, - module='^{0}\.'.format(re.escape(__name__))) + module=r'^{0}\.'.format(re.escape(__name__))) # PEP0440 compatible formatted version, see: # https://www.python.org/dev/peps/pep-0440/
warnings . filterwarnings ( 'always' , category = DeprecationWarning , module = '^{0}\.' . format ( re . escape ( __name__ ) ) )
warnings . filterwarnings ( 'always' , category = DeprecationWarning , module = r'^{0}\.' . format ( re . escape ( __name__ ) ) )
CHANGE_STRING_LITERAL
[["Update", ["string:'^{0}\\.'", 3, 32, 3, 40], "r'^{0}\\.'"]]
paulha/scikit-learn@cacc4ca7f7c4b616638609508b7f5008e7001f1c
Fixed sklearn-related invalid escape sequence DesprecationWarnings (#8951)
[ { "sha": "3ca2a6814e70b1cb9544796e8ff28f8ed93c0a4f", "filename": "sklearn/__init__.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/paulha/scikit-learn/blob/cacc4ca7f7c4b616638609508b7f5008e7001f1c/sklearn%2F__init__.py", "raw_url": "https://github.com/paulha/scikit-learn/raw/cacc4ca7f7c4b616638609508b7f5008e7001f1c/sklearn%2F__init__.py", "contents_url": "https://api.github.com/repos/paulha/scikit-learn/contents/sklearn%2F__init__.py?ref=cacc4ca7f7c4b616638609508b7f5008e7001f1c", "patch": "@@ -91,7 +91,7 @@ def config_context(**new_config):\n \n # Make sure that DeprecationWarning within this package always gets printed\n warnings.filterwarnings('always', category=DeprecationWarning,\n- module='^{0}\\.'.format(re.escape(__name__)))\n+ module=r'^{0}\\.'.format(re.escape(__name__)))\n \n # PEP0440 compatible formatted version, see:\n # https://www.python.org/dev/peps/pep-0440/" } ]
scikit-learn
aa28633add954740bbe2b996ec1cabccb9f30ad8
8d8bc1f72055bf1eec0f864c146c0b4c53935553
sklearn/mixture/base.py
https://github.com/paulha/scikit-learn
true
false
true
@@ -417,7 +417,7 @@ class BaseMixture(six.with_metaclass(ABCMeta, DensityMixin, BaseEstimator)): Returns ------- - weighted_log_prob : array, shape (n_features, n_component) + weighted_log_prob : array, shape (n_samples, n_component)
- - - - - - - weighted_log_prob : array , shape ( n_features , n_component )
- - - - - - - weighted_log_prob : array , shape ( n_samples , n_component )
CHANGE_IDENTIFIER_USED
[["Update", ["identifier:n_features", 3, 43, 3, 53], "n_samples"]]
paulha/scikit-learn@aa28633add954740bbe2b996ec1cabccb9f30ad8
Documentation fix in mixture/base.py (#9237) Documentation of the shape of _estimate_weighted_log_prob() function's return value is (n_samples, n_component)
[ { "sha": "e88b00cd325b3f6391378086eddf3e1624b39e36", "filename": "sklearn/mixture/base.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/paulha/scikit-learn/blob/aa28633add954740bbe2b996ec1cabccb9f30ad8/sklearn%2Fmixture%2Fbase.py", "raw_url": "https://github.com/paulha/scikit-learn/raw/aa28633add954740bbe2b996ec1cabccb9f30ad8/sklearn%2Fmixture%2Fbase.py", "contents_url": "https://api.github.com/repos/paulha/scikit-learn/contents/sklearn%2Fmixture%2Fbase.py?ref=aa28633add954740bbe2b996ec1cabccb9f30ad8", "patch": "@@ -417,7 +417,7 @@ def _estimate_weighted_log_prob(self, X):\n \n Returns\n -------\n- weighted_log_prob : array, shape (n_features, n_component)\n+ weighted_log_prob : array, shape (n_samples, n_component)\n \"\"\"\n return self._estimate_log_prob(X) + self._estimate_log_weights()\n " } ]
WNTR
e315443738f8b3dc69e7964d092527753f092e5c
c97b3748dfa781bd0115e3fc35d626aa2598c347
wntr/tests/test_sim_performance.py
https://github.com/sandialabs/WNTR
true
false
true
@@ -224,7 +224,7 @@ class TestPerformance(unittest.TestCase): self.assertLess(demand_diff.mean().mean(), 3.4e-7) self.assertLess(flow_diff.mean().mean(), 2.0e-7) self.assertLess(head_diff.max().max(), 3.3e-4) - self.assertLess(demand_diff.max().max(), 3.1e-6) + self.assertLess(demand_diff.max().max(), 3.2e-6) self.assertLess(flow_diff.max().max(), 4.8e-5) self.assertLess(head_diff.std().mean(), 2.6e-5) self.assertLess(demand_diff.std().mean(), 3.6e-7)
self . assertLess ( demand_diff . max ( ) . max ( ) , 3.1e-6 )
self . assertLess ( demand_diff . max ( ) . max ( ) , 3.2e-6 )
CHANGE_NUMERIC_LITERAL
[["Update", ["float:3.1e-6", 3, 50, 3, 56], "3.2e-6"]]
sandialabs/WNTR@e315443738f8b3dc69e7964d092527753f092e5c
fixing test
[ { "sha": "047155bb4927cb3b0402551903ab0ceb1975368c", "filename": "wntr/tests/test_sim_performance.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/sandialabs/WNTR/blob/e315443738f8b3dc69e7964d092527753f092e5c/wntr%2Ftests%2Ftest_sim_performance.py", "raw_url": "https://github.com/sandialabs/WNTR/raw/e315443738f8b3dc69e7964d092527753f092e5c/wntr%2Ftests%2Ftest_sim_performance.py", "contents_url": "https://api.github.com/repos/sandialabs/WNTR/contents/wntr%2Ftests%2Ftest_sim_performance.py?ref=e315443738f8b3dc69e7964d092527753f092e5c", "patch": "@@ -224,7 +224,7 @@ def test_Net3_performance(self):\n self.assertLess(demand_diff.mean().mean(), 3.4e-7)\n self.assertLess(flow_diff.mean().mean(), 2.0e-7)\n self.assertLess(head_diff.max().max(), 3.3e-4)\n- self.assertLess(demand_diff.max().max(), 3.1e-6)\n+ self.assertLess(demand_diff.max().max(), 3.2e-6)\n self.assertLess(flow_diff.max().max(), 4.8e-5)\n self.assertLess(head_diff.std().mean(), 2.6e-5)\n self.assertLess(demand_diff.std().mean(), 3.6e-7)" } ]
WNTR
43bf20c273283f299347b2234f973f4d9f3a079b
6d65a81d4e9147099e0bcf1256af3dafd8896821
wntr/sim/hydraulics.py
https://github.com/sandialabs/WNTR
true
false
false
@@ -7,7 +7,7 @@ from wntr.network.model import WaterNetworkModel from wntr.network.base import NodeType, LinkType, LinkStatus from wntr.network.elements import Junction, Tank, Reservoir, Pipe, HeadPump, PowerPump, PRValve, PSValve, FCValve, \ TCValve, GPValve, PBValve -import wntr.aml.aml.aml as aml +from wntr.aml.aml import aml from collections import OrderedDict from wntr.utils.ordered_set import OrderedSet
import wntr . aml . aml . aml as aml
from wntr . aml . aml import aml
SINGLE_STMT
[["Insert", ["module", 0, 1, 7, 0], ["import_from_statement", "N0"], 2], ["Insert", "N0", ["from:from", "T"], 0], ["Move", "N0", ["dotted_name", 3, 8, 3, 24], 1], ["Insert", "N0", ["import:import", "T"], 2], ["Insert", "N0", ["dotted_name", "N1"], 3], ["Move", "N1", ["identifier:aml", 3, 28, 3, 31], 0], ["Delete", ["import:import", 3, 1, 3, 7]], ["Delete", [".:.", 3, 20, 3, 21]], ["Delete", ["identifier:aml", 3, 21, 3, 24]], ["Delete", ["as:as", 3, 25, 3, 27]], ["Delete", ["aliased_import", 3, 8, 3, 31]], ["Delete", ["import_statement", 3, 1, 3, 31]]]
sandialabs/WNTR@43bf20c273283f299347b2234f973f4d9f3a079b
bug
[ { "sha": "527c4d913b6d7906c351d7dd99f3fbc0fbe43a76", "filename": "wntr/sim/hydraulics.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/sandialabs/WNTR/blob/43bf20c273283f299347b2234f973f4d9f3a079b/wntr%2Fsim%2Fhydraulics.py", "raw_url": "https://github.com/sandialabs/WNTR/raw/43bf20c273283f299347b2234f973f4d9f3a079b/wntr%2Fsim%2Fhydraulics.py", "contents_url": "https://api.github.com/repos/sandialabs/WNTR/contents/wntr%2Fsim%2Fhydraulics.py?ref=43bf20c273283f299347b2234f973f4d9f3a079b", "patch": "@@ -7,7 +7,7 @@\n from wntr.network.base import NodeType, LinkType, LinkStatus\n from wntr.network.elements import Junction, Tank, Reservoir, Pipe, HeadPump, PowerPump, PRValve, PSValve, FCValve, \\\n TCValve, GPValve, PBValve\n-import wntr.aml.aml.aml as aml\n+from wntr.aml.aml import aml\n from collections import OrderedDict\n from wntr.utils.ordered_set import OrderedSet\n " } ]
WNTR
26c2008cfdf7a8a9a0d02e4f50333603a09f91df
1070ce31de37bcb7c4aca9c19462f4ce4e379049
wntr/network/model.py
https://github.com/sandialabs/WNTR
true
false
true
@@ -919,7 +919,7 @@ class WaterNetworkModel(AbstractModel): def _get_valve_controls(self): valve_controls = [] - for valve_name, valve in self.valves: + for valve_name, valve in self.valves(): new_setting_action = ControlAction(valve, 'status', LinkStatus.Active) new_setting_condition = _ValveNewSettingCondition(valve)
for valve_name , valve in self . valves : new_setting_action = ControlAction ( valve , 'status' , LinkStatus . Active ) new_setting_condition = _ValveNewSettingCondition ( valve )
for valve_name , valve in self . valves ( ) : new_setting_action = ControlAction ( valve , 'status' , LinkStatus . Active ) new_setting_condition = _ValveNewSettingCondition ( valve )
SINGLE_STMT
[["Insert", ["for_statement", 3, 9, 6, 69], ["call", "N0"], 3], ["Move", "N0", ["attribute", 3, 34, 3, 45], 0], ["Insert", "N0", ["argument_list", "N1"], 1], ["Insert", "N1", ["(:(", "T"], 0], ["Insert", "N1", ["):)", "T"], 1]]
sandialabs/WNTR@26c2008cfdf7a8a9a0d02e4f50333603a09f91df
fixing some tests
[ { "sha": "e9ae3eca465791e4c5d84f6dfaff36ba89452e03", "filename": "wntr/network/model.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/sandialabs/WNTR/blob/26c2008cfdf7a8a9a0d02e4f50333603a09f91df/wntr%2Fnetwork%2Fmodel.py", "raw_url": "https://github.com/sandialabs/WNTR/raw/26c2008cfdf7a8a9a0d02e4f50333603a09f91df/wntr%2Fnetwork%2Fmodel.py", "contents_url": "https://api.github.com/repos/sandialabs/WNTR/contents/wntr%2Fnetwork%2Fmodel.py?ref=26c2008cfdf7a8a9a0d02e4f50333603a09f91df", "patch": "@@ -919,7 +919,7 @@ def _get_pump_controls(self):\n \n def _get_valve_controls(self):\n valve_controls = []\n- for valve_name, valve in self.valves:\n+ for valve_name, valve in self.valves():\n \n new_setting_action = ControlAction(valve, 'status', LinkStatus.Active)\n new_setting_condition = _ValveNewSettingCondition(valve)" } ]
WNTR
bc7b5b0c95cb260eb51f6e965736e1876e051f94
26c2008cfdf7a8a9a0d02e4f50333603a09f91df
wntr/network/model.py
https://github.com/sandialabs/WNTR
true
false
true
@@ -959,7 +959,7 @@ class WaterNetworkModel(AbstractModel): def _get_demand_status_controls(self): demand_status_controls = [] - for node_name, node in self.junctions: + for node_name, node in self.junctions(): partial_action = ControlAction(node, '_demand_status', _DemandStatus.Partial) zero_action = ControlAction(node, '_demand_status', _DemandStatus.Zero) full_action = ControlAction(node, '_demand_status', _DemandStatus.Full)
for node_name , node in self . junctions : partial_action = ControlAction ( node , '_demand_status' , _DemandStatus . Partial ) zero_action = ControlAction ( node , '_demand_status' , _DemandStatus . Zero ) full_action = ControlAction ( node , '_demand_status' , _DemandStatus . Full )
for node_name , node in self . junctions ( ) : partial_action = ControlAction ( node , '_demand_status' , _DemandStatus . Partial ) zero_action = ControlAction ( node , '_demand_status' , _DemandStatus . Zero ) full_action = ControlAction ( node , '_demand_status' , _DemandStatus . Full )
SINGLE_STMT
[["Insert", ["for_statement", 3, 9, 6, 84], ["call", "N0"], 3], ["Move", "N0", ["attribute", 3, 32, 3, 46], 0], ["Insert", "N0", ["argument_list", "N1"], 1], ["Insert", "N1", ["(:(", "T"], 0], ["Insert", "N1", ["):)", "T"], 1]]
sandialabs/WNTR@bc7b5b0c95cb260eb51f6e965736e1876e051f94
bug
[ { "sha": "d334dbe3fea983c77c58ae5825d9dac89922d8a7", "filename": "wntr/network/model.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/sandialabs/WNTR/blob/bc7b5b0c95cb260eb51f6e965736e1876e051f94/wntr%2Fnetwork%2Fmodel.py", "raw_url": "https://github.com/sandialabs/WNTR/raw/bc7b5b0c95cb260eb51f6e965736e1876e051f94/wntr%2Fnetwork%2Fmodel.py", "contents_url": "https://api.github.com/repos/sandialabs/WNTR/contents/wntr%2Fnetwork%2Fmodel.py?ref=bc7b5b0c95cb260eb51f6e965736e1876e051f94", "patch": "@@ -959,7 +959,7 @@ def _get_valve_controls(self):\n \n def _get_demand_status_controls(self):\n demand_status_controls = []\n- for node_name, node in self.junctions:\n+ for node_name, node in self.junctions():\n partial_action = ControlAction(node, '_demand_status', _DemandStatus.Partial)\n zero_action = ControlAction(node, '_demand_status', _DemandStatus.Zero)\n full_action = ControlAction(node, '_demand_status', _DemandStatus.Full)" } ]
WNTR
ba09cd1b3e39eb138672b4e9f1c798333d2269e3
512b49057a669af0de3d888dba7d675466c456cb
wntr/epanet/io.py
https://github.com/sandialabs/WNTR
true
false
true
@@ -1151,7 +1151,7 @@ class InpFile(object): # control_name = control_name + '/' + str(run_at_time) elif len(current) == 7: # at clocktime run_at_time = int(_clock_time_to_sec(current[5], current[6])) - control_obj = Control._time_control(self.wn, run_at_time, 'SHIFTED_TIME', True, action_obj, control_name) + control_obj = Control._time_control(self.wn, run_at_time, 'CLOCK_TIME', True, action_obj, control_name) # control_name = '' # for i in range(len(current)-1): # control_name = control_name + '/' + current[i]
control_obj = Control . _time_control ( self . wn , run_at_time , 'SHIFTED_TIME' , True , action_obj , control_name )
control_obj = Control . _time_control ( self . wn , run_at_time , 'CLOCK_TIME' , True , action_obj , control_name )
CHANGE_STRING_LITERAL
[["Update", ["string:'SHIFTED_TIME'", 3, 79, 3, 93], "'CLOCK_TIME'"]]
sandialabs/WNTR@ba09cd1b3e39eb138672b4e9f1c798333d2269e3
Bug fix in time control, use CLOCK_TIME instead of SHIFTED_TIME
[ { "sha": "d5882f73556a4b4d339e79fef709f5bd077279ad", "filename": "wntr/epanet/io.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/sandialabs/WNTR/blob/ba09cd1b3e39eb138672b4e9f1c798333d2269e3/wntr%2Fepanet%2Fio.py", "raw_url": "https://github.com/sandialabs/WNTR/raw/ba09cd1b3e39eb138672b4e9f1c798333d2269e3/wntr%2Fepanet%2Fio.py", "contents_url": "https://api.github.com/repos/sandialabs/WNTR/contents/wntr%2Fepanet%2Fio.py?ref=ba09cd1b3e39eb138672b4e9f1c798333d2269e3", "patch": "@@ -1151,7 +1151,7 @@ def _read_controls(self):\n # control_name = control_name + '/' + str(run_at_time)\n elif len(current) == 7: # at clocktime\n run_at_time = int(_clock_time_to_sec(current[5], current[6]))\n- control_obj = Control._time_control(self.wn, run_at_time, 'SHIFTED_TIME', True, action_obj, control_name)\n+ control_obj = Control._time_control(self.wn, run_at_time, 'CLOCK_TIME', True, action_obj, control_name)\n # control_name = ''\n # for i in range(len(current)-1):\n # control_name = control_name + '/' + current[i]" } ]
WNTR
aed32326241d35287020e2ccc31f51e6af93e295
60c126e26fcd2f588f8072108281c30655548a65
wntr/sim/solvers.py
https://github.com/sandialabs/WNTR
true
false
true
@@ -91,7 +91,7 @@ class NewtonSolver(object): if r_norm < self.tol: return SolverStatus.converged, 'Solved Successfully' - J = model.evaluate_jacobian(x=None, new_eval=False) + J = model.evaluate_jacobian(x=None) # Call Linear solver try:
J = model . evaluate_jacobian ( x = None , new_eval = False )
J = model . evaluate_jacobian ( x = None )
SAME_FUNCTION_LESS_ARGS
[["Delete", [",:,", 3, 47, 3, 48]], ["Delete", ["identifier:new_eval", 3, 49, 3, 57]], ["Delete", ["=:=", 3, 57, 3, 58]], ["Delete", ["false:False", 3, 58, 3, 63]], ["Delete", ["keyword_argument", 3, 49, 3, 63]]]
sandialabs/WNTR@aed32326241d35287020e2ccc31f51e6af93e295
debugging
[ { "sha": "6e524deb36e796d365b047381d38d36f0d3e728b", "filename": "wntr/sim/solvers.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/sandialabs/WNTR/blob/aed32326241d35287020e2ccc31f51e6af93e295/wntr%2Fsim%2Fsolvers.py", "raw_url": "https://github.com/sandialabs/WNTR/raw/aed32326241d35287020e2ccc31f51e6af93e295/wntr%2Fsim%2Fsolvers.py", "contents_url": "https://api.github.com/repos/sandialabs/WNTR/contents/wntr%2Fsim%2Fsolvers.py?ref=aed32326241d35287020e2ccc31f51e6af93e295", "patch": "@@ -91,7 +91,7 @@ def solve(self, model):\n if r_norm < self.tol:\n return SolverStatus.converged, 'Solved Successfully'\n \n- J = model.evaluate_jacobian(x=None, new_eval=False)\n+ J = model.evaluate_jacobian(x=None)\n \n # Call Linear solver\n try:" } ]
WNTR
f73f0618e22201c0d15e5d1c62537564fc8840cb
423f89122f74e590b2b38bdca5580e4c1e9f58dc
setup.py
https://github.com/sandialabs/WNTR
true
false
false
@@ -5,7 +5,7 @@ import os import sys from distutils.spawn import find_executable -use_swig = True +use_swig = False try: numpy_include = numpy.get_include()
use_swig = True
use_swig = False
CHANGE_BOOLEAN_LITERAL
[["Insert", ["assignment", 3, 1, 3, 16], ["false:False", "T"], 2], ["Delete", ["true:True", 3, 12, 3, 16]]]
sandialabs/WNTR@f73f0618e22201c0d15e5d1c62537564fc8840cb
dont use swig in setup.py by default
[ { "sha": "c6f22a8700160f317b0f88d007b6f9ee44cb91d4", "filename": "setup.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/sandialabs/WNTR/blob/f73f0618e22201c0d15e5d1c62537564fc8840cb/setup.py", "raw_url": "https://github.com/sandialabs/WNTR/raw/f73f0618e22201c0d15e5d1c62537564fc8840cb/setup.py", "contents_url": "https://api.github.com/repos/sandialabs/WNTR/contents/setup.py?ref=f73f0618e22201c0d15e5d1c62537564fc8840cb", "patch": "@@ -5,7 +5,7 @@\n import sys\n from distutils.spawn import find_executable\n \n-use_swig = True\n+use_swig = False\n \n try:\n numpy_include = numpy.get_include()" } ]
WNTR
04cd9d09740893ac63c7d7b66f78fb1222cc8970
f5c8b8248c77a981fad8c2364394fa1d6d6e6d76
wntr/sim/hydraulics.py
https://github.com/sandialabs/WNTR
true
false
true
@@ -74,7 +74,7 @@ def create_hydraulic_model(wn, mode='DD'): constraint.pdd_constraint.build(m, wn, model_updater) else: raise ValueError('mode not recognized: ' + str(mode)) - constraint.approx_hazen_williams_headloss_constraint.build(m, wn, model_updater) + constraint.hazen_williams_headloss_constraint.build(m, wn, model_updater) constraint.head_pump_headloss_constraint.build(m, wn, model_updater) constraint.power_pump_headloss_constraint.build(m, wn, model_updater) constraint.prv_headloss_constraint.build(m, wn, model_updater)
constraint . approx_hazen_williams_headloss_constraint . build ( m , wn , model_updater )
constraint . hazen_williams_headloss_constraint . build ( m , wn , model_updater )
CHANGE_ATTRIBUTE_USED
[["Update", ["identifier:approx_hazen_williams_headloss_constraint", 3, 16, 3, 57], "hazen_williams_headloss_constraint"]]
sandialabs/WNTR@04cd9d09740893ac63c7d7b66f78fb1222cc8970
default hw headloss constraint
[ { "sha": "51d4349e7964a27568fdc599517922a04e5d91a9", "filename": "wntr/sim/hydraulics.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/sandialabs/WNTR/blob/04cd9d09740893ac63c7d7b66f78fb1222cc8970/wntr%2Fsim%2Fhydraulics.py", "raw_url": "https://github.com/sandialabs/WNTR/raw/04cd9d09740893ac63c7d7b66f78fb1222cc8970/wntr%2Fsim%2Fhydraulics.py", "contents_url": "https://api.github.com/repos/sandialabs/WNTR/contents/wntr%2Fsim%2Fhydraulics.py?ref=04cd9d09740893ac63c7d7b66f78fb1222cc8970", "patch": "@@ -74,7 +74,7 @@ def create_hydraulic_model(wn, mode='DD'):\n constraint.pdd_constraint.build(m, wn, model_updater)\n else:\n raise ValueError('mode not recognized: ' + str(mode))\n- constraint.approx_hazen_williams_headloss_constraint.build(m, wn, model_updater)\n+ constraint.hazen_williams_headloss_constraint.build(m, wn, model_updater)\n constraint.head_pump_headloss_constraint.build(m, wn, model_updater)\n constraint.power_pump_headloss_constraint.build(m, wn, model_updater)\n constraint.prv_headloss_constraint.build(m, wn, model_updater)" } ]
WNTR
dcd4d132543ce3ab8dda6b422d5d9bb75ff96938
34a83c477f65392d8810d00bb7fd5681715724a2
setup.py
https://github.com/sandialabs/WNTR
true
false
false
@@ -3,7 +3,7 @@ from setuptools.extension import Extension import numpy import os -use_swig = True +use_swig = False build = True extension_modules = list()
use_swig = True
use_swig = False
CHANGE_BOOLEAN_LITERAL
[["Insert", ["assignment", 3, 1, 3, 16], ["false:False", "T"], 2], ["Delete", ["true:True", 3, 12, 3, 16]]]
sandialabs/WNTR@dcd4d132543ce3ab8dda6b422d5d9bb75ff96938
dont use swig by default
[ { "sha": "dd37749b0acd469070d180ebf3c923e2812ee9ac", "filename": "setup.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/sandialabs/WNTR/blob/dcd4d132543ce3ab8dda6b422d5d9bb75ff96938/setup.py", "raw_url": "https://github.com/sandialabs/WNTR/raw/dcd4d132543ce3ab8dda6b422d5d9bb75ff96938/setup.py", "contents_url": "https://api.github.com/repos/sandialabs/WNTR/contents/setup.py?ref=dcd4d132543ce3ab8dda6b422d5d9bb75ff96938", "patch": "@@ -3,7 +3,7 @@\n import numpy\n import os\n \n-use_swig = True\n+use_swig = False\n build = True\n \n extension_modules = list()" } ]
WNTR
dbd23448e2441eea49648aca3930f483bc45931a
f5e4fa7453b106be5d4d1f046cfa2a07d6baadcd
setup.py
https://github.com/sandialabs/WNTR
true
false
false
@@ -4,7 +4,7 @@ import numpy import os use_swig = False -build = True +build = False extension_modules = list()
build = True
build = False
CHANGE_BOOLEAN_LITERAL
[["Insert", ["assignment", 3, 1, 3, 13], ["false:False", "T"], 2], ["Delete", ["true:True", 3, 9, 3, 13]]]
sandialabs/WNTR@dbd23448e2441eea49648aca3930f483bc45931a
setting build to False by default in setup.py
[ { "sha": "53e171e729b6d6b8a7cff6883ddd6a31cb7dce98", "filename": "setup.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/sandialabs/WNTR/blob/dbd23448e2441eea49648aca3930f483bc45931a/setup.py", "raw_url": "https://github.com/sandialabs/WNTR/raw/dbd23448e2441eea49648aca3930f483bc45931a/setup.py", "contents_url": "https://api.github.com/repos/sandialabs/WNTR/contents/setup.py?ref=dbd23448e2441eea49648aca3930f483bc45931a", "patch": "@@ -4,7 +4,7 @@\n import os\n \n use_swig = False\n-build = True\n+build = False\n \n extension_modules = list()\n " } ]
ssh
13f818f04f53573f873a019874dbdaec79e5365e
a8a023a2432753bc6bdfdd0011b66869642845d0
paramiko/channel.py
https://github.com/haonature/ssh
true
false
true
@@ -828,7 +828,7 @@ class Channel (object): return x = self.in_buffer[:nbytes] self.in_buffer = self.in_buffer[nbytes:] - os.write(self.pipd_wfd, x) + os.write(self.pipe_wfd, x) def _unlink(self): if self.closed or not self.active:
os . write ( self . pipd_wfd , x )
os . write ( self . pipe_wfd , x )
CHANGE_ATTRIBUTE_USED
[["Update", ["identifier:pipd_wfd", 3, 23, 3, 31], "pipe_wfd"]]
haonature/ssh@13f818f04f53573f873a019874dbdaec79e5365e
[project @ Arch-1:[email protected]%secsh--dev--1.0--patch-114] fix typo in channel fix typo that alain found: pipd_wfd -> pipe_wfd.
[ { "sha": "833ae62b3226fd14af6f69476f7985a13cbca995", "filename": "paramiko/channel.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/haonature/ssh/blob/13f818f04f53573f873a019874dbdaec79e5365e/paramiko%2Fchannel.py", "raw_url": "https://github.com/haonature/ssh/raw/13f818f04f53573f873a019874dbdaec79e5365e/paramiko%2Fchannel.py", "contents_url": "https://api.github.com/repos/haonature/ssh/contents/paramiko%2Fchannel.py?ref=13f818f04f53573f873a019874dbdaec79e5365e", "patch": "@@ -828,7 +828,7 @@ def _push_pipe(self, nbytes):\n return\n x = self.in_buffer[:nbytes]\n self.in_buffer = self.in_buffer[nbytes:]\n- os.write(self.pipd_wfd, x)\n+ os.write(self.pipe_wfd, x)\n \n def _unlink(self):\n if self.closed or not self.active:" } ]
ssh
4862d5955bdc8275fe5c03cceffd73448b130812
112b72511eb186175839fc0b8d66b6fc6dcdb534
demo_simple.py
https://github.com/haonature/ssh
true
false
false
@@ -113,7 +113,7 @@ try: t.close() except Exception, e: - print '*** Caught exception: ' + str(e.__class__) + ': ' + str(e) + print '*** Caught exception: %s: %s' % (e.__class__, e) traceback.print_exc() try: t.close()
'*** Caught exception: ' + str ( e . __class__ ) + ': ' + str ( e )
'*** Caught exception: %s: %s' % ( e . __class__ , e )
SINGLE_STMT
[["Update", ["string:'*** Caught exception: '", 3, 11, 3, 35], "'*** Caught exception: %s: %s'"], ["Move", ["binary_operator", 3, 11, 3, 70], ["string:'*** Caught exception: '", 3, 11, 3, 35], 0], ["Insert", ["binary_operator", 3, 11, 3, 70], ["%:%", "T"], 1], ["Insert", ["binary_operator", 3, 11, 3, 70], ["tuple", "N0"], 2], ["Move", "N0", ["(:(", 3, 41, 3, 42], 0], ["Move", "N0", ["attribute", 3, 42, 3, 53], 1], ["Insert", "N0", [",:,", "T"], 2], ["Move", "N0", ["identifier:e", 3, 68, 3, 69], 3], ["Move", "N0", ["):)", 3, 69, 3, 70], 4], ["Delete", ["+:+", 3, 36, 3, 37]], ["Delete", ["identifier:str", 3, 38, 3, 41]], ["Delete", ["):)", 3, 53, 3, 54]], ["Delete", ["argument_list", 3, 41, 3, 54]], ["Delete", ["call", 3, 38, 3, 54]], ["Delete", ["binary_operator", 3, 11, 3, 54]], ["Delete", ["+:+", 3, 55, 3, 56]], ["Delete", ["string:': '", 3, 57, 3, 61]], ["Delete", ["binary_operator", 3, 11, 3, 61]], ["Delete", ["+:+", 3, 62, 3, 63]], ["Delete", ["identifier:str", 3, 64, 3, 67]], ["Delete", ["(:(", 3, 67, 3, 68]], ["Delete", ["argument_list", 3, 67, 3, 70]], ["Delete", ["call", 3, 64, 3, 70]]]
haonature/ssh@4862d5955bdc8275fe5c03cceffd73448b130812
[project @ Arch-1:[email protected]%paramiko--dev--1--patch-57] simplify a line of debug output in demo_simple that bothered me one day
[ { "sha": "7eade45bb3913d9382addbddb8cfe0208dcfe7c0", "filename": "demo_simple.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/haonature/ssh/blob/4862d5955bdc8275fe5c03cceffd73448b130812/demo_simple.py", "raw_url": "https://github.com/haonature/ssh/raw/4862d5955bdc8275fe5c03cceffd73448b130812/demo_simple.py", "contents_url": "https://api.github.com/repos/haonature/ssh/contents/demo_simple.py?ref=4862d5955bdc8275fe5c03cceffd73448b130812", "patch": "@@ -113,7 +113,7 @@\n t.close()\n \n except Exception, e:\n- print '*** Caught exception: ' + str(e.__class__) + ': ' + str(e)\n+ print '*** Caught exception: %s: %s' % (e.__class__, e)\n traceback.print_exc()\n try:\n t.close()" } ]
ssh
be7d99886c2f6223d8f0899e4864c11f922dfde8
cb3008b402a6a411204d0dc060c2f85a548c1c7e
paramiko/util.py
https://github.com/haonature/ssh
true
false
true
@@ -294,7 +294,7 @@ def lookup_ssh_host_config(hostname, config): matches = [x for x in config if fnmatch.fnmatch(hostname, x['host'])] # sort in order of shortest match (usually '*') to longest - matches.sort(key=lambda x: len(x['host'])) + matches.sort(lambda x,y: cmp(len(x['host']), len(x['host']))) ret = {} for m in matches: ret.update(m)
matches . sort ( key = lambda x : len ( x [ 'host' ] ) )
matches . sort ( lambda x , y : cmp ( len ( x [ 'host' ] ) , len ( x [ 'host' ] ) ) )
SINGLE_STMT
[["Move", ["argument_list", 2, 17, 2, 47], ["lambda", 2, 22, 2, 46], 1], ["Insert", ["lambda", 2, 22, 2, 46], ["call", "N0"], 3], ["Insert", ["lambda_parameters", 2, 29, 2, 30], [",:,", "T"], 1], ["Insert", ["lambda_parameters", 2, 29, 2, 30], ["identifier:y", "T"], 2], ["Insert", "N0", ["identifier:cmp", "T"], 0], ["Insert", "N0", ["argument_list", "N1"], 1], ["Insert", "N1", ["(:(", "T"], 0], ["Move", "N1", ["call", 2, 32, 2, 46], 1], ["Insert", "N1", [",:,", "T"], 2], ["Insert", "N1", ["call", "N2"], 3], ["Insert", "N1", ["):)", "T"], 4], ["Insert", "N2", ["identifier:len", "T"], 0], ["Insert", "N2", ["argument_list", "N3"], 1], ["Insert", "N3", ["(:(", "T"], 0], ["Insert", "N3", ["subscript", "N4"], 1], ["Insert", "N3", ["):)", "T"], 2], ["Insert", "N4", ["identifier:x", "T"], 0], ["Insert", "N4", ["[:[", "T"], 1], ["Insert", "N4", ["string:'host'", "T"], 2], ["Insert", "N4", ["]:]", "T"], 3], ["Delete", ["identifier:key", 2, 18, 2, 21]], ["Delete", ["=:=", 2, 21, 2, 22]], ["Delete", ["keyword_argument", 2, 18, 2, 46]]]
haonature/ssh@be7d99886c2f6223d8f0899e4864c11f922dfde8
[project @ [email protected]] patch from jan hudec to fix a python 2.4-ism
[ { "sha": "809b850d43a790806c23117d7cef11259de839c7", "filename": "paramiko/util.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/haonature/ssh/blob/be7d99886c2f6223d8f0899e4864c11f922dfde8/paramiko%2Futil.py", "raw_url": "https://github.com/haonature/ssh/raw/be7d99886c2f6223d8f0899e4864c11f922dfde8/paramiko%2Futil.py", "contents_url": "https://api.github.com/repos/haonature/ssh/contents/paramiko%2Futil.py?ref=be7d99886c2f6223d8f0899e4864c11f922dfde8", "patch": "@@ -294,7 +294,7 @@ def lookup_ssh_host_config(hostname, config):\n \"\"\"\n matches = [x for x in config if fnmatch.fnmatch(hostname, x['host'])]\n # sort in order of shortest match (usually '*') to longest\n- matches.sort(key=lambda x: len(x['host']))\n+ matches.sort(lambda x,y: cmp(len(x['host']), len(x['host'])))\n ret = {}\n for m in matches:\n ret.update(m)" } ]
ipython
13bd783565dcc01ebf3acd475a8bdf6d705faa43
42a53e105d45af4a93f5046795e5081302ff0bba
IPython/zmq/iostream.py
https://github.com/ChinaQuants/ipython
true
false
true
@@ -69,7 +69,7 @@ def write(self, string): else: # Make sure that we're handling unicode if not isinstance(string, unicode): - enc = encoding.getdefaultencoding() + enc = encoding.DEFAULT_ENCODING string = string.decode(enc, 'replace') self._buffer.write(string)
else : if not isinstance ( string , unicode ) : enc = encoding . getdefaultencoding ( )
else : if not isinstance ( string , unicode ) : enc = encoding . DEFAULT_ENCODING
SINGLE_STMT
[["Move", ["assignment", 0, 9, 3, 52], ["attribute", 3, 23, 3, 50], 5], ["Update", ["identifier:getdefaultencoding", 3, 32, 3, 50], "DEFAULT_ENCODING"], ["Delete", ["(:(", 3, 50, 3, 51]], ["Delete", ["):)", 3, 51, 3, 52]], ["Delete", ["argument_list", 3, 50, 3, 52]], ["Delete", ["call", 3, 23, 3, 52]]]
ChinaQuants/ipython@13bd783565dcc01ebf3acd475a8bdf6d705faa43
fix missed case of getdefaultencoding() -> DEFAULT_ENCODING
[ { "sha": "7fa896c06c5767ae758614cae029f65106ebec1b", "filename": "IPython/zmq/iostream.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/ChinaQuants/ipython/blob/13bd783565dcc01ebf3acd475a8bdf6d705faa43/IPython%2Fzmq%2Fiostream.py", "raw_url": "https://github.com/ChinaQuants/ipython/raw/13bd783565dcc01ebf3acd475a8bdf6d705faa43/IPython%2Fzmq%2Fiostream.py", "contents_url": "https://api.github.com/repos/ChinaQuants/ipython/contents/IPython%2Fzmq%2Fiostream.py?ref=13bd783565dcc01ebf3acd475a8bdf6d705faa43", "patch": "@@ -69,7 +69,7 @@ def write(self, string):\n else:\n # Make sure that we're handling unicode\n if not isinstance(string, unicode):\n- enc = encoding.getdefaultencoding()\n+ enc = encoding.DEFAULT_ENCODING\n string = string.decode(enc, 'replace')\n \n self._buffer.write(string)" } ]
ipython
c7e190dab47875a958ca022ea953e6bed84b238d
cffb287e1deb7ed62904b2f76a75451a9c0d11f5
IPython/frontend/qt/console/rich_ipython_widget.py
https://github.com/ChinaQuants/ipython
true
false
true
@@ -231,7 +231,7 @@ def _get_image_tag(self, match, path = None, format = "png"): try: svg = str(self._name_to_svg_map[match.group("name")]) except KeyError: - return "<b>Couldn't find image %s</b>" % match.group("name") + return "<b>Cannot convert a PNG to SVG. </b>To fix this, add this to your config: <span>c.InlineBackendConfig.figure_format = 'svg'</span>" # Not currently checking path, because it's tricky to find a # cross-browser way to embed external SVG images (e.g., via
return "<b>Couldn't find image %s</b>" % match . group ( "name" )
return "<b>Cannot convert a PNG to SVG. </b>To fix this, add this to your config: <span>c.InlineBackendConfig.figure_format = 'svg'</span>"
SINGLE_STMT
[["Update", ["string:\"<b>Couldn't find image %s</b>\"", 3, 24, 3, 55], "\"<b>Cannot convert a PNG to SVG. </b>To fix this, add this to your config: <span>c.InlineBackendConfig.figure_format = 'svg'</span>\""], ["Move", ["return_statement", 3, 17, 3, 77], ["string:\"<b>Couldn't find image %s</b>\"", 3, 24, 3, 55], 1], ["Delete", ["%:%", 3, 56, 3, 57]], ["Delete", ["identifier:match", 3, 58, 3, 63]], ["Delete", [".:.", 3, 63, 3, 64]], ["Delete", ["identifier:group", 3, 64, 3, 69]], ["Delete", ["attribute", 3, 58, 3, 69]], ["Delete", ["(:(", 3, 69, 3, 70]], ["Delete", ["string:\"name\"", 3, 70, 3, 76]], ["Delete", ["):)", 3, 76, 3, 77]], ["Delete", ["argument_list", 3, 69, 3, 77]], ["Delete", ["call", 3, 58, 3, 77]], ["Delete", ["binary_operator", 3, 24, 3, 77]]]
ChinaQuants/ipython@c7e190dab47875a958ca022ea953e6bed84b238d
Fixed #735. More useful error message in html output
[ { "sha": "18edd496fad64031332d444acc870063dc483b2c", "filename": "IPython/frontend/qt/console/rich_ipython_widget.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/ChinaQuants/ipython/blob/c7e190dab47875a958ca022ea953e6bed84b238d/IPython%2Ffrontend%2Fqt%2Fconsole%2Frich_ipython_widget.py", "raw_url": "https://github.com/ChinaQuants/ipython/raw/c7e190dab47875a958ca022ea953e6bed84b238d/IPython%2Ffrontend%2Fqt%2Fconsole%2Frich_ipython_widget.py", "contents_url": "https://api.github.com/repos/ChinaQuants/ipython/contents/IPython%2Ffrontend%2Fqt%2Fconsole%2Frich_ipython_widget.py?ref=c7e190dab47875a958ca022ea953e6bed84b238d", "patch": "@@ -231,7 +231,7 @@ def _get_image_tag(self, match, path = None, format = \"png\"):\n try:\n svg = str(self._name_to_svg_map[match.group(\"name\")])\n except KeyError:\n- return \"<b>Couldn't find image %s</b>\" % match.group(\"name\")\n+ return \"<b>Cannot convert a PNG to SVG. </b>To fix this, add this to your config: <span>c.InlineBackendConfig.figure_format = 'svg'</span>\"\n \n # Not currently checking path, because it's tricky to find a\n # cross-browser way to embed external SVG images (e.g., via" } ]
ipython
b0856dffe8876ca4b84613da7f099301fdd08a0f
fa31376612c997996f144802f2338ebf3acefe78
IPython/frontend/qt/console/console_widget.py
https://github.com/ChinaQuants/ipython
true
false
true
@@ -1670,7 +1670,7 @@ def _page(self, text, html=False): else: self.layout().setCurrentWidget(self._page_control) elif html: - self._append_plain_html(text) + self._append_html(text) else: self._append_plain_text(text)
elif html : self . _append_plain_html ( text )
elif html : self . _append_html ( text )
WRONG_FUNCTION_NAME
[["Update", ["identifier:_append_plain_html", 3, 18, 3, 36], "_append_html"]]
ChinaQuants/ipython@b0856dffe8876ca4b84613da7f099301fdd08a0f
[qtconsole] fix append_plain_html -> append_html
[ { "sha": "e4b3d6c2371735c7b05e644e40380e1c17a94c92", "filename": "IPython/frontend/qt/console/console_widget.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/ChinaQuants/ipython/blob/b0856dffe8876ca4b84613da7f099301fdd08a0f/IPython%2Ffrontend%2Fqt%2Fconsole%2Fconsole_widget.py", "raw_url": "https://github.com/ChinaQuants/ipython/raw/b0856dffe8876ca4b84613da7f099301fdd08a0f/IPython%2Ffrontend%2Fqt%2Fconsole%2Fconsole_widget.py", "contents_url": "https://api.github.com/repos/ChinaQuants/ipython/contents/IPython%2Ffrontend%2Fqt%2Fconsole%2Fconsole_widget.py?ref=b0856dffe8876ca4b84613da7f099301fdd08a0f", "patch": "@@ -1670,7 +1670,7 @@ def _page(self, text, html=False):\n else:\n self.layout().setCurrentWidget(self._page_control)\n elif html:\n- self._append_plain_html(text)\n+ self._append_html(text)\n else:\n self._append_plain_text(text)\n " } ]
ipython
c92f639c4e55004a532fa6937984f1de343fe4ce
e223878f8ed69da8c9ac26c5d82ac86b97b20bec
IPython/core/tests/test_history.py
https://github.com/ChinaQuants/ipython
true
false
true
@@ -92,7 +92,7 @@ def test_history(): # Cross testing: check that magic %save can get previous session. testfilename = os.path.realpath(os.path.join(tmpdir, "test.py")) - ip.magic_save(testfilename + " ~1/1-3") + ip.magic("save " + testfilename + " ~1/1-3") with py3compat.open(testfilename) as testfile: nt.assert_equal(testfile.read(), u"# coding: utf-8\n" + u"\n".join(hist))
ip . magic_save ( testfilename + " ~1/1-3" )
ip . magic ( "save " + testfilename + " ~1/1-3" )
SINGLE_STMT
[["Update", ["identifier:magic_save", 3, 16, 3, 26], "magic"], ["Insert", ["binary_operator", 3, 27, 3, 51], ["binary_operator", "N0"], 0], ["Insert", "N0", ["string:\"save \"", "T"], 0], ["Insert", "N0", ["+:+", "T"], 1], ["Move", "N0", ["identifier:testfilename", 3, 27, 3, 39], 2]]
ChinaQuants/ipython@c92f639c4e55004a532fa6937984f1de343fe4ce
Fix failing test that did direct magic access.
[ { "sha": "9e5e758936f0ca41a9275bf3d01eb93c522bc4b5", "filename": "IPython/core/tests/test_history.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/ChinaQuants/ipython/blob/c92f639c4e55004a532fa6937984f1de343fe4ce/IPython%2Fcore%2Ftests%2Ftest_history.py", "raw_url": "https://github.com/ChinaQuants/ipython/raw/c92f639c4e55004a532fa6937984f1de343fe4ce/IPython%2Fcore%2Ftests%2Ftest_history.py", "contents_url": "https://api.github.com/repos/ChinaQuants/ipython/contents/IPython%2Fcore%2Ftests%2Ftest_history.py?ref=c92f639c4e55004a532fa6937984f1de343fe4ce", "patch": "@@ -92,7 +92,7 @@ def test_history():\n \n # Cross testing: check that magic %save can get previous session.\n testfilename = os.path.realpath(os.path.join(tmpdir, \"test.py\"))\n- ip.magic_save(testfilename + \" ~1/1-3\")\n+ ip.magic(\"save \" + testfilename + \" ~1/1-3\")\n with py3compat.open(testfilename) as testfile:\n nt.assert_equal(testfile.read(),\n u\"# coding: utf-8\\n\" + u\"\\n\".join(hist))" } ]
ipython
e3752a8b2043460a8c1b421d3ef4ff46c2c7d10f
3f85cbcc8254e33140e2764306e87bf0c4e1f7a3
IPython/core/magic_functions.py
https://github.com/ChinaQuants/ipython
true
false
true
@@ -122,7 +122,7 @@ def magic(self, parameter_s=''): for mtype in ('line', 'cell'): escape = escapes[mtype] - for fname, fn in magics: + for fname, fn in magics[mtype].iteritems(): if mode == 'brief': # only first line
for fname , fn in magics : if mode == 'brief' :
for fname , fn in magics [ mtype ] . iteritems ( ) : if mode == 'brief' :
SINGLE_STMT
[["Insert", ["for_statement", 3, 13, 6, 38], ["call", "N0"], 3], ["Insert", "N0", ["attribute", "N1"], 0], ["Insert", "N0", ["argument_list", "N2"], 1], ["Insert", "N1", ["subscript", "N3"], 0], ["Insert", "N1", [".:.", "T"], 1], ["Insert", "N1", ["identifier:iteritems", "T"], 2], ["Insert", "N2", ["(:(", "T"], 0], ["Insert", "N2", ["):)", "T"], 1], ["Move", "N3", ["identifier:magics", 3, 30, 3, 36], 0], ["Insert", "N3", ["[:[", "T"], 1], ["Insert", "N3", ["identifier:mtype", "T"], 2], ["Insert", "N3", ["]:]", "T"], 3]]
ChinaQuants/ipython@e3752a8b2043460a8c1b421d3ef4ff46c2c7d10f
Fix %magic
[ { "sha": "e6866468ed4f10d0954eef86c1d08bf49a589096", "filename": "IPython/core/magic_functions.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/ChinaQuants/ipython/blob/e3752a8b2043460a8c1b421d3ef4ff46c2c7d10f/IPython%2Fcore%2Fmagic_functions.py", "raw_url": "https://github.com/ChinaQuants/ipython/raw/e3752a8b2043460a8c1b421d3ef4ff46c2c7d10f/IPython%2Fcore%2Fmagic_functions.py", "contents_url": "https://api.github.com/repos/ChinaQuants/ipython/contents/IPython%2Fcore%2Fmagic_functions.py?ref=e3752a8b2043460a8c1b421d3ef4ff46c2c7d10f", "patch": "@@ -122,7 +122,7 @@ def magic(self, parameter_s=''):\n \n for mtype in ('line', 'cell'):\n escape = escapes[mtype]\n- for fname, fn in magics:\n+ for fname, fn in magics[mtype].iteritems():\n \n if mode == 'brief':\n # only first line" } ]
ipython
f179c85c08c4dc100014b53dc0885274a906120d
6c9a5759a96824c767c18085249d7811f2740293
IPython/zmq/zmqshell.py
https://github.com/ChinaQuants/ipython
true
false
false
@@ -33,7 +33,7 @@ from IPython.core.autocall import ZMQExitAutocall from IPython.core.displaypub import DisplayPublisher from IPython.core.macro import Macro -from IPython.core.magic import MacroToEdit +from IPython.core.magic_functions import MacroToEdit from IPython.core.payloadpage import install_payload_page from IPython.lib.kernel import ( get_connection_file, get_connection_info, connect_qtconsole
from IPython . core . magic import MacroToEdit
from IPython . core . magic_functions import MacroToEdit
CHANGE_IDENTIFIER_USED
[["Update", ["identifier:magic", 3, 19, 3, 24], "magic_functions"]]
ChinaQuants/ipython@f179c85c08c4dc100014b53dc0885274a906120d
Fix import for zmqshell
[ { "sha": "518c76c1ee8e0f2c574b80a3b9eb99fcc3f1d37f", "filename": "IPython/zmq/zmqshell.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/ChinaQuants/ipython/blob/f179c85c08c4dc100014b53dc0885274a906120d/IPython%2Fzmq%2Fzmqshell.py", "raw_url": "https://github.com/ChinaQuants/ipython/raw/f179c85c08c4dc100014b53dc0885274a906120d/IPython%2Fzmq%2Fzmqshell.py", "contents_url": "https://api.github.com/repos/ChinaQuants/ipython/contents/IPython%2Fzmq%2Fzmqshell.py?ref=f179c85c08c4dc100014b53dc0885274a906120d", "patch": "@@ -33,7 +33,7 @@\n from IPython.core.autocall import ZMQExitAutocall\n from IPython.core.displaypub import DisplayPublisher\n from IPython.core.macro import Macro\n-from IPython.core.magic import MacroToEdit\n+from IPython.core.magic_functions import MacroToEdit\n from IPython.core.payloadpage import install_payload_page\n from IPython.lib.kernel import (\n get_connection_file, get_connection_info, connect_qtconsole" } ]
ipython
b537de1513fbab47bdb04e830ad6d8a8baca719b
cf15c8f0ed4493492b50e8fccb72175157afac7c
IPython/core/history.py
https://github.com/ChinaQuants/ipython
true
false
false
@@ -72,7 +72,7 @@ class HistoryAccessor(Configurable): By default, IPython will put the history database in the IPython profile directory. If you would rather share one history among - profiles, you ca set this value in each, so that they are consistent. + profiles, you can set this value in each, so that they are consistent. Due to an issue with fcntl, SQLite is known to misbehave on some NFS mounts. If you see IPython hanging, try setting this to something on a
profiles , you ca set this value in each , so that they are consistent . Due to an issue
profiles , you can set this value in each , so that they are consistent . Due to an issue
CHANGE_IDENTIFIER_USED
[["Update", ["identifier:ca", 3, 23, 3, 25], "can"]]
ChinaQuants/ipython@b537de1513fbab47bdb04e830ad6d8a8baca719b
Minor fixes as per @Carreau's review
[ { "sha": "317f76c1c51b197c30b05d21083acf543f8db5e2", "filename": "IPython/core/history.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/ChinaQuants/ipython/blob/b537de1513fbab47bdb04e830ad6d8a8baca719b/IPython%2Fcore%2Fhistory.py", "raw_url": "https://github.com/ChinaQuants/ipython/raw/b537de1513fbab47bdb04e830ad6d8a8baca719b/IPython%2Fcore%2Fhistory.py", "contents_url": "https://api.github.com/repos/ChinaQuants/ipython/contents/IPython%2Fcore%2Fhistory.py?ref=b537de1513fbab47bdb04e830ad6d8a8baca719b", "patch": "@@ -72,7 +72,7 @@ class HistoryAccessor(Configurable):\n \n By default, IPython will put the history database in the IPython\n profile directory. If you would rather share one history among\n- profiles, you ca set this value in each, so that they are consistent.\n+ profiles, you can set this value in each, so that they are consistent.\n \n Due to an issue with fcntl, SQLite is known to misbehave on some NFS\n mounts. If you see IPython hanging, try setting this to something on a" } ]
createsend-python
29e8f327059004d3241a10a9c07b7a7f9f8c80a2
633c37ee4adb1fe81690c7ada6267de9b535b920
setup.py
https://github.com/haonaturel/createsend-python
true
false
false
@@ -2,7 +2,7 @@ import sys import os from distutils.core import setup -from createsend import __version__ +from createsend.createsend import __version__ setup(name = "createsend", version = __version__,
from createsend import __version__
from createsend . createsend import __version__
SINGLE_STMT
[["Insert", ["dotted_name", 3, 6, 3, 16], [".:.", "T"], 1], ["Insert", ["dotted_name", 3, 6, 3, 16], ["identifier:createsend", "T"], 2]]
haonaturel/createsend-python@29e8f327059004d3241a10a9c07b7a7f9f8c80a2
fix setup in Python 3
[ { "sha": "147b6f22b192d11fc8e46b4885958b939f58e64b", "filename": "setup.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/haonaturel/createsend-python/blob/29e8f327059004d3241a10a9c07b7a7f9f8c80a2/setup.py", "raw_url": "https://github.com/haonaturel/createsend-python/raw/29e8f327059004d3241a10a9c07b7a7f9f8c80a2/setup.py", "contents_url": "https://api.github.com/repos/haonaturel/createsend-python/contents/setup.py?ref=29e8f327059004d3241a10a9c07b7a7f9f8c80a2", "patch": "@@ -2,7 +2,7 @@\n import os\n from distutils.core import setup\n \n-from createsend import __version__\n+from createsend.createsend import __version__\n \n setup(name = \"createsend\",\n version = __version__," } ]
facebook-python-ads-sdk
0c3bd271d238d0601b61bdb54c75b5273bd26476
c94697d84095d0ee00fc943fb01ec2ad6291187b
facebookads/objects.py
https://github.com/haonaturel/facebook-python-ads-sdk
true
false
false
@@ -848,7 +848,7 @@ class AdAccount(CannotCreate, CannotDelete, AbstractCrudObject): partner = 'partner' spend_cap = 'spend_cap' tax_id_status = 'tax_id_status' - timezon_id = 'timezone_id' + timezone_id = 'timezone_id' timezone_name = 'timezone_name' timezone_offset_hours_utc = 'timezone_offset_hours_utc' tos_accepted = 'tos_accepted'
timezon_id = 'timezone_id'
timezone_id = 'timezone_id'
CHANGE_IDENTIFIER_USED
[["Update", ["identifier:timezon_id", 3, 9, 3, 19], "timezone_id"]]
haonaturel/facebook-python-ads-sdk@0c3bd271d238d0601b61bdb54c75b5273bd26476
Fix typo timezone_id
[ { "sha": "f60241a1624d71bd9833171c74a469392e3bc473", "filename": "facebookads/objects.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/haonaturel/facebook-python-ads-sdk/blob/0c3bd271d238d0601b61bdb54c75b5273bd26476/facebookads%2Fobjects.py", "raw_url": "https://github.com/haonaturel/facebook-python-ads-sdk/raw/0c3bd271d238d0601b61bdb54c75b5273bd26476/facebookads%2Fobjects.py", "contents_url": "https://api.github.com/repos/haonaturel/facebook-python-ads-sdk/contents/facebookads%2Fobjects.py?ref=0c3bd271d238d0601b61bdb54c75b5273bd26476", "patch": "@@ -848,7 +848,7 @@ class Field(object):\n partner = 'partner'\n spend_cap = 'spend_cap'\n tax_id_status = 'tax_id_status'\n- timezon_id = 'timezone_id'\n+ timezone_id = 'timezone_id'\n timezone_name = 'timezone_name'\n timezone_offset_hours_utc = 'timezone_offset_hours_utc'\n tos_accepted = 'tos_accepted'" } ]
bot
4005c8071f6d005fcd415ee0190ba1f36a00fbaa
d90afa3014ff763a42a992a27f5e6883808e68db
start-payday.py
https://github.com/haonaturel/bot
true
false
true
@@ -8,7 +8,7 @@ import botlib class Paydays(botlib.Issues): def find_previous(self): - return self.hit_api('get', params={'labels': 'Payday'})[0] + return self.hit_api('get', params={'state': 'all', 'labels': 'Payday'})[0] def get_crew(self): return ('@whit537', '@clone1018', '@rohitpaulk')
return self . hit_api ( 'get' , params = { 'labels' : 'Payday' } ) [ 0 ]
return self . hit_api ( 'get' , params = { 'state' : 'all' , 'labels' : 'Payday' } ) [ 0 ]
ADD_ELEMENTS_TO_ITERABLE
[["Insert", ["dictionary", 3, 43, 3, 63], ["pair", "N0"], 1], ["Insert", ["dictionary", 3, 43, 3, 63], [",:,", "T"], 2], ["Insert", "N0", ["string:'state'", "T"], 0], ["Insert", "N0", [":::", "T"], 1], ["Insert", "N0", ["string:'all'", "T"], 2]]
haonaturel/bot@4005c8071f6d005fcd415ee0190ba1f36a00fbaa
Fix bug where we choked on closed paydays
[ { "sha": "bd90d0fcaf6d0f1516f5a7e841896609dd53d32f", "filename": "start-payday.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/haonaturel/bot/blob/4005c8071f6d005fcd415ee0190ba1f36a00fbaa/start-payday.py", "raw_url": "https://github.com/haonaturel/bot/raw/4005c8071f6d005fcd415ee0190ba1f36a00fbaa/start-payday.py", "contents_url": "https://api.github.com/repos/haonaturel/bot/contents/start-payday.py?ref=4005c8071f6d005fcd415ee0190ba1f36a00fbaa", "patch": "@@ -8,7 +8,7 @@\n class Paydays(botlib.Issues):\n \n def find_previous(self):\n- return self.hit_api('get', params={'labels': 'Payday'})[0]\n+ return self.hit_api('get', params={'state': 'all', 'labels': 'Payday'})[0]\n \n def get_crew(self):\n return ('@whit537', '@clone1018', '@rohitpaulk')" } ]
bot
daeeddcad51b3e6cafad579304c8b7286226c3d7
d70603e12880f586ebfe59592f71296091050daa
kick-off-payday.py
https://github.com/haonaturel/bot
true
false
true
@@ -19,7 +19,7 @@ class Paydays(botlib.Issues): prev_ticket_number = previous['number'] assert type(prev_ticket_number) is int, prev_ticket_number - prev_link = '[&larr; Payday {}]({}/{})' + prev_link = '[&larr; Payday {}]({}/{})\n\n------\n\n' prev_link = prev_link.format(prev_payday_number, self.urls['html'], previous['number']) n = int(prev_title.split()[-1])
prev_link = '[&larr; Payday {}]({}/{})'
prev_link = '[&larr; Payday {}]({}/{})\n\n------\n\n'
CHANGE_STRING_LITERAL
[["Update", ["string:'[&larr; Payday {}]({}/{})'", 3, 21, 3, 48], "'[&larr; Payday {}]({}/{})\\n\\n------\\n\\n'"]]
haonaturel/bot@daeeddcad51b3e6cafad579304c8b7286226c3d7
Fix bug where payday was getting eaten alive
[ { "sha": "5b9f23f3a145ac339fef174d17ecd7146b4de1c4", "filename": "kick-off-payday.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/haonaturel/bot/blob/daeeddcad51b3e6cafad579304c8b7286226c3d7/kick-off-payday.py", "raw_url": "https://github.com/haonaturel/bot/raw/daeeddcad51b3e6cafad579304c8b7286226c3d7/kick-off-payday.py", "contents_url": "https://api.github.com/repos/haonaturel/bot/contents/kick-off-payday.py?ref=daeeddcad51b3e6cafad579304c8b7286226c3d7", "patch": "@@ -19,7 +19,7 @@ def create_next(self, previous):\n prev_ticket_number = previous['number']\n assert type(prev_ticket_number) is int, prev_ticket_number\n \n- prev_link = '[&larr; Payday {}]({}/{})'\n+ prev_link = '[&larr; Payday {}]({}/{})\\n\\n------\\n\\n'\n prev_link = prev_link.format(prev_payday_number, self.urls['html'], previous['number'])\n \n n = int(prev_title.split()[-1])" } ]
ssh
13f818f04f53573f873a019874dbdaec79e5365e
a8a023a2432753bc6bdfdd0011b66869642845d0
paramiko/channel.py
https://github.com/haonaturel/ssh
true
false
true
@@ -828,7 +828,7 @@ class Channel (object): return x = self.in_buffer[:nbytes] self.in_buffer = self.in_buffer[nbytes:] - os.write(self.pipd_wfd, x) + os.write(self.pipe_wfd, x) def _unlink(self): if self.closed or not self.active:
os . write ( self . pipd_wfd , x )
os . write ( self . pipe_wfd , x )
CHANGE_ATTRIBUTE_USED
[["Update", ["identifier:pipd_wfd", 3, 23, 3, 31], "pipe_wfd"]]
haonaturel/ssh@13f818f04f53573f873a019874dbdaec79e5365e
[project @ Arch-1:[email protected]%secsh--dev--1.0--patch-114] fix typo in channel fix typo that alain found: pipd_wfd -> pipe_wfd.
[ { "sha": "833ae62b3226fd14af6f69476f7985a13cbca995", "filename": "paramiko/channel.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/haonaturel/ssh/blob/13f818f04f53573f873a019874dbdaec79e5365e/paramiko%2Fchannel.py", "raw_url": "https://github.com/haonaturel/ssh/raw/13f818f04f53573f873a019874dbdaec79e5365e/paramiko%2Fchannel.py", "contents_url": "https://api.github.com/repos/haonaturel/ssh/contents/paramiko%2Fchannel.py?ref=13f818f04f53573f873a019874dbdaec79e5365e", "patch": "@@ -828,7 +828,7 @@ def _push_pipe(self, nbytes):\n return\n x = self.in_buffer[:nbytes]\n self.in_buffer = self.in_buffer[nbytes:]\n- os.write(self.pipd_wfd, x)\n+ os.write(self.pipe_wfd, x)\n \n def _unlink(self):\n if self.closed or not self.active:" } ]
python-instagram
87dc812ac90befa931908c5712983276a3b3d58a
e885e7d004e42ddedbace604709f44e927575024
instagram/oauth2.py
https://github.com/haonature888/python-instagram
true
false
true
@@ -153,7 +153,7 @@ class OAuth2Request(object): def _post_body(self, params): return urlencode(params) - def _encode_multipart(params, files): + def _encode_multipart(self, params, files): boundary = "MuL7Ip4rt80uND4rYF0o" def get_content_type(file_name):
def _encode_multipart ( params , files ) : boundary = "MuL7Ip4rt80uND4rYF0o" def get_content_type ( file_name ) :
def _encode_multipart ( self , params , files ) : boundary = "MuL7Ip4rt80uND4rYF0o" def get_content_type ( file_name ) :
SINGLE_STMT
[["Move", ["identifier:params", 3, 27, 3, 33], ["parameters", 3, 26, 3, 41], 2], ["Insert", ["parameters", 3, 26, 3, 41], ["identifier:self", "T"], 1], ["Insert", ["parameters", 3, 26, 3, 41], [",:,", "T"], 3]]
haonature888/python-instagram@87dc812ac90befa931908c5712983276a3b3d58a
fix _encode_multipart bug _encode_multipart add self param
[ { "sha": "52272f1c0b9ed901e29b1df91eb0cbf4061ee8ac", "filename": "instagram/oauth2.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/haonature888/python-instagram/blob/87dc812ac90befa931908c5712983276a3b3d58a/instagram%2Foauth2.py", "raw_url": "https://github.com/haonature888/python-instagram/raw/87dc812ac90befa931908c5712983276a3b3d58a/instagram%2Foauth2.py", "contents_url": "https://api.github.com/repos/haonature888/python-instagram/contents/instagram%2Foauth2.py?ref=87dc812ac90befa931908c5712983276a3b3d58a", "patch": "@@ -153,7 +153,7 @@ def _auth_query(self, include_secret=False):\n def _post_body(self, params):\n return urlencode(params)\n \n- def _encode_multipart(params, files):\n+ def _encode_multipart(self, params, files):\n boundary = \"MuL7Ip4rt80uND4rYF0o\"\n \n def get_content_type(file_name):" } ]
ssh
13f818f04f53573f873a019874dbdaec79e5365e
a8a023a2432753bc6bdfdd0011b66869642845d0
paramiko/channel.py
https://github.com/haonature888/ssh
true
false
true
@@ -828,7 +828,7 @@ class Channel (object): return x = self.in_buffer[:nbytes] self.in_buffer = self.in_buffer[nbytes:] - os.write(self.pipd_wfd, x) + os.write(self.pipe_wfd, x) def _unlink(self): if self.closed or not self.active:
os . write ( self . pipd_wfd , x )
os . write ( self . pipe_wfd , x )
CHANGE_ATTRIBUTE_USED
[["Update", ["identifier:pipd_wfd", 3, 23, 3, 31], "pipe_wfd"]]
haonature888/ssh@13f818f04f53573f873a019874dbdaec79e5365e
[project @ Arch-1:[email protected]%secsh--dev--1.0--patch-114] fix typo in channel fix typo that alain found: pipd_wfd -> pipe_wfd.
[ { "sha": "833ae62b3226fd14af6f69476f7985a13cbca995", "filename": "paramiko/channel.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/haonature888/ssh/blob/13f818f04f53573f873a019874dbdaec79e5365e/paramiko%2Fchannel.py", "raw_url": "https://github.com/haonature888/ssh/raw/13f818f04f53573f873a019874dbdaec79e5365e/paramiko%2Fchannel.py", "contents_url": "https://api.github.com/repos/haonature888/ssh/contents/paramiko%2Fchannel.py?ref=13f818f04f53573f873a019874dbdaec79e5365e", "patch": "@@ -828,7 +828,7 @@ def _push_pipe(self, nbytes):\n return\n x = self.in_buffer[:nbytes]\n self.in_buffer = self.in_buffer[nbytes:]\n- os.write(self.pipd_wfd, x)\n+ os.write(self.pipe_wfd, x)\n \n def _unlink(self):\n if self.closed or not self.active:" } ]
ssh
4862d5955bdc8275fe5c03cceffd73448b130812
112b72511eb186175839fc0b8d66b6fc6dcdb534
demo_simple.py
https://github.com/haonature888/ssh
true
false
false
@@ -113,7 +113,7 @@ try: t.close() except Exception, e: - print '*** Caught exception: ' + str(e.__class__) + ': ' + str(e) + print '*** Caught exception: %s: %s' % (e.__class__, e) traceback.print_exc() try: t.close()
'*** Caught exception: ' + str ( e . __class__ ) + ': ' + str ( e )
'*** Caught exception: %s: %s' % ( e . __class__ , e )
SINGLE_STMT
[["Update", ["string:'*** Caught exception: '", 3, 11, 3, 35], "'*** Caught exception: %s: %s'"], ["Move", ["binary_operator", 3, 11, 3, 70], ["string:'*** Caught exception: '", 3, 11, 3, 35], 0], ["Insert", ["binary_operator", 3, 11, 3, 70], ["%:%", "T"], 1], ["Insert", ["binary_operator", 3, 11, 3, 70], ["tuple", "N0"], 2], ["Move", "N0", ["(:(", 3, 41, 3, 42], 0], ["Move", "N0", ["attribute", 3, 42, 3, 53], 1], ["Insert", "N0", [",:,", "T"], 2], ["Move", "N0", ["identifier:e", 3, 68, 3, 69], 3], ["Move", "N0", ["):)", 3, 69, 3, 70], 4], ["Delete", ["+:+", 3, 36, 3, 37]], ["Delete", ["identifier:str", 3, 38, 3, 41]], ["Delete", ["):)", 3, 53, 3, 54]], ["Delete", ["argument_list", 3, 41, 3, 54]], ["Delete", ["call", 3, 38, 3, 54]], ["Delete", ["binary_operator", 3, 11, 3, 54]], ["Delete", ["+:+", 3, 55, 3, 56]], ["Delete", ["string:': '", 3, 57, 3, 61]], ["Delete", ["binary_operator", 3, 11, 3, 61]], ["Delete", ["+:+", 3, 62, 3, 63]], ["Delete", ["identifier:str", 3, 64, 3, 67]], ["Delete", ["(:(", 3, 67, 3, 68]], ["Delete", ["argument_list", 3, 67, 3, 70]], ["Delete", ["call", 3, 64, 3, 70]]]
haonature888/ssh@4862d5955bdc8275fe5c03cceffd73448b130812
[project @ Arch-1:[email protected]%paramiko--dev--1--patch-57] simplify a line of debug output in demo_simple that bothered me one day
[ { "sha": "7eade45bb3913d9382addbddb8cfe0208dcfe7c0", "filename": "demo_simple.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/haonature888/ssh/blob/4862d5955bdc8275fe5c03cceffd73448b130812/demo_simple.py", "raw_url": "https://github.com/haonature888/ssh/raw/4862d5955bdc8275fe5c03cceffd73448b130812/demo_simple.py", "contents_url": "https://api.github.com/repos/haonature888/ssh/contents/demo_simple.py?ref=4862d5955bdc8275fe5c03cceffd73448b130812", "patch": "@@ -113,7 +113,7 @@\n t.close()\n \n except Exception, e:\n- print '*** Caught exception: ' + str(e.__class__) + ': ' + str(e)\n+ print '*** Caught exception: %s: %s' % (e.__class__, e)\n traceback.print_exc()\n try:\n t.close()" } ]
ssh
be7d99886c2f6223d8f0899e4864c11f922dfde8
cb3008b402a6a411204d0dc060c2f85a548c1c7e
paramiko/util.py
https://github.com/haonature888/ssh
true
false
true
@@ -294,7 +294,7 @@ def lookup_ssh_host_config(hostname, config): matches = [x for x in config if fnmatch.fnmatch(hostname, x['host'])] # sort in order of shortest match (usually '*') to longest - matches.sort(key=lambda x: len(x['host'])) + matches.sort(lambda x,y: cmp(len(x['host']), len(x['host']))) ret = {} for m in matches: ret.update(m)
matches . sort ( key = lambda x : len ( x [ 'host' ] ) )
matches . sort ( lambda x , y : cmp ( len ( x [ 'host' ] ) , len ( x [ 'host' ] ) ) )
SINGLE_STMT
[["Move", ["argument_list", 2, 17, 2, 47], ["lambda", 2, 22, 2, 46], 1], ["Insert", ["lambda", 2, 22, 2, 46], ["call", "N0"], 3], ["Insert", ["lambda_parameters", 2, 29, 2, 30], [",:,", "T"], 1], ["Insert", ["lambda_parameters", 2, 29, 2, 30], ["identifier:y", "T"], 2], ["Insert", "N0", ["identifier:cmp", "T"], 0], ["Insert", "N0", ["argument_list", "N1"], 1], ["Insert", "N1", ["(:(", "T"], 0], ["Move", "N1", ["call", 2, 32, 2, 46], 1], ["Insert", "N1", [",:,", "T"], 2], ["Insert", "N1", ["call", "N2"], 3], ["Insert", "N1", ["):)", "T"], 4], ["Insert", "N2", ["identifier:len", "T"], 0], ["Insert", "N2", ["argument_list", "N3"], 1], ["Insert", "N3", ["(:(", "T"], 0], ["Insert", "N3", ["subscript", "N4"], 1], ["Insert", "N3", ["):)", "T"], 2], ["Insert", "N4", ["identifier:x", "T"], 0], ["Insert", "N4", ["[:[", "T"], 1], ["Insert", "N4", ["string:'host'", "T"], 2], ["Insert", "N4", ["]:]", "T"], 3], ["Delete", ["identifier:key", 2, 18, 2, 21]], ["Delete", ["=:=", 2, 21, 2, 22]], ["Delete", ["keyword_argument", 2, 18, 2, 46]]]
haonature888/ssh@be7d99886c2f6223d8f0899e4864c11f922dfde8
[project @ [email protected]] patch from jan hudec to fix a python 2.4-ism
[ { "sha": "809b850d43a790806c23117d7cef11259de839c7", "filename": "paramiko/util.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/haonature888/ssh/blob/be7d99886c2f6223d8f0899e4864c11f922dfde8/paramiko%2Futil.py", "raw_url": "https://github.com/haonature888/ssh/raw/be7d99886c2f6223d8f0899e4864c11f922dfde8/paramiko%2Futil.py", "contents_url": "https://api.github.com/repos/haonature888/ssh/contents/paramiko%2Futil.py?ref=be7d99886c2f6223d8f0899e4864c11f922dfde8", "patch": "@@ -294,7 +294,7 @@ def lookup_ssh_host_config(hostname, config):\n \"\"\"\n matches = [x for x in config if fnmatch.fnmatch(hostname, x['host'])]\n # sort in order of shortest match (usually '*') to longest\n- matches.sort(key=lambda x: len(x['host']))\n+ matches.sort(lambda x,y: cmp(len(x['host']), len(x['host'])))\n ret = {}\n for m in matches:\n ret.update(m)" } ]
cpython
ceb93f4540981e3f9af66bd936920186aba813fc
b331f80b4765c3201674a7ca8b35c9d4a65efe79
Lib/test/test_listcomps.py
https://github.com/mtorromeo/cpython
true
false
true
@@ -139,7 +139,7 @@ def test_main(verbose=None): import gc counts = [None] * 5 for i in range(len(counts)): - support.run_doctest(test_genexps, verbose) + support.run_doctest(test_listcomps, verbose) gc.collect() counts[i] = sys.gettotalrefcount() print(counts)
support . run_doctest ( test_genexps , verbose )
support . run_doctest ( test_listcomps , verbose )
CHANGE_IDENTIFIER_USED
[["Update", ["identifier:test_genexps", 3, 33, 3, 45], "test_listcomps"]]
mtorromeo/cpython@ceb93f4540981e3f9af66bd936920186aba813fc
bpo-31027: Fix test_listcomps failure when run directly (#2939) Bug appears to be incomplete copy-paste-edit.
[ { "sha": "ddb169fe58957c22e38593ee69acc8663024838d", "filename": "Lib/test/test_listcomps.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/mtorromeo/cpython/blob/ceb93f4540981e3f9af66bd936920186aba813fc/Lib%2Ftest%2Ftest_listcomps.py", "raw_url": "https://github.com/mtorromeo/cpython/raw/ceb93f4540981e3f9af66bd936920186aba813fc/Lib%2Ftest%2Ftest_listcomps.py", "contents_url": "https://api.github.com/repos/mtorromeo/cpython/contents/Lib%2Ftest%2Ftest_listcomps.py?ref=ceb93f4540981e3f9af66bd936920186aba813fc", "patch": "@@ -139,7 +139,7 @@ def test_main(verbose=None):\n import gc\n counts = [None] * 5\n for i in range(len(counts)):\n- support.run_doctest(test_genexps, verbose)\n+ support.run_doctest(test_listcomps, verbose)\n gc.collect()\n counts[i] = sys.gettotalrefcount()\n print(counts)" } ]
cpython
ebfaa71c2e8c018f72c179395dafaf06dcaf29e2
d34d8fc24f23ccff5de03c9277da5acbbdc30e90
Lib/test/test_pydoc.py
https://github.com/mtorromeo/cpython
true
false
true
@@ -357,7 +357,7 @@ def get_pydoc_html(module): def get_pydoc_link(module): "Returns a documentation web link of a module" dirname = os.path.dirname - basedir = dirname(dirname(os.path.realpath(__file__))) + basedir = dirname(dirname(__file__)) doc = pydoc.TextDoc() loc = doc.getdocloc(module, basedir=basedir) return loc
basedir = dirname ( dirname ( os . path . realpath ( __file__ ) ) )
basedir = dirname ( dirname ( __file__ ) )
SINGLE_STMT
[["Delete", ["(:(", 3, 30, 3, 31]], ["Delete", ["identifier:os", 3, 31, 3, 33]], ["Delete", [".:.", 3, 33, 3, 34]], ["Delete", ["identifier:path", 3, 34, 3, 38]], ["Delete", ["attribute", 3, 31, 3, 38]], ["Delete", [".:.", 3, 38, 3, 39]], ["Delete", ["identifier:realpath", 3, 39, 3, 47]], ["Delete", ["attribute", 3, 31, 3, 47]], ["Delete", ["call", 3, 31, 3, 57]], ["Delete", ["):)", 3, 57, 3, 58]]]
mtorromeo/cpython@ebfaa71c2e8c018f72c179395dafaf06dcaf29e2
bpo-32031: Fix pydoc `test_mixed_case_module_names_are_lower_cased` (GH-4441) When there is a symlink in the directory path of the standard library.
[ { "sha": "1926cffba263a27bbe1c260e1bd4d1da4a773842", "filename": "Lib/test/test_pydoc.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/mtorromeo/cpython/blob/ebfaa71c2e8c018f72c179395dafaf06dcaf29e2/Lib%2Ftest%2Ftest_pydoc.py", "raw_url": "https://github.com/mtorromeo/cpython/raw/ebfaa71c2e8c018f72c179395dafaf06dcaf29e2/Lib%2Ftest%2Ftest_pydoc.py", "contents_url": "https://api.github.com/repos/mtorromeo/cpython/contents/Lib%2Ftest%2Ftest_pydoc.py?ref=ebfaa71c2e8c018f72c179395dafaf06dcaf29e2", "patch": "@@ -357,7 +357,7 @@ def get_pydoc_html(module):\n def get_pydoc_link(module):\n \"Returns a documentation web link of a module\"\n dirname = os.path.dirname\n- basedir = dirname(dirname(os.path.realpath(__file__)))\n+ basedir = dirname(dirname(__file__))\n doc = pydoc.TextDoc()\n loc = doc.getdocloc(module, basedir=basedir)\n return loc" } ]
cpython
28e61650b23119b68cd7943ccc01b8b9af1b4103
c9409f7c4533f75b11a4c44e839d95a1403f8a0a
Lib/asyncio/base_events.py
https://github.com/mtorromeo/cpython
true
false
false
@@ -860,7 +860,7 @@ class BaseEventLoop(events.AbstractEventLoop): addr_pairs_info = (((family, proto), (None, None)),) elif hasattr(socket, 'AF_UNIX') and family == socket.AF_UNIX: for addr in (local_addr, remote_addr): - if addr is not None and not isistance(addr, str): + if addr is not None and not isinstance(addr, str): raise TypeError('string is expected') addr_pairs_info = (((family, proto), (local_addr, remote_addr)), )
if addr is not None and not isistance ( addr , str ) : raise TypeError ( 'string is expected' )
if addr is not None and not isinstance ( addr , str ) : raise TypeError ( 'string is expected' )
WRONG_FUNCTION_NAME
[["Update", ["identifier:isistance", 3, 49, 3, 58], "isinstance"]]
mtorromeo/cpython@28e61650b23119b68cd7943ccc01b8b9af1b4103
bpo-31245: asyncio: Fix typo, isistance => isinstance (#4594)
[ { "sha": "ffdb50f4beea3ea07ed309cdead7251f7a906be5", "filename": "Lib/asyncio/base_events.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/mtorromeo/cpython/blob/28e61650b23119b68cd7943ccc01b8b9af1b4103/Lib%2Fasyncio%2Fbase_events.py", "raw_url": "https://github.com/mtorromeo/cpython/raw/28e61650b23119b68cd7943ccc01b8b9af1b4103/Lib%2Fasyncio%2Fbase_events.py", "contents_url": "https://api.github.com/repos/mtorromeo/cpython/contents/Lib%2Fasyncio%2Fbase_events.py?ref=28e61650b23119b68cd7943ccc01b8b9af1b4103", "patch": "@@ -860,7 +860,7 @@ def create_datagram_endpoint(self, protocol_factory,\n addr_pairs_info = (((family, proto), (None, None)),)\n elif hasattr(socket, 'AF_UNIX') and family == socket.AF_UNIX:\n for addr in (local_addr, remote_addr):\n- if addr is not None and not isistance(addr, str):\n+ if addr is not None and not isinstance(addr, str):\n raise TypeError('string is expected')\n addr_pairs_info = (((family, proto),\n (local_addr, remote_addr)), )" } ]
PyPDF2
7456f0acea0a68a155d0a734b4d1997023eb462c
cf269ddfa9f22a29676e3cb6e1e3b622199aa000
PyPDF2/pdf.py
https://github.com/Shoobx/PyPDF2
true
false
true
@@ -2142,7 +2142,7 @@ class PageObject(DictionaryObject): page2Res = res2.get(resource, DictionaryObject()).getObject() renameRes = {} for key in list(page2Res.keys()): - if key in newRes and newRes[key] != page2Res[key]: + if key in newRes and newRes.raw_get(key) != page2Res.raw_get(key): newname = NameObject(key + str(uuid.uuid4())) renameRes[key] = newname newRes[newname] = page2Res[key]
if key in newRes and newRes [ key ] != page2Res [ key ] : newname = NameObject ( key + str ( uuid . uuid4 ( ) ) ) renameRes [ key ] = newname newRes [ newname ] = page2Res [ key ]
if key in newRes and newRes . raw_get ( key ) != page2Res . raw_get ( key ) : newname = NameObject ( key + str ( uuid . uuid4 ( ) ) ) renameRes [ key ] = newname newRes [ newname ] = page2Res [ key ]
CHANGE_BINARY_OPERAND
[["Insert", ["comparison_operator", 3, 34, 3, 62], ["call", "N0"], 0], ["Insert", ["comparison_operator", 3, 34, 3, 62], ["call", "N1"], 3], ["Insert", "N0", ["attribute", "N2"], 0], ["Insert", "N0", ["argument_list", "N3"], 1], ["Insert", "N1", ["attribute", "N4"], 0], ["Insert", "N1", ["argument_list", "N5"], 1], ["Move", "N2", ["identifier:newRes", 3, 34, 3, 40], 0], ["Insert", "N2", [".:.", "T"], 1], ["Insert", "N2", ["identifier:raw_get", "T"], 2], ["Insert", "N3", ["(:(", "T"], 0], ["Move", "N3", ["identifier:key", 3, 41, 3, 44], 1], ["Insert", "N3", ["):)", "T"], 2], ["Move", "N4", ["identifier:page2Res", 3, 49, 3, 57], 0], ["Insert", "N4", [".:.", "T"], 1], ["Insert", "N4", ["identifier:raw_get", "T"], 2], ["Insert", "N5", ["(:(", "T"], 0], ["Move", "N5", ["identifier:key", 3, 58, 3, 61], 1], ["Insert", "N5", ["):)", "T"], 2], ["Delete", ["[:[", 3, 40, 3, 41]], ["Delete", ["]:]", 3, 44, 3, 45]], ["Delete", ["subscript", 3, 34, 3, 45]], ["Delete", ["[:[", 3, 57, 3, 58]], ["Delete", ["]:]", 3, 61, 3, 62]], ["Delete", ["subscript", 3, 49, 3, 62]]]
Shoobx/PyPDF2@7456f0acea0a68a155d0a734b4d1997023eb462c
Stronger equality test for resource values Fixes #182
[ { "sha": "f54680a74be629b1a7c3c15f19b0d54af2dfcd81", "filename": "PyPDF2/pdf.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/Shoobx/PyPDF2/blob/7456f0acea0a68a155d0a734b4d1997023eb462c/PyPDF2%2Fpdf.py", "raw_url": "https://github.com/Shoobx/PyPDF2/raw/7456f0acea0a68a155d0a734b4d1997023eb462c/PyPDF2%2Fpdf.py", "contents_url": "https://api.github.com/repos/Shoobx/PyPDF2/contents/PyPDF2%2Fpdf.py?ref=7456f0acea0a68a155d0a734b4d1997023eb462c", "patch": "@@ -2142,7 +2142,7 @@ def _mergeResources(res1, res2, resource):\n page2Res = res2.get(resource, DictionaryObject()).getObject()\n renameRes = {}\n for key in list(page2Res.keys()):\n- if key in newRes and newRes[key] != page2Res[key]:\n+ if key in newRes and newRes.raw_get(key) != page2Res.raw_get(key):\n newname = NameObject(key + str(uuid.uuid4()))\n renameRes[key] = newname\n newRes[newname] = page2Res[key]" } ]
PyPDF2
8ba44f2099f8971cd8885c8bc3786ca2455157f6
4fc7f9d14adb2a9b890aea2616955ec54229f48c
PyPDF2/merger.py
https://github.com/Shoobx/PyPDF2
true
false
true
@@ -113,7 +113,7 @@ class PdfFileMerger(object): if isString(fileobj): fileobj = file(fileobj, 'rb') my_file = True - elif isinstance(fileobj, file): + elif hasattr(fileobj, "seek") and hasattr(fileobj, "read"): fileobj.seek(0) filecontent = fileobj.read() fileobj = StreamIO(filecontent)
if isString ( fileobj ) : fileobj = file ( fileobj , 'rb' ) my_file = True elif isinstance ( fileobj , file ) : fileobj . seek ( 0 ) filecontent = fileobj . read ( ) fileobj = StreamIO ( filecontent )
if isString ( fileobj ) : fileobj = file ( fileobj , 'rb' ) my_file = True elif hasattr ( fileobj , "seek" ) and hasattr ( fileobj , "read" ) : fileobj . seek ( 0 ) filecontent = fileobj . read ( ) fileobj = StreamIO ( filecontent )
SINGLE_STMT
[["Insert", ["elif_clause", 3, 9, 6, 44], ["boolean_operator", "N0"], 1], ["Move", "N0", ["call", 3, 14, 3, 39], 0], ["Insert", "N0", ["and:and", "T"], 1], ["Insert", "N0", ["call", "N1"], 2], ["Update", ["identifier:isinstance", 3, 14, 3, 24], "hasattr"], ["Insert", "N1", ["identifier:hasattr", "T"], 0], ["Insert", "N1", ["argument_list", "N2"], 1], ["Insert", ["argument_list", 3, 24, 3, 39], ["string:\"seek\"", "T"], 3], ["Insert", "N2", ["(:(", "T"], 0], ["Insert", "N2", ["identifier:fileobj", "T"], 1], ["Insert", "N2", [",:,", "T"], 2], ["Insert", "N2", ["string:\"read\"", "T"], 3], ["Insert", "N2", ["):)", "T"], 4], ["Delete", ["identifier:file", 3, 34, 3, 38]]]
Shoobx/PyPDF2@8ba44f2099f8971cd8885c8bc3786ca2455157f6
Fix PdfFileMerger for file objects on Python 3. The previous check was always evaluated to False on Python 3, so I replaced it with a duck-typing one compatible with both Python versions.
[ { "sha": "c3373e445963ab731d7bcdadb08b4d1eaff4125c", "filename": "PyPDF2/merger.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/Shoobx/PyPDF2/blob/8ba44f2099f8971cd8885c8bc3786ca2455157f6/PyPDF2%2Fmerger.py", "raw_url": "https://github.com/Shoobx/PyPDF2/raw/8ba44f2099f8971cd8885c8bc3786ca2455157f6/PyPDF2%2Fmerger.py", "contents_url": "https://api.github.com/repos/Shoobx/PyPDF2/contents/PyPDF2%2Fmerger.py?ref=8ba44f2099f8971cd8885c8bc3786ca2455157f6", "patch": "@@ -113,7 +113,7 @@ def merge(self, position, fileobj, bookmark=None, pages=None, import_bookmarks=T\n if isString(fileobj):\n fileobj = file(fileobj, 'rb')\n my_file = True\n- elif isinstance(fileobj, file):\n+ elif hasattr(fileobj, \"seek\") and hasattr(fileobj, \"read\"):\n fileobj.seek(0)\n filecontent = fileobj.read()\n fileobj = StreamIO(filecontent)" } ]
PyPDF2
77629e6266709b65cce24d6346b8be0bfa29cf90
d7f5eafddb46106850c036d118d33f0416ef3441
PyPDF2/generic.py
https://github.com/Shoobx/PyPDF2
true
false
true
@@ -363,7 +363,7 @@ def readStringFromStream(stream): b_('$') : b_('$'), } try: - tok = escape_dict[tok] + tok = ESCAPE_DICT[tok] except KeyError: if tok.isdigit(): # "The number ddd may consist of one, two, or three
tok = escape_dict [ tok ]
tok = ESCAPE_DICT [ tok ]
CHANGE_IDENTIFIER_USED
[["Update", ["identifier:escape_dict", 3, 23, 3, 34], "ESCAPE_DICT"]]
Shoobx/PyPDF2@77629e6266709b65cce24d6346b8be0bfa29cf90
Correct name error
[ { "sha": "959957ddee7bfa3fead1fade0986518c0cab2ee1", "filename": "PyPDF2/generic.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/Shoobx/PyPDF2/blob/77629e6266709b65cce24d6346b8be0bfa29cf90/PyPDF2%2Fgeneric.py", "raw_url": "https://github.com/Shoobx/PyPDF2/raw/77629e6266709b65cce24d6346b8be0bfa29cf90/PyPDF2%2Fgeneric.py", "contents_url": "https://api.github.com/repos/Shoobx/PyPDF2/contents/PyPDF2%2Fgeneric.py?ref=77629e6266709b65cce24d6346b8be0bfa29cf90", "patch": "@@ -363,7 +363,7 @@ def readStringFromStream(stream):\n b_('$') : b_('$'),\n }\n try:\n- tok = escape_dict[tok]\n+ tok = ESCAPE_DICT[tok]\n except KeyError:\n if tok.isdigit():\n # \"The number ddd may consist of one, two, or three" } ]
PyPDF2
83ff6fea00de5ab0b82704c4e8aa4258c9e504d6
a1bfcedf8083c85ec8d80583e8c0e853366d25fc
PyPDF2/pdf.py
https://github.com/Shoobx/PyPDF2
true
false
true
@@ -2057,7 +2057,7 @@ class PdfFileReader(object): if encrypt['/Filter'] != '/Standard': raise NotImplementedError("only Standard PDF encryption handler is available") if not (encrypt['/V'] in (1, 2)): - raise NotImplementedError("only algorithm code 1 and 2 are supported") + raise NotImplementedError("only algorithm code 1 and 2 are supported. This PDF uses code %s" % encrypt['/V']) user_password, key = self._authenticateUserPassword(password) if user_password: self._decryption_key = key
raise NotImplementedError ( "only algorithm code 1 and 2 are supported" )
raise NotImplementedError ( "only algorithm code 1 and 2 are supported. This PDF uses code %s" % encrypt [ '/V' ] )
SINGLE_STMT
[["Insert", ["argument_list", 3, 38, 3, 83], ["binary_operator", "N0"], 1], ["Update", ["string:\"only algorithm code 1 and 2 are supported\"", 3, 39, 3, 82], "\"only algorithm code 1 and 2 are supported. This PDF uses code %s\""], ["Move", "N0", ["string:\"only algorithm code 1 and 2 are supported\"", 3, 39, 3, 82], 0], ["Insert", "N0", ["%:%", "T"], 1], ["Insert", "N0", ["subscript", "N1"], 2], ["Insert", "N1", ["identifier:encrypt", "T"], 0], ["Insert", "N1", ["[:[", "T"], 1], ["Insert", "N1", ["string:'/V'", "T"], 2], ["Insert", "N1", ["]:]", "T"], 3]]
Shoobx/PyPDF2@83ff6fea00de5ab0b82704c4e8aa4258c9e504d6
Additional error output for failed encryption
[ { "sha": "58f4ca6ee8344e20965515bc046b3396cc66533e", "filename": "PyPDF2/pdf.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/Shoobx/PyPDF2/blob/83ff6fea00de5ab0b82704c4e8aa4258c9e504d6/PyPDF2%2Fpdf.py", "raw_url": "https://github.com/Shoobx/PyPDF2/raw/83ff6fea00de5ab0b82704c4e8aa4258c9e504d6/PyPDF2%2Fpdf.py", "contents_url": "https://api.github.com/repos/Shoobx/PyPDF2/contents/PyPDF2%2Fpdf.py?ref=83ff6fea00de5ab0b82704c4e8aa4258c9e504d6", "patch": "@@ -2057,7 +2057,7 @@ def _decrypt(self, password):\n if encrypt['/Filter'] != '/Standard':\n raise NotImplementedError(\"only Standard PDF encryption handler is available\")\n if not (encrypt['/V'] in (1, 2)):\n- raise NotImplementedError(\"only algorithm code 1 and 2 are supported\")\n+ raise NotImplementedError(\"only algorithm code 1 and 2 are supported. This PDF uses code %s\" % encrypt['/V'])\n user_password, key = self._authenticateUserPassword(password)\n if user_password:\n self._decryption_key = key" } ]
PyPDF2
5971df1aeaa29065e18ff2c23443e4abef9082e8
1775bdc4b9b3281a31b7d966223b8f02f53ba5fc
PyPDF2/generic.py
https://github.com/Shoobx/PyPDF2
true
false
true
@@ -459,7 +459,7 @@ class TextStringObject(utils.string_type, PdfObject): else: stream.write(b_("(")) for c in bytearr: - if not chr_(c).isalnum() and c != b_(' '): + if not chr_(c).isalnum() and c not in b_(' /_'): stream.write(b_("\\%03o" % ord_(c))) else: stream.write(b_(chr_(c)))
if not chr_ ( c ) . isalnum ( ) and c != b_ ( ' ' ) : stream . write ( b_ ( "\\%03o" % ord_ ( c ) ) ) else : stream . write ( b_ ( chr_ ( c ) ) )
if not chr_ ( c ) . isalnum ( ) and c not in b_ ( ' /_' ) : stream . write ( b_ ( "\\%03o" % ord_ ( c ) ) ) else : stream . write ( b_ ( chr_ ( c ) ) )
CHANGE_BINARY_OPERAND
[["Insert", ["comparison_operator", 3, 46, 3, 58], ["not:not", "T"], 1], ["Insert", ["comparison_operator", 3, 46, 3, 58], ["in:in", "T"], 2], ["Update", ["string:' '", 3, 54, 3, 57], "' /_'"], ["Delete", ["!=:!=", 3, 48, 3, 50]]]
Shoobx/PyPDF2@5971df1aeaa29065e18ff2c23443e4abef9082e8
Allow underscores and slashes in text strings Text strings that has anything except A-Z, a-z and 0-9 got scrambled for reasons I don't entirely understand. This means that "/DA (/Font ..." got the /Font changed to \\057Font, and any form field name with an underscore would get scrambled. This fixes that. Closes #425
[ { "sha": "dd0a5cc8caa7e13cb86858e220a8709d641d1da5", "filename": "PyPDF2/generic.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/Shoobx/PyPDF2/blob/5971df1aeaa29065e18ff2c23443e4abef9082e8/PyPDF2%2Fgeneric.py", "raw_url": "https://github.com/Shoobx/PyPDF2/raw/5971df1aeaa29065e18ff2c23443e4abef9082e8/PyPDF2%2Fgeneric.py", "contents_url": "https://api.github.com/repos/Shoobx/PyPDF2/contents/PyPDF2%2Fgeneric.py?ref=5971df1aeaa29065e18ff2c23443e4abef9082e8", "patch": "@@ -459,7 +459,7 @@ def writeToStream(self, stream, encryption_key):\n else:\n stream.write(b_(\"(\"))\n for c in bytearr:\n- if not chr_(c).isalnum() and c != b_(' '):\n+ if not chr_(c).isalnum() and c not in b_(' /_'):\n stream.write(b_(\"\\\\%03o\" % ord_(c)))\n else:\n stream.write(b_(chr_(c)))" } ]
asciidoc
63dce9bac4a681ddbf2154653de6b33724b4c67a
a50b8e6c0a13f95fb15127530db0efd3421d3af6
asciidoc.py
https://github.com/pepr/asciidoc
true
false
true
@@ -4157,7 +4157,7 @@ def main(): 'help','no-conf','no-header-footer','out-file=','profile', 'section-numbers','verbose','version','safe','unsafe']) except getopt.GetoptError: - usage(msg) + usage() sys.exit(1) if len(args) > 1: usage()
'section-numbers' , 'verbose' , 'version' , 'safe' , 'unsafe' ] ) except getopt . GetoptError : usage ( msg )
'section-numbers' , 'verbose' , 'version' , 'safe' , 'unsafe' ] ) except getopt . GetoptError : usage ( )
SAME_FUNCTION_LESS_ARGS
[["Delete", ["identifier:msg", 3, 15, 3, 18]]]
pepr/asciidoc@63dce9bac4a681ddbf2154653de6b33724b4c67a
fixed exception thrown by illegal command-line arguments
[ { "sha": "834179bf5c5c22167b2bb1f588808ffebdb0a594", "filename": "asciidoc.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/pepr/asciidoc/blob/63dce9bac4a681ddbf2154653de6b33724b4c67a/asciidoc.py", "raw_url": "https://github.com/pepr/asciidoc/raw/63dce9bac4a681ddbf2154653de6b33724b4c67a/asciidoc.py", "contents_url": "https://api.github.com/repos/pepr/asciidoc/contents/asciidoc.py?ref=63dce9bac4a681ddbf2154653de6b33724b4c67a", "patch": "@@ -4157,7 +4157,7 @@ def main():\n 'help','no-conf','no-header-footer','out-file=','profile',\n 'section-numbers','verbose','version','safe','unsafe'])\n except getopt.GetoptError:\n- usage(msg)\n+ usage()\n sys.exit(1)\n if len(args) > 1:\n usage()" } ]
asciidoc
bc0fcb3bba33fe83b76c8046892553118ece9216
55279c59c5c5d15c0f77170588d16081c771eadf
asciidoc.py
https://github.com/pepr/asciidoc
true
false
true
@@ -3119,7 +3119,7 @@ class CalloutMap: result += ' ' + self.calloutid(self.listnumber,coindex) return result.strip() else: - error('no callouts refer to list item '+str(listindex)) + warning('no callouts refer to list item '+str(listindex)) return '' def validate(self,maxlistindex): # Check that all list indexes referenced by callouts exist.
else : error ( 'no callouts refer to list item ' + str ( listindex ) )
else : warning ( 'no callouts refer to list item ' + str ( listindex ) )
WRONG_FUNCTION_NAME
[["Update", ["identifier:error", 3, 13, 3, 18], "warning"]]
pepr/asciidoc@bc0fcb3bba33fe83b76c8046892553118ece9216
Changed 'no callouts refer to ...' error to warning as it can't be suppressed in HTML source highlight elements.
[ { "sha": "3c5a5cfaad3087271e2f3b7cedb9c9527ecf278a", "filename": "asciidoc.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/pepr/asciidoc/blob/bc0fcb3bba33fe83b76c8046892553118ece9216/asciidoc.py", "raw_url": "https://github.com/pepr/asciidoc/raw/bc0fcb3bba33fe83b76c8046892553118ece9216/asciidoc.py", "contents_url": "https://api.github.com/repos/pepr/asciidoc/contents/asciidoc.py?ref=bc0fcb3bba33fe83b76c8046892553118ece9216", "patch": "@@ -3119,7 +3119,7 @@ def calloutids(self,listindex):\n result += ' ' + self.calloutid(self.listnumber,coindex)\n return result.strip()\n else:\n- error('no callouts refer to list item '+str(listindex))\n+ warning('no callouts refer to list item '+str(listindex))\n return ''\n def validate(self,maxlistindex):\n # Check that all list indexes referenced by callouts exist." } ]
asciidoc
de52a403c930b93f923f376a6329b17ac0192daf
eacda0d284e01dc56f38e1ea0ae68772a998b20c
asciidoc.py
https://github.com/pepr/asciidoc
true
false
true
@@ -124,7 +124,7 @@ def file_in(fname, directory): directory = os.getcwd() else: assert os.path.isdir(directory) - directory = os.path.abspath(directory) + directory = os.path.realpath(directory) fname = os.path.realpath(fname) return os.path.commonprefix((directory, fname)) == directory
directory = os . path . abspath ( directory )
directory = os . path . realpath ( directory )
WRONG_FUNCTION_NAME
[["Update", ["identifier:abspath", 3, 29, 3, 36], "realpath"]]
pepr/asciidoc@de52a403c930b93f923f376a6329b17ac0192daf
FIXED: file_in bug that was causing false negative safe mode warnings (patch submitted by Julien Palmas, see http://groups.google.com/group/asciidoc/browse_thread/thread/81c8e722a88c7960/693b0bc284795fe).
[ { "sha": "83376d79c9b1eabb9ddfe281fbed64307d667123", "filename": "asciidoc.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/pepr/asciidoc/blob/de52a403c930b93f923f376a6329b17ac0192daf/asciidoc.py", "raw_url": "https://github.com/pepr/asciidoc/raw/de52a403c930b93f923f376a6329b17ac0192daf/asciidoc.py", "contents_url": "https://api.github.com/repos/pepr/asciidoc/contents/asciidoc.py?ref=de52a403c930b93f923f376a6329b17ac0192daf", "patch": "@@ -124,7 +124,7 @@ def file_in(fname, directory):\n directory = os.getcwd()\n else:\n assert os.path.isdir(directory)\n- directory = os.path.abspath(directory)\n+ directory = os.path.realpath(directory)\n fname = os.path.realpath(fname)\n return os.path.commonprefix((directory, fname)) == directory\n " } ]
asciidoc
ba8d3dee0f73b226584bd1a7ecacc0f067a2fd75
af906b768b64f2eaed22a0fd7a9be8a0b410ef19
asciidoc.py
https://github.com/pepr/asciidoc
true
false
true
@@ -384,7 +384,7 @@ def subs_quotes(text): mo = reo.search(text,pos) if not mo: break if text[mo.start()] == '\\': - pos = mo.end() + pos += 1 # Skip over backslash. else: attrs = {} parse_attributes(mo.group('attrs'), attrs)
pos = mo . end ( )
pos += 1
SINGLE_STMT
[["Insert", ["expression_statement", 3, 17, 3, 31], ["augmented_assignment", "N0"], 0], ["Move", "N0", ["identifier:pos", 3, 17, 3, 20], 0], ["Insert", "N0", ["+=:+=", "T"], 1], ["Insert", "N0", ["integer:1", "T"], 2], ["Delete", ["=:=", 3, 21, 3, 22]], ["Delete", ["identifier:mo", 3, 23, 3, 25]], ["Delete", [".:.", 3, 25, 3, 26]], ["Delete", ["identifier:end", 3, 26, 3, 29]], ["Delete", ["attribute", 3, 23, 3, 29]], ["Delete", ["(:(", 3, 29, 3, 30]], ["Delete", ["):)", 3, 30, 3, 31]], ["Delete", ["argument_list", 3, 29, 3, 31]], ["Delete", ["call", 3, 23, 3, 31]], ["Delete", ["assignment", 3, 17, 3, 31]]]
pepr/asciidoc@ba8d3dee0f73b226584bd1a7ecacc0f067a2fd75
FIXED: Escaped quote skipped over leading and trailing quote instead of just the leading quote.
[ { "sha": "e6377d3deaa230f9f020de3467f8b542bc00d45d", "filename": "asciidoc.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/pepr/asciidoc/blob/ba8d3dee0f73b226584bd1a7ecacc0f067a2fd75/asciidoc.py", "raw_url": "https://github.com/pepr/asciidoc/raw/ba8d3dee0f73b226584bd1a7ecacc0f067a2fd75/asciidoc.py", "contents_url": "https://api.github.com/repos/pepr/asciidoc/contents/asciidoc.py?ref=ba8d3dee0f73b226584bd1a7ecacc0f067a2fd75", "patch": "@@ -384,7 +384,7 @@ def subs_quotes(text):\n mo = reo.search(text,pos)\n if not mo: break\n if text[mo.start()] == '\\\\':\n- pos = mo.end()\n+ pos += 1 # Skip over backslash.\n else:\n attrs = {}\n parse_attributes(mo.group('attrs'), attrs)" } ]
ssh
4862d5955bdc8275fe5c03cceffd73448b130812
112b72511eb186175839fc0b8d66b6fc6dcdb534
demo_simple.py
https://github.com/haonaturel/ssh
true
false
false
@@ -113,7 +113,7 @@ try: t.close() except Exception, e: - print '*** Caught exception: ' + str(e.__class__) + ': ' + str(e) + print '*** Caught exception: %s: %s' % (e.__class__, e) traceback.print_exc() try: t.close()
'*** Caught exception: ' + str ( e . __class__ ) + ': ' + str ( e )
'*** Caught exception: %s: %s' % ( e . __class__ , e )
SINGLE_STMT
[["Update", ["string:'*** Caught exception: '", 3, 11, 3, 35], "'*** Caught exception: %s: %s'"], ["Move", ["binary_operator", 3, 11, 3, 70], ["string:'*** Caught exception: '", 3, 11, 3, 35], 0], ["Insert", ["binary_operator", 3, 11, 3, 70], ["%:%", "T"], 1], ["Insert", ["binary_operator", 3, 11, 3, 70], ["tuple", "N0"], 2], ["Move", "N0", ["(:(", 3, 41, 3, 42], 0], ["Move", "N0", ["attribute", 3, 42, 3, 53], 1], ["Insert", "N0", [",:,", "T"], 2], ["Move", "N0", ["identifier:e", 3, 68, 3, 69], 3], ["Move", "N0", ["):)", 3, 69, 3, 70], 4], ["Delete", ["+:+", 3, 36, 3, 37]], ["Delete", ["identifier:str", 3, 38, 3, 41]], ["Delete", ["):)", 3, 53, 3, 54]], ["Delete", ["argument_list", 3, 41, 3, 54]], ["Delete", ["call", 3, 38, 3, 54]], ["Delete", ["binary_operator", 3, 11, 3, 54]], ["Delete", ["+:+", 3, 55, 3, 56]], ["Delete", ["string:': '", 3, 57, 3, 61]], ["Delete", ["binary_operator", 3, 11, 3, 61]], ["Delete", ["+:+", 3, 62, 3, 63]], ["Delete", ["identifier:str", 3, 64, 3, 67]], ["Delete", ["(:(", 3, 67, 3, 68]], ["Delete", ["argument_list", 3, 67, 3, 70]], ["Delete", ["call", 3, 64, 3, 70]]]
haonaturel/ssh@4862d5955bdc8275fe5c03cceffd73448b130812
[project @ Arch-1:[email protected]%paramiko--dev--1--patch-57] simplify a line of debug output in demo_simple that bothered me one day
[ { "sha": "7eade45bb3913d9382addbddb8cfe0208dcfe7c0", "filename": "demo_simple.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/haonaturel/ssh/blob/4862d5955bdc8275fe5c03cceffd73448b130812/demo_simple.py", "raw_url": "https://github.com/haonaturel/ssh/raw/4862d5955bdc8275fe5c03cceffd73448b130812/demo_simple.py", "contents_url": "https://api.github.com/repos/haonaturel/ssh/contents/demo_simple.py?ref=4862d5955bdc8275fe5c03cceffd73448b130812", "patch": "@@ -113,7 +113,7 @@\n t.close()\n \n except Exception, e:\n- print '*** Caught exception: ' + str(e.__class__) + ': ' + str(e)\n+ print '*** Caught exception: %s: %s' % (e.__class__, e)\n traceback.print_exc()\n try:\n t.close()" } ]
ssh
be7d99886c2f6223d8f0899e4864c11f922dfde8
cb3008b402a6a411204d0dc060c2f85a548c1c7e
paramiko/util.py
https://github.com/haonaturel/ssh
true
false
true
@@ -294,7 +294,7 @@ def lookup_ssh_host_config(hostname, config): matches = [x for x in config if fnmatch.fnmatch(hostname, x['host'])] # sort in order of shortest match (usually '*') to longest - matches.sort(key=lambda x: len(x['host'])) + matches.sort(lambda x,y: cmp(len(x['host']), len(x['host']))) ret = {} for m in matches: ret.update(m)
matches . sort ( key = lambda x : len ( x [ 'host' ] ) )
matches . sort ( lambda x , y : cmp ( len ( x [ 'host' ] ) , len ( x [ 'host' ] ) ) )
SINGLE_STMT
[["Move", ["argument_list", 2, 17, 2, 47], ["lambda", 2, 22, 2, 46], 1], ["Insert", ["lambda", 2, 22, 2, 46], ["call", "N0"], 3], ["Insert", ["lambda_parameters", 2, 29, 2, 30], [",:,", "T"], 1], ["Insert", ["lambda_parameters", 2, 29, 2, 30], ["identifier:y", "T"], 2], ["Insert", "N0", ["identifier:cmp", "T"], 0], ["Insert", "N0", ["argument_list", "N1"], 1], ["Insert", "N1", ["(:(", "T"], 0], ["Move", "N1", ["call", 2, 32, 2, 46], 1], ["Insert", "N1", [",:,", "T"], 2], ["Insert", "N1", ["call", "N2"], 3], ["Insert", "N1", ["):)", "T"], 4], ["Insert", "N2", ["identifier:len", "T"], 0], ["Insert", "N2", ["argument_list", "N3"], 1], ["Insert", "N3", ["(:(", "T"], 0], ["Insert", "N3", ["subscript", "N4"], 1], ["Insert", "N3", ["):)", "T"], 2], ["Insert", "N4", ["identifier:x", "T"], 0], ["Insert", "N4", ["[:[", "T"], 1], ["Insert", "N4", ["string:'host'", "T"], 2], ["Insert", "N4", ["]:]", "T"], 3], ["Delete", ["identifier:key", 2, 18, 2, 21]], ["Delete", ["=:=", 2, 21, 2, 22]], ["Delete", ["keyword_argument", 2, 18, 2, 46]]]
haonaturel/ssh@be7d99886c2f6223d8f0899e4864c11f922dfde8
[project @ [email protected]] patch from jan hudec to fix a python 2.4-ism
[ { "sha": "809b850d43a790806c23117d7cef11259de839c7", "filename": "paramiko/util.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/haonaturel/ssh/blob/be7d99886c2f6223d8f0899e4864c11f922dfde8/paramiko%2Futil.py", "raw_url": "https://github.com/haonaturel/ssh/raw/be7d99886c2f6223d8f0899e4864c11f922dfde8/paramiko%2Futil.py", "contents_url": "https://api.github.com/repos/haonaturel/ssh/contents/paramiko%2Futil.py?ref=be7d99886c2f6223d8f0899e4864c11f922dfde8", "patch": "@@ -294,7 +294,7 @@ def lookup_ssh_host_config(hostname, config):\n \"\"\"\n matches = [x for x in config if fnmatch.fnmatch(hostname, x['host'])]\n # sort in order of shortest match (usually '*') to longest\n- matches.sort(key=lambda x: len(x['host']))\n+ matches.sort(lambda x,y: cmp(len(x['host']), len(x['host'])))\n ret = {}\n for m in matches:\n ret.update(m)" } ]
ssh
a3128c63de7fc1aaeb44c455495accd3149a773c
92e92a9297770661eb578bfab2735136e3d98a5b
tests/test_transport.py
https://github.com/haonaturel/ssh
true
false
true
@@ -699,4 +699,4 @@ class TransportTest (unittest.TestCase): schan.close() chan.close() - self.assertEquals(chan.send_read(), True) + self.assertEquals(chan.send_ready(), True)
self . assertEquals ( chan . send_read ( ) , True )
self . assertEquals ( chan . send_ready ( ) , True )
WRONG_FUNCTION_NAME
[["Update", ["identifier:send_read", 3, 32, 3, 41], "send_ready"]]
haonaturel/ssh@a3128c63de7fc1aaeb44c455495accd3149a773c
[project @ [email protected]] oops, fix typo.
[ { "sha": "d9ac1780ba02a15b54de4b4f5e5579afd9ac3c14", "filename": "tests/test_transport.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/haonaturel/ssh/blob/a3128c63de7fc1aaeb44c455495accd3149a773c/tests%2Ftest_transport.py", "raw_url": "https://github.com/haonaturel/ssh/raw/a3128c63de7fc1aaeb44c455495accd3149a773c/tests%2Ftest_transport.py", "contents_url": "https://api.github.com/repos/haonaturel/ssh/contents/tests%2Ftest_transport.py?ref=a3128c63de7fc1aaeb44c455495accd3149a773c", "patch": "@@ -699,4 +699,4 @@ def test_L_send_ready(self):\n \n schan.close()\n chan.close()\n- self.assertEquals(chan.send_read(), True)\n+ self.assertEquals(chan.send_ready(), True)" } ]
ssh
0e2e882d0dd9e237019b74fb0cbe73d3000675d7
e06dbde805df433e4c2c36d72276e45aba147b0b
paramiko/auth_handler.py
https://github.com/haonaturel/ssh
true
false
true
@@ -363,7 +363,7 @@ class AuthHandler (object): self.transport._log(DEBUG, 'Methods: ' + str(authlist)) self.transport.saved_exception = PartialAuthentication(authlist) elif self.auth_method not in authlist: - self.transport._log(INFO, 'Authentication type (%s) not permitted.' % self.auth_method) + self.transport._log(DEBUG, 'Authentication type (%s) not permitted.' % self.auth_method) self.transport._log(DEBUG, 'Allowed methods: ' + str(authlist)) self.transport.saved_exception = BadAuthenticationType('Bad authentication type', authlist) else:
elif self . auth_method not in authlist : self . transport . _log ( INFO , 'Authentication type (%s) not permitted.' % self . auth_method )
elif self . auth_method not in authlist : self . transport . _log ( DEBUG , 'Authentication type (%s) not permitted.' % self . auth_method )
CHANGE_IDENTIFIER_USED
[["Update", ["identifier:INFO", 3, 33, 3, 37], "DEBUG"]]
haonaturel/ssh@0e2e882d0dd9e237019b74fb0cbe73d3000675d7
make the "auth type not permitted" message debug level, for bazaar.
[ { "sha": "a542f767dfcbddb7ea8b36a242d367fc24b8d671", "filename": "paramiko/auth_handler.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/haonaturel/ssh/blob/0e2e882d0dd9e237019b74fb0cbe73d3000675d7/paramiko%2Fauth_handler.py", "raw_url": "https://github.com/haonaturel/ssh/raw/0e2e882d0dd9e237019b74fb0cbe73d3000675d7/paramiko%2Fauth_handler.py", "contents_url": "https://api.github.com/repos/haonaturel/ssh/contents/paramiko%2Fauth_handler.py?ref=0e2e882d0dd9e237019b74fb0cbe73d3000675d7", "patch": "@@ -363,7 +363,7 @@ def _parse_userauth_failure(self, m):\n self.transport._log(DEBUG, 'Methods: ' + str(authlist))\n self.transport.saved_exception = PartialAuthentication(authlist)\n elif self.auth_method not in authlist:\n- self.transport._log(INFO, 'Authentication type (%s) not permitted.' % self.auth_method)\n+ self.transport._log(DEBUG, 'Authentication type (%s) not permitted.' % self.auth_method)\n self.transport._log(DEBUG, 'Allowed methods: ' + str(authlist))\n self.transport.saved_exception = BadAuthenticationType('Bad authentication type', authlist)\n else:" } ]
ipyparallel
3b77aa14b0d5db831923d057629e265c4143c42d
5d3982073600eef29d81438e346dd26d1e6b05c3
ipyparallel/apps/ipcontrollerapp.py
https://github.com/ChinaQuants/ipyparallel
true
false
true
@@ -403,7 +403,7 @@ class IPControllerApp(BaseParallelApplication): if 'TaskScheduler.scheme_name' in self.config: scheme = self.config.TaskScheduler.scheme_name else: - scheme = TaskScheduler.scheme_name.get_default_value() + scheme = TaskScheduler.scheme_name.default_value # Task Queue (in a Process) if scheme == 'pure': self.log.warn("task::using pure DEALER Task scheduler")
scheme = TaskScheduler . scheme_name . get_default_value ( )
scheme = TaskScheduler . scheme_name . default_value
SINGLE_STMT
[["Move", ["assignment", 3, 13, 3, 67], ["attribute", 3, 22, 3, 65], 2], ["Update", ["identifier:get_default_value", 3, 48, 3, 65], "default_value"], ["Delete", ["(:(", 3, 65, 3, 66]], ["Delete", ["):)", 3, 66, 3, 67]], ["Delete", ["argument_list", 3, 65, 3, 67]], ["Delete", ["call", 3, 22, 3, 67]]]
ChinaQuants/ipyparallel@3b77aa14b0d5db831923d057629e265c4143c42d
ipyparallel/apps/ipcontrollerapp.py: Switch to using `default_value` attribute.
[ { "sha": "3f11c7ec5558ce86351f3fb6053b989fc740b10d", "filename": "ipyparallel/apps/ipcontrollerapp.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/ChinaQuants/ipyparallel/blob/3b77aa14b0d5db831923d057629e265c4143c42d/ipyparallel%2Fapps%2Fipcontrollerapp.py", "raw_url": "https://github.com/ChinaQuants/ipyparallel/raw/3b77aa14b0d5db831923d057629e265c4143c42d/ipyparallel%2Fapps%2Fipcontrollerapp.py", "contents_url": "https://api.github.com/repos/ChinaQuants/ipyparallel/contents/ipyparallel%2Fapps%2Fipcontrollerapp.py?ref=3b77aa14b0d5db831923d057629e265c4143c42d", "patch": "@@ -403,7 +403,7 @@ def init_schedulers(self):\n if 'TaskScheduler.scheme_name' in self.config:\n scheme = self.config.TaskScheduler.scheme_name\n else:\n- scheme = TaskScheduler.scheme_name.get_default_value()\n+ scheme = TaskScheduler.scheme_name.default_value\n # Task Queue (in a Process)\n if scheme == 'pure':\n self.log.warn(\"task::using pure DEALER Task scheduler\")" } ]
ipyparallel
0f8b1f3b01bc359065f65a5e9a87220cc1dcd842
3b77aa14b0d5db831923d057629e265c4143c42d
ipyparallel/controller/hub.py
https://github.com/ChinaQuants/ipyparallel
true
false
true
@@ -258,7 +258,7 @@ class HubFactory(RegistrationFactory): scheme = self.config.TaskScheduler.scheme_name else: from .scheduler import TaskScheduler - scheme = TaskScheduler.scheme_name.get_default_value() + scheme = TaskScheduler.scheme_name.default_value # build connection dicts engine = self.engine_info = {
scheme = TaskScheduler . scheme_name . get_default_value ( )
scheme = TaskScheduler . scheme_name . default_value
SINGLE_STMT
[["Move", ["assignment", 3, 13, 3, 67], ["attribute", 3, 22, 3, 65], 2], ["Update", ["identifier:get_default_value", 3, 48, 3, 65], "default_value"], ["Delete", ["(:(", 3, 65, 3, 66]], ["Delete", ["):)", 3, 66, 3, 67]], ["Delete", ["argument_list", 3, 65, 3, 67]], ["Delete", ["call", 3, 22, 3, 67]]]
ChinaQuants/ipyparallel@0f8b1f3b01bc359065f65a5e9a87220cc1dcd842
ipyparallel/controller/hub.py: Switch to using `default_value` attribute.
[ { "sha": "98da619c6dfa0a8555847813faafa88b019f42dc", "filename": "ipyparallel/controller/hub.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/ChinaQuants/ipyparallel/blob/0f8b1f3b01bc359065f65a5e9a87220cc1dcd842/ipyparallel%2Fcontroller%2Fhub.py", "raw_url": "https://github.com/ChinaQuants/ipyparallel/raw/0f8b1f3b01bc359065f65a5e9a87220cc1dcd842/ipyparallel%2Fcontroller%2Fhub.py", "contents_url": "https://api.github.com/repos/ChinaQuants/ipyparallel/contents/ipyparallel%2Fcontroller%2Fhub.py?ref=0f8b1f3b01bc359065f65a5e9a87220cc1dcd842", "patch": "@@ -258,7 +258,7 @@ def init_hub(self):\n scheme = self.config.TaskScheduler.scheme_name\n else:\n from .scheduler import TaskScheduler\n- scheme = TaskScheduler.scheme_name.get_default_value()\n+ scheme = TaskScheduler.scheme_name.default_value\n \n # build connection dicts\n engine = self.engine_info = {" } ]
Theano
4e61200c7f1615cbcfcccb4be302e660965b66c8
9b5f64932eff018549fba17fcff574a2f6e8eec5
theano/compile/debugmode.py
https://github.com/ChinaQuants/Theano
true
false
true
@@ -372,7 +372,7 @@ def _check_inputs(node, storage_map, r_vals, dr_vals, active_nodes, clobber_dr_v destroyed_res_list = [node.inputs[i] for i in destroyed_idx_list] for r_idx, r in enumerate(node.inputs): - if not r.type.values_eq_approx(r_vals[r], storage_map[r][0]): + if not r.type.values_eq(r_vals[r], storage_map[r][0]): # some input node 'r' got changed by running the node # this may or may not be ok... if r in destroyed_res_list:
if not r . type . values_eq_approx ( r_vals [ r ] , storage_map [ r ] [ 0 ] ) : if r in destroyed_res_list :
if not r . type . values_eq ( r_vals [ r ] , storage_map [ r ] [ 0 ] ) : if r in destroyed_res_list :
WRONG_FUNCTION_NAME
[["Update", ["identifier:values_eq_approx", 3, 23, 3, 39], "values_eq"]]
ChinaQuants/Theano@4e61200c7f1615cbcfcccb4be302e660965b66c8
Strict equality checking in debugmode to detect BadDestroyMap
[ { "sha": "09b88950600d4aa89ec8f04ad76f1605d85b3e81", "filename": "theano/compile/debugmode.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/ChinaQuants/Theano/blob/4e61200c7f1615cbcfcccb4be302e660965b66c8/theano%2Fcompile%2Fdebugmode.py", "raw_url": "https://github.com/ChinaQuants/Theano/raw/4e61200c7f1615cbcfcccb4be302e660965b66c8/theano%2Fcompile%2Fdebugmode.py", "contents_url": "https://api.github.com/repos/ChinaQuants/Theano/contents/theano%2Fcompile%2Fdebugmode.py?ref=4e61200c7f1615cbcfcccb4be302e660965b66c8", "patch": "@@ -372,7 +372,7 @@ def _check_inputs(node, storage_map, r_vals, dr_vals, active_nodes, clobber_dr_v\n destroyed_res_list = [node.inputs[i] for i in destroyed_idx_list]\n \n for r_idx, r in enumerate(node.inputs):\n- if not r.type.values_eq_approx(r_vals[r], storage_map[r][0]):\n+ if not r.type.values_eq(r_vals[r], storage_map[r][0]):\n # some input node 'r' got changed by running the node\n # this may or may not be ok...\n if r in destroyed_res_list:" } ]
cpython
eb52ac89929bb09b15c014ab8ff60eee685e86c7
d79c1d4a9406384f10a37f26a7515ce79f9fdd78
Lib/test/test_capi.py
https://github.com/mtorromeo/cpython
true
false
true
@@ -480,7 +480,7 @@ class EmbeddingTests(unittest.TestCase): def test_forced_io_encoding(self): # Checks forced configuration of embedded interpreter IO streams - env = {"PYTHONIOENCODING": "utf-8:surrogateescape"} + env = dict(os.environ, PYTHONIOENCODING="utf-8:surrogateescape") out, err = self.run_embedded_interpreter("forced_io_encoding", env=env) if support.verbose > 1: print()
env = { "PYTHONIOENCODING" : "utf-8:surrogateescape" }
env = dict ( os . environ , PYTHONIOENCODING = "utf-8:surrogateescape" )
SINGLE_STMT
[["Insert", ["assignment", 3, 9, 3, 60], ["call", "N0"], 2], ["Insert", "N0", ["identifier:dict", "T"], 0], ["Insert", "N0", ["argument_list", "N1"], 1], ["Insert", "N1", ["(:(", "T"], 0], ["Insert", "N1", ["attribute", "N2"], 1], ["Insert", "N1", [",:,", "T"], 2], ["Insert", "N1", ["keyword_argument", "N3"], 3], ["Insert", "N1", ["):)", "T"], 4], ["Insert", "N2", ["identifier:os", "T"], 0], ["Insert", "N2", [".:.", "T"], 1], ["Insert", "N2", ["identifier:environ", "T"], 2], ["Insert", "N3", ["identifier:PYTHONIOENCODING", "T"], 0], ["Insert", "N3", ["=:=", "T"], 1], ["Move", "N3", ["string:\"utf-8:surrogateescape\"", 3, 36, 3, 59], 2], ["Delete", ["{:{", 3, 15, 3, 16]], ["Delete", ["string:\"PYTHONIOENCODING\"", 3, 16, 3, 34]], ["Delete", [":::", 3, 34, 3, 35]], ["Delete", ["pair", 3, 16, 3, 59]], ["Delete", ["}:}", 3, 59, 3, 60]], ["Delete", ["dictionary", 3, 15, 3, 60]]]
mtorromeo/cpython@eb52ac89929bb09b15c014ab8ff60eee685e86c7
bpo-28180: Fix test_capi.test_forced_io_encoding() (#2155) Don't run Python in an empty environment, but copy the current environment and set PYTHONIOENCODING. So the test works also on Python compiled in shared mode (using libpython).
[ { "sha": "1cf5cd73f0939108604d298fe07f337e572e58b5", "filename": "Lib/test/test_capi.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/mtorromeo/cpython/blob/eb52ac89929bb09b15c014ab8ff60eee685e86c7/Lib%2Ftest%2Ftest_capi.py", "raw_url": "https://github.com/mtorromeo/cpython/raw/eb52ac89929bb09b15c014ab8ff60eee685e86c7/Lib%2Ftest%2Ftest_capi.py", "contents_url": "https://api.github.com/repos/mtorromeo/cpython/contents/Lib%2Ftest%2Ftest_capi.py?ref=eb52ac89929bb09b15c014ab8ff60eee685e86c7", "patch": "@@ -480,7 +480,7 @@ def test_subinterps_distinct_state(self):\n \n def test_forced_io_encoding(self):\n # Checks forced configuration of embedded interpreter IO streams\n- env = {\"PYTHONIOENCODING\": \"utf-8:surrogateescape\"}\n+ env = dict(os.environ, PYTHONIOENCODING=\"utf-8:surrogateescape\")\n out, err = self.run_embedded_interpreter(\"forced_io_encoding\", env=env)\n if support.verbose > 1:\n print()" } ]
cpython
a0e911b190a229693a1b9948caf768de0069046b
f3cffd2b7879d209f982de899b782fb89cfc410a
Lib/idlelib/config_key.py
https://github.com/mtorromeo/cpython
true
false
true
@@ -186,7 +186,7 @@ class GetKeysDialog(Toplevel): def LoadFinalKeyList(self): #these tuples are also available for use in validity checks - self.functionKeys=('F1','F2','F2','F4','F5','F6','F7','F8','F9', + self.functionKeys=('F1','F2','F3','F4','F5','F6','F7','F8','F9', 'F10','F11','F12') self.alphanumKeys=tuple(string.ascii_lowercase+string.digits) self.punctuationKeys=tuple('~!@#%^&*()_-+={}[]|;:,.<>/?')
self . functionKeys = ( 'F1' , 'F2' , 'F2' , 'F4' , 'F5' , 'F6' , 'F7' , 'F8' , 'F9' , 'F10' , 'F11' , 'F12' )
self . functionKeys = ( 'F1' , 'F2' , 'F3' , 'F4' , 'F5' , 'F6' , 'F7' , 'F8' , 'F9' , 'F10' , 'F11' , 'F12' )
CHANGE_STRING_LITERAL
[["Update", ["string:'F2'", 3, 38, 3, 42], "'F3'"]]
mtorromeo/cpython@a0e911b190a229693a1b9948caf768de0069046b
Fix typo in idlelib.config_key.py (#2322)
[ { "sha": "479d6ad313e35f22f5de7b2eb094cf4d45079d7f", "filename": "Lib/idlelib/config_key.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/mtorromeo/cpython/blob/a0e911b190a229693a1b9948caf768de0069046b/Lib%2Fidlelib%2Fconfig_key.py", "raw_url": "https://github.com/mtorromeo/cpython/raw/a0e911b190a229693a1b9948caf768de0069046b/Lib%2Fidlelib%2Fconfig_key.py", "contents_url": "https://api.github.com/repos/mtorromeo/cpython/contents/Lib%2Fidlelib%2Fconfig_key.py?ref=a0e911b190a229693a1b9948caf768de0069046b", "patch": "@@ -186,7 +186,7 @@ def ClearKeySeq(self):\n \n def LoadFinalKeyList(self):\n #these tuples are also available for use in validity checks\n- self.functionKeys=('F1','F2','F2','F4','F5','F6','F7','F8','F9',\n+ self.functionKeys=('F1','F2','F3','F4','F5','F6','F7','F8','F9',\n 'F10','F11','F12')\n self.alphanumKeys=tuple(string.ascii_lowercase+string.digits)\n self.punctuationKeys=tuple('~!@#%^&*()_-+={}[]|;:,.<>/?')" } ]
cpython
5e742fa922ea70131b4c63451c87cf0347532806
73528640ffd872141e126d2c4a103126055ec9ce
Lib/site.py
https://github.com/mtorromeo/cpython
true
false
true
@@ -343,8 +343,8 @@ def getsitepackages(prefixes=None): # for framework builds *only* we add the standard Apple locations. if sys.platform == "darwin" and sys._framework: sitepackages.append( - os.path.join("/Library", framework, - '%d.%d' % sys.version_info[:2], "site-packages")) + os.path.join("/Library", sys._framework, + '%d.%d' % sys.version_info[:2], "site-packages")) return sitepackages def addsitepackages(known_paths, prefixes=None):
sitepackages . append ( os . path . join ( "/Library" , framework , '%d.%d' % sys . version_info [ : 2 ] , "site-packages" ) )
sitepackages . append ( os . path . join ( "/Library" , sys . _framework , '%d.%d' % sys . version_info [ : 2 ] , "site-packages" ) )
SINGLE_STMT
[["Insert", ["argument_list", 3, 33, 4, 73], ["attribute", "N0"], 3], ["Update", ["identifier:framework", 3, 46, 3, 55], "sys"], ["Move", "N0", ["identifier:framework", 3, 46, 3, 55], 0], ["Insert", "N0", [".:.", "T"], 1], ["Insert", "N0", ["identifier:_framework", "T"], 2]]
mtorromeo/cpython@5e742fa922ea70131b4c63451c87cf0347532806
bpo-30804: fix macOS build with framework enabled. (#2516)
[ { "sha": "7dc1b041c192c889df3171c4ddb6381ecee9c2ad", "filename": "Lib/site.py", "status": "modified", "additions": 2, "deletions": 2, "changes": 4, "blob_url": "https://github.com/mtorromeo/cpython/blob/5e742fa922ea70131b4c63451c87cf0347532806/Lib%2Fsite.py", "raw_url": "https://github.com/mtorromeo/cpython/raw/5e742fa922ea70131b4c63451c87cf0347532806/Lib%2Fsite.py", "contents_url": "https://api.github.com/repos/mtorromeo/cpython/contents/Lib%2Fsite.py?ref=5e742fa922ea70131b4c63451c87cf0347532806", "patch": "@@ -343,8 +343,8 @@ def getsitepackages(prefixes=None):\n # for framework builds *only* we add the standard Apple locations.\n if sys.platform == \"darwin\" and sys._framework:\n sitepackages.append(\n- os.path.join(\"/Library\", framework,\n- '%d.%d' % sys.version_info[:2], \"site-packages\"))\n+ os.path.join(\"/Library\", sys._framework,\n+ '%d.%d' % sys.version_info[:2], \"site-packages\"))\n return sitepackages\n \n def addsitepackages(known_paths, prefixes=None):" } ]
cpython
25a4206c243e3b1fa6f5b1c72a11b409b007694d
529746c90584069270cd122920270bd7be38bca3
Lib/idlelib/idle_test/test_configdialog.py
https://github.com/mtorromeo/cpython
true
false
true
@@ -44,7 +44,7 @@ def setUpModule(): def tearDownModule(): global root, configure - idleConf.userCfg = testcfg + idleConf.userCfg = usercfg configure.remove_var_callbacks() del configure root.update_idletasks()
idleConf . userCfg = testcfg
idleConf . userCfg = usercfg
CHANGE_IDENTIFIER_USED
[["Update", ["identifier:testcfg", 3, 24, 3, 31], "usercfg"]]
mtorromeo/cpython@25a4206c243e3b1fa6f5b1c72a11b409b007694d
bpo-30780: Fix error in idlelib.test_idle.test_configdialog (#2606)
[ { "sha": "ffc8a723ca0e6a80003ebbf9851d82050a34de2c", "filename": "Lib/idlelib/idle_test/test_configdialog.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/mtorromeo/cpython/blob/25a4206c243e3b1fa6f5b1c72a11b409b007694d/Lib%2Fidlelib%2Fidle_test%2Ftest_configdialog.py", "raw_url": "https://github.com/mtorromeo/cpython/raw/25a4206c243e3b1fa6f5b1c72a11b409b007694d/Lib%2Fidlelib%2Fidle_test%2Ftest_configdialog.py", "contents_url": "https://api.github.com/repos/mtorromeo/cpython/contents/Lib%2Fidlelib%2Fidle_test%2Ftest_configdialog.py?ref=25a4206c243e3b1fa6f5b1c72a11b409b007694d", "patch": "@@ -44,7 +44,7 @@ def setUpModule():\n \n def tearDownModule():\n global root, configure\n- idleConf.userCfg = testcfg\n+ idleConf.userCfg = usercfg\n configure.remove_var_callbacks()\n del configure\n root.update_idletasks()" } ]
cpython
9f9192afbb4e45d09f0d3d717b457d157dc46398
856cbcc12f2e4cca93af5dc7ed6bcea4dd942f10
Lib/idlelib/idle_test/test_config.py
https://github.com/mtorromeo/cpython
true
false
true
@@ -608,7 +608,7 @@ class IdleConfTest(unittest.TestCase): f = Font.actual(Font(name='TkFixedFont', exists=True, root=root)) self.assertEqual( conf.GetFont(root, 'main', 'EditorWindow'), - (f['family'], 10 if f['size'] < 10 else f['size'], f['weight'])) + (f['family'], 10 if f['size'] <= 0 else f['size'], f['weight'])) # Cleanup root root.destroy()
self . assertEqual ( conf . GetFont ( root , 'main' , 'EditorWindow' ) , ( f [ 'family' ] , 10 if f [ 'size' ] < 10 else f [ 'size' ] , f [ 'weight' ] ) )
self . assertEqual ( conf . GetFont ( root , 'main' , 'EditorWindow' ) , ( f [ 'family' ] , 10 if f [ 'size' ] <= 0 else f [ 'size' ] , f [ 'weight' ] ) )
SINGLE_STMT
[["Insert", ["comparison_operator", 3, 33, 3, 47], ["<=:<=", "T"], 1], ["Update", ["integer:10", 3, 45, 3, 47], "0"], ["Delete", ["<:<", 3, 43, 3, 44]]]
mtorromeo/cpython@9f9192afbb4e45d09f0d3d717b457d157dc46398
bpo-30968: Fix test_get_font in IDLE's test_config. (#2769)
[ { "sha": "bbf06504fc25527b8b17d0d390d6885dc59901de", "filename": "Lib/idlelib/idle_test/test_config.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/mtorromeo/cpython/blob/9f9192afbb4e45d09f0d3d717b457d157dc46398/Lib%2Fidlelib%2Fidle_test%2Ftest_config.py", "raw_url": "https://github.com/mtorromeo/cpython/raw/9f9192afbb4e45d09f0d3d717b457d157dc46398/Lib%2Fidlelib%2Fidle_test%2Ftest_config.py", "contents_url": "https://api.github.com/repos/mtorromeo/cpython/contents/Lib%2Fidlelib%2Fidle_test%2Ftest_config.py?ref=9f9192afbb4e45d09f0d3d717b457d157dc46398", "patch": "@@ -608,7 +608,7 @@ def test_get_font(self):\n f = Font.actual(Font(name='TkFixedFont', exists=True, root=root))\n self.assertEqual(\n conf.GetFont(root, 'main', 'EditorWindow'),\n- (f['family'], 10 if f['size'] < 10 else f['size'], f['weight']))\n+ (f['family'], 10 if f['size'] <= 0 else f['size'], f['weight']))\n \n # Cleanup root\n root.destroy()" } ]
cpython
ff92ff53665b1606a17231972b48ad894459f761
e72b1359f81d1dd42bd8a5c5cc2b3928b74f8023
Lib/test/_test_multiprocessing.py
https://github.com/mtorromeo/cpython
true
false
false
@@ -109,7 +109,7 @@ try: from ctypes import Structure, c_int, c_double, c_longlong except ImportError: Structure = object - c_int = c_double = None + c_int = c_double = c_longlong = None def check_enough_semaphores():
c_int = c_double = None
c_int = c_double = c_longlong = None
SINGLE_STMT
[["Insert", ["assignment", 3, 13, 3, 28], ["assignment", "N0"], 2], ["Insert", "N0", ["identifier:c_longlong", "T"], 0], ["Insert", "N0", ["=:=", "T"], 1], ["Move", "N0", ["none:None", 3, 24, 3, 28], 2]]
mtorromeo/cpython@ff92ff53665b1606a17231972b48ad894459f761
Fix test failure without ctypes (#2802)
[ { "sha": "329a6d29acadf57a10b77844ff04448727b1433b", "filename": "Lib/test/_test_multiprocessing.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/mtorromeo/cpython/blob/ff92ff53665b1606a17231972b48ad894459f761/Lib%2Ftest%2F_test_multiprocessing.py", "raw_url": "https://github.com/mtorromeo/cpython/raw/ff92ff53665b1606a17231972b48ad894459f761/Lib%2Ftest%2F_test_multiprocessing.py", "contents_url": "https://api.github.com/repos/mtorromeo/cpython/contents/Lib%2Ftest%2F_test_multiprocessing.py?ref=ff92ff53665b1606a17231972b48ad894459f761", "patch": "@@ -109,7 +109,7 @@ def wait_for_handle(handle, timeout):\n from ctypes import Structure, c_int, c_double, c_longlong\n except ImportError:\n Structure = object\n- c_int = c_double = None\n+ c_int = c_double = c_longlong = None\n \n \n def check_enough_semaphores():" } ]
cpython
fd46561167af6cd697191dd7ebb8c2fef5ad6493
a80e985c493d2ab9df0832c99d9ddb798d2e66cf
Lib/test/test_pydoc.py
https://github.com/mtorromeo/cpython
true
false
true
@@ -360,7 +360,7 @@ def get_pydoc_html(module): def get_pydoc_link(module): "Returns a documentation web link of a module" dirname = os.path.dirname - basedir = dirname(dirname(__file__)) + basedir = dirname(dirname(os.path.realpath(__file__))) doc = pydoc.TextDoc() loc = doc.getdocloc(module, basedir=basedir) return loc
basedir = dirname ( dirname ( __file__ ) )
basedir = dirname ( dirname ( os . path . realpath ( __file__ ) ) )
ADD_FUNCTION_AROUND_EXPRESSION
[["Insert", ["argument_list", 3, 30, 3, 40], ["(:(", "T"], 0], ["Insert", ["argument_list", 3, 30, 3, 40], ["call", "N0"], 1], ["Insert", ["argument_list", 3, 30, 3, 40], ["):)", "T"], 2], ["Insert", "N0", ["attribute", "N1"], 0], ["Move", "N0", ["argument_list", 3, 30, 3, 40], 1], ["Insert", "N1", ["attribute", "N2"], 0], ["Insert", "N1", [".:.", "T"], 1], ["Insert", "N1", ["identifier:realpath", "T"], 2], ["Insert", "N2", ["identifier:os", "T"], 0], ["Insert", "N2", [".:.", "T"], 1], ["Insert", "N2", ["identifier:path", "T"], 2]]
mtorromeo/cpython@fd46561167af6cd697191dd7ebb8c2fef5ad6493
bpo-31028: Fix test_pydoc when run directly (#2864) * bpo-31028: Fix test_pydoc when run directly Fix get_pydoc_link() of test_pydoc to fix "./python Lib/test/test_pydoc.py": get the absolute path to __file__ to prevent relative directories. * Use realpath() instead of abspath()
[ { "sha": "d68ab5595e877b6e7db839c90ca5307a79d679c1", "filename": "Lib/test/test_pydoc.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/mtorromeo/cpython/blob/fd46561167af6cd697191dd7ebb8c2fef5ad6493/Lib%2Ftest%2Ftest_pydoc.py", "raw_url": "https://github.com/mtorromeo/cpython/raw/fd46561167af6cd697191dd7ebb8c2fef5ad6493/Lib%2Ftest%2Ftest_pydoc.py", "contents_url": "https://api.github.com/repos/mtorromeo/cpython/contents/Lib%2Ftest%2Ftest_pydoc.py?ref=fd46561167af6cd697191dd7ebb8c2fef5ad6493", "patch": "@@ -360,7 +360,7 @@ def get_pydoc_html(module):\n def get_pydoc_link(module):\n \"Returns a documentation web link of a module\"\n dirname = os.path.dirname\n- basedir = dirname(dirname(__file__))\n+ basedir = dirname(dirname(os.path.realpath(__file__)))\n doc = pydoc.TextDoc()\n loc = doc.getdocloc(module, basedir=basedir)\n return loc" } ]
WNTR
0ccdfc50d94349eb55153464934581f6394d6aff
f7b62d68d48fe48c426825c4f29b373b17de87f0
wntr/scenario/tests/test_FragilityCurve.py
https://github.com/sandialabs/WNTR
true
false
true
@@ -26,7 +26,7 @@ FC2.add_state('Major', 2, {'Default': lognorm(0.25, loc=1, scale=2)}) #plt.legend() def test_get_priority_map(): - priority_map = FC.get_priority_map() + priority_map = FC1.get_priority_map() assert_dict_equal(priority_map, {None: 0, 'Minor': 1, 'Major': 2}) def test_cdf_probability():
priority_map = FC . get_priority_map ( )
priority_map = FC1 . get_priority_map ( )
SAME_FUNCTION_WRONG_CALLER
[["Update", ["identifier:FC", 3, 20, 3, 22], "FC1"]]
sandialabs/WNTR@0ccdfc50d94349eb55153464934581f6394d6aff
bug fix
[ { "sha": "edbd5090819fc94304b4ab414fbe95237f6626e3", "filename": "wntr/scenario/tests/test_FragilityCurve.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/sandialabs/WNTR/blob/0ccdfc50d94349eb55153464934581f6394d6aff/wntr%2Fscenario%2Ftests%2Ftest_FragilityCurve.py", "raw_url": "https://github.com/sandialabs/WNTR/raw/0ccdfc50d94349eb55153464934581f6394d6aff/wntr%2Fscenario%2Ftests%2Ftest_FragilityCurve.py", "contents_url": "https://api.github.com/repos/sandialabs/WNTR/contents/wntr%2Fscenario%2Ftests%2Ftest_FragilityCurve.py?ref=0ccdfc50d94349eb55153464934581f6394d6aff", "patch": "@@ -26,7 +26,7 @@\n #plt.legend()\n \n def test_get_priority_map():\n- priority_map = FC.get_priority_map()\n+ priority_map = FC1.get_priority_map()\n assert_dict_equal(priority_map, {None: 0, 'Minor': 1, 'Major': 2})\n \n def test_cdf_probability():" } ]
WNTR
f7f9b73f80b19e6e678443a4792c1280f321093c
c12df23374174d8b0cddb502924c56c333d1a6ca
wntr/sim/NewtonSolver.py
https://github.com/sandialabs/WNTR
true
false
true
@@ -37,7 +37,7 @@ class NewtonSolver(object): self.bt_maxiter = self._options['BT_MAXITER'] if 'BACKTRACKING' not in self._options: - self.bt = True + self.bt = False else: self.bt = self._options['BACKTRACKING']
self . bt = True
self . bt = False
CHANGE_BOOLEAN_LITERAL
[["Insert", ["assignment", 3, 13, 3, 27], ["false:False", "T"], 2], ["Delete", ["true:True", 3, 23, 3, 27]]]
sandialabs/WNTR@f7f9b73f80b19e6e678443a4792c1280f321093c
no line search by default
[ { "sha": "52f4e9fb35bb96d5c18e6c8c59b0ffa464963c5d", "filename": "wntr/sim/NewtonSolver.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/sandialabs/WNTR/blob/f7f9b73f80b19e6e678443a4792c1280f321093c/wntr%2Fsim%2FNewtonSolver.py", "raw_url": "https://github.com/sandialabs/WNTR/raw/f7f9b73f80b19e6e678443a4792c1280f321093c/wntr%2Fsim%2FNewtonSolver.py", "contents_url": "https://api.github.com/repos/sandialabs/WNTR/contents/wntr%2Fsim%2FNewtonSolver.py?ref=f7f9b73f80b19e6e678443a4792c1280f321093c", "patch": "@@ -37,7 +37,7 @@ def __init__(self, options={}):\n self.bt_maxiter = self._options['BT_MAXITER']\n \n if 'BACKTRACKING' not in self._options:\n- self.bt = True\n+ self.bt = False\n else:\n self.bt = self._options['BACKTRACKING']\n " } ]
WNTR
87f3de3c55a70e3ac422584760eb5b90896b0928
8999b52458b731e71128caaae0475d7d0e4d18d7
wntr/tests/test_examples.py
https://github.com/sandialabs/WNTR
true
false
true
@@ -32,5 +32,5 @@ class TestExamples(unittest.TestCase): example_files = [f for f in listdir(os.path.join(resilienceMainDir,'examples')) if isfile(os.path.join(resilienceMainDir,'examples',f)) and f.endswith('.py') and not f.startswith('test')] flag = 1 for f in example_files: - flag = call(['sys.executable', os.path.join(resilienceMainDir,'examples',f)]) + flag = call([sys.executable, os.path.join(resilienceMainDir,'examples',f)]) self.assertEqual(flag,0)
flag = call ( [ 'sys.executable' , os . path . join ( resilienceMainDir , 'examples' , f ) ] )
flag = call ( [ sys . executable , os . path . join ( resilienceMainDir , 'examples' , f ) ] )
SINGLE_STMT
[["Insert", ["list", 3, 25, 3, 89], ["attribute", "N0"], 1], ["Insert", "N0", ["identifier:sys", "T"], 0], ["Insert", "N0", [".:.", "T"], 1], ["Insert", "N0", ["identifier:executable", "T"], 2], ["Delete", ["string:'sys.executable'", 3, 26, 3, 42]]]
sandialabs/WNTR@87f3de3c55a70e3ac422584760eb5b90896b0928
bug fix in test_examples
[ { "sha": "a849a94e92a4cb573b807bd5b05cda47d13f5905", "filename": "wntr/tests/test_examples.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/sandialabs/WNTR/blob/87f3de3c55a70e3ac422584760eb5b90896b0928/wntr%2Ftests%2Ftest_examples.py", "raw_url": "https://github.com/sandialabs/WNTR/raw/87f3de3c55a70e3ac422584760eb5b90896b0928/wntr%2Ftests%2Ftest_examples.py", "contents_url": "https://api.github.com/repos/sandialabs/WNTR/contents/wntr%2Ftests%2Ftest_examples.py?ref=87f3de3c55a70e3ac422584760eb5b90896b0928", "patch": "@@ -32,5 +32,5 @@ def test_that_examples_run(self):\n example_files = [f for f in listdir(os.path.join(resilienceMainDir,'examples')) if isfile(os.path.join(resilienceMainDir,'examples',f)) and f.endswith('.py') and not f.startswith('test')]\n flag = 1\n for f in example_files:\n- flag = call(['sys.executable', os.path.join(resilienceMainDir,'examples',f)])\n+ flag = call([sys.executable, os.path.join(resilienceMainDir,'examples',f)])\n self.assertEqual(flag,0)" } ]
WNTR
412b93ef5187123a0192af3c74259395b9cd83f0
b1a32fbc20da002b22fe017e9b4ae838063dd6d2
wntr/sim/NewtonSolver.py
https://github.com/sandialabs/WNTR
true
false
true
@@ -27,7 +27,7 @@ class NewtonSolver(object): self.flow_filter[self.num_nodes*2:(2*self.num_nodes+self.num_links)] = np.zeros(self.num_links) if 'MAXITER' not in self._options: - self.maxiter = 40 + self.maxiter = 300 else: self.maxiter = self._options['MAXITER']
self . maxiter = 40
self . maxiter = 300
CHANGE_NUMERIC_LITERAL
[["Update", ["integer:40", 3, 28, 3, 30], "300"]]
sandialabs/WNTR@412b93ef5187123a0192af3c74259395b9cd83f0
increasing default maximum number of iterations in NewtonSolver
[ { "sha": "aea01506aee0655bcc8b22d9bdb3539c56166fd0", "filename": "wntr/sim/NewtonSolver.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/sandialabs/WNTR/blob/412b93ef5187123a0192af3c74259395b9cd83f0/wntr%2Fsim%2FNewtonSolver.py", "raw_url": "https://github.com/sandialabs/WNTR/raw/412b93ef5187123a0192af3c74259395b9cd83f0/wntr%2Fsim%2FNewtonSolver.py", "contents_url": "https://api.github.com/repos/sandialabs/WNTR/contents/wntr%2Fsim%2FNewtonSolver.py?ref=412b93ef5187123a0192af3c74259395b9cd83f0", "patch": "@@ -27,7 +27,7 @@ def __init__(self, num_nodes, num_links, num_leaks, options={}):\n self.flow_filter[self.num_nodes*2:(2*self.num_nodes+self.num_links)] = np.zeros(self.num_links)\n \n if 'MAXITER' not in self._options:\n- self.maxiter = 40\n+ self.maxiter = 300\n else:\n self.maxiter = self._options['MAXITER']\n " } ]
WNTR
c6b45eae247ecd12b39601f3fc6e04830c4e7b87
6c97c0c0ffca6e26589ef9b1a396bf6f8350c595
wntr/network/ParseWaterNetwork.py
https://github.com/sandialabs/WNTR
true
false
true
@@ -209,7 +209,7 @@ class ParseWaterNetwork(object): setattr(wn.options, current[0].lower()+'_'+current[1].lower(), float(current[2]) if is_number(current[2]) else current[2].upper()) if type(wn.options.report_timestep)==float or type(wn.options.report_timestep)==int: - if wn.options.report_timestep<=wn.options.hydraulic_timestep: + if wn.options.report_timestep<wn.options.hydraulic_timestep: raise RuntimeError('wn.options.report_timestep must be greater than or equal to wn.options.hydraulic_timestep.') if wn.options.report_timestep%wn.options.hydraulic_timestep != 0: raise RuntimeError('wn.options.report_timestep must be a multiple of wn.options.hydraulic_timestep')
if wn . options . report_timestep <= wn . options . hydraulic_timestep : raise RuntimeError ( 'wn.options.report_timestep must be greater than or equal to wn.options.hydraulic_timestep.' )
if wn . options . report_timestep < wn . options . hydraulic_timestep : raise RuntimeError ( 'wn.options.report_timestep must be greater than or equal to wn.options.hydraulic_timestep.' )
CHANGE_BINARY_OPERATOR
[["Insert", ["comparison_operator", 3, 16, 3, 73], ["<:<", "T"], 1], ["Delete", ["<=:<=", 3, 42, 3, 44]]]
sandialabs/WNTR@c6b45eae247ecd12b39601f3fc6e04830c4e7b87
bug fix
[ { "sha": "302479a27897f30d7be5ce5955de83c2849d036f", "filename": "wntr/network/ParseWaterNetwork.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/sandialabs/WNTR/blob/c6b45eae247ecd12b39601f3fc6e04830c4e7b87/wntr%2Fnetwork%2FParseWaterNetwork.py", "raw_url": "https://github.com/sandialabs/WNTR/raw/c6b45eae247ecd12b39601f3fc6e04830c4e7b87/wntr%2Fnetwork%2FParseWaterNetwork.py", "contents_url": "https://api.github.com/repos/sandialabs/WNTR/contents/wntr%2Fnetwork%2FParseWaterNetwork.py?ref=c6b45eae247ecd12b39601f3fc6e04830c4e7b87", "patch": "@@ -209,7 +209,7 @@ def read_inp_file(self, wn, inp_file_name):\n setattr(wn.options, current[0].lower()+'_'+current[1].lower(), float(current[2]) if is_number(current[2]) else current[2].upper())\n \n if type(wn.options.report_timestep)==float or type(wn.options.report_timestep)==int:\n- if wn.options.report_timestep<=wn.options.hydraulic_timestep:\n+ if wn.options.report_timestep<wn.options.hydraulic_timestep:\n raise RuntimeError('wn.options.report_timestep must be greater than or equal to wn.options.hydraulic_timestep.')\n if wn.options.report_timestep%wn.options.hydraulic_timestep != 0:\n raise RuntimeError('wn.options.report_timestep must be a multiple of wn.options.hydraulic_timestep')" } ]
WNTR
11722d7a4c5e284300c6ddf13d895da30a12c131
2585969ade1e3224ddb1d3e71c2ff326fd5fbba2
wntr/network/WaterNetworkModel.py
https://github.com/sandialabs/WNTR
true
false
true
@@ -1724,7 +1724,7 @@ class WaterNetworkModel(object): sec -= hours*3600 mm = int(sec/60.) sec -= mm*60 - return (hours, mm, sec) + return (hours, mm, int(sec)) class WaterNetworkOptions(object):
return ( hours , mm , sec )
return ( hours , mm , int ( sec ) )
ADD_FUNCTION_AROUND_EXPRESSION
[["Insert", ["tuple", 3, 16, 3, 32], ["call", "N0"], 5], ["Insert", ["tuple", 3, 16, 3, 32], ["):)", "T"], 6], ["Insert", "N0", ["identifier:int", "T"], 0], ["Insert", "N0", ["argument_list", "N1"], 1], ["Insert", "N1", ["(:(", "T"], 0], ["Move", "N1", ["identifier:sec", 3, 28, 3, 31], 1], ["Move", "N1", ["):)", 3, 31, 3, 32], 2]]
sandialabs/WNTR@11722d7a4c5e284300c6ddf13d895da30a12c131
Fixed the time output writer which was not int-ifying the number of seconds
[ { "sha": "1f3f1c980abd384af5025a09f8fdc6c847045248", "filename": "wntr/network/WaterNetworkModel.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/sandialabs/WNTR/blob/11722d7a4c5e284300c6ddf13d895da30a12c131/wntr%2Fnetwork%2FWaterNetworkModel.py", "raw_url": "https://github.com/sandialabs/WNTR/raw/11722d7a4c5e284300c6ddf13d895da30a12c131/wntr%2Fnetwork%2FWaterNetworkModel.py", "contents_url": "https://api.github.com/repos/sandialabs/WNTR/contents/wntr%2Fnetwork%2FWaterNetworkModel.py?ref=11722d7a4c5e284300c6ddf13d895da30a12c131", "patch": "@@ -1724,7 +1724,7 @@ def _sec_to_string(self, sec):\n sec -= hours*3600\n mm = int(sec/60.)\n sec -= mm*60\n- return (hours, mm, sec)\n+ return (hours, mm, int(sec))\n \n class WaterNetworkOptions(object):\n \"\"\"" } ]
asciidoc
e73f188c4d16fb8dca1da7318ea960e0e3b44f1b
cb376ab065bbbf0730ab6ecf04a96a9cc7ce904e
asciidoc.py
https://github.com/pepr/asciidoc
true
false
true
@@ -1201,7 +1201,7 @@ class Document: message.linenos = linenos if lang: if not config.load_lang(lang): - message.error('missing language conf file: lang-%s.conf' % lang) + message.warning('missing language conf file: lang-%s.conf' % lang) self.attributes['lang'] = lang # Reinstate new lang attribute. else: message.error('language attribute (lang) is not defined')
message . error ( 'missing language conf file: lang-%s.conf' % lang )
message . warning ( 'missing language conf file: lang-%s.conf' % lang )
WRONG_FUNCTION_NAME
[["Update", ["identifier:error", 3, 25, 3, 30], "warning"]]
pepr/asciidoc@e73f188c4d16fb8dca1da7318ea960e0e3b44f1b
Missing language file generates a warning instead of an error.
[ { "sha": "c18edb63dac855c008cf253004356e8ed58a4567", "filename": "asciidoc.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/pepr/asciidoc/blob/e73f188c4d16fb8dca1da7318ea960e0e3b44f1b/asciidoc.py", "raw_url": "https://github.com/pepr/asciidoc/raw/e73f188c4d16fb8dca1da7318ea960e0e3b44f1b/asciidoc.py", "contents_url": "https://api.github.com/repos/pepr/asciidoc/contents/asciidoc.py?ref=e73f188c4d16fb8dca1da7318ea960e0e3b44f1b", "patch": "@@ -1201,7 +1201,7 @@ def load_lang(self,linenos=False):\n message.linenos = linenos\n if lang:\n if not config.load_lang(lang):\n- message.error('missing language conf file: lang-%s.conf' % lang)\n+ message.warning('missing language conf file: lang-%s.conf' % lang)\n self.attributes['lang'] = lang # Reinstate new lang attribute.\n else:\n message.error('language attribute (lang) is not defined')" } ]
asciidoc
57c86db2b00f74623fa31faff6e53343c1c95e53
78d4db809182824db8c37ecd41bd18f83f68c9de
asciidoc.py
https://github.com/pepr/asciidoc
true
false
true
@@ -373,7 +373,7 @@ def parse_attributes(attrs,dict): # Attributes must evaluate to strings, numbers or None. for v in d.values(): if not (isinstance(v,str) or isinstance(v,int) or isinstance(v,float) or v is None): - raise + raise Exception except Exception: s = s.replace('"','\\"') s = s.split(',')
raise
raise Exception
SINGLE_STMT
[["Insert", ["raise_statement", 3, 17, 3, 22], ["identifier:Exception", "T"], 1]]
pepr/asciidoc@57c86db2b00f74623fa31faff6e53343c1c95e53
FIXED: asciidoc now runs on Jython 2.5.0.
[ { "sha": "f2a09c82358c1ac97a5043c2d3c6bb4541e454f6", "filename": "asciidoc.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/pepr/asciidoc/blob/57c86db2b00f74623fa31faff6e53343c1c95e53/asciidoc.py", "raw_url": "https://github.com/pepr/asciidoc/raw/57c86db2b00f74623fa31faff6e53343c1c95e53/asciidoc.py", "contents_url": "https://api.github.com/repos/pepr/asciidoc/contents/asciidoc.py?ref=57c86db2b00f74623fa31faff6e53343c1c95e53", "patch": "@@ -373,7 +373,7 @@ def f(*args,**keywords):\n # Attributes must evaluate to strings, numbers or None.\n for v in d.values():\n if not (isinstance(v,str) or isinstance(v,int) or isinstance(v,float) or v is None):\n- raise\n+ raise Exception\n except Exception:\n s = s.replace('\"','\\\\\"')\n s = s.split(',')" } ]
asciidoc
97f3fec003532373cf638ed543cb56b5ac4c5b7a
4c340dc6e915f6e7994dacf0ff66aa1fe9abfddb
asciidoc.py
https://github.com/pepr/asciidoc
true
false
true
@@ -1054,7 +1054,7 @@ def time_str(t): """Convert seconds since the Epoch to formatted local time string.""" t = time.localtime(t) s = time.strftime('%H:%M:%S',t) - if time.daylight: + if time.daylight and t.tm_isdst == 1: result = s + ' ' + time.tzname[1] else: result = s + ' ' + time.tzname[0]
if time . daylight : result = s + ' ' + time . tzname [ 1 ] else : result = s + ' ' + time . tzname [ 0 ]
if time . daylight and t . tm_isdst == 1 : result = s + ' ' + time . tzname [ 1 ] else : result = s + ' ' + time . tzname [ 0 ]
MORE_SPECIFIC_IF
[["Insert", ["if_statement", 3, 5, 6, 42], ["boolean_operator", "N0"], 1], ["Move", "N0", ["attribute", 3, 8, 3, 21], 0], ["Insert", "N0", ["and:and", "T"], 1], ["Insert", "N0", ["comparison_operator", "N1"], 2], ["Insert", "N1", ["attribute", "N2"], 0], ["Insert", "N1", ["==:==", "T"], 1], ["Insert", "N1", ["integer:1", "T"], 2], ["Insert", "N2", ["identifier:t", "T"], 0], ["Insert", "N2", [".:.", "T"], 1], ["Insert", "N2", ["identifier:tm_isdst", "T"], 2]]
pepr/asciidoc@97f3fec003532373cf638ed543cb56b5ac4c5b7a
FIXED: 'localtime' and 'doctime' attributes calculated incorrect daylight saving / non daylight saving timezones and consequently so did HTML footers. Patch submitted by Slawomir Testowy. See http://groups.google.com/group/asciidoc/browse_frm/thread/af652507caf6cec9
[ { "sha": "fd70879afed09ad180c7f341cd1bc4fa405dc0f7", "filename": "asciidoc.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/pepr/asciidoc/blob/97f3fec003532373cf638ed543cb56b5ac4c5b7a/asciidoc.py", "raw_url": "https://github.com/pepr/asciidoc/raw/97f3fec003532373cf638ed543cb56b5ac4c5b7a/asciidoc.py", "contents_url": "https://api.github.com/repos/pepr/asciidoc/contents/asciidoc.py?ref=97f3fec003532373cf638ed543cb56b5ac4c5b7a", "patch": "@@ -1054,7 +1054,7 @@ def time_str(t):\n \"\"\"Convert seconds since the Epoch to formatted local time string.\"\"\"\n t = time.localtime(t)\n s = time.strftime('%H:%M:%S',t)\n- if time.daylight:\n+ if time.daylight and t.tm_isdst == 1:\n result = s + ' ' + time.tzname[1]\n else:\n result = s + ' ' + time.tzname[0]" } ]
asciidoc
c224c2f26817786b29a05297882d7f2c0ad89cf5
82c4d100aa9a3d0958ef52b5bab889aa9886ca1e
tests/testasciidoc.py
https://github.com/pepr/asciidoc
true
false
true
@@ -297,7 +297,7 @@ class AsciiDocTests(object): self.passed = self.failed = self.skipped = 0 for test in self.tests: - if (not test.disabled or number) and (not number or number == test.number): + if (not test.disabled or number) and (not number or number == test.number) and (not backend or backend in test.backends): test.run(backend) self.passed += test.passed self.failed += test.failed
if ( not test . disabled or number ) and ( not number or number == test . number ) : test . run ( backend ) self . passed += test . passed self . failed += test . failed
if ( not test . disabled or number ) and ( not number or number == test . number ) and ( not backend or backend in test . backends ) : test . run ( backend ) self . passed += test . passed self . failed += test . failed
MORE_SPECIFIC_IF
[["Move", ["boolean_operator", 2, 16, 2, 87], ["boolean_operator", 2, 16, 2, 87], 0], ["Insert", ["boolean_operator", 2, 16, 2, 87], ["and:and", "T"], 1], ["Insert", ["boolean_operator", 2, 16, 2, 87], ["parenthesized_expression", "N0"], 2], ["Insert", "N0", ["(:(", "T"], 0], ["Insert", "N0", ["not_operator", "N1"], 1], ["Insert", "N0", ["):)", "T"], 2], ["Insert", "N1", ["not:not", "T"], 0], ["Insert", "N1", ["boolean_operator", "N2"], 1], ["Insert", "N2", ["identifier:backend", "T"], 0], ["Insert", "N2", ["or:or", "T"], 1], ["Insert", "N2", ["comparison_operator", "N3"], 2], ["Insert", "N3", ["identifier:backend", "T"], 0], ["Insert", "N3", ["in:in", "T"], 1], ["Insert", "N3", ["attribute", "N4"], 2], ["Insert", "N4", ["identifier:test", "T"], 0], ["Insert", "N4", [".:.", "T"], 1], ["Insert", "N4", ["identifier:backends", "T"], 2]]
pepr/asciidoc@c224c2f26817786b29a05297882d7f2c0ad89cf5
FIXED: asciidoctest.py: BACKEND command argument was being ignored.
[ { "sha": "673547ae88e99872c5a088776d539ec3725c6234", "filename": "tests/testasciidoc.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/pepr/asciidoc/blob/c224c2f26817786b29a05297882d7f2c0ad89cf5/tests%2Ftestasciidoc.py", "raw_url": "https://github.com/pepr/asciidoc/raw/c224c2f26817786b29a05297882d7f2c0ad89cf5/tests%2Ftestasciidoc.py", "contents_url": "https://api.github.com/repos/pepr/asciidoc/contents/tests%2Ftestasciidoc.py?ref=c224c2f26817786b29a05297882d7f2c0ad89cf5", "patch": "@@ -297,7 +297,7 @@ def run(self, number=None, backend=None):\n \"\"\"\n self.passed = self.failed = self.skipped = 0\n for test in self.tests:\n- if (not test.disabled or number) and (not number or number == test.number):\n+ if (not test.disabled or number) and (not number or number == test.number) and (not backend or backend in test.backends):\n test.run(backend)\n self.passed += test.passed\n self.failed += test.failed" } ]
asciidoc
9125371e7e664c4d0793d08d97c2b556ee36333d
623f97381f5b65ffb627f4365f0ef6c5081045fb
a2x.py
https://github.com/pepr/asciidoc
true
false
true
@@ -429,7 +429,7 @@ class A2X(AttrDict): if self.icons or self.icons_dir: params = [ 'callout.graphics 1', - 'navig.graphics 0', + 'navig.graphics 1', 'admon.textlabel 0', 'admon.graphics 1', ]
params = [ 'callout.graphics 1' , 'navig.graphics 0' , 'admon.textlabel 0' , 'admon.graphics 1' , ]
params = [ 'callout.graphics 1' , 'navig.graphics 1' , 'admon.textlabel 0' , 'admon.graphics 1' , ]
CHANGE_STRING_LITERAL
[["Update", ["string:'navig.graphics 0'", 3, 17, 3, 35], "'navig.graphics 1'"]]
pepr/asciidoc@9125371e7e664c4d0793d08d97c2b556ee36333d
FIXED: a2x: Long-standing bug in a2x which always passes --string-param navig.graphics 0 to xsltproc, no matter whether icons are enabled or not. Reported by Michael Wild: http://groups.google.com/group/asciidoc/browse_thread/thread/59a610068e4acb58
[ { "sha": "b564221abd5ec38fc0babdf8c9226bb5ab3f5bd0", "filename": "a2x.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/pepr/asciidoc/blob/9125371e7e664c4d0793d08d97c2b556ee36333d/a2x.py", "raw_url": "https://github.com/pepr/asciidoc/raw/9125371e7e664c4d0793d08d97c2b556ee36333d/a2x.py", "contents_url": "https://api.github.com/repos/pepr/asciidoc/contents/a2x.py?ref=9125371e7e664c4d0793d08d97c2b556ee36333d", "patch": "@@ -429,7 +429,7 @@ def process_options(self):\n if self.icons or self.icons_dir:\n params = [\n 'callout.graphics 1',\n- 'navig.graphics 0',\n+ 'navig.graphics 1',\n 'admon.textlabel 0',\n 'admon.graphics 1',\n ]" } ]
asciidoc
d8f2a39d548a17f7f2ce1490921d6dca24af4831
bc04d165550ea03017bbdeb5734f57add523d2c8
asciidoc.py
https://github.com/pepr/asciidoc
true
false
true
@@ -4520,7 +4520,7 @@ class Config: if not macro in self.sections: message.warning('missing special word macro: [%s]' % macro) # Check all text quotes have a corresponding tag. - for q in self.quotes.keys(): + for q in self.quotes.keys()[:]: tag = self.quotes[q] if not tag: del self.quotes[q] # Undefine quote.
for q in self . quotes . keys ( ) : tag = self . quotes [ q ] if not tag : del self . quotes [ q ]
for q in self . quotes . keys ( ) [ : ] : tag = self . quotes [ q ] if not tag : del self . quotes [ q ]
SINGLE_STMT
[["Insert", ["for_statement", 3, 9, 6, 35], ["subscript", "N0"], 3], ["Insert", ["for_statement", 3, 9, 6, 35], [":::", "T"], 4], ["Move", "N0", ["call", 3, 18, 3, 36], 0], ["Insert", "N0", ["[:[", "T"], 1], ["Insert", "N0", ["slice", "N1"], 2], ["Insert", "N0", ["]:]", "T"], 3], ["Move", "N1", [":::", 3, 36, 3, 37], 0]]
pepr/asciidoc@d8f2a39d548a17f7f2ce1490921d6dca24af4831
FIXED: An error can occur when more than one consecutive quote is defined as a blank string. Reported by Peggy Russell.
[ { "sha": "b6f0551ccd1d91746fb6bd3437159a367ac81570", "filename": "asciidoc.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/pepr/asciidoc/blob/d8f2a39d548a17f7f2ce1490921d6dca24af4831/asciidoc.py", "raw_url": "https://github.com/pepr/asciidoc/raw/d8f2a39d548a17f7f2ce1490921d6dca24af4831/asciidoc.py", "contents_url": "https://api.github.com/repos/pepr/asciidoc/contents/asciidoc.py?ref=d8f2a39d548a17f7f2ce1490921d6dca24af4831", "patch": "@@ -4520,7 +4520,7 @@ def validate(self):\n if not macro in self.sections:\n message.warning('missing special word macro: [%s]' % macro)\n # Check all text quotes have a corresponding tag.\n- for q in self.quotes.keys():\n+ for q in self.quotes.keys()[:]:\n tag = self.quotes[q]\n if not tag:\n del self.quotes[q] # Undefine quote." } ]
asciidoc
e4b0ce2bd206159adc389ed98f211a778841efa5
55d8b1e1d6ac620e08843901998f2ad597cd2c5b
asciidoc.py
https://github.com/pepr/asciidoc
true
false
true
@@ -5582,7 +5582,7 @@ def asciidoc(backend, doctype, confiles, infile, outfile, options): config.load_file('asciidoc.conf', indir, include=['attributes','titles','specialchars']) else: - load_conffiles() + load_conffiles(include=['attributes','titles','specialchars']) document.update_attributes() # Check the infile exists.
else : load_conffiles ( )
else : load_conffiles ( include = [ 'attributes' , 'titles' , 'specialchars' ] )
SAME_FUNCTION_MORE_ARGS
[["Insert", ["argument_list", 3, 27, 3, 29], ["keyword_argument", "N0"], 1], ["Insert", "N0", ["identifier:include", "T"], 0], ["Insert", "N0", ["=:=", "T"], 1], ["Insert", "N0", ["list", "N1"], 2], ["Insert", "N1", ["[:[", "T"], 0], ["Insert", "N1", ["string:'attributes'", "T"], 1], ["Insert", "N1", [",:,", "T"], 2], ["Insert", "N1", ["string:'titles'", "T"], 3], ["Insert", "N1", [",:,", "T"], 4], ["Insert", "N1", ["string:'specialchars'", "T"], 5], ["Insert", "N1", ["]:]", "T"], 6]]
pepr/asciidoc@e4b0ce2bd206159adc389ed98f211a778841efa5
Configuration files are loaded in two passes when the -e command-line option is used (the same behavior as when the -e option is not used). Patch submitted by haad. See http://groups.google.com/group/asciidoc/browse_thread/thread/cd0f47495fd04181 and http://code.google.com/p/asciidoc/issues/detail?id=6&q=label%3APriority-Medium
[ { "sha": "c46a968271cd8e40c08c07e2e527a2f1e1d7d102", "filename": "asciidoc.py", "status": "modified", "additions": 2, "deletions": 2, "changes": 4, "blob_url": "https://github.com/pepr/asciidoc/blob/e4b0ce2bd206159adc389ed98f211a778841efa5/asciidoc.py", "raw_url": "https://github.com/pepr/asciidoc/raw/e4b0ce2bd206159adc389ed98f211a778841efa5/asciidoc.py", "contents_url": "https://api.github.com/repos/pepr/asciidoc/contents/asciidoc.py?ref=e4b0ce2bd206159adc389ed98f211a778841efa5", "patch": "@@ -5582,7 +5582,7 @@ def load_conffiles(include=[], exclude=[]):\n config.load_file('asciidoc.conf', indir,\n include=['attributes','titles','specialchars'])\n else:\n- load_conffiles()\n+ load_conffiles(include=['attributes','titles','specialchars'])\n document.update_attributes()\n # Check the infile exists.\n if infile != '<stdin>':\n@@ -5618,7 +5618,7 @@ def load_conffiles(include=[], exclude=[]):\n f = os.path.splitext(infile)[0]\n config.load_file(f + '.conf')\n config.load_file(f + '-' + document.backend + '.conf')\n- load_conffiles()\n+ load_conffiles()\n # Build outfile name.\n if outfile is None:\n outfile = os.path.splitext(infile)[0] + '.' + document.backend" } ]
asciidoc
83c03fccbdf694f7803add507ac961076d13f94c
075162eef4ea657383ce5cea50307552c8a46597
filters/latex/latex2png.py
https://github.com/pepr/asciidoc
true
false
true
@@ -142,7 +142,7 @@ def latex2png(infile, outfile, dpi, modified): cmd = 'dvipng' if dpi: cmd += ' -D %s' % dpi - cmd += ' -T tight -x 1000 -z 9 -bg Transparent --truecolor -o "%s" "%s ' \ + cmd += ' -T tight -x 1000 -z 9 -bg Transparent --truecolor -o "%s" "%s" ' \ % (outfile,dvifile) run(cmd) finally:
cmd += ' -T tight -x 1000 -z 9 -bg Transparent --truecolor -o "%s" "%s ' % ( outfile , dvifile )
cmd += ' -T tight -x 1000 -z 9 -bg Transparent --truecolor -o "%s" "%s" ' % ( outfile , dvifile )
CHANGE_BINARY_OPERAND
[["Update", ["string:' -T tight -x 1000 -z 9 -bg Transparent --truecolor -o \"%s\" \"%s '", 3, 16, 3, 81], "' -T tight -x 1000 -z 9 -bg Transparent --truecolor -o \"%s\" \"%s\" '"]]
pepr/asciidoc@83c03fccbdf694f7803add507ac961076d13f94c
Fixed missing quote in preceding LaTeX filter patch. Fix submitted by Simon Ruderich. See: http://groups.google.com/group/asciidoc/browse_thread/thread/6436788a10561851
[ { "sha": "bbafdd2d9754f7ad57c110f0dc46c54a933ad1e4", "filename": "filters/latex/latex2png.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/pepr/asciidoc/blob/83c03fccbdf694f7803add507ac961076d13f94c/filters%2Flatex%2Flatex2png.py", "raw_url": "https://github.com/pepr/asciidoc/raw/83c03fccbdf694f7803add507ac961076d13f94c/filters%2Flatex%2Flatex2png.py", "contents_url": "https://api.github.com/repos/pepr/asciidoc/contents/filters%2Flatex%2Flatex2png.py?ref=83c03fccbdf694f7803add507ac961076d13f94c", "patch": "@@ -142,7 +142,7 @@ def latex2png(infile, outfile, dpi, modified):\n cmd = 'dvipng'\n if dpi:\n cmd += ' -D %s' % dpi\n- cmd += ' -T tight -x 1000 -z 9 -bg Transparent --truecolor -o \"%s\" \"%s ' \\\n+ cmd += ' -T tight -x 1000 -z 9 -bg Transparent --truecolor -o \"%s\" \"%s\" ' \\\n % (outfile,dvifile)\n run(cmd)\n finally:" } ]
asciidoc
065a649f140ef6702e46135617486063536cdbb6
ed76d3ec4513186515870eccd85e4314521d2e77
asciidoc.py
https://github.com/pepr/asciidoc
true
false
true
@@ -1932,7 +1932,7 @@ class AttributeList: reo = re.compile(r"^'.*'$") for k,v in attrs.items(): if reo.match(str(v)): - attrs[k] = Lex.subs_1(v[1:-1],SUBS_NORMAL) + attrs[k] = Lex.subs_1(v[1:-1], config.subsnormal) @staticmethod def style(): return AttributeList.attrs.get('style') or AttributeList.attrs.get('1')
attrs [ k ] = Lex . subs_1 ( v [ 1 : - 1 ] , SUBS_NORMAL )
attrs [ k ] = Lex . subs_1 ( v [ 1 : - 1 ] , config . subsnormal )
SINGLE_STMT
[["Insert", ["argument_list", 3, 38, 3, 59], ["attribute", "N0"], 3], ["Update", ["identifier:SUBS_NORMAL", 3, 47, 3, 58], "config"], ["Move", "N0", ["identifier:SUBS_NORMAL", 3, 47, 3, 58], 0], ["Insert", "N0", [".:.", "T"], 1], ["Insert", "N0", ["identifier:subsnormal", "T"], 2]]
pepr/asciidoc@065a649f140ef6702e46135617486063536cdbb6
FIXED: Use configured normal substitution in preference to the default one.
[ { "sha": "bf1eadc180925f70960ede71378d143a154d7a11", "filename": "asciidoc.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/pepr/asciidoc/blob/065a649f140ef6702e46135617486063536cdbb6/asciidoc.py", "raw_url": "https://github.com/pepr/asciidoc/raw/065a649f140ef6702e46135617486063536cdbb6/asciidoc.py", "contents_url": "https://api.github.com/repos/pepr/asciidoc/contents/asciidoc.py?ref=065a649f140ef6702e46135617486063536cdbb6", "patch": "@@ -1932,7 +1932,7 @@ def subs(attrs):\n reo = re.compile(r\"^'.*'$\")\n for k,v in attrs.items():\n if reo.match(str(v)):\n- attrs[k] = Lex.subs_1(v[1:-1],SUBS_NORMAL)\n+ attrs[k] = Lex.subs_1(v[1:-1], config.subsnormal)\n @staticmethod\n def style():\n return AttributeList.attrs.get('style') or AttributeList.attrs.get('1')" } ]
asciidoc
bf174dcabf0fc61450244d7190e01b1908fc7af6
9493a1778c03c870c8a74ce1bdf37fa34791a8a0
a2x.py
https://github.com/pepr/asciidoc
true
false
true
@@ -440,7 +440,7 @@ class A2X(AttrDict): for r in self.resources: r = os.path.expanduser(r) r = os.path.expandvars(r) - if r.endswith(('/','\\')): + if r.endswith('/') or r.endswith('\\'): if os.path.isdir(r): self.resource_dirs.append(r) else:
if r . endswith ( ( '/' , '\\' ) ) : if os . path . isdir ( r ) : self . resource_dirs . append ( r ) else :
if r . endswith ( '/' ) or r . endswith ( '\\' ) : if os . path . isdir ( r ) : self . resource_dirs . append ( r ) else :
SINGLE_STMT
[["Insert", ["if_statement", 3, 13, 6, 22], ["boolean_operator", "N0"], 1], ["Insert", "N0", ["call", "N1"], 0], ["Insert", "N0", ["or:or", "T"], 1], ["Insert", "N0", ["call", "N2"], 2], ["Move", "N1", ["attribute", 3, 16, 3, 26], 0], ["Insert", "N1", ["argument_list", "N3"], 1], ["Insert", "N2", ["attribute", "N4"], 0], ["Insert", "N2", ["argument_list", "N5"], 1], ["Move", "N3", ["(:(", 3, 26, 3, 27], 0], ["Insert", "N3", ["string:'/'", "T"], 1], ["Insert", "N3", ["):)", "T"], 2], ["Insert", "N4", ["identifier:r", "T"], 0], ["Insert", "N4", [".:.", "T"], 1], ["Insert", "N4", ["identifier:endswith", "T"], 2], ["Insert", "N5", ["(:(", "T"], 0], ["Move", "N5", ["string:'\\\\'", 3, 32, 3, 36], 1], ["Move", "N5", ["):)", 3, 36, 3, 37], 2], ["Delete", ["(:(", 3, 27, 3, 28]], ["Delete", ["string:'/'", 3, 28, 3, 31]], ["Delete", [",:,", 3, 31, 3, 32]], ["Delete", ["tuple", 3, 27, 3, 37]], ["Delete", ["):)", 3, 37, 3, 38]], ["Delete", ["argument_list", 3, 26, 3, 38]], ["Delete", ["call", 3, 16, 3, 38]]]
pepr/asciidoc@bf174dcabf0fc61450244d7190e01b1908fc7af6
Fixed Python 2.4 backward incompatibility (see http://code.google.com/p/asciidoc/issues/detail?id=9)
[ { "sha": "83cfbecd82275c98cdfd81471fbc9da1fa850ec4", "filename": "a2x.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/pepr/asciidoc/blob/bf174dcabf0fc61450244d7190e01b1908fc7af6/a2x.py", "raw_url": "https://github.com/pepr/asciidoc/raw/bf174dcabf0fc61450244d7190e01b1908fc7af6/a2x.py", "contents_url": "https://api.github.com/repos/pepr/asciidoc/contents/a2x.py?ref=bf174dcabf0fc61450244d7190e01b1908fc7af6", "patch": "@@ -440,7 +440,7 @@ def process_options(self):\n for r in self.resources:\n r = os.path.expanduser(r)\n r = os.path.expandvars(r)\n- if r.endswith(('/','\\\\')):\n+ if r.endswith('/') or r.endswith('\\\\'):\n if os.path.isdir(r):\n self.resource_dirs.append(r)\n else:" } ]